2026-03-31T22:31:22.447 INFO:root:teuthology version: 1.2.4.dev37+ga59626679 2026-03-31T22:31:22.452 DEBUG:teuthology.report:Pushing job info to http://localhost:8080 2026-03-31T22:31:22.471 INFO:teuthology.run:Config: archive_path: /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360 branch: tentacle description: rados/cephadm/workunits/{0-distro/centos_9.stream agent/on mon_election/connectivity task/test_monitoring_stack_basic} email: null first_in_suite: false flavor: default job_id: '4360' last_in_suite: false machine_type: vps name: kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps no_nested_subset: false os_type: centos os_version: 9.stream overrides: admin_socket: branch: tentacle ansible.cephlab: branch: main repo: https://github.com/kshtsk/ceph-cm-ansible.git skip_tags: nagios,monitoring-scripts,hostname,pubkeys,zap,sudoers,kerberos,ntp-client,resolvconf,cpan,nfs vars: logical_volumes: lv_1: scratch_dev: true size: 25%VG vg: vg_nvme lv_2: scratch_dev: true size: 25%VG vg: vg_nvme lv_3: scratch_dev: true size: 25%VG vg: vg_nvme lv_4: scratch_dev: true size: 25%VG vg: vg_nvme timezone: UTC volume_groups: vg_nvme: pvs: /dev/vdb,/dev/vdc,/dev/vdd,/dev/vde ceph: conf: global: mon election default strategy: 3 mgr: debug mgr: 20 debug ms: 1 mgr/cephadm/use_agent: true mon: debug mon: 20 debug ms: 1 debug paxos: 20 osd: debug ms: 1 debug osd: 20 osd mclock iops capacity threshold hdd: 49000 flavor: default log-ignorelist: - \(MDS_ALL_DOWN\) - \(MDS_UP_LESS_THAN_MAX\) - MON_DOWN - mons down - mon down - out of quorum - CEPHADM_STRAY_DAEMON - CEPHADM_FAILED_DAEMON log-only-match: - CEPHADM_ sha1: 5bb3278730741031382ca9c3dc9d221a942e06a2 ceph-deploy: conf: client: log file: /var/log/ceph/ceph-$name.$pid.log mon: {} cephadm: cephadm_binary_url: https://download.ceph.com/rpm-20.2.0/el9/noarch/cephadm install: ceph: flavor: default sha1: 5bb3278730741031382ca9c3dc9d221a942e06a2 extra_system_packages: deb: - python3-jmespath - python3-xmltodict - s3cmd rpm: - bzip2 - perl-Test-Harness - python3-jmespath - python3-xmltodict - s3cmd selinux: allowlist: - scontext=system_u:system_r:logrotate_t:s0 - scontext=system_u:system_r:getty_t:s0 workunit: branch: tt-tentacle sha1: 0392f78529848ec72469e8e431875cb98d3a5fb4 owner: kyr priority: 1000 repo: https://github.com/ceph/ceph.git roles: - - host.a - mon.a - mgr.a - osd.0 - - host.b - mon.b - mgr.b - osd.1 - - host.c - mon.c - osd.2 seed: 6407 sha1: 5bb3278730741031382ca9c3dc9d221a942e06a2 sleep_before_teardown: 0 subset: 1/100000 suite: rados suite_branch: tt-tentacle suite_path: /home/teuthos/src/github.com_kshtsk_ceph_0392f78529848ec72469e8e431875cb98d3a5fb4/qa suite_relpath: qa suite_repo: https://github.com/kshtsk/ceph.git suite_sha1: 0392f78529848ec72469e8e431875cb98d3a5fb4 targets: vm00.local: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPzgsvVwqL3j6I5JcMmol58A32SxrQsjE0Y2P+uqD1aykQAUQMavX+7l3Lea3fWrTSmrP38KjviIfbGlupu+NkY= vm05.local: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNrFbcYshVCQzIrwVAuCemvU+msHoFpbwoACSq9Kk4ZbG98mF7jsiU/8QisIOVbCUX+4KX1wHetXoqpuq7kJIww= vm09.local: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBAWYIBbezg5YRU+pomTjvL8HT/DilpbHB+1J70SBc09h3UVK11iRKI8iMTKy5tdWGl7wuFoGg1HlycaCMDlt41c= tasks: - pexec: all: - sudo dnf remove nvme-cli -y - sudo dnf install nvmetcli nvme-cli -y - install: null - cephadm: null - cephadm.shell: host.a: - "set -e\nset -x\nceph orch apply node-exporter\nceph orch apply grafana\nceph\ \ orch apply alertmanager\nceph orch apply prometheus\nsleep 240\nceph orch\ \ ls\nceph orch ps\nceph orch host ls\nMON_DAEMON=$(ceph orch ps --daemon-type\ \ mon -f json | jq -r 'last | .daemon_name')\nGRAFANA_HOST=$(ceph orch ps --daemon-type\ \ grafana -f json | jq -e '.[]' | jq -r '.hostname')\nPROM_HOST=$(ceph orch\ \ ps --daemon-type prometheus -f json | jq -e '.[]' | jq -r '.hostname')\nALERTM_HOST=$(ceph\ \ orch ps --daemon-type alertmanager -f json | jq -e '.[]' | jq -r '.hostname')\n\ GRAFANA_IP=$(ceph orch host ls -f json | jq -r --arg GRAFANA_HOST \"$GRAFANA_HOST\"\ \ '.[] | select(.hostname==$GRAFANA_HOST) | .addr')\nPROM_IP=$(ceph orch host\ \ ls -f json | jq -r --arg PROM_HOST \"$PROM_HOST\" '.[] | select(.hostname==$PROM_HOST)\ \ | .addr')\nALERTM_IP=$(ceph orch host ls -f json | jq -r --arg ALERTM_HOST\ \ \"$ALERTM_HOST\" '.[] | select(.hostname==$ALERTM_HOST) | .addr')\n# check\ \ each host node-exporter metrics endpoint is responsive\nALL_HOST_IPS=$(ceph\ \ orch host ls -f json | jq -r '.[] | .addr')\nfor ip in $ALL_HOST_IPS; do\n\ \ curl -s http://${ip}:9100/metric\ndone\n# check grafana endpoints are responsive\ \ and database health is okay\ncurl -k -s https://${GRAFANA_IP}:3000/api/health\n\ curl -k -s https://${GRAFANA_IP}:3000/api/health | jq -e '.database == \"ok\"\ '\n# stop mon daemon in order to trigger an alert\nceph orch daemon stop $MON_DAEMON\n\ sleep 120\n# check prometheus endpoints are responsive and mon down alert is\ \ firing\ncurl -s http://${PROM_IP}:9095/api/v1/status/config\ncurl -s http://${PROM_IP}:9095/api/v1/status/config\ \ | jq -e '.status == \"success\"'\ncurl -s http://${PROM_IP}:9095/api/v1/alerts\n\ curl -s http://${PROM_IP}:9095/api/v1/alerts | jq -e '.data | .alerts | .[]\ \ | select(.labels | .alertname == \"CephMonDown\") | .state == \"firing\"'\n\ # check alertmanager endpoints are responsive and mon down alert is active\n\ curl -s http://${ALERTM_IP}:9093/api/v2/status\ncurl -s http://${ALERTM_IP}:9093/api/v2/alerts\n\ curl -s http://${ALERTM_IP}:9093/api/v2/alerts | jq -e '.[] | select(.labels\ \ | .alertname == \"CephMonDown\") | .status | .state == \"active\"'\n# check\ \ prometheus metrics endpoint is not empty and make sure we can get metrics\n\ METRICS_URL=$(ceph mgr services | jq -r .prometheus)\n[ -n \"$METRICS_URL\"\ \ ] || exit 1\ncurl -s \"${METRICS_URL}metrics\" | grep -q '^ceph_health_status'\n" teuthology: fragments_dropped: [] meta: {} postmerge: [] teuthology_branch: uv2 teuthology_repo: https://github.com/kshtsk/teuthology teuthology_sha1: a59626679648f962bca99d20d35578f2998c8f37 timestamp: 2026-03-31_11:18:10 tube: vps user: kyr verbose: false worker_log: /home/teuthos/.teuthology/dispatcher/dispatcher.vps.282426 2026-03-31T22:31:22.471 INFO:teuthology.run:suite_path is set to /home/teuthos/src/github.com_kshtsk_ceph_0392f78529848ec72469e8e431875cb98d3a5fb4/qa; will attempt to use it 2026-03-31T22:31:22.472 INFO:teuthology.run:Found tasks at /home/teuthos/src/github.com_kshtsk_ceph_0392f78529848ec72469e8e431875cb98d3a5fb4/qa/tasks 2026-03-31T22:31:22.472 INFO:teuthology.run_tasks:Running task internal.check_packages... 2026-03-31T22:31:22.472 INFO:teuthology.task.internal:Checking packages... 2026-03-31T22:31:22.472 INFO:teuthology.task.internal:Checking packages for os_type 'centos', flavor 'default' and ceph hash '5bb3278730741031382ca9c3dc9d221a942e06a2' 2026-03-31T22:31:22.472 WARNING:teuthology.packaging:More than one of ref, tag, branch, or sha1 supplied; using branch 2026-03-31T22:31:22.472 INFO:teuthology.packaging:ref: None 2026-03-31T22:31:22.472 INFO:teuthology.packaging:tag: None 2026-03-31T22:31:22.472 INFO:teuthology.packaging:branch: tentacle 2026-03-31T22:31:22.472 INFO:teuthology.packaging:sha1: 5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:31:22.472 DEBUG:teuthology.packaging:Querying https://shaman.ceph.com/api/search?status=ready&project=ceph&flavor=default&distros=centos%2F9%2Fx86_64&ref=tentacle 2026-03-31T22:31:23.194 INFO:teuthology.task.internal:Found packages for ceph version 20.2.0-721.g5bb32787 2026-03-31T22:31:23.195 INFO:teuthology.run_tasks:Running task internal.buildpackages_prep... 2026-03-31T22:31:23.202 INFO:teuthology.task.internal:no buildpackages task found 2026-03-31T22:31:23.202 INFO:teuthology.run_tasks:Running task internal.save_config... 2026-03-31T22:31:23.220 INFO:teuthology.task.internal:Saving configuration 2026-03-31T22:31:23.226 INFO:teuthology.run_tasks:Running task internal.check_lock... 2026-03-31T22:31:23.226 INFO:teuthology.task.internal.check_lock:Checking locks... 2026-03-31T22:31:23.233 DEBUG:teuthology.task.internal.check_lock:machine status is {'name': 'vm00.local', 'description': '/archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360', 'up': True, 'machine_type': 'vps', 'is_vm': True, 'vm_host': {'name': 'localhost', 'description': None, 'up': True, 'machine_type': 'libvirt', 'is_vm': False, 'vm_host': None, 'os_type': None, 'os_version': None, 'arch': None, 'locked': True, 'locked_since': None, 'locked_by': None, 'mac_address': None, 'ssh_pub_key': None}, 'os_type': 'centos', 'os_version': '9.stream', 'arch': 'x86_64', 'locked': True, 'locked_since': '2026-03-31 22:30:45.243160', 'locked_by': 'kyr', 'mac_address': '52:55:00:00:00:00', 'ssh_pub_key': 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPzgsvVwqL3j6I5JcMmol58A32SxrQsjE0Y2P+uqD1aykQAUQMavX+7l3Lea3fWrTSmrP38KjviIfbGlupu+NkY='} 2026-03-31T22:31:23.237 DEBUG:teuthology.task.internal.check_lock:machine status is {'name': 'vm05.local', 'description': '/archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360', 'up': True, 'machine_type': 'vps', 'is_vm': True, 'vm_host': {'name': 'localhost', 'description': None, 'up': True, 'machine_type': 'libvirt', 'is_vm': False, 'vm_host': None, 'os_type': None, 'os_version': None, 'arch': None, 'locked': True, 'locked_since': None, 'locked_by': None, 'mac_address': None, 'ssh_pub_key': None}, 'os_type': 'centos', 'os_version': '9.stream', 'arch': 'x86_64', 'locked': True, 'locked_since': '2026-03-31 22:29:41.830501', 'locked_by': 'kyr', 'mac_address': '52:55:00:00:00:05', 'ssh_pub_key': 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNrFbcYshVCQzIrwVAuCemvU+msHoFpbwoACSq9Kk4ZbG98mF7jsiU/8QisIOVbCUX+4KX1wHetXoqpuq7kJIww='} 2026-03-31T22:31:23.242 DEBUG:teuthology.task.internal.check_lock:machine status is {'name': 'vm09.local', 'description': '/archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360', 'up': True, 'machine_type': 'vps', 'is_vm': True, 'vm_host': {'name': 'localhost', 'description': None, 'up': True, 'machine_type': 'libvirt', 'is_vm': False, 'vm_host': None, 'os_type': None, 'os_version': None, 'arch': None, 'locked': True, 'locked_since': None, 'locked_by': None, 'mac_address': None, 'ssh_pub_key': None}, 'os_type': 'centos', 'os_version': '9.stream', 'arch': 'x86_64', 'locked': True, 'locked_since': '2026-03-31 22:29:41.830049', 'locked_by': 'kyr', 'mac_address': '52:55:00:00:00:09', 'ssh_pub_key': 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBAWYIBbezg5YRU+pomTjvL8HT/DilpbHB+1J70SBc09h3UVK11iRKI8iMTKy5tdWGl7wuFoGg1HlycaCMDlt41c='} 2026-03-31T22:31:23.242 INFO:teuthology.run_tasks:Running task internal.add_remotes... 2026-03-31T22:31:23.243 INFO:teuthology.task.internal:roles: ubuntu@vm00.local - ['host.a', 'mon.a', 'mgr.a', 'osd.0'] 2026-03-31T22:31:23.243 INFO:teuthology.task.internal:roles: ubuntu@vm05.local - ['host.b', 'mon.b', 'mgr.b', 'osd.1'] 2026-03-31T22:31:23.243 INFO:teuthology.task.internal:roles: ubuntu@vm09.local - ['host.c', 'mon.c', 'osd.2'] 2026-03-31T22:31:23.243 INFO:teuthology.run_tasks:Running task console_log... 2026-03-31T22:31:23.249 DEBUG:teuthology.task.console_log:vm00 does not support IPMI; excluding 2026-03-31T22:31:23.254 DEBUG:teuthology.task.console_log:vm05 does not support IPMI; excluding 2026-03-31T22:31:23.260 DEBUG:teuthology.task.console_log:vm09 does not support IPMI; excluding 2026-03-31T22:31:23.261 DEBUG:teuthology.exit:Installing handler: Handler(exiter=, func=.kill_console_loggers at 0x7f11ce1a2290>, signals=[15]) 2026-03-31T22:31:23.261 INFO:teuthology.run_tasks:Running task internal.connect... 2026-03-31T22:31:23.262 INFO:teuthology.task.internal:Opening connections... 2026-03-31T22:31:23.262 DEBUG:teuthology.task.internal:connecting to ubuntu@vm00.local 2026-03-31T22:31:23.262 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm00.local', 'username': 'ubuntu', 'timeout': 60} 2026-03-31T22:31:23.322 DEBUG:teuthology.task.internal:connecting to ubuntu@vm05.local 2026-03-31T22:31:23.322 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm05.local', 'username': 'ubuntu', 'timeout': 60} 2026-03-31T22:31:23.393 DEBUG:teuthology.task.internal:connecting to ubuntu@vm09.local 2026-03-31T22:31:23.393 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm09.local', 'username': 'ubuntu', 'timeout': 60} 2026-03-31T22:31:23.451 INFO:teuthology.run_tasks:Running task internal.push_inventory... 2026-03-31T22:31:23.453 DEBUG:teuthology.orchestra.run.vm00:> uname -m 2026-03-31T22:31:23.467 INFO:teuthology.orchestra.run.vm00.stdout:x86_64 2026-03-31T22:31:23.467 DEBUG:teuthology.orchestra.run.vm00:> cat /etc/os-release 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:NAME="CentOS Stream" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:VERSION="9" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:ID="centos" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:ID_LIKE="rhel fedora" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:VERSION_ID="9" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:PLATFORM_ID="platform:el9" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:PRETTY_NAME="CentOS Stream 9" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:ANSI_COLOR="0;31" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:LOGO="fedora-logo-icon" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:CPE_NAME="cpe:/o:centos:centos:9" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:HOME_URL="https://centos.org/" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:BUG_REPORT_URL="https://issues.redhat.com/" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:REDHAT_SUPPORT_PRODUCT="Red Hat Enterprise Linux 9" 2026-03-31T22:31:23.522 INFO:teuthology.orchestra.run.vm00.stdout:REDHAT_SUPPORT_PRODUCT_VERSION="CentOS Stream" 2026-03-31T22:31:23.523 INFO:teuthology.lock.ops:Updating vm00.local on lock server 2026-03-31T22:31:23.527 DEBUG:teuthology.orchestra.run.vm05:> uname -m 2026-03-31T22:31:23.544 INFO:teuthology.orchestra.run.vm05.stdout:x86_64 2026-03-31T22:31:23.544 DEBUG:teuthology.orchestra.run.vm05:> cat /etc/os-release 2026-03-31T22:31:23.600 INFO:teuthology.orchestra.run.vm05.stdout:NAME="CentOS Stream" 2026-03-31T22:31:23.600 INFO:teuthology.orchestra.run.vm05.stdout:VERSION="9" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:ID="centos" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:ID_LIKE="rhel fedora" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:VERSION_ID="9" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:PLATFORM_ID="platform:el9" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:PRETTY_NAME="CentOS Stream 9" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:ANSI_COLOR="0;31" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:LOGO="fedora-logo-icon" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:CPE_NAME="cpe:/o:centos:centos:9" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:HOME_URL="https://centos.org/" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:BUG_REPORT_URL="https://issues.redhat.com/" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:REDHAT_SUPPORT_PRODUCT="Red Hat Enterprise Linux 9" 2026-03-31T22:31:23.601 INFO:teuthology.orchestra.run.vm05.stdout:REDHAT_SUPPORT_PRODUCT_VERSION="CentOS Stream" 2026-03-31T22:31:23.601 INFO:teuthology.lock.ops:Updating vm05.local on lock server 2026-03-31T22:31:23.605 DEBUG:teuthology.orchestra.run.vm09:> uname -m 2026-03-31T22:31:23.622 INFO:teuthology.orchestra.run.vm09.stdout:x86_64 2026-03-31T22:31:23.622 DEBUG:teuthology.orchestra.run.vm09:> cat /etc/os-release 2026-03-31T22:31:23.677 INFO:teuthology.orchestra.run.vm09.stdout:NAME="CentOS Stream" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:VERSION="9" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:ID="centos" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:ID_LIKE="rhel fedora" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:VERSION_ID="9" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:PLATFORM_ID="platform:el9" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:PRETTY_NAME="CentOS Stream 9" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:ANSI_COLOR="0;31" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:LOGO="fedora-logo-icon" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:CPE_NAME="cpe:/o:centos:centos:9" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:HOME_URL="https://centos.org/" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:BUG_REPORT_URL="https://issues.redhat.com/" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:REDHAT_SUPPORT_PRODUCT="Red Hat Enterprise Linux 9" 2026-03-31T22:31:23.678 INFO:teuthology.orchestra.run.vm09.stdout:REDHAT_SUPPORT_PRODUCT_VERSION="CentOS Stream" 2026-03-31T22:31:23.678 INFO:teuthology.lock.ops:Updating vm09.local on lock server 2026-03-31T22:31:23.682 INFO:teuthology.run_tasks:Running task internal.serialize_remote_roles... 2026-03-31T22:31:23.684 INFO:teuthology.run_tasks:Running task internal.check_conflict... 2026-03-31T22:31:23.685 INFO:teuthology.task.internal:Checking for old test directory... 2026-03-31T22:31:23.685 DEBUG:teuthology.orchestra.run.vm00:> test '!' -e /home/ubuntu/cephtest 2026-03-31T22:31:23.687 DEBUG:teuthology.orchestra.run.vm05:> test '!' -e /home/ubuntu/cephtest 2026-03-31T22:31:23.689 DEBUG:teuthology.orchestra.run.vm09:> test '!' -e /home/ubuntu/cephtest 2026-03-31T22:31:23.733 INFO:teuthology.run_tasks:Running task internal.check_ceph_data... 2026-03-31T22:31:23.734 INFO:teuthology.task.internal:Checking for non-empty /var/lib/ceph... 2026-03-31T22:31:23.734 DEBUG:teuthology.orchestra.run.vm00:> test -z $(ls -A /var/lib/ceph) 2026-03-31T22:31:23.743 DEBUG:teuthology.orchestra.run.vm05:> test -z $(ls -A /var/lib/ceph) 2026-03-31T22:31:23.747 DEBUG:teuthology.orchestra.run.vm09:> test -z $(ls -A /var/lib/ceph) 2026-03-31T22:31:23.756 INFO:teuthology.orchestra.run.vm00.stderr:ls: cannot access '/var/lib/ceph': No such file or directory 2026-03-31T22:31:23.763 INFO:teuthology.orchestra.run.vm05.stderr:ls: cannot access '/var/lib/ceph': No such file or directory 2026-03-31T22:31:23.789 INFO:teuthology.orchestra.run.vm09.stderr:ls: cannot access '/var/lib/ceph': No such file or directory 2026-03-31T22:31:23.790 INFO:teuthology.run_tasks:Running task internal.vm_setup... 2026-03-31T22:31:23.796 DEBUG:teuthology.orchestra.run.vm00:> test -e /ceph-qa-ready 2026-03-31T22:31:23.810 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-03-31T22:31:24.021 DEBUG:teuthology.orchestra.run.vm05:> test -e /ceph-qa-ready 2026-03-31T22:31:24.036 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-03-31T22:31:24.226 DEBUG:teuthology.orchestra.run.vm09:> test -e /ceph-qa-ready 2026-03-31T22:31:24.241 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-03-31T22:31:24.425 INFO:teuthology.run_tasks:Running task internal.base... 2026-03-31T22:31:24.426 INFO:teuthology.task.internal:Creating test directory... 2026-03-31T22:31:24.426 DEBUG:teuthology.orchestra.run.vm00:> mkdir -p -m0755 -- /home/ubuntu/cephtest 2026-03-31T22:31:24.428 DEBUG:teuthology.orchestra.run.vm05:> mkdir -p -m0755 -- /home/ubuntu/cephtest 2026-03-31T22:31:24.430 DEBUG:teuthology.orchestra.run.vm09:> mkdir -p -m0755 -- /home/ubuntu/cephtest 2026-03-31T22:31:24.447 INFO:teuthology.run_tasks:Running task internal.archive_upload... 2026-03-31T22:31:24.448 INFO:teuthology.run_tasks:Running task internal.archive... 2026-03-31T22:31:24.449 INFO:teuthology.task.internal:Creating archive directory... 2026-03-31T22:31:24.449 DEBUG:teuthology.orchestra.run.vm00:> install -d -m0755 -- /home/ubuntu/cephtest/archive 2026-03-31T22:31:24.484 DEBUG:teuthology.orchestra.run.vm05:> install -d -m0755 -- /home/ubuntu/cephtest/archive 2026-03-31T22:31:24.488 DEBUG:teuthology.orchestra.run.vm09:> install -d -m0755 -- /home/ubuntu/cephtest/archive 2026-03-31T22:31:24.506 INFO:teuthology.run_tasks:Running task internal.coredump... 2026-03-31T22:31:24.508 INFO:teuthology.task.internal:Enabling coredump saving... 2026-03-31T22:31:24.508 DEBUG:teuthology.orchestra.run.vm00:> test -f /run/.containerenv -o -f /.dockerenv 2026-03-31T22:31:24.552 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-03-31T22:31:24.552 DEBUG:teuthology.orchestra.run.vm05:> test -f /run/.containerenv -o -f /.dockerenv 2026-03-31T22:31:24.568 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-03-31T22:31:24.569 DEBUG:teuthology.orchestra.run.vm09:> test -f /run/.containerenv -o -f /.dockerenv 2026-03-31T22:31:24.583 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-03-31T22:31:24.583 DEBUG:teuthology.orchestra.run.vm00:> install -d -m0755 -- /home/ubuntu/cephtest/archive/coredump && sudo sysctl -w kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core && echo kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core | sudo tee -a /etc/sysctl.conf 2026-03-31T22:31:24.596 DEBUG:teuthology.orchestra.run.vm05:> install -d -m0755 -- /home/ubuntu/cephtest/archive/coredump && sudo sysctl -w kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core && echo kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core | sudo tee -a /etc/sysctl.conf 2026-03-31T22:31:24.611 DEBUG:teuthology.orchestra.run.vm09:> install -d -m0755 -- /home/ubuntu/cephtest/archive/coredump && sudo sysctl -w kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core && echo kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core | sudo tee -a /etc/sysctl.conf 2026-03-31T22:31:24.617 INFO:teuthology.orchestra.run.vm00.stdout:kernel.core_pattern = /home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-03-31T22:31:24.626 INFO:teuthology.orchestra.run.vm00.stdout:kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-03-31T22:31:24.636 INFO:teuthology.orchestra.run.vm05.stdout:kernel.core_pattern = /home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-03-31T22:31:24.646 INFO:teuthology.orchestra.run.vm05.stdout:kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-03-31T22:31:24.649 INFO:teuthology.orchestra.run.vm09.stdout:kernel.core_pattern = /home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-03-31T22:31:24.658 INFO:teuthology.orchestra.run.vm09.stdout:kernel.core_pattern=/home/ubuntu/cephtest/archive/coredump/%t.%p.core 2026-03-31T22:31:24.659 INFO:teuthology.run_tasks:Running task internal.sudo... 2026-03-31T22:31:24.660 INFO:teuthology.task.internal:Configuring sudo... 2026-03-31T22:31:24.661 DEBUG:teuthology.orchestra.run.vm00:> sudo sed -i.orig.teuthology -e 's/^\([^#]*\) \(requiretty\)/\1 !\2/g' -e 's/^\([^#]*\) !\(visiblepw\)/\1 \2/g' /etc/sudoers 2026-03-31T22:31:24.669 DEBUG:teuthology.orchestra.run.vm05:> sudo sed -i.orig.teuthology -e 's/^\([^#]*\) \(requiretty\)/\1 !\2/g' -e 's/^\([^#]*\) !\(visiblepw\)/\1 \2/g' /etc/sudoers 2026-03-31T22:31:24.689 DEBUG:teuthology.orchestra.run.vm09:> sudo sed -i.orig.teuthology -e 's/^\([^#]*\) \(requiretty\)/\1 !\2/g' -e 's/^\([^#]*\) !\(visiblepw\)/\1 \2/g' /etc/sudoers 2026-03-31T22:31:24.725 INFO:teuthology.run_tasks:Running task internal.syslog... 2026-03-31T22:31:24.727 INFO:teuthology.task.internal.syslog:Starting syslog monitoring... 2026-03-31T22:31:24.727 DEBUG:teuthology.orchestra.run.vm00:> mkdir -p -m0755 -- /home/ubuntu/cephtest/archive/syslog 2026-03-31T22:31:24.732 DEBUG:teuthology.orchestra.run.vm05:> mkdir -p -m0755 -- /home/ubuntu/cephtest/archive/syslog 2026-03-31T22:31:24.755 DEBUG:teuthology.orchestra.run.vm09:> mkdir -p -m0755 -- /home/ubuntu/cephtest/archive/syslog 2026-03-31T22:31:24.781 DEBUG:teuthology.orchestra.run.vm00:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/kern.log 2026-03-31T22:31:24.806 DEBUG:teuthology.orchestra.run.vm00:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/misc.log 2026-03-31T22:31:24.862 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:31:24.862 DEBUG:teuthology.orchestra.run.vm00:> sudo dd of=/etc/rsyslog.d/80-cephtest.conf 2026-03-31T22:31:24.919 DEBUG:teuthology.orchestra.run.vm05:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/kern.log 2026-03-31T22:31:24.940 DEBUG:teuthology.orchestra.run.vm05:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/misc.log 2026-03-31T22:31:24.994 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:31:24.994 DEBUG:teuthology.orchestra.run.vm05:> sudo dd of=/etc/rsyslog.d/80-cephtest.conf 2026-03-31T22:31:25.055 DEBUG:teuthology.orchestra.run.vm09:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/kern.log 2026-03-31T22:31:25.077 DEBUG:teuthology.orchestra.run.vm09:> install -m 666 /dev/null /home/ubuntu/cephtest/archive/syslog/misc.log 2026-03-31T22:31:25.135 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:31:25.135 DEBUG:teuthology.orchestra.run.vm09:> sudo dd of=/etc/rsyslog.d/80-cephtest.conf 2026-03-31T22:31:25.196 DEBUG:teuthology.orchestra.run.vm00:> sudo service rsyslog restart 2026-03-31T22:31:25.198 DEBUG:teuthology.orchestra.run.vm05:> sudo service rsyslog restart 2026-03-31T22:31:25.200 DEBUG:teuthology.orchestra.run.vm09:> sudo service rsyslog restart 2026-03-31T22:31:25.224 INFO:teuthology.orchestra.run.vm00.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-03-31T22:31:25.229 INFO:teuthology.orchestra.run.vm05.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-03-31T22:31:25.265 INFO:teuthology.orchestra.run.vm09.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-03-31T22:31:25.700 INFO:teuthology.run_tasks:Running task internal.timer... 2026-03-31T22:31:25.702 INFO:teuthology.task.internal:Starting timer... 2026-03-31T22:31:25.702 INFO:teuthology.run_tasks:Running task pcp... 2026-03-31T22:31:25.705 INFO:teuthology.run_tasks:Running task selinux... 2026-03-31T22:31:25.707 DEBUG:teuthology.task:Applying overrides for task selinux: {'allowlist': ['scontext=system_u:system_r:logrotate_t:s0', 'scontext=system_u:system_r:getty_t:s0']} 2026-03-31T22:31:25.707 INFO:teuthology.task.selinux:Excluding vm00: VMs are not yet supported 2026-03-31T22:31:25.707 INFO:teuthology.task.selinux:Excluding vm05: VMs are not yet supported 2026-03-31T22:31:25.707 INFO:teuthology.task.selinux:Excluding vm09: VMs are not yet supported 2026-03-31T22:31:25.707 DEBUG:teuthology.task.selinux:Getting current SELinux state 2026-03-31T22:31:25.707 DEBUG:teuthology.task.selinux:Existing SELinux modes: {} 2026-03-31T22:31:25.707 INFO:teuthology.task.selinux:Putting SELinux into permissive mode 2026-03-31T22:31:25.707 INFO:teuthology.run_tasks:Running task ansible.cephlab... 2026-03-31T22:31:25.709 DEBUG:teuthology.task:Applying overrides for task ansible.cephlab: {'branch': 'main', 'repo': 'https://github.com/kshtsk/ceph-cm-ansible.git', 'skip_tags': 'nagios,monitoring-scripts,hostname,pubkeys,zap,sudoers,kerberos,ntp-client,resolvconf,cpan,nfs', 'vars': {'logical_volumes': {'lv_1': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}, 'lv_2': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}, 'lv_3': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}, 'lv_4': {'scratch_dev': True, 'size': '25%VG', 'vg': 'vg_nvme'}}, 'timezone': 'UTC', 'volume_groups': {'vg_nvme': {'pvs': '/dev/vdb,/dev/vdc,/dev/vdd,/dev/vde'}}}} 2026-03-31T22:31:25.709 DEBUG:teuthology.repo_utils:Setting repo remote to https://github.com/kshtsk/ceph-cm-ansible.git 2026-03-31T22:31:25.713 INFO:teuthology.repo_utils:Fetching github.com_kshtsk_ceph-cm-ansible_main from origin 2026-03-31T22:31:26.515 DEBUG:teuthology.repo_utils:Resetting repo at /home/teuthos/src/github.com_kshtsk_ceph-cm-ansible_main to origin/main 2026-03-31T22:31:26.521 INFO:teuthology.task.ansible:Playbook: [{'import_playbook': 'ansible_managed.yml'}, {'import_playbook': 'teuthology.yml'}, {'hosts': 'testnodes', 'tasks': [{'set_fact': {'ran_from_cephlab_playbook': True}}]}, {'import_playbook': 'testnodes.yml'}, {'import_playbook': 'container-host.yml'}, {'import_playbook': 'cobbler.yml'}, {'import_playbook': 'paddles.yml'}, {'import_playbook': 'pulpito.yml'}, {'hosts': 'testnodes', 'become': True, 'tasks': [{'name': 'Touch /ceph-qa-ready', 'file': {'path': '/ceph-qa-ready', 'state': 'touch'}, 'when': 'ran_from_cephlab_playbook|bool'}]}] 2026-03-31T22:31:26.522 DEBUG:teuthology.task.ansible:Running ansible-playbook -v --extra-vars '{"ansible_ssh_user": "ubuntu", "logical_volumes": {"lv_1": {"scratch_dev": true, "size": "25%VG", "vg": "vg_nvme"}, "lv_2": {"scratch_dev": true, "size": "25%VG", "vg": "vg_nvme"}, "lv_3": {"scratch_dev": true, "size": "25%VG", "vg": "vg_nvme"}, "lv_4": {"scratch_dev": true, "size": "25%VG", "vg": "vg_nvme"}}, "timezone": "UTC", "volume_groups": {"vg_nvme": {"pvs": "/dev/vdb,/dev/vdc,/dev/vdd,/dev/vde"}}}' -i /tmp/teuth_ansible_inventorysy0_njv_ --limit vm00.local,vm05.local,vm09.local /home/teuthos/src/github.com_kshtsk_ceph-cm-ansible_main/cephlab.yml --skip-tags nagios,monitoring-scripts,hostname,pubkeys,zap,sudoers,kerberos,ntp-client,resolvconf,cpan,nfs 2026-03-31T22:52:54.311 DEBUG:teuthology.task.ansible:Reconnecting to [Remote(name='ubuntu@vm00.local'), Remote(name='ubuntu@vm05.local'), Remote(name='ubuntu@vm09.local')] 2026-03-31T22:52:54.312 INFO:teuthology.orchestra.remote:Trying to reconnect to host 'ubuntu@vm00.local' 2026-03-31T22:52:54.312 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm00.local', 'username': 'ubuntu', 'timeout': 60} 2026-03-31T22:52:54.372 DEBUG:teuthology.orchestra.run.vm00:> true 2026-03-31T22:52:54.460 INFO:teuthology.orchestra.remote:Successfully reconnected to host 'ubuntu@vm00.local' 2026-03-31T22:52:54.461 INFO:teuthology.orchestra.remote:Trying to reconnect to host 'ubuntu@vm05.local' 2026-03-31T22:52:54.461 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm05.local', 'username': 'ubuntu', 'timeout': 60} 2026-03-31T22:52:54.519 DEBUG:teuthology.orchestra.run.vm05:> true 2026-03-31T22:52:54.596 INFO:teuthology.orchestra.remote:Successfully reconnected to host 'ubuntu@vm05.local' 2026-03-31T22:52:54.596 INFO:teuthology.orchestra.remote:Trying to reconnect to host 'ubuntu@vm09.local' 2026-03-31T22:52:54.597 DEBUG:teuthology.orchestra.connection:{'hostname': 'vm09.local', 'username': 'ubuntu', 'timeout': 60} 2026-03-31T22:52:54.656 DEBUG:teuthology.orchestra.run.vm09:> true 2026-03-31T22:52:54.741 INFO:teuthology.orchestra.remote:Successfully reconnected to host 'ubuntu@vm09.local' 2026-03-31T22:52:54.741 INFO:teuthology.run_tasks:Running task clock... 2026-03-31T22:52:54.744 INFO:teuthology.task.clock:Syncing clocks and checking initial clock skew... 2026-03-31T22:52:54.744 INFO:teuthology.orchestra.run:Running command with timeout 360 2026-03-31T22:52:54.744 DEBUG:teuthology.orchestra.run.vm00:> sudo systemctl stop ntp.service || sudo systemctl stop ntpd.service || sudo systemctl stop chronyd.service ; sudo ntpd -gq || sudo chronyc makestep ; sudo systemctl start ntp.service || sudo systemctl start ntpd.service || sudo systemctl start chronyd.service ; PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-03-31T22:52:54.746 INFO:teuthology.orchestra.run:Running command with timeout 360 2026-03-31T22:52:54.746 DEBUG:teuthology.orchestra.run.vm05:> sudo systemctl stop ntp.service || sudo systemctl stop ntpd.service || sudo systemctl stop chronyd.service ; sudo ntpd -gq || sudo chronyc makestep ; sudo systemctl start ntp.service || sudo systemctl start ntpd.service || sudo systemctl start chronyd.service ; PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-03-31T22:52:54.747 INFO:teuthology.orchestra.run:Running command with timeout 360 2026-03-31T22:52:54.747 DEBUG:teuthology.orchestra.run.vm09:> sudo systemctl stop ntp.service || sudo systemctl stop ntpd.service || sudo systemctl stop chronyd.service ; sudo ntpd -gq || sudo chronyc makestep ; sudo systemctl start ntp.service || sudo systemctl start ntpd.service || sudo systemctl start chronyd.service ; PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-03-31T22:52:54.769 INFO:teuthology.orchestra.run.vm00.stderr:Failed to stop ntp.service: Unit ntp.service not loaded. 2026-03-31T22:52:54.774 INFO:teuthology.orchestra.run.vm05.stderr:Failed to stop ntp.service: Unit ntp.service not loaded. 2026-03-31T22:52:54.781 INFO:teuthology.orchestra.run.vm00.stderr:Failed to stop ntpd.service: Unit ntpd.service not loaded. 2026-03-31T22:52:54.787 INFO:teuthology.orchestra.run.vm05.stderr:Failed to stop ntpd.service: Unit ntpd.service not loaded. 2026-03-31T22:52:54.799 INFO:teuthology.orchestra.run.vm00.stderr:sudo: ntpd: command not found 2026-03-31T22:52:54.808 INFO:teuthology.orchestra.run.vm00.stdout:506 Cannot talk to daemon 2026-03-31T22:52:54.810 INFO:teuthology.orchestra.run.vm05.stderr:sudo: ntpd: command not found 2026-03-31T22:52:54.811 INFO:teuthology.orchestra.run.vm09.stderr:Failed to stop ntp.service: Unit ntp.service not loaded. 2026-03-31T22:52:54.819 INFO:teuthology.orchestra.run.vm00.stderr:Failed to start ntp.service: Unit ntp.service not found. 2026-03-31T22:52:54.820 INFO:teuthology.orchestra.run.vm05.stdout:506 Cannot talk to daemon 2026-03-31T22:52:54.823 INFO:teuthology.orchestra.run.vm09.stderr:Failed to stop ntpd.service: Unit ntpd.service not loaded. 2026-03-31T22:52:54.832 INFO:teuthology.orchestra.run.vm00.stderr:Failed to start ntpd.service: Unit ntpd.service not found. 2026-03-31T22:52:54.834 INFO:teuthology.orchestra.run.vm05.stderr:Failed to start ntp.service: Unit ntp.service not found. 2026-03-31T22:52:54.846 INFO:teuthology.orchestra.run.vm09.stderr:sudo: ntpd: command not found 2026-03-31T22:52:54.847 INFO:teuthology.orchestra.run.vm05.stderr:Failed to start ntpd.service: Unit ntpd.service not found. 2026-03-31T22:52:54.857 INFO:teuthology.orchestra.run.vm09.stdout:506 Cannot talk to daemon 2026-03-31T22:52:54.870 INFO:teuthology.orchestra.run.vm09.stderr:Failed to start ntp.service: Unit ntp.service not found. 2026-03-31T22:52:54.883 INFO:teuthology.orchestra.run.vm09.stderr:Failed to start ntpd.service: Unit ntpd.service not found. 2026-03-31T22:52:54.893 INFO:teuthology.orchestra.run.vm00.stderr:bash: line 1: ntpq: command not found 2026-03-31T22:52:54.895 INFO:teuthology.orchestra.run.vm00.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-03-31T22:52:54.895 INFO:teuthology.orchestra.run.vm00.stdout:=============================================================================== 2026-03-31T22:52:54.897 INFO:teuthology.orchestra.run.vm05.stderr:bash: line 1: ntpq: command not found 2026-03-31T22:52:54.899 INFO:teuthology.orchestra.run.vm05.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-03-31T22:52:54.899 INFO:teuthology.orchestra.run.vm05.stdout:=============================================================================== 2026-03-31T22:52:54.939 INFO:teuthology.orchestra.run.vm09.stderr:bash: line 1: ntpq: command not found 2026-03-31T22:52:54.941 INFO:teuthology.orchestra.run.vm09.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-03-31T22:52:54.941 INFO:teuthology.orchestra.run.vm09.stdout:=============================================================================== 2026-03-31T22:52:54.942 INFO:teuthology.run_tasks:Running task pexec... 2026-03-31T22:52:54.944 INFO:teuthology.task.pexec:Executing custom commands... 2026-03-31T22:52:54.944 DEBUG:teuthology.orchestra.run.vm00:> TESTDIR=/home/ubuntu/cephtest bash -s 2026-03-31T22:52:54.945 DEBUG:teuthology.orchestra.run.vm05:> TESTDIR=/home/ubuntu/cephtest bash -s 2026-03-31T22:52:54.945 DEBUG:teuthology.orchestra.run.vm09:> TESTDIR=/home/ubuntu/cephtest bash -s 2026-03-31T22:52:54.946 INFO:teuthology.task.pexec:Running commands on host ubuntu@vm00.local 2026-03-31T22:52:54.946 INFO:teuthology.task.pexec:sudo dnf remove nvme-cli -y 2026-03-31T22:52:54.946 INFO:teuthology.task.pexec:sudo dnf install nvmetcli nvme-cli -y 2026-03-31T22:52:54.947 INFO:teuthology.task.pexec:Running commands on host ubuntu@vm05.local 2026-03-31T22:52:54.947 INFO:teuthology.task.pexec:sudo dnf remove nvme-cli -y 2026-03-31T22:52:54.947 INFO:teuthology.task.pexec:sudo dnf install nvmetcli nvme-cli -y 2026-03-31T22:52:54.984 INFO:teuthology.task.pexec:Running commands on host ubuntu@vm09.local 2026-03-31T22:52:54.984 INFO:teuthology.task.pexec:sudo dnf remove nvme-cli -y 2026-03-31T22:52:54.984 INFO:teuthology.task.pexec:sudo dnf install nvmetcli nvme-cli -y 2026-03-31T22:52:55.132 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: nvme-cli 2026-03-31T22:52:55.132 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T22:52:55.134 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T22:52:55.135 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T22:52:55.135 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T22:52:55.135 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: nvme-cli 2026-03-31T22:52:55.135 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T22:52:55.138 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T22:52:55.139 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T22:52:55.139 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T22:52:55.174 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: nvme-cli 2026-03-31T22:52:55.174 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T22:52:55.177 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T22:52:55.178 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T22:52:55.178 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T22:52:55.499 INFO:teuthology.orchestra.run.vm05.stdout:Last metadata expiration check: 0:15:53 ago on Tue 31 Mar 2026 10:37:02 PM UTC. 2026-03-31T22:52:55.510 INFO:teuthology.orchestra.run.vm00.stdout:Last metadata expiration check: 0:19:00 ago on Tue 31 Mar 2026 10:33:55 PM UTC. 2026-03-31T22:52:55.538 INFO:teuthology.orchestra.run.vm09.stdout:Last metadata expiration check: 0:00:20 ago on Tue 31 Mar 2026 10:52:35 PM UTC. 2026-03-31T22:52:55.590 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout: Package Architecture Version Repository Size 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:Installing: 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout: nvme-cli x86_64 2.16-1.el9 baseos 1.2 M 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout: nvmetcli noarch 0.8-3.el9 baseos 44 k 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:Installing dependencies: 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout: python3-configshell noarch 1:1.1.30-1.el9 baseos 72 k 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout: python3-kmod x86_64 0.9-32.el9 baseos 84 k 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyparsing noarch 2.4.7-9.el9 baseos 150 k 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout: python3-urwid x86_64 2.1.2-4.el9 baseos 837 k 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:Install 6 Packages 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:Total download size: 2.3 M 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:Installed size: 11 M 2026-03-31T22:52:55.591 INFO:teuthology.orchestra.run.vm05.stdout:Downloading Packages: 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout: Package Architecture Version Repository Size 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:Installing: 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout: nvme-cli x86_64 2.16-1.el9 baseos 1.2 M 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout: nvmetcli noarch 0.8-3.el9 baseos 44 k 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:Installing dependencies: 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout: python3-configshell noarch 1:1.1.30-1.el9 baseos 72 k 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout: python3-kmod x86_64 0.9-32.el9 baseos 84 k 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyparsing noarch 2.4.7-9.el9 baseos 150 k 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout: python3-urwid x86_64 2.1.2-4.el9 baseos 837 k 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:Install 6 Packages 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:Total download size: 2.3 M 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:Installed size: 11 M 2026-03-31T22:52:55.605 INFO:teuthology.orchestra.run.vm00.stdout:Downloading Packages: 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout: Package Architecture Version Repository Size 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout:Installing: 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout: nvme-cli x86_64 2.16-1.el9 baseos 1.2 M 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout: nvmetcli noarch 0.8-3.el9 baseos 44 k 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout:Installing dependencies: 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout: python3-configshell noarch 1:1.1.30-1.el9 baseos 72 k 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout: python3-kmod x86_64 0.9-32.el9 baseos 84 k 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyparsing noarch 2.4.7-9.el9 baseos 150 k 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout: python3-urwid x86_64 2.1.2-4.el9 baseos 837 k 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout:Install 6 Packages 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:52:55.630 INFO:teuthology.orchestra.run.vm09.stdout:Total download size: 2.3 M 2026-03-31T22:52:55.631 INFO:teuthology.orchestra.run.vm09.stdout:Installed size: 11 M 2026-03-31T22:52:55.631 INFO:teuthology.orchestra.run.vm09.stdout:Downloading Packages: 2026-03-31T22:52:55.980 INFO:teuthology.orchestra.run.vm05.stdout:(1/6): nvmetcli-0.8-3.el9.noarch.rpm 173 kB/s | 44 kB 00:00 2026-03-31T22:52:56.041 INFO:teuthology.orchestra.run.vm09.stdout:(1/6): python3-configshell-1.1.30-1.el9.noarch. 233 kB/s | 72 kB 00:00 2026-03-31T22:52:56.042 INFO:teuthology.orchestra.run.vm09.stdout:(2/6): nvmetcli-0.8-3.el9.noarch.rpm 142 kB/s | 44 kB 00:00 2026-03-31T22:52:56.054 INFO:teuthology.orchestra.run.vm05.stdout:(2/6): python3-configshell-1.1.30-1.el9.noarch. 219 kB/s | 72 kB 00:00 2026-03-31T22:52:56.136 INFO:teuthology.orchestra.run.vm09.stdout:(3/6): python3-kmod-0.9-32.el9.x86_64.rpm 884 kB/s | 84 kB 00:00 2026-03-31T22:52:56.138 INFO:teuthology.orchestra.run.vm09.stdout:(4/6): python3-pyparsing-2.4.7-9.el9.noarch.rpm 1.5 MB/s | 150 kB 00:00 2026-03-31T22:52:56.225 INFO:teuthology.orchestra.run.vm09.stdout:(5/6): nvme-cli-2.16-1.el9.x86_64.rpm 2.3 MB/s | 1.2 MB 00:00 2026-03-31T22:52:56.284 INFO:teuthology.orchestra.run.vm09.stdout:(6/6): python3-urwid-2.1.2-4.el9.x86_64.rpm 5.6 MB/s | 837 kB 00:00 2026-03-31T22:52:56.284 INFO:teuthology.orchestra.run.vm09.stdout:-------------------------------------------------------------------------------- 2026-03-31T22:52:56.284 INFO:teuthology.orchestra.run.vm09.stdout:Total 3.5 MB/s | 2.3 MB 00:00 2026-03-31T22:52:56.285 INFO:teuthology.orchestra.run.vm05.stdout:(3/6): python3-kmod-0.9-32.el9.x86_64.rpm 276 kB/s | 84 kB 00:00 2026-03-31T22:52:56.333 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T22:52:56.339 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T22:52:56.339 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T22:52:56.358 INFO:teuthology.orchestra.run.vm05.stdout:(4/6): nvme-cli-2.16-1.el9.x86_64.rpm 1.8 MB/s | 1.2 MB 00:00 2026-03-31T22:52:56.363 INFO:teuthology.orchestra.run.vm05.stdout:(5/6): python3-pyparsing-2.4.7-9.el9.noarch.rpm 488 kB/s | 150 kB 00:00 2026-03-31T22:52:56.389 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T22:52:56.389 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T22:52:56.485 INFO:teuthology.orchestra.run.vm05.stdout:(6/6): python3-urwid-2.1.2-4.el9.x86_64.rpm 4.1 MB/s | 837 kB 00:00 2026-03-31T22:52:56.487 INFO:teuthology.orchestra.run.vm05.stdout:-------------------------------------------------------------------------------- 2026-03-31T22:52:56.487 INFO:teuthology.orchestra.run.vm05.stdout:Total 2.6 MB/s | 2.3 MB 00:00 2026-03-31T22:52:56.532 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T22:52:56.545 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-urwid-2.1.2-4.el9.x86_64 1/6 2026-03-31T22:52:56.546 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T22:52:56.554 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T22:52:56.554 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T22:52:56.555 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-pyparsing-2.4.7-9.el9.noarch 2/6 2026-03-31T22:52:56.562 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-configshell-1:1.1.30-1.el9.noarch 3/6 2026-03-31T22:52:56.569 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-kmod-0.9-32.el9.x86_64 4/6 2026-03-31T22:52:56.571 INFO:teuthology.orchestra.run.vm09.stdout: Installing : nvmetcli-0.8-3.el9.noarch 5/6 2026-03-31T22:52:56.586 INFO:teuthology.orchestra.run.vm00.stdout:(1/6): python3-configshell-1.1.30-1.el9.noarch. 166 kB/s | 72 kB 00:00 2026-03-31T22:52:56.593 INFO:teuthology.orchestra.run.vm00.stdout:(2/6): nvmetcli-0.8-3.el9.noarch.rpm 100 kB/s | 44 kB 00:00 2026-03-31T22:52:56.605 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T22:52:56.605 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T22:52:56.694 INFO:teuthology.orchestra.run.vm00.stdout:(3/6): python3-kmod-0.9-32.el9.x86_64.rpm 781 kB/s | 84 kB 00:00 2026-03-31T22:52:56.725 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: nvmetcli-0.8-3.el9.noarch 5/6 2026-03-31T22:52:56.729 INFO:teuthology.orchestra.run.vm09.stdout: Installing : nvme-cli-2.16-1.el9.x86_64 6/6 2026-03-31T22:52:56.753 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T22:52:56.766 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-urwid-2.1.2-4.el9.x86_64 1/6 2026-03-31T22:52:56.779 INFO:teuthology.orchestra.run.vm00.stdout:(4/6): python3-pyparsing-2.4.7-9.el9.noarch.rpm 810 kB/s | 150 kB 00:00 2026-03-31T22:52:56.780 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-pyparsing-2.4.7-9.el9.noarch 2/6 2026-03-31T22:52:56.790 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-configshell-1:1.1.30-1.el9.noarch 3/6 2026-03-31T22:52:56.798 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-kmod-0.9-32.el9.x86_64 4/6 2026-03-31T22:52:56.800 INFO:teuthology.orchestra.run.vm05.stdout: Installing : nvmetcli-0.8-3.el9.noarch 5/6 2026-03-31T22:52:56.911 INFO:teuthology.orchestra.run.vm00.stdout:(5/6): python3-urwid-2.1.2-4.el9.x86_64.rpm 3.8 MB/s | 837 kB 00:00 2026-03-31T22:52:56.947 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: nvmetcli-0.8-3.el9.noarch 5/6 2026-03-31T22:52:57.005 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: nvme-cli-2.16-1.el9.x86_64 6/6 2026-03-31T22:52:57.007 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/default.target.wants/nvmefc-boot-connections.service → /usr/lib/systemd/system/nvmefc-boot-connections.service. 2026-03-31T22:52:57.007 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:52:57.045 INFO:teuthology.orchestra.run.vm05.stdout: Installing : nvme-cli-2.16-1.el9.x86_64 6/6 2026-03-31T22:52:57.333 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : nvme-cli-2.16-1.el9.x86_64 1/6 2026-03-31T22:52:57.333 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : nvmetcli-0.8-3.el9.noarch 2/6 2026-03-31T22:52:57.333 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-configshell-1:1.1.30-1.el9.noarch 3/6 2026-03-31T22:52:57.333 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-kmod-0.9-32.el9.x86_64 4/6 2026-03-31T22:52:57.333 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pyparsing-2.4.7-9.el9.noarch 5/6 2026-03-31T22:52:57.335 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: nvme-cli-2.16-1.el9.x86_64 6/6 2026-03-31T22:52:57.335 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/default.target.wants/nvmefc-boot-connections.service → /usr/lib/systemd/system/nvmefc-boot-connections.service. 2026-03-31T22:52:57.335 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:52:57.428 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-urwid-2.1.2-4.el9.x86_64 6/6 2026-03-31T22:52:57.428 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:52:57.428 INFO:teuthology.orchestra.run.vm09.stdout:Installed: 2026-03-31T22:52:57.428 INFO:teuthology.orchestra.run.vm09.stdout: nvme-cli-2.16-1.el9.x86_64 nvmetcli-0.8-3.el9.noarch 2026-03-31T22:52:57.428 INFO:teuthology.orchestra.run.vm09.stdout: python3-configshell-1:1.1.30-1.el9.noarch python3-kmod-0.9-32.el9.x86_64 2026-03-31T22:52:57.428 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyparsing-2.4.7-9.el9.noarch python3-urwid-2.1.2-4.el9.x86_64 2026-03-31T22:52:57.428 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:52:57.428 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T22:52:57.525 DEBUG:teuthology.parallel:result is None 2026-03-31T22:52:57.771 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : nvme-cli-2.16-1.el9.x86_64 1/6 2026-03-31T22:52:57.771 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : nvmetcli-0.8-3.el9.noarch 2/6 2026-03-31T22:52:57.771 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-configshell-1:1.1.30-1.el9.noarch 3/6 2026-03-31T22:52:57.771 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-kmod-0.9-32.el9.x86_64 4/6 2026-03-31T22:52:57.771 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pyparsing-2.4.7-9.el9.noarch 5/6 2026-03-31T22:52:57.827 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-urwid-2.1.2-4.el9.x86_64 6/6 2026-03-31T22:52:57.827 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:52:57.827 INFO:teuthology.orchestra.run.vm05.stdout:Installed: 2026-03-31T22:52:57.827 INFO:teuthology.orchestra.run.vm05.stdout: nvme-cli-2.16-1.el9.x86_64 nvmetcli-0.8-3.el9.noarch 2026-03-31T22:52:57.827 INFO:teuthology.orchestra.run.vm05.stdout: python3-configshell-1:1.1.30-1.el9.noarch python3-kmod-0.9-32.el9.x86_64 2026-03-31T22:52:57.827 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyparsing-2.4.7-9.el9.noarch python3-urwid-2.1.2-4.el9.x86_64 2026-03-31T22:52:57.827 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:52:57.827 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T22:52:57.864 DEBUG:teuthology.parallel:result is None 2026-03-31T22:52:58.906 INFO:teuthology.orchestra.run.vm00.stdout:(6/6): nvme-cli-2.16-1.el9.x86_64.rpm 428 kB/s | 1.2 MB 00:02 2026-03-31T22:52:58.906 INFO:teuthology.orchestra.run.vm00.stdout:-------------------------------------------------------------------------------- 2026-03-31T22:52:58.906 INFO:teuthology.orchestra.run.vm00.stdout:Total 717 kB/s | 2.3 MB 00:03 2026-03-31T22:52:58.954 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T22:52:58.961 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T22:52:58.961 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T22:52:59.006 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T22:52:59.006 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T22:52:59.142 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T22:52:59.203 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-urwid-2.1.2-4.el9.x86_64 1/6 2026-03-31T22:52:59.213 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-pyparsing-2.4.7-9.el9.noarch 2/6 2026-03-31T22:52:59.219 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-configshell-1:1.1.30-1.el9.noarch 3/6 2026-03-31T22:52:59.227 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-kmod-0.9-32.el9.x86_64 4/6 2026-03-31T22:52:59.228 INFO:teuthology.orchestra.run.vm00.stdout: Installing : nvmetcli-0.8-3.el9.noarch 5/6 2026-03-31T22:52:59.372 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: nvmetcli-0.8-3.el9.noarch 5/6 2026-03-31T22:52:59.376 INFO:teuthology.orchestra.run.vm00.stdout: Installing : nvme-cli-2.16-1.el9.x86_64 6/6 2026-03-31T22:52:59.640 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: nvme-cli-2.16-1.el9.x86_64 6/6 2026-03-31T22:52:59.640 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/default.target.wants/nvmefc-boot-connections.service → /usr/lib/systemd/system/nvmefc-boot-connections.service. 2026-03-31T22:52:59.640 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:52:59.929 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : nvme-cli-2.16-1.el9.x86_64 1/6 2026-03-31T22:52:59.930 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : nvmetcli-0.8-3.el9.noarch 2/6 2026-03-31T22:52:59.930 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-configshell-1:1.1.30-1.el9.noarch 3/6 2026-03-31T22:52:59.930 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-kmod-0.9-32.el9.x86_64 4/6 2026-03-31T22:52:59.930 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pyparsing-2.4.7-9.el9.noarch 5/6 2026-03-31T22:52:59.982 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-urwid-2.1.2-4.el9.x86_64 6/6 2026-03-31T22:52:59.982 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:52:59.982 INFO:teuthology.orchestra.run.vm00.stdout:Installed: 2026-03-31T22:52:59.982 INFO:teuthology.orchestra.run.vm00.stdout: nvme-cli-2.16-1.el9.x86_64 nvmetcli-0.8-3.el9.noarch 2026-03-31T22:52:59.982 INFO:teuthology.orchestra.run.vm00.stdout: python3-configshell-1:1.1.30-1.el9.noarch python3-kmod-0.9-32.el9.x86_64 2026-03-31T22:52:59.982 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyparsing-2.4.7-9.el9.noarch python3-urwid-2.1.2-4.el9.x86_64 2026-03-31T22:52:59.982 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:52:59.982 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T22:53:00.022 DEBUG:teuthology.parallel:result is None 2026-03-31T22:53:00.022 INFO:teuthology.run_tasks:Running task install... 2026-03-31T22:53:00.024 DEBUG:teuthology.task.install:project ceph 2026-03-31T22:53:00.024 DEBUG:teuthology.task.install:INSTALL overrides: {'ceph': {'flavor': 'default', 'sha1': '5bb3278730741031382ca9c3dc9d221a942e06a2'}, 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}} 2026-03-31T22:53:00.024 DEBUG:teuthology.task.install:config {'flavor': 'default', 'sha1': '5bb3278730741031382ca9c3dc9d221a942e06a2', 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}} 2026-03-31T22:53:00.024 INFO:teuthology.task.install:Using flavor: default 2026-03-31T22:53:00.027 DEBUG:teuthology.task.install:Package list is: {'deb': ['ceph', 'cephadm', 'ceph-mds', 'ceph-mgr', 'ceph-common', 'ceph-fuse', 'ceph-test', 'ceph-volume', 'radosgw', 'python3-rados', 'python3-rgw', 'python3-cephfs', 'python3-rbd', 'libcephfs2', 'libcephfs-dev', 'librados2', 'librbd1', 'rbd-fuse'], 'rpm': ['ceph-radosgw', 'ceph-test', 'ceph', 'ceph-base', 'cephadm', 'ceph-immutable-object-cache', 'ceph-mgr', 'ceph-mgr-dashboard', 'ceph-mgr-diskprediction-local', 'ceph-mgr-rook', 'ceph-mgr-cephadm', 'ceph-fuse', 'ceph-volume', 'librados-devel', 'libcephfs2', 'libcephfs-devel', 'librados2', 'librbd1', 'python3-rados', 'python3-rgw', 'python3-cephfs', 'python3-rbd', 'rbd-fuse', 'rbd-mirror', 'rbd-nbd']} 2026-03-31T22:53:00.027 INFO:teuthology.task.install:extra packages: [] 2026-03-31T22:53:00.027 DEBUG:teuthology.task.install.rpm:_update_package_list_and_install: config is {'branch': None, 'cleanup': None, 'debuginfo': None, 'downgrade_packages': [], 'exclude_packages': [], 'extra_packages': [], 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}, 'extras': None, 'enable_coprs': [], 'flavor': 'default', 'install_ceph_packages': True, 'packages': {}, 'project': 'ceph', 'repos_only': False, 'sha1': '5bb3278730741031382ca9c3dc9d221a942e06a2', 'tag': None, 'wait_for_package': False} 2026-03-31T22:53:00.027 DEBUG:teuthology.packaging:Querying https://shaman.ceph.com/api/search?status=ready&project=ceph&flavor=default&distros=centos%2F9%2Fx86_64&sha1=5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:53:00.028 DEBUG:teuthology.task.install.rpm:_update_package_list_and_install: config is {'branch': None, 'cleanup': None, 'debuginfo': None, 'downgrade_packages': [], 'exclude_packages': [], 'extra_packages': [], 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}, 'extras': None, 'enable_coprs': [], 'flavor': 'default', 'install_ceph_packages': True, 'packages': {}, 'project': 'ceph', 'repos_only': False, 'sha1': '5bb3278730741031382ca9c3dc9d221a942e06a2', 'tag': None, 'wait_for_package': False} 2026-03-31T22:53:00.028 DEBUG:teuthology.packaging:Querying https://shaman.ceph.com/api/search?status=ready&project=ceph&flavor=default&distros=centos%2F9%2Fx86_64&sha1=5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:53:00.028 DEBUG:teuthology.task.install.rpm:_update_package_list_and_install: config is {'branch': None, 'cleanup': None, 'debuginfo': None, 'downgrade_packages': [], 'exclude_packages': [], 'extra_packages': [], 'extra_system_packages': {'deb': ['python3-jmespath', 'python3-xmltodict', 's3cmd'], 'rpm': ['bzip2', 'perl-Test-Harness', 'python3-jmespath', 'python3-xmltodict', 's3cmd']}, 'extras': None, 'enable_coprs': [], 'flavor': 'default', 'install_ceph_packages': True, 'packages': {}, 'project': 'ceph', 'repos_only': False, 'sha1': '5bb3278730741031382ca9c3dc9d221a942e06a2', 'tag': None, 'wait_for_package': False} 2026-03-31T22:53:00.029 DEBUG:teuthology.packaging:Querying https://shaman.ceph.com/api/search?status=ready&project=ceph&flavor=default&distros=centos%2F9%2Fx86_64&sha1=5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:53:00.667 INFO:teuthology.task.install.rpm:Pulling from https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/ 2026-03-31T22:53:00.667 INFO:teuthology.task.install.rpm:Package version is 20.2.0-721.g5bb32787 2026-03-31T22:53:00.742 INFO:teuthology.task.install.rpm:Pulling from https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/ 2026-03-31T22:53:00.742 INFO:teuthology.task.install.rpm:Package version is 20.2.0-721.g5bb32787 2026-03-31T22:53:00.775 INFO:teuthology.task.install.rpm:Pulling from https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/ 2026-03-31T22:53:00.775 INFO:teuthology.task.install.rpm:Package version is 20.2.0-721.g5bb32787 2026-03-31T22:53:01.192 INFO:teuthology.packaging:Writing yum repo: [ceph] name=ceph packages for $basearch baseurl=https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/$basearch enabled=1 gpgcheck=0 type=rpm-md [ceph-noarch] name=ceph noarch packages baseurl=https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/noarch enabled=1 gpgcheck=0 type=rpm-md [ceph-source] name=ceph source packages baseurl=https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/SRPMS enabled=1 gpgcheck=0 type=rpm-md 2026-03-31T22:53:01.192 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:53:01.192 DEBUG:teuthology.orchestra.run.vm00:> sudo dd of=/etc/yum.repos.d/ceph.repo 2026-03-31T22:53:01.216 INFO:teuthology.task.install.rpm:Installing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd, bzip2, perl-Test-Harness, python3-jmespath, python3-xmltodict, s3cmd on remote rpm x86_64 2026-03-31T22:53:01.217 DEBUG:teuthology.orchestra.run.vm00:> if test -f /etc/yum.repos.d/ceph.repo ; then sudo sed -i -e ':a;N;$!ba;s/enabled=1\ngpg/enabled=1\npriority=1\ngpg/g' /etc/yum.repos.d/ceph.repo ; fi 2026-03-31T22:53:01.251 INFO:teuthology.packaging:Writing yum repo: [ceph] name=ceph packages for $basearch baseurl=https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/$basearch enabled=1 gpgcheck=0 type=rpm-md [ceph-noarch] name=ceph noarch packages baseurl=https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/noarch enabled=1 gpgcheck=0 type=rpm-md [ceph-source] name=ceph source packages baseurl=https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/SRPMS enabled=1 gpgcheck=0 type=rpm-md 2026-03-31T22:53:01.252 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:53:01.252 DEBUG:teuthology.orchestra.run.vm09:> sudo dd of=/etc/yum.repos.d/ceph.repo 2026-03-31T22:53:01.280 INFO:teuthology.task.install.rpm:Installing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd, bzip2, perl-Test-Harness, python3-jmespath, python3-xmltodict, s3cmd on remote rpm x86_64 2026-03-31T22:53:01.280 DEBUG:teuthology.orchestra.run.vm09:> if test -f /etc/yum.repos.d/ceph.repo ; then sudo sed -i -e ':a;N;$!ba;s/enabled=1\ngpg/enabled=1\npriority=1\ngpg/g' /etc/yum.repos.d/ceph.repo ; fi 2026-03-31T22:53:01.281 DEBUG:teuthology.orchestra.run.vm00:> sudo touch -a /etc/yum/pluginconf.d/priorities.conf ; test -e /etc/yum/pluginconf.d/priorities.conf.orig || sudo cp -af /etc/yum/pluginconf.d/priorities.conf /etc/yum/pluginconf.d/priorities.conf.orig 2026-03-31T22:53:01.319 INFO:teuthology.packaging:Writing yum repo: [ceph] name=ceph packages for $basearch baseurl=https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/$basearch enabled=1 gpgcheck=0 type=rpm-md [ceph-noarch] name=ceph noarch packages baseurl=https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/noarch enabled=1 gpgcheck=0 type=rpm-md [ceph-source] name=ceph source packages baseurl=https://2.chacra.ceph.com/r/ceph/tentacle-release/5bb3278730741031382ca9c3dc9d221a942e06a2/centos/9/flavors/default/SRPMS enabled=1 gpgcheck=0 type=rpm-md 2026-03-31T22:53:01.319 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:53:01.319 DEBUG:teuthology.orchestra.run.vm05:> sudo dd of=/etc/yum.repos.d/ceph.repo 2026-03-31T22:53:01.346 INFO:teuthology.task.install.rpm:Installing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd, bzip2, perl-Test-Harness, python3-jmespath, python3-xmltodict, s3cmd on remote rpm x86_64 2026-03-31T22:53:01.346 DEBUG:teuthology.orchestra.run.vm05:> if test -f /etc/yum.repos.d/ceph.repo ; then sudo sed -i -e ':a;N;$!ba;s/enabled=1\ngpg/enabled=1\npriority=1\ngpg/g' /etc/yum.repos.d/ceph.repo ; fi 2026-03-31T22:53:01.346 DEBUG:teuthology.orchestra.run.vm09:> sudo touch -a /etc/yum/pluginconf.d/priorities.conf ; test -e /etc/yum/pluginconf.d/priorities.conf.orig || sudo cp -af /etc/yum/pluginconf.d/priorities.conf /etc/yum/pluginconf.d/priorities.conf.orig 2026-03-31T22:53:01.354 DEBUG:teuthology.orchestra.run.vm00:> grep check_obsoletes /etc/yum/pluginconf.d/priorities.conf && sudo sed -i 's/check_obsoletes.*0/check_obsoletes = 1/g' /etc/yum/pluginconf.d/priorities.conf || echo 'check_obsoletes = 1' | sudo tee -a /etc/yum/pluginconf.d/priorities.conf 2026-03-31T22:53:01.411 DEBUG:teuthology.orchestra.run.vm05:> sudo touch -a /etc/yum/pluginconf.d/priorities.conf ; test -e /etc/yum/pluginconf.d/priorities.conf.orig || sudo cp -af /etc/yum/pluginconf.d/priorities.conf /etc/yum/pluginconf.d/priorities.conf.orig 2026-03-31T22:53:01.418 INFO:teuthology.orchestra.run.vm00.stdout:check_obsoletes = 1 2026-03-31T22:53:01.419 DEBUG:teuthology.orchestra.run.vm00:> sudo yum clean all 2026-03-31T22:53:01.422 DEBUG:teuthology.orchestra.run.vm09:> grep check_obsoletes /etc/yum/pluginconf.d/priorities.conf && sudo sed -i 's/check_obsoletes.*0/check_obsoletes = 1/g' /etc/yum/pluginconf.d/priorities.conf || echo 'check_obsoletes = 1' | sudo tee -a /etc/yum/pluginconf.d/priorities.conf 2026-03-31T22:53:01.486 DEBUG:teuthology.orchestra.run.vm05:> grep check_obsoletes /etc/yum/pluginconf.d/priorities.conf && sudo sed -i 's/check_obsoletes.*0/check_obsoletes = 1/g' /etc/yum/pluginconf.d/priorities.conf || echo 'check_obsoletes = 1' | sudo tee -a /etc/yum/pluginconf.d/priorities.conf 2026-03-31T22:53:01.487 INFO:teuthology.orchestra.run.vm09.stdout:check_obsoletes = 1 2026-03-31T22:53:01.488 DEBUG:teuthology.orchestra.run.vm09:> sudo yum clean all 2026-03-31T22:53:01.551 INFO:teuthology.orchestra.run.vm05.stdout:check_obsoletes = 1 2026-03-31T22:53:01.552 DEBUG:teuthology.orchestra.run.vm05:> sudo yum clean all 2026-03-31T22:53:01.581 INFO:teuthology.orchestra.run.vm00.stdout:41 files removed 2026-03-31T22:53:01.602 DEBUG:teuthology.orchestra.run.vm00:> sudo yum -y install ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd bzip2 perl-Test-Harness python3-jmespath python3-xmltodict s3cmd 2026-03-31T22:53:01.651 INFO:teuthology.orchestra.run.vm09.stdout:41 files removed 2026-03-31T22:53:01.671 DEBUG:teuthology.orchestra.run.vm09:> sudo yum -y install ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd bzip2 perl-Test-Harness python3-jmespath python3-xmltodict s3cmd 2026-03-31T22:53:01.717 INFO:teuthology.orchestra.run.vm05.stdout:41 files removed 2026-03-31T22:53:01.738 DEBUG:teuthology.orchestra.run.vm05:> sudo yum -y install ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd bzip2 perl-Test-Harness python3-jmespath python3-xmltodict s3cmd 2026-03-31T22:53:02.937 INFO:teuthology.orchestra.run.vm00.stdout:ceph packages for x86_64 75 kB/s | 89 kB 00:01 2026-03-31T22:53:02.951 INFO:teuthology.orchestra.run.vm09.stdout:ceph packages for x86_64 79 kB/s | 89 kB 00:01 2026-03-31T22:53:03.055 INFO:teuthology.orchestra.run.vm05.stdout:ceph packages for x86_64 76 kB/s | 89 kB 00:01 2026-03-31T22:53:03.966 INFO:teuthology.orchestra.run.vm09.stdout:ceph noarch packages 19 kB/s | 19 kB 00:00 2026-03-31T22:53:04.000 INFO:teuthology.orchestra.run.vm00.stdout:ceph noarch packages 18 kB/s | 19 kB 00:01 2026-03-31T22:53:04.097 INFO:teuthology.orchestra.run.vm05.stdout:ceph noarch packages 18 kB/s | 19 kB 00:01 2026-03-31T22:53:04.935 INFO:teuthology.orchestra.run.vm00.stdout:ceph source packages 2.1 kB/s | 1.9 kB 00:00 2026-03-31T22:53:04.936 INFO:teuthology.orchestra.run.vm09.stdout:ceph source packages 2.0 kB/s | 1.9 kB 00:00 2026-03-31T22:53:05.056 INFO:teuthology.orchestra.run.vm05.stdout:ceph source packages 2.1 kB/s | 1.9 kB 00:00 2026-03-31T22:53:06.202 INFO:teuthology.orchestra.run.vm05.stdout:CentOS Stream 9 - BaseOS 7.9 MB/s | 8.9 MB 00:01 2026-03-31T22:53:06.287 INFO:teuthology.orchestra.run.vm00.stdout:CentOS Stream 9 - BaseOS 6.7 MB/s | 8.9 MB 00:01 2026-03-31T22:53:06.980 INFO:teuthology.orchestra.run.vm09.stdout:CentOS Stream 9 - BaseOS 4.4 MB/s | 8.9 MB 00:02 2026-03-31T22:53:08.376 INFO:teuthology.orchestra.run.vm05.stdout:CentOS Stream 9 - AppStream 17 MB/s | 27 MB 00:01 2026-03-31T22:53:08.508 INFO:teuthology.orchestra.run.vm09.stdout:CentOS Stream 9 - AppStream 29 MB/s | 27 MB 00:00 2026-03-31T22:53:08.959 INFO:teuthology.orchestra.run.vm00.stdout:CentOS Stream 9 - AppStream 13 MB/s | 27 MB 00:02 2026-03-31T22:53:11.972 INFO:teuthology.orchestra.run.vm05.stdout:CentOS Stream 9 - CRB 7.9 MB/s | 8.0 MB 00:01 2026-03-31T22:53:12.009 INFO:teuthology.orchestra.run.vm09.stdout:CentOS Stream 9 - CRB 8.7 MB/s | 8.0 MB 00:00 2026-03-31T22:53:12.291 INFO:teuthology.orchestra.run.vm00.stdout:CentOS Stream 9 - CRB 11 MB/s | 8.0 MB 00:00 2026-03-31T22:53:13.118 INFO:teuthology.orchestra.run.vm05.stdout:CentOS Stream 9 - Extras packages 60 kB/s | 21 kB 00:00 2026-03-31T22:53:13.559 INFO:teuthology.orchestra.run.vm05.stdout:Extra Packages for Enterprise Linux 53 MB/s | 20 MB 00:00 2026-03-31T22:53:13.626 INFO:teuthology.orchestra.run.vm00.stdout:CentOS Stream 9 - Extras packages 38 kB/s | 21 kB 00:00 2026-03-31T22:53:13.746 INFO:teuthology.orchestra.run.vm09.stdout:CentOS Stream 9 - Extras packages 22 kB/s | 21 kB 00:00 2026-03-31T22:53:14.047 INFO:teuthology.orchestra.run.vm00.stdout:Extra Packages for Enterprise Linux 56 MB/s | 20 MB 00:00 2026-03-31T22:53:14.242 INFO:teuthology.orchestra.run.vm09.stdout:Extra Packages for Enterprise Linux 47 MB/s | 20 MB 00:00 2026-03-31T22:53:18.630 INFO:teuthology.orchestra.run.vm05.stdout:lab-extras 64 kB/s | 50 kB 00:00 2026-03-31T22:53:19.432 INFO:teuthology.orchestra.run.vm00.stdout:lab-extras 64 kB/s | 50 kB 00:00 2026-03-31T22:53:19.881 INFO:teuthology.orchestra.run.vm05.stdout:Package librados2-2:16.2.4-5.el9.x86_64 is already installed. 2026-03-31T22:53:19.881 INFO:teuthology.orchestra.run.vm05.stdout:Package librbd1-2:16.2.4-5.el9.x86_64 is already installed. 2026-03-31T22:53:19.905 INFO:teuthology.orchestra.run.vm09.stdout:lab-extras 48 kB/s | 50 kB 00:01 2026-03-31T22:53:19.912 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout:====================================================================================== 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repository Size 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout:====================================================================================== 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout:Installing: 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: bzip2 x86_64 1.0.8-11.el9 baseos 55 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph x86_64 2:20.2.0-721.g5bb32787.el9 ceph 6.5 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-base x86_64 2:20.2.0-721.g5bb32787.el9 ceph 5.9 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-fuse x86_64 2:20.2.0-721.g5bb32787.el9 ceph 940 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-721.g5bb32787.el9 ceph 154 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr x86_64 2:20.2.0-721.g5bb32787.el9 ceph 961 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-cephadm noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 173 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-dashboard noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 11 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 7.4 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-rook noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 50 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-radosgw x86_64 2:20.2.0-721.g5bb32787.el9 ceph 24 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-test x86_64 2:20.2.0-721.g5bb32787.el9 ceph 84 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-volume noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 298 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: cephadm noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 1.0 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-devel x86_64 2:20.2.0-721.g5bb32787.el9 ceph 34 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 867 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: librados-devel x86_64 2:20.2.0-721.g5bb32787.el9 ceph 126 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: perl-Test-Harness noarch 1:3.42-461.el9 appstream 295 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: python3-cephfs x86_64 2:20.2.0-721.g5bb32787.el9 ceph 163 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: python3-jmespath noarch 1.0.1-1.el9 appstream 48 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: python3-rados x86_64 2:20.2.0-721.g5bb32787.el9 ceph 323 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: python3-rbd x86_64 2:20.2.0-721.g5bb32787.el9 ceph 304 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: python3-rgw x86_64 2:20.2.0-721.g5bb32787.el9 ceph 99 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: python3-xmltodict noarch 0.12.0-15.el9 epel 22 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: rbd-fuse x86_64 2:20.2.0-721.g5bb32787.el9 ceph 91 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: rbd-mirror x86_64 2:20.2.0-721.g5bb32787.el9 ceph 2.9 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: rbd-nbd x86_64 2:20.2.0-721.g5bb32787.el9 ceph 179 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: s3cmd noarch 2.4.0-1.el9 epel 206 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout:Upgrading: 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: librados2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 3.5 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: librbd1 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 2.8 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout:Installing dependencies: 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: abseil-cpp x86_64 20211102.0-4.el9 epel 551 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: boost-program-options x86_64 1.75.0-13.el9 appstream 104 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 ceph 24 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-grafana-dashboards noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 43 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mds x86_64 2:20.2.0-721.g5bb32787.el9 ceph 2.3 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-modules-core noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 290 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mon x86_64 2:20.2.0-721.g5bb32787.el9 ceph 5.0 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-osd x86_64 2:20.2.0-721.g5bb32787.el9 ceph 17 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-prometheus-alerts noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 17 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ceph-selinux x86_64 2:20.2.0-721.g5bb32787.el9 ceph 25 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: cryptsetup x86_64 2.8.1-3.el9 baseos 351 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas x86_64 3.0.4-9.el9 appstream 30 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-netlib x86_64 3.0.4-9.el9 appstream 3.0 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-openblas-openmp x86_64 3.0.4-9.el9 appstream 15 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: fuse x86_64 2.9.9-17.el9 baseos 80 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: gperftools-libs x86_64 2.9.1-3.el9 epel 308 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: grpc-data noarch 1.46.7-10.el9 epel 19 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: ledmon-libs x86_64 1.1.0-3.el9 baseos 40 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: libarrow x86_64 9.0.0-15.el9 epel 4.4 M 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: libarrow-doc noarch 9.0.0-15.el9 epel 25 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-proxy2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 24 k 2026-03-31T22:53:19.917 INFO:teuthology.orchestra.run.vm05.stdout: libcephsqlite x86_64 2:20.2.0-721.g5bb32787.el9 ceph 164 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: libconfig x86_64 1.7.2-9.el9 baseos 72 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: libgfortran x86_64 11.5.0-14.el9 baseos 794 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: libnbd x86_64 1.20.3-4.el9 appstream 164 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: liboath x86_64 2.6.12-1.el9 epel 49 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: libpmemobj x86_64 1.12.1-1.el9 appstream 160 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: libquadmath x86_64 11.5.0-14.el9 baseos 184 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: librabbitmq x86_64 0.11.0-7.el9 appstream 45 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: libradosstriper1 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 250 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: librdkafka x86_64 1.6.1-102.el9 appstream 662 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: librgw2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 6.4 M 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: libstoragemgmt x86_64 1.10.1-1.el9 appstream 246 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: libunwind x86_64 1.6.2-1.el9 epel 67 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: libxslt x86_64 1.1.34-12.el9 appstream 233 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: lttng-ust x86_64 2.12.0-6.el9 appstream 292 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: lua x86_64 5.4.4-4.el9 appstream 188 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: lua-devel x86_64 5.4.4-4.el9 crb 22 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: luarocks noarch 3.9.2-5.el9 epel 151 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: mailcap noarch 2.1.49-5.el9 baseos 33 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: openblas x86_64 0.3.29-1.el9 appstream 42 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: openblas-openmp x86_64 0.3.29-1.el9 appstream 5.3 M 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: parquet-libs x86_64 9.0.0-15.el9 epel 838 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: pciutils x86_64 3.7.0-7.el9 baseos 93 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: perl-Benchmark noarch 1.23-483.el9 appstream 26 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: protobuf x86_64 3.14.0-17.el9 appstream 1.0 M 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: protobuf-compiler x86_64 3.14.0-17.el9 crb 862 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-asyncssh noarch 2.13.2-5.el9 epel 548 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-autocommand noarch 2.2.2-8.el9 epel 29 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-babel noarch 2.9.1-2.el9 appstream 6.0 M 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 epel 60 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-bcrypt x86_64 3.2.2-1.el9 epel 43 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-cachetools noarch 4.2.4-1.el9 epel 32 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-ceph-argparse x86_64 2:20.2.0-721.g5bb32787.el9 ceph 45 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 ceph 175 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-certifi noarch 2023.05.07-4.el9 epel 14 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-cffi x86_64 1.14.5-5.el9 baseos 253 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-cheroot noarch 10.0.1-5.el9 epel 173 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-cherrypy noarch 18.10.0-5.el9 epel 290 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-cryptography x86_64 36.0.1-5.el9 baseos 1.2 M 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-devel x86_64 3.9.25-3.el9 appstream 244 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-google-auth noarch 1:2.45.0-1.el9 epel 254 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-grpcio x86_64 1.46.7-10.el9 epel 2.0 M 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 epel 144 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-isodate noarch 0.6.1-3.el9 epel 56 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco noarch 8.2.1-3.el9 epel 11 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 epel 18 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 epel 23 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-context noarch 6.0.1-3.el9 epel 20 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 epel 19 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-text noarch 4.0.0-2.el9 epel 26 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-jinja2 noarch 2.11.3-8.el9 appstream 249 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 epel 1.0 M 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 appstream 177 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-lxml x86_64 4.6.5-3.el9 appstream 1.2 M 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-markupsafe x86_64 1.1.1-12.el9 appstream 35 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-more-itertools noarch 8.12.0-2.el9 epel 79 k 2026-03-31T22:53:19.918 INFO:teuthology.orchestra.run.vm05.stdout: python3-msgpack x86_64 1.0.3-2.el9 epel 86 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-natsort noarch 7.1.1-5.el9 epel 58 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-numpy x86_64 1:1.23.5-2.el9 appstream 6.1 M 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9 appstream 442 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-packaging noarch 20.9-5.el9 appstream 77 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-ply noarch 3.11-14.el9 baseos 106 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-portend noarch 3.1.0-2.el9 epel 16 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-protobuf noarch 3.14.0-17.el9 appstream 267 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 epel 90 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyasn1 noarch 0.4.8-7.el9 appstream 157 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9 appstream 277 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-pycparser noarch 2.20-6.el9 baseos 135 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-repoze-lru noarch 0.7-16.el9 epel 31 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-requests noarch 2.25.1-10.el9 baseos 126 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 appstream 54 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-routes noarch 2.5.1-5.el9 epel 188 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-rsa noarch 4.9-2.el9 epel 59 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-scipy x86_64 1.9.3-2.el9 appstream 19 M 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-tempora noarch 5.0.0-2.el9 epel 36 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-toml noarch 0.10.2-6.el9 appstream 42 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-typing-extensions noarch 4.15.0-1.el9 epel 86 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-urllib3 noarch 1.26.5-7.el9 baseos 218 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-websocket-client noarch 1.2.3-2.el9 epel 90 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-xmlsec x86_64 1.3.13-1.el9 epel 48 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-zc-lockfile noarch 2.0-10.el9 epel 20 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: qatlib x86_64 25.08.0-2.el9 appstream 240 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: qatzip-libs x86_64 1.3.1-1.el9 appstream 66 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: re2 x86_64 1:20211101-20.el9 epel 191 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: socat x86_64 1.7.4.1-8.el9 appstream 303 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: thrift x86_64 0.15.0-4.el9 epel 1.6 M 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: unzip x86_64 6.0-59.el9 baseos 182 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: xmlsec1 x86_64 1.2.29-13.el9 appstream 189 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 appstream 90 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: xmlstarlet x86_64 1.6.1-20.el9 appstream 64 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: zip x86_64 3.0-35.el9 baseos 266 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout:Installing weak dependencies: 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 22 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-daemon x86_64 2:20.2.0-721.g5bb32787.el9 ceph 35 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-influxdb noarch 5.3.1-1.el9 epel 139 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: python3-saml noarch 1.16.0-1.el9 epel 125 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: qatlib-service x86_64 25.08.0-2.el9 appstream 37 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: smartmontools x86_64 1:7.2-10.el9 baseos 556 k 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout:====================================================================================== 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout:Install 146 Packages 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout:Upgrade 2 Packages 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout:Total download size: 269 M 2026-03-31T22:53:19.919 INFO:teuthology.orchestra.run.vm05.stdout:Downloading Packages: 2026-03-31T22:53:20.695 INFO:teuthology.orchestra.run.vm00.stdout:Package librados2-2:16.2.4-5.el9.x86_64 is already installed. 2026-03-31T22:53:20.695 INFO:teuthology.orchestra.run.vm00.stdout:Package librbd1-2:16.2.4-5.el9.x86_64 is already installed. 2026-03-31T22:53:20.726 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout:====================================================================================== 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repository Size 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout:====================================================================================== 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout:Installing: 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: bzip2 x86_64 1.0.8-11.el9 baseos 55 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph x86_64 2:20.2.0-721.g5bb32787.el9 ceph 6.5 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-base x86_64 2:20.2.0-721.g5bb32787.el9 ceph 5.9 M 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-fuse x86_64 2:20.2.0-721.g5bb32787.el9 ceph 940 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-721.g5bb32787.el9 ceph 154 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr x86_64 2:20.2.0-721.g5bb32787.el9 ceph 961 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-cephadm noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 173 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-dashboard noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 11 M 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 7.4 M 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-rook noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 50 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-radosgw x86_64 2:20.2.0-721.g5bb32787.el9 ceph 24 M 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-test x86_64 2:20.2.0-721.g5bb32787.el9 ceph 84 M 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: ceph-volume noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 298 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: cephadm noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 1.0 M 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-devel x86_64 2:20.2.0-721.g5bb32787.el9 ceph 34 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 867 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: librados-devel x86_64 2:20.2.0-721.g5bb32787.el9 ceph 126 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: perl-Test-Harness noarch 1:3.42-461.el9 appstream 295 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: python3-cephfs x86_64 2:20.2.0-721.g5bb32787.el9 ceph 163 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: python3-jmespath noarch 1.0.1-1.el9 appstream 48 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: python3-rados x86_64 2:20.2.0-721.g5bb32787.el9 ceph 323 k 2026-03-31T22:53:20.731 INFO:teuthology.orchestra.run.vm00.stdout: python3-rbd x86_64 2:20.2.0-721.g5bb32787.el9 ceph 304 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: python3-rgw x86_64 2:20.2.0-721.g5bb32787.el9 ceph 99 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: python3-xmltodict noarch 0.12.0-15.el9 epel 22 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: rbd-fuse x86_64 2:20.2.0-721.g5bb32787.el9 ceph 91 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: rbd-mirror x86_64 2:20.2.0-721.g5bb32787.el9 ceph 2.9 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: rbd-nbd x86_64 2:20.2.0-721.g5bb32787.el9 ceph 179 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: s3cmd noarch 2.4.0-1.el9 epel 206 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout:Upgrading: 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: librados2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 3.5 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: librbd1 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 2.8 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout:Installing dependencies: 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: abseil-cpp x86_64 20211102.0-4.el9 epel 551 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: boost-program-options x86_64 1.75.0-13.el9 appstream 104 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 ceph 24 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: ceph-grafana-dashboards noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 43 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mds x86_64 2:20.2.0-721.g5bb32787.el9 ceph 2.3 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-modules-core noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 290 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mon x86_64 2:20.2.0-721.g5bb32787.el9 ceph 5.0 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: ceph-osd x86_64 2:20.2.0-721.g5bb32787.el9 ceph 17 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: ceph-prometheus-alerts noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 17 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: ceph-selinux x86_64 2:20.2.0-721.g5bb32787.el9 ceph 25 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: cryptsetup x86_64 2.8.1-3.el9 baseos 351 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas x86_64 3.0.4-9.el9 appstream 30 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-netlib x86_64 3.0.4-9.el9 appstream 3.0 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-openblas-openmp x86_64 3.0.4-9.el9 appstream 15 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: fuse x86_64 2.9.9-17.el9 baseos 80 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: gperftools-libs x86_64 2.9.1-3.el9 epel 308 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: grpc-data noarch 1.46.7-10.el9 epel 19 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: ledmon-libs x86_64 1.1.0-3.el9 baseos 40 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libarrow x86_64 9.0.0-15.el9 epel 4.4 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libarrow-doc noarch 9.0.0-15.el9 epel 25 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-proxy2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 24 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libcephsqlite x86_64 2:20.2.0-721.g5bb32787.el9 ceph 164 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libconfig x86_64 1.7.2-9.el9 baseos 72 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libgfortran x86_64 11.5.0-14.el9 baseos 794 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libnbd x86_64 1.20.3-4.el9 appstream 164 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: liboath x86_64 2.6.12-1.el9 epel 49 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libpmemobj x86_64 1.12.1-1.el9 appstream 160 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libquadmath x86_64 11.5.0-14.el9 baseos 184 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: librabbitmq x86_64 0.11.0-7.el9 appstream 45 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libradosstriper1 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 250 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: librdkafka x86_64 1.6.1-102.el9 appstream 662 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: librgw2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 6.4 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libstoragemgmt x86_64 1.10.1-1.el9 appstream 246 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libunwind x86_64 1.6.2-1.el9 epel 67 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: libxslt x86_64 1.1.34-12.el9 appstream 233 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: lttng-ust x86_64 2.12.0-6.el9 appstream 292 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: lua x86_64 5.4.4-4.el9 appstream 188 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: lua-devel x86_64 5.4.4-4.el9 crb 22 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: luarocks noarch 3.9.2-5.el9 epel 151 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: mailcap noarch 2.1.49-5.el9 baseos 33 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: openblas x86_64 0.3.29-1.el9 appstream 42 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: openblas-openmp x86_64 0.3.29-1.el9 appstream 5.3 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: parquet-libs x86_64 9.0.0-15.el9 epel 838 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: pciutils x86_64 3.7.0-7.el9 baseos 93 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: perl-Benchmark noarch 1.23-483.el9 appstream 26 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: protobuf x86_64 3.14.0-17.el9 appstream 1.0 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: protobuf-compiler x86_64 3.14.0-17.el9 crb 862 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: python3-asyncssh noarch 2.13.2-5.el9 epel 548 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: python3-autocommand noarch 2.2.2-8.el9 epel 29 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: python3-babel noarch 2.9.1-2.el9 appstream 6.0 M 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 epel 60 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: python3-bcrypt x86_64 3.2.2-1.el9 epel 43 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: python3-cachetools noarch 4.2.4-1.el9 epel 32 k 2026-03-31T22:53:20.732 INFO:teuthology.orchestra.run.vm00.stdout: python3-ceph-argparse x86_64 2:20.2.0-721.g5bb32787.el9 ceph 45 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 ceph 175 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-certifi noarch 2023.05.07-4.el9 epel 14 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-cffi x86_64 1.14.5-5.el9 baseos 253 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-cheroot noarch 10.0.1-5.el9 epel 173 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-cherrypy noarch 18.10.0-5.el9 epel 290 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-cryptography x86_64 36.0.1-5.el9 baseos 1.2 M 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-devel x86_64 3.9.25-3.el9 appstream 244 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-google-auth noarch 1:2.45.0-1.el9 epel 254 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-grpcio x86_64 1.46.7-10.el9 epel 2.0 M 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 epel 144 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-isodate noarch 0.6.1-3.el9 epel 56 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco noarch 8.2.1-3.el9 epel 11 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 epel 18 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 epel 23 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-context noarch 6.0.1-3.el9 epel 20 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 epel 19 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-text noarch 4.0.0-2.el9 epel 26 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-jinja2 noarch 2.11.3-8.el9 appstream 249 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 epel 1.0 M 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 appstream 177 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-lxml x86_64 4.6.5-3.el9 appstream 1.2 M 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-markupsafe x86_64 1.1.1-12.el9 appstream 35 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-more-itertools noarch 8.12.0-2.el9 epel 79 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-msgpack x86_64 1.0.3-2.el9 epel 86 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-natsort noarch 7.1.1-5.el9 epel 58 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-numpy x86_64 1:1.23.5-2.el9 appstream 6.1 M 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9 appstream 442 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-packaging noarch 20.9-5.el9 appstream 77 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-ply noarch 3.11-14.el9 baseos 106 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-portend noarch 3.1.0-2.el9 epel 16 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-protobuf noarch 3.14.0-17.el9 appstream 267 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 epel 90 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyasn1 noarch 0.4.8-7.el9 appstream 157 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9 appstream 277 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-pycparser noarch 2.20-6.el9 baseos 135 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-repoze-lru noarch 0.7-16.el9 epel 31 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-requests noarch 2.25.1-10.el9 baseos 126 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 appstream 54 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-routes noarch 2.5.1-5.el9 epel 188 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-rsa noarch 4.9-2.el9 epel 59 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-scipy x86_64 1.9.3-2.el9 appstream 19 M 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-tempora noarch 5.0.0-2.el9 epel 36 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-toml noarch 0.10.2-6.el9 appstream 42 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-typing-extensions noarch 4.15.0-1.el9 epel 86 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-urllib3 noarch 1.26.5-7.el9 baseos 218 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-websocket-client noarch 1.2.3-2.el9 epel 90 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-xmlsec x86_64 1.3.13-1.el9 epel 48 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-zc-lockfile noarch 2.0-10.el9 epel 20 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: qatlib x86_64 25.08.0-2.el9 appstream 240 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: qatzip-libs x86_64 1.3.1-1.el9 appstream 66 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: re2 x86_64 1:20211101-20.el9 epel 191 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: socat x86_64 1.7.4.1-8.el9 appstream 303 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: thrift x86_64 0.15.0-4.el9 epel 1.6 M 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: unzip x86_64 6.0-59.el9 baseos 182 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: xmlsec1 x86_64 1.2.29-13.el9 appstream 189 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 appstream 90 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: xmlstarlet x86_64 1.6.1-20.el9 appstream 64 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: zip x86_64 3.0-35.el9 baseos 266 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout:Installing weak dependencies: 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 22 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-daemon x86_64 2:20.2.0-721.g5bb32787.el9 ceph 35 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-influxdb noarch 5.3.1-1.el9 epel 139 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: python3-saml noarch 1.16.0-1.el9 epel 125 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: qatlib-service x86_64 25.08.0-2.el9 appstream 37 k 2026-03-31T22:53:20.733 INFO:teuthology.orchestra.run.vm00.stdout: smartmontools x86_64 1:7.2-10.el9 baseos 556 k 2026-03-31T22:53:20.734 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:20.734 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T22:53:20.734 INFO:teuthology.orchestra.run.vm00.stdout:====================================================================================== 2026-03-31T22:53:20.734 INFO:teuthology.orchestra.run.vm00.stdout:Install 146 Packages 2026-03-31T22:53:20.734 INFO:teuthology.orchestra.run.vm00.stdout:Upgrade 2 Packages 2026-03-31T22:53:20.734 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:20.734 INFO:teuthology.orchestra.run.vm00.stdout:Total download size: 269 M 2026-03-31T22:53:20.734 INFO:teuthology.orchestra.run.vm00.stdout:Downloading Packages: 2026-03-31T22:53:21.172 INFO:teuthology.orchestra.run.vm09.stdout:Package librados2-2:16.2.4-5.el9.x86_64 is already installed. 2026-03-31T22:53:21.172 INFO:teuthology.orchestra.run.vm09.stdout:Package librbd1-2:16.2.4-5.el9.x86_64 is already installed. 2026-03-31T22:53:21.203 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout:====================================================================================== 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repository Size 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout:====================================================================================== 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout:Installing: 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: bzip2 x86_64 1.0.8-11.el9 baseos 55 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph x86_64 2:20.2.0-721.g5bb32787.el9 ceph 6.5 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-base x86_64 2:20.2.0-721.g5bb32787.el9 ceph 5.9 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-fuse x86_64 2:20.2.0-721.g5bb32787.el9 ceph 940 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-721.g5bb32787.el9 ceph 154 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr x86_64 2:20.2.0-721.g5bb32787.el9 ceph 961 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-cephadm noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 173 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-dashboard noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 11 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 7.4 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-rook noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 50 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-radosgw x86_64 2:20.2.0-721.g5bb32787.el9 ceph 24 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-test x86_64 2:20.2.0-721.g5bb32787.el9 ceph 84 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-volume noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 298 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: cephadm noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 1.0 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-devel x86_64 2:20.2.0-721.g5bb32787.el9 ceph 34 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 867 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: librados-devel x86_64 2:20.2.0-721.g5bb32787.el9 ceph 126 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: perl-Test-Harness noarch 1:3.42-461.el9 appstream 295 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: python3-cephfs x86_64 2:20.2.0-721.g5bb32787.el9 ceph 163 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: python3-jmespath noarch 1.0.1-1.el9 appstream 48 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: python3-rados x86_64 2:20.2.0-721.g5bb32787.el9 ceph 323 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: python3-rbd x86_64 2:20.2.0-721.g5bb32787.el9 ceph 304 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: python3-rgw x86_64 2:20.2.0-721.g5bb32787.el9 ceph 99 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: python3-xmltodict noarch 0.12.0-15.el9 epel 22 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: rbd-fuse x86_64 2:20.2.0-721.g5bb32787.el9 ceph 91 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: rbd-mirror x86_64 2:20.2.0-721.g5bb32787.el9 ceph 2.9 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: rbd-nbd x86_64 2:20.2.0-721.g5bb32787.el9 ceph 179 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: s3cmd noarch 2.4.0-1.el9 epel 206 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout:Upgrading: 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: librados2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 3.5 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: librbd1 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 2.8 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout:Installing dependencies: 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: abseil-cpp x86_64 20211102.0-4.el9 epel 551 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: boost-program-options x86_64 1.75.0-13.el9 appstream 104 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 ceph 24 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-grafana-dashboards noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 43 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mds x86_64 2:20.2.0-721.g5bb32787.el9 ceph 2.3 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-modules-core noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 290 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mon x86_64 2:20.2.0-721.g5bb32787.el9 ceph 5.0 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-osd x86_64 2:20.2.0-721.g5bb32787.el9 ceph 17 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-prometheus-alerts noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 17 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: ceph-selinux x86_64 2:20.2.0-721.g5bb32787.el9 ceph 25 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: cryptsetup x86_64 2.8.1-3.el9 baseos 351 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas x86_64 3.0.4-9.el9 appstream 30 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-netlib x86_64 3.0.4-9.el9 appstream 3.0 M 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-openblas-openmp x86_64 3.0.4-9.el9 appstream 15 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: fuse x86_64 2.9.9-17.el9 baseos 80 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: gperftools-libs x86_64 2.9.1-3.el9 epel 308 k 2026-03-31T22:53:21.208 INFO:teuthology.orchestra.run.vm09.stdout: grpc-data noarch 1.46.7-10.el9 epel 19 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: ledmon-libs x86_64 1.1.0-3.el9 baseos 40 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libarrow x86_64 9.0.0-15.el9 epel 4.4 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libarrow-doc noarch 9.0.0-15.el9 epel 25 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-proxy2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 24 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libcephsqlite x86_64 2:20.2.0-721.g5bb32787.el9 ceph 164 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libconfig x86_64 1.7.2-9.el9 baseos 72 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libgfortran x86_64 11.5.0-14.el9 baseos 794 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libnbd x86_64 1.20.3-4.el9 appstream 164 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: liboath x86_64 2.6.12-1.el9 epel 49 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libpmemobj x86_64 1.12.1-1.el9 appstream 160 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libquadmath x86_64 11.5.0-14.el9 baseos 184 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: librabbitmq x86_64 0.11.0-7.el9 appstream 45 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libradosstriper1 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 250 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: librdkafka x86_64 1.6.1-102.el9 appstream 662 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: librgw2 x86_64 2:20.2.0-721.g5bb32787.el9 ceph 6.4 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libstoragemgmt x86_64 1.10.1-1.el9 appstream 246 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libunwind x86_64 1.6.2-1.el9 epel 67 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: libxslt x86_64 1.1.34-12.el9 appstream 233 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: lttng-ust x86_64 2.12.0-6.el9 appstream 292 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: lua x86_64 5.4.4-4.el9 appstream 188 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: lua-devel x86_64 5.4.4-4.el9 crb 22 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: luarocks noarch 3.9.2-5.el9 epel 151 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: mailcap noarch 2.1.49-5.el9 baseos 33 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: openblas x86_64 0.3.29-1.el9 appstream 42 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: openblas-openmp x86_64 0.3.29-1.el9 appstream 5.3 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: parquet-libs x86_64 9.0.0-15.el9 epel 838 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: pciutils x86_64 3.7.0-7.el9 baseos 93 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: perl-Benchmark noarch 1.23-483.el9 appstream 26 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: protobuf x86_64 3.14.0-17.el9 appstream 1.0 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: protobuf-compiler x86_64 3.14.0-17.el9 crb 862 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-asyncssh noarch 2.13.2-5.el9 epel 548 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-autocommand noarch 2.2.2-8.el9 epel 29 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-babel noarch 2.9.1-2.el9 appstream 6.0 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 epel 60 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-bcrypt x86_64 3.2.2-1.el9 epel 43 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-cachetools noarch 4.2.4-1.el9 epel 32 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-ceph-argparse x86_64 2:20.2.0-721.g5bb32787.el9 ceph 45 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 ceph 175 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-certifi noarch 2023.05.07-4.el9 epel 14 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-cffi x86_64 1.14.5-5.el9 baseos 253 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-cheroot noarch 10.0.1-5.el9 epel 173 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-cherrypy noarch 18.10.0-5.el9 epel 290 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-cryptography x86_64 36.0.1-5.el9 baseos 1.2 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-devel x86_64 3.9.25-3.el9 appstream 244 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-google-auth noarch 1:2.45.0-1.el9 epel 254 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-grpcio x86_64 1.46.7-10.el9 epel 2.0 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 epel 144 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-isodate noarch 0.6.1-3.el9 epel 56 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco noarch 8.2.1-3.el9 epel 11 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 epel 18 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 epel 23 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-context noarch 6.0.1-3.el9 epel 20 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 epel 19 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-text noarch 4.0.0-2.el9 epel 26 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-jinja2 noarch 2.11.3-8.el9 appstream 249 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 epel 1.0 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 appstream 177 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-lxml x86_64 4.6.5-3.el9 appstream 1.2 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-markupsafe x86_64 1.1.1-12.el9 appstream 35 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-more-itertools noarch 8.12.0-2.el9 epel 79 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-msgpack x86_64 1.0.3-2.el9 epel 86 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-natsort noarch 7.1.1-5.el9 epel 58 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-numpy x86_64 1:1.23.5-2.el9 appstream 6.1 M 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9 appstream 442 k 2026-03-31T22:53:21.209 INFO:teuthology.orchestra.run.vm09.stdout: python3-packaging noarch 20.9-5.el9 appstream 77 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-ply noarch 3.11-14.el9 baseos 106 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-portend noarch 3.1.0-2.el9 epel 16 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-protobuf noarch 3.14.0-17.el9 appstream 267 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 epel 90 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyasn1 noarch 0.4.8-7.el9 appstream 157 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9 appstream 277 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-pycparser noarch 2.20-6.el9 baseos 135 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-repoze-lru noarch 0.7-16.el9 epel 31 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-requests noarch 2.25.1-10.el9 baseos 126 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 appstream 54 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-routes noarch 2.5.1-5.el9 epel 188 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-rsa noarch 4.9-2.el9 epel 59 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-scipy x86_64 1.9.3-2.el9 appstream 19 M 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-tempora noarch 5.0.0-2.el9 epel 36 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-toml noarch 0.10.2-6.el9 appstream 42 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-typing-extensions noarch 4.15.0-1.el9 epel 86 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-urllib3 noarch 1.26.5-7.el9 baseos 218 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-websocket-client noarch 1.2.3-2.el9 epel 90 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-xmlsec x86_64 1.3.13-1.el9 epel 48 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-zc-lockfile noarch 2.0-10.el9 epel 20 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: qatlib x86_64 25.08.0-2.el9 appstream 240 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: qatzip-libs x86_64 1.3.1-1.el9 appstream 66 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: re2 x86_64 1:20211101-20.el9 epel 191 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: socat x86_64 1.7.4.1-8.el9 appstream 303 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: thrift x86_64 0.15.0-4.el9 epel 1.6 M 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: unzip x86_64 6.0-59.el9 baseos 182 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: xmlsec1 x86_64 1.2.29-13.el9 appstream 189 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 appstream 90 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: xmlstarlet x86_64 1.6.1-20.el9 appstream 64 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: zip x86_64 3.0-35.el9 baseos 266 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout:Installing weak dependencies: 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-721.g5bb32787.el9 ceph-noarch 22 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-daemon x86_64 2:20.2.0-721.g5bb32787.el9 ceph 35 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-influxdb noarch 5.3.1-1.el9 epel 139 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: python3-saml noarch 1.16.0-1.el9 epel 125 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: qatlib-service x86_64 25.08.0-2.el9 appstream 37 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: smartmontools x86_64 1:7.2-10.el9 baseos 556 k 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout:====================================================================================== 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout:Install 146 Packages 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout:Upgrade 2 Packages 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout:Total download size: 269 M 2026-03-31T22:53:21.210 INFO:teuthology.orchestra.run.vm09.stdout:Downloading Packages: 2026-03-31T22:53:22.041 INFO:teuthology.orchestra.run.vm00.stdout:(1/148): ceph-20.2.0-721.g5bb32787.el9.x86_64.r 14 kB/s | 6.5 kB 00:00 2026-03-31T22:53:22.615 INFO:teuthology.orchestra.run.vm05.stdout:(1/148): ceph-20.2.0-721.g5bb32787.el9.x86_64.r 14 kB/s | 6.5 kB 00:00 2026-03-31T22:53:22.851 INFO:teuthology.orchestra.run.vm00.stdout:(2/148): ceph-fuse-20.2.0-721.g5bb32787.el9.x86 1.1 MB/s | 940 kB 00:00 2026-03-31T22:53:22.908 INFO:teuthology.orchestra.run.vm09.stdout:(1/148): ceph-20.2.0-721.g5bb32787.el9.x86_64.r 14 kB/s | 6.5 kB 00:00 2026-03-31T22:53:22.973 INFO:teuthology.orchestra.run.vm00.stdout:(3/148): ceph-immutable-object-cache-20.2.0-721 1.2 MB/s | 154 kB 00:00 2026-03-31T22:53:23.096 INFO:teuthology.orchestra.run.vm00.stdout:(4/148): ceph-base-20.2.0-721.g5bb32787.el9.x86 3.9 MB/s | 5.9 MB 00:01 2026-03-31T22:53:23.225 INFO:teuthology.orchestra.run.vm00.stdout:(5/148): ceph-mgr-20.2.0-721.g5bb32787.el9.x86_ 7.3 MB/s | 961 kB 00:00 2026-03-31T22:53:23.248 INFO:teuthology.orchestra.run.vm00.stdout:(6/148): ceph-mds-20.2.0-721.g5bb32787.el9.x86_ 8.5 MB/s | 2.3 MB 00:00 2026-03-31T22:53:23.443 INFO:teuthology.orchestra.run.vm05.stdout:(2/148): ceph-fuse-20.2.0-721.g5bb32787.el9.x86 1.1 MB/s | 940 kB 00:00 2026-03-31T22:53:23.574 INFO:teuthology.orchestra.run.vm05.stdout:(3/148): ceph-immutable-object-cache-20.2.0-721 1.2 MB/s | 154 kB 00:00 2026-03-31T22:53:23.615 INFO:teuthology.orchestra.run.vm00.stdout:(7/148): ceph-mon-20.2.0-721.g5bb32787.el9.x86_ 13 MB/s | 5.0 MB 00:00 2026-03-31T22:53:23.993 INFO:teuthology.orchestra.run.vm05.stdout:(4/148): ceph-mds-20.2.0-721.g5bb32787.el9.x86_ 5.6 MB/s | 2.3 MB 00:00 2026-03-31T22:53:24.250 INFO:teuthology.orchestra.run.vm09.stdout:(2/148): ceph-fuse-20.2.0-721.g5bb32787.el9.x86 700 kB/s | 940 kB 00:01 2026-03-31T22:53:24.273 INFO:teuthology.orchestra.run.vm05.stdout:(5/148): ceph-mgr-20.2.0-721.g5bb32787.el9.x86_ 3.4 MB/s | 961 kB 00:00 2026-03-31T22:53:24.324 INFO:teuthology.orchestra.run.vm00.stdout:(8/148): ceph-common-20.2.0-721.g5bb32787.el9.x 8.7 MB/s | 24 MB 00:02 2026-03-31T22:53:24.444 INFO:teuthology.orchestra.run.vm00.stdout:(9/148): ceph-selinux-20.2.0-721.g5bb32787.el9. 210 kB/s | 25 kB 00:00 2026-03-31T22:53:24.483 INFO:teuthology.orchestra.run.vm09.stdout:(3/148): ceph-immutable-object-cache-20.2.0-721 663 kB/s | 154 kB 00:00 2026-03-31T22:53:24.905 INFO:teuthology.orchestra.run.vm05.stdout:(6/148): ceph-mon-20.2.0-721.g5bb32787.el9.x86_ 8.0 MB/s | 5.0 MB 00:00 2026-03-31T22:53:25.265 INFO:teuthology.orchestra.run.vm00.stdout:(10/148): ceph-osd-20.2.0-721.g5bb32787.el9.x86 8.4 MB/s | 17 MB 00:02 2026-03-31T22:53:25.319 INFO:teuthology.orchestra.run.vm00.stdout:(11/148): ceph-radosgw-20.2.0-721.g5bb32787.el9 14 MB/s | 24 MB 00:01 2026-03-31T22:53:25.382 INFO:teuthology.orchestra.run.vm00.stdout:(12/148): libcephfs-daemon-20.2.0-721.g5bb32787 303 kB/s | 35 kB 00:00 2026-03-31T22:53:25.437 INFO:teuthology.orchestra.run.vm00.stdout:(13/148): libcephfs-devel-20.2.0-721.g5bb32787. 293 kB/s | 34 kB 00:00 2026-03-31T22:53:25.499 INFO:teuthology.orchestra.run.vm00.stdout:(14/148): libcephfs-proxy2-20.2.0-721.g5bb32787 208 kB/s | 24 kB 00:00 2026-03-31T22:53:25.574 INFO:teuthology.orchestra.run.vm00.stdout:(15/148): libcephfs2-20.2.0-721.g5bb32787.el9.x 6.2 MB/s | 867 kB 00:00 2026-03-31T22:53:25.574 INFO:teuthology.orchestra.run.vm05.stdout:(7/148): ceph-common-20.2.0-721.g5bb32787.el9.x 7.0 MB/s | 24 MB 00:03 2026-03-31T22:53:25.616 INFO:teuthology.orchestra.run.vm00.stdout:(16/148): libcephsqlite-20.2.0-721.g5bb32787.el 1.4 MB/s | 164 kB 00:00 2026-03-31T22:53:25.725 INFO:teuthology.orchestra.run.vm00.stdout:(17/148): librados-devel-20.2.0-721.g5bb32787.e 832 kB/s | 126 kB 00:00 2026-03-31T22:53:25.763 INFO:teuthology.orchestra.run.vm00.stdout:(18/148): libradosstriper1-20.2.0-721.g5bb32787 1.7 MB/s | 250 kB 00:00 2026-03-31T22:53:25.916 INFO:teuthology.orchestra.run.vm00.stdout:(19/148): python3-ceph-argparse-20.2.0-721.g5bb 295 kB/s | 45 kB 00:00 2026-03-31T22:53:26.018 INFO:teuthology.orchestra.run.vm09.stdout:(4/148): ceph-mds-20.2.0-721.g5bb32787.el9.x86_ 1.5 MB/s | 2.3 MB 00:01 2026-03-31T22:53:26.071 INFO:teuthology.orchestra.run.vm00.stdout:(20/148): python3-ceph-common-20.2.0-721.g5bb32 1.1 MB/s | 175 kB 00:00 2026-03-31T22:53:26.271 INFO:teuthology.orchestra.run.vm00.stdout:(21/148): python3-cephfs-20.2.0-721.g5bb32787.e 819 kB/s | 163 kB 00:00 2026-03-31T22:53:26.329 INFO:teuthology.orchestra.run.vm00.stdout:(22/148): librgw2-20.2.0-721.g5bb32787.el9.x86_ 11 MB/s | 6.4 MB 00:00 2026-03-31T22:53:26.417 INFO:teuthology.orchestra.run.vm00.stdout:(23/148): python3-rados-20.2.0-721.g5bb32787.el 2.2 MB/s | 323 kB 00:00 2026-03-31T22:53:26.463 INFO:teuthology.orchestra.run.vm00.stdout:(24/148): python3-rbd-20.2.0-721.g5bb32787.el9. 2.2 MB/s | 304 kB 00:00 2026-03-31T22:53:26.488 INFO:teuthology.orchestra.run.vm05.stdout:(8/148): ceph-osd-20.2.0-721.g5bb32787.el9.x86_ 11 MB/s | 17 MB 00:01 2026-03-31T22:53:26.533 INFO:teuthology.orchestra.run.vm00.stdout:(25/148): python3-rgw-20.2.0-721.g5bb32787.el9. 852 kB/s | 99 kB 00:00 2026-03-31T22:53:26.563 INFO:teuthology.orchestra.run.vm09.stdout:(5/148): ceph-mgr-20.2.0-721.g5bb32787.el9.x86_ 1.7 MB/s | 961 kB 00:00 2026-03-31T22:53:26.580 INFO:teuthology.orchestra.run.vm00.stdout:(26/148): rbd-fuse-20.2.0-721.g5bb32787.el9.x86 779 kB/s | 91 kB 00:00 2026-03-31T22:53:26.606 INFO:teuthology.orchestra.run.vm05.stdout:(9/148): ceph-selinux-20.2.0-721.g5bb32787.el9. 214 kB/s | 25 kB 00:00 2026-03-31T22:53:26.709 INFO:teuthology.orchestra.run.vm09.stdout:(6/148): ceph-base-20.2.0-721.g5bb32787.el9.x86 1.4 MB/s | 5.9 MB 00:04 2026-03-31T22:53:26.718 INFO:teuthology.orchestra.run.vm00.stdout:(27/148): rbd-nbd-20.2.0-721.g5bb32787.el9.x86_ 1.3 MB/s | 179 kB 00:00 2026-03-31T22:53:26.828 INFO:teuthology.orchestra.run.vm00.stdout:(28/148): rbd-mirror-20.2.0-721.g5bb32787.el9.x 9.9 MB/s | 2.9 MB 00:00 2026-03-31T22:53:26.871 INFO:teuthology.orchestra.run.vm00.stdout:(29/148): ceph-grafana-dashboards-20.2.0-721.g5 282 kB/s | 43 kB 00:00 2026-03-31T22:53:26.963 INFO:teuthology.orchestra.run.vm00.stdout:(30/148): ceph-mgr-cephadm-20.2.0-721.g5bb32787 1.3 MB/s | 173 kB 00:00 2026-03-31T22:53:27.111 INFO:teuthology.orchestra.run.vm05.stdout:(10/148): ceph-base-20.2.0-721.g5bb32787.el9.x8 1.2 MB/s | 5.9 MB 00:04 2026-03-31T22:53:27.283 INFO:teuthology.orchestra.run.vm05.stdout:(11/148): libcephfs-daemon-20.2.0-721.g5bb32787 206 kB/s | 35 kB 00:00 2026-03-31T22:53:27.391 INFO:teuthology.orchestra.run.vm05.stdout:(12/148): ceph-radosgw-20.2.0-721.g5bb32787.el9 13 MB/s | 24 MB 00:01 2026-03-31T22:53:27.404 INFO:teuthology.orchestra.run.vm05.stdout:(13/148): libcephfs-devel-20.2.0-721.g5bb32787. 285 kB/s | 34 kB 00:00 2026-03-31T22:53:27.505 INFO:teuthology.orchestra.run.vm05.stdout:(14/148): libcephfs-proxy2-20.2.0-721.g5bb32787 212 kB/s | 24 kB 00:00 2026-03-31T22:53:27.640 INFO:teuthology.orchestra.run.vm05.stdout:(15/148): libcephsqlite-20.2.0-721.g5bb32787.el 1.2 MB/s | 164 kB 00:00 2026-03-31T22:53:27.785 INFO:teuthology.orchestra.run.vm05.stdout:(16/148): librados-devel-20.2.0-721.g5bb32787.e 868 kB/s | 126 kB 00:00 2026-03-31T22:53:27.797 INFO:teuthology.orchestra.run.vm00.stdout:(31/148): ceph-mgr-dashboard-20.2.0-721.g5bb327 11 MB/s | 11 MB 00:00 2026-03-31T22:53:27.852 INFO:teuthology.orchestra.run.vm05.stdout:(17/148): libcephfs2-20.2.0-721.g5bb32787.el9.x 1.9 MB/s | 867 kB 00:00 2026-03-31T22:53:27.907 INFO:teuthology.orchestra.run.vm05.stdout:(18/148): libradosstriper1-20.2.0-721.g5bb32787 2.0 MB/s | 250 kB 00:00 2026-03-31T22:53:27.929 INFO:teuthology.orchestra.run.vm00.stdout:(32/148): ceph-mgr-k8sevents-20.2.0-721.g5bb327 168 kB/s | 22 kB 00:00 2026-03-31T22:53:28.014 INFO:teuthology.orchestra.run.vm00.stdout:(33/148): ceph-mgr-diskprediction-local-20.2.0- 7.1 MB/s | 7.4 MB 00:01 2026-03-31T22:53:28.016 INFO:teuthology.orchestra.run.vm05.stdout:(19/148): python3-ceph-argparse-20.2.0-721.g5bb 412 kB/s | 45 kB 00:00 2026-03-31T22:53:28.050 INFO:teuthology.orchestra.run.vm00.stdout:(34/148): ceph-mgr-modules-core-20.2.0-721.g5bb 2.3 MB/s | 290 kB 00:00 2026-03-31T22:53:28.126 INFO:teuthology.orchestra.run.vm05.stdout:(20/148): python3-ceph-common-20.2.0-721.g5bb32 1.6 MB/s | 175 kB 00:00 2026-03-31T22:53:28.130 INFO:teuthology.orchestra.run.vm00.stdout:(35/148): ceph-mgr-rook-20.2.0-721.g5bb32787.el 433 kB/s | 50 kB 00:00 2026-03-31T22:53:28.166 INFO:teuthology.orchestra.run.vm00.stdout:(36/148): ceph-prometheus-alerts-20.2.0-721.g5b 150 kB/s | 17 kB 00:00 2026-03-31T22:53:28.235 INFO:teuthology.orchestra.run.vm05.stdout:(21/148): python3-cephfs-20.2.0-721.g5bb32787.e 1.5 MB/s | 163 kB 00:00 2026-03-31T22:53:28.249 INFO:teuthology.orchestra.run.vm00.stdout:(37/148): ceph-volume-20.2.0-721.g5bb32787.el9. 2.5 MB/s | 298 kB 00:00 2026-03-31T22:53:28.257 INFO:teuthology.orchestra.run.vm09.stdout:(7/148): ceph-mon-20.2.0-721.g5bb32787.el9.x86_ 3.0 MB/s | 5.0 MB 00:01 2026-03-31T22:53:28.302 INFO:teuthology.orchestra.run.vm00.stdout:(38/148): cephadm-20.2.0-721.g5bb32787.el9.noar 7.4 MB/s | 1.0 MB 00:00 2026-03-31T22:53:28.353 INFO:teuthology.orchestra.run.vm05.stdout:(22/148): python3-rados-20.2.0-721.g5bb32787.el 2.7 MB/s | 323 kB 00:00 2026-03-31T22:53:28.395 INFO:teuthology.orchestra.run.vm00.stdout:(39/148): bzip2-1.0.8-11.el9.x86_64.rpm 375 kB/s | 55 kB 00:00 2026-03-31T22:53:28.448 INFO:teuthology.orchestra.run.vm00.stdout:(40/148): fuse-2.9.9-17.el9.x86_64.rpm 1.5 MB/s | 80 kB 00:00 2026-03-31T22:53:28.464 INFO:teuthology.orchestra.run.vm05.stdout:(23/148): python3-rbd-20.2.0-721.g5bb32787.el9. 2.7 MB/s | 304 kB 00:00 2026-03-31T22:53:28.479 INFO:teuthology.orchestra.run.vm00.stdout:(41/148): ledmon-libs-1.1.0-3.el9.x86_64.rpm 1.3 MB/s | 40 kB 00:00 2026-03-31T22:53:28.514 INFO:teuthology.orchestra.run.vm00.stdout:(42/148): libconfig-1.7.2-9.el9.x86_64.rpm 2.0 MB/s | 72 kB 00:00 2026-03-31T22:53:28.519 INFO:teuthology.orchestra.run.vm00.stdout:(43/148): cryptsetup-2.8.1-3.el9.x86_64.rpm 1.6 MB/s | 351 kB 00:00 2026-03-31T22:53:28.573 INFO:teuthology.orchestra.run.vm05.stdout:(24/148): python3-rgw-20.2.0-721.g5bb32787.el9. 914 kB/s | 99 kB 00:00 2026-03-31T22:53:28.587 INFO:teuthology.orchestra.run.vm00.stdout:(44/148): libquadmath-11.5.0-14.el9.x86_64.rpm 2.7 MB/s | 184 kB 00:00 2026-03-31T22:53:28.629 INFO:teuthology.orchestra.run.vm00.stdout:(45/148): libgfortran-11.5.0-14.el9.x86_64.rpm 6.8 MB/s | 794 kB 00:00 2026-03-31T22:53:28.656 INFO:teuthology.orchestra.run.vm00.stdout:(46/148): pciutils-3.7.0-7.el9.x86_64.rpm 3.4 MB/s | 93 kB 00:00 2026-03-31T22:53:28.662 INFO:teuthology.orchestra.run.vm00.stdout:(47/148): mailcap-2.1.49-5.el9.noarch.rpm 441 kB/s | 33 kB 00:00 2026-03-31T22:53:28.682 INFO:teuthology.orchestra.run.vm05.stdout:(25/148): rbd-fuse-20.2.0-721.g5bb32787.el9.x86 839 kB/s | 91 kB 00:00 2026-03-31T22:53:28.699 INFO:teuthology.orchestra.run.vm00.stdout:(48/148): python3-cffi-1.14.5-5.el9.x86_64.rpm 5.8 MB/s | 253 kB 00:00 2026-03-31T22:53:28.728 INFO:teuthology.orchestra.run.vm00.stdout:(49/148): python3-ply-3.11-14.el9.noarch.rpm 3.6 MB/s | 106 kB 00:00 2026-03-31T22:53:28.758 INFO:teuthology.orchestra.run.vm00.stdout:(50/148): python3-pycparser-2.20-6.el9.noarch.r 4.5 MB/s | 135 kB 00:00 2026-03-31T22:53:28.787 INFO:teuthology.orchestra.run.vm00.stdout:(51/148): python3-requests-2.25.1-10.el9.noarch 4.2 MB/s | 126 kB 00:00 2026-03-31T22:53:28.809 INFO:teuthology.orchestra.run.vm00.stdout:(52/148): python3-cryptography-36.0.1-5.el9.x86 8.5 MB/s | 1.2 MB 00:00 2026-03-31T22:53:28.831 INFO:teuthology.orchestra.run.vm00.stdout:(53/148): python3-urllib3-1.26.5-7.el9.noarch.r 4.9 MB/s | 218 kB 00:00 2026-03-31T22:53:28.862 INFO:teuthology.orchestra.run.vm00.stdout:(54/148): unzip-6.0-59.el9.x86_64.rpm 5.8 MB/s | 182 kB 00:00 2026-03-31T22:53:28.895 INFO:teuthology.orchestra.run.vm00.stdout:(55/148): smartmontools-7.2-10.el9.x86_64.rpm 6.4 MB/s | 556 kB 00:00 2026-03-31T22:53:28.896 INFO:teuthology.orchestra.run.vm00.stdout:(56/148): zip-3.0-35.el9.x86_64.rpm 7.6 MB/s | 266 kB 00:00 2026-03-31T22:53:28.924 INFO:teuthology.orchestra.run.vm00.stdout:(57/148): boost-program-options-1.75.0-13.el9.x 3.5 MB/s | 104 kB 00:00 2026-03-31T22:53:28.945 INFO:teuthology.orchestra.run.vm00.stdout:(58/148): flexiblas-3.0.4-9.el9.x86_64.rpm 603 kB/s | 30 kB 00:00 2026-03-31T22:53:28.963 INFO:teuthology.orchestra.run.vm05.stdout:(26/148): rbd-mirror-20.2.0-721.g5bb32787.el9.x 10 MB/s | 2.9 MB 00:00 2026-03-31T22:53:28.983 INFO:teuthology.orchestra.run.vm00.stdout:(59/148): flexiblas-openblas-openmp-3.0.4-9.el9 397 kB/s | 15 kB 00:00 2026-03-31T22:53:29.014 INFO:teuthology.orchestra.run.vm00.stdout:(60/148): libnbd-1.20.3-4.el9.x86_64.rpm 5.2 MB/s | 164 kB 00:00 2026-03-31T22:53:29.030 INFO:teuthology.orchestra.run.vm00.stdout:(61/148): flexiblas-netlib-3.0.4-9.el9.x86_64.r 28 MB/s | 3.0 MB 00:00 2026-03-31T22:53:29.044 INFO:teuthology.orchestra.run.vm00.stdout:(62/148): libpmemobj-1.12.1-1.el9.x86_64.rpm 5.3 MB/s | 160 kB 00:00 2026-03-31T22:53:29.057 INFO:teuthology.orchestra.run.vm00.stdout:(63/148): librabbitmq-0.11.0-7.el9.x86_64.rpm 1.6 MB/s | 45 kB 00:00 2026-03-31T22:53:29.080 INFO:teuthology.orchestra.run.vm05.stdout:(27/148): rbd-nbd-20.2.0-721.g5bb32787.el9.x86_ 1.5 MB/s | 179 kB 00:00 2026-03-31T22:53:29.083 INFO:teuthology.orchestra.run.vm00.stdout:(64/148): librdkafka-1.6.1-102.el9.x86_64.rpm 17 MB/s | 662 kB 00:00 2026-03-31T22:53:29.088 INFO:teuthology.orchestra.run.vm00.stdout:(65/148): libstoragemgmt-1.10.1-1.el9.x86_64.rp 7.9 MB/s | 246 kB 00:00 2026-03-31T22:53:29.126 INFO:teuthology.orchestra.run.vm00.stdout:(66/148): libxslt-1.1.34-12.el9.x86_64.rpm 5.3 MB/s | 233 kB 00:00 2026-03-31T22:53:29.135 INFO:teuthology.orchestra.run.vm00.stdout:(67/148): lttng-ust-2.12.0-6.el9.x86_64.rpm 6.1 MB/s | 292 kB 00:00 2026-03-31T22:53:29.156 INFO:teuthology.orchestra.run.vm00.stdout:(68/148): lua-5.4.4-4.el9.x86_64.rpm 6.2 MB/s | 188 kB 00:00 2026-03-31T22:53:29.162 INFO:teuthology.orchestra.run.vm00.stdout:(69/148): openblas-0.3.29-1.el9.x86_64.rpm 1.5 MB/s | 42 kB 00:00 2026-03-31T22:53:29.194 INFO:teuthology.orchestra.run.vm05.stdout:(28/148): ceph-grafana-dashboards-20.2.0-721.g5 379 kB/s | 43 kB 00:00 2026-03-31T22:53:29.297 INFO:teuthology.orchestra.run.vm00.stdout:(70/148): perl-Benchmark-1.23-483.el9.noarch.rp 195 kB/s | 26 kB 00:00 2026-03-31T22:53:29.313 INFO:teuthology.orchestra.run.vm05.stdout:(29/148): ceph-mgr-cephadm-20.2.0-721.g5bb32787 1.4 MB/s | 173 kB 00:00 2026-03-31T22:53:29.331 INFO:teuthology.orchestra.run.vm00.stdout:(71/148): openblas-openmp-0.3.29-1.el9.x86_64.r 30 MB/s | 5.3 MB 00:00 2026-03-31T22:53:29.333 INFO:teuthology.orchestra.run.vm00.stdout:(72/148): perl-Test-Harness-3.42-461.el9.noarch 8.1 MB/s | 295 kB 00:00 2026-03-31T22:53:29.455 INFO:teuthology.orchestra.run.vm00.stdout:(73/148): protobuf-3.14.0-17.el9.x86_64.rpm 8.2 MB/s | 1.0 MB 00:00 2026-03-31T22:53:29.475 INFO:teuthology.orchestra.run.vm05.stdout:(30/148): librgw2-20.2.0-721.g5bb32787.el9.x86_ 3.9 MB/s | 6.4 MB 00:01 2026-03-31T22:53:29.500 INFO:teuthology.orchestra.run.vm00.stdout:(74/148): python3-babel-2.9.1-2.el9.noarch.rpm 36 MB/s | 6.0 MB 00:00 2026-03-31T22:53:29.531 INFO:teuthology.orchestra.run.vm00.stdout:(75/148): python3-jinja2-2.11.3-8.el9.noarch.rp 7.8 MB/s | 249 kB 00:00 2026-03-31T22:53:29.539 INFO:teuthology.orchestra.run.vm00.stdout:(76/148): python3-devel-3.9.25-3.el9.x86_64.rpm 2.8 MB/s | 244 kB 00:00 2026-03-31T22:53:29.573 INFO:teuthology.orchestra.run.vm00.stdout:(77/148): python3-jmespath-1.0.1-1.el9.noarch.r 1.1 MB/s | 48 kB 00:00 2026-03-31T22:53:29.586 INFO:teuthology.orchestra.run.vm00.stdout:(78/148): python3-libstoragemgmt-1.10.1-1.el9.x 3.7 MB/s | 177 kB 00:00 2026-03-31T22:53:29.632 INFO:teuthology.orchestra.run.vm00.stdout:(79/148): python3-markupsafe-1.1.1-12.el9.x86_6 757 kB/s | 35 kB 00:00 2026-03-31T22:53:29.633 INFO:teuthology.orchestra.run.vm09.stdout:(8/148): ceph-common-20.2.0-721.g5bb32787.el9.x 3.3 MB/s | 24 MB 00:07 2026-03-31T22:53:29.681 INFO:teuthology.orchestra.run.vm00.stdout:(80/148): python3-lxml-4.6.5-3.el9.x86_64.rpm 11 MB/s | 1.2 MB 00:00 2026-03-31T22:53:29.810 INFO:teuthology.orchestra.run.vm00.stdout:(81/148): python3-numpy-f2py-1.23.5-2.el9.x86_6 3.4 MB/s | 442 kB 00:00 2026-03-31T22:53:29.839 INFO:teuthology.orchestra.run.vm00.stdout:(82/148): python3-packaging-20.9-5.el9.noarch.r 2.6 MB/s | 77 kB 00:00 2026-03-31T22:53:29.914 INFO:teuthology.orchestra.run.vm00.stdout:(83/148): python3-numpy-1.23.5-2.el9.x86_64.rpm 22 MB/s | 6.1 MB 00:00 2026-03-31T22:53:29.918 INFO:teuthology.orchestra.run.vm00.stdout:(84/148): python3-protobuf-3.14.0-17.el9.noarch 3.3 MB/s | 267 kB 00:00 2026-03-31T22:53:29.962 INFO:teuthology.orchestra.run.vm00.stdout:(85/148): python3-pyasn1-modules-0.4.8-7.el9.no 6.1 MB/s | 277 kB 00:00 2026-03-31T22:53:29.973 INFO:teuthology.orchestra.run.vm00.stdout:(86/148): python3-pyasn1-0.4.8-7.el9.noarch.rpm 2.6 MB/s | 157 kB 00:00 2026-03-31T22:53:29.989 INFO:teuthology.orchestra.run.vm00.stdout:(87/148): python3-requests-oauthlib-1.3.0-12.el 2.0 MB/s | 54 kB 00:00 2026-03-31T22:53:30.017 INFO:teuthology.orchestra.run.vm00.stdout:(88/148): python3-toml-0.10.2-6.el9.noarch.rpm 1.5 MB/s | 42 kB 00:00 2026-03-31T22:53:30.052 INFO:teuthology.orchestra.run.vm00.stdout:(89/148): qatlib-25.08.0-2.el9.x86_64.rpm 6.7 MB/s | 240 kB 00:00 2026-03-31T22:53:30.090 INFO:teuthology.orchestra.run.vm00.stdout:(90/148): qatlib-service-25.08.0-2.el9.x86_64.r 974 kB/s | 37 kB 00:00 2026-03-31T22:53:30.119 INFO:teuthology.orchestra.run.vm00.stdout:(91/148): qatzip-libs-1.3.1-1.el9.x86_64.rpm 2.3 MB/s | 66 kB 00:00 2026-03-31T22:53:30.151 INFO:teuthology.orchestra.run.vm00.stdout:(92/148): socat-1.7.4.1-8.el9.x86_64.rpm 9.2 MB/s | 303 kB 00:00 2026-03-31T22:53:30.207 INFO:teuthology.orchestra.run.vm00.stdout:(93/148): xmlsec1-1.2.29-13.el9.x86_64.rpm 3.4 MB/s | 189 kB 00:00 2026-03-31T22:53:30.251 INFO:teuthology.orchestra.run.vm00.stdout:(94/148): xmlsec1-openssl-1.2.29-13.el9.x86_64. 2.0 MB/s | 90 kB 00:00 2026-03-31T22:53:30.265 INFO:teuthology.orchestra.run.vm09.stdout:(9/148): ceph-selinux-20.2.0-721.g5bb32787.el9. 40 kB/s | 25 kB 00:00 2026-03-31T22:53:30.278 INFO:teuthology.orchestra.run.vm00.stdout:(95/148): xmlstarlet-1.6.1-20.el9.x86_64.rpm 2.3 MB/s | 64 kB 00:00 2026-03-31T22:53:30.352 INFO:teuthology.orchestra.run.vm00.stdout:(96/148): lua-devel-5.4.4-4.el9.x86_64.rpm 303 kB/s | 22 kB 00:00 2026-03-31T22:53:30.478 INFO:teuthology.orchestra.run.vm00.stdout:(97/148): python3-scipy-1.9.3-2.el9.x86_64.rpm 38 MB/s | 19 MB 00:00 2026-03-31T22:53:30.494 INFO:teuthology.orchestra.run.vm00.stdout:(98/148): abseil-cpp-20211102.0-4.el9.x86_64.rp 34 MB/s | 551 kB 00:00 2026-03-31T22:53:30.502 INFO:teuthology.orchestra.run.vm00.stdout:(99/148): gperftools-libs-2.9.1-3.el9.x86_64.rp 40 MB/s | 308 kB 00:00 2026-03-31T22:53:30.504 INFO:teuthology.orchestra.run.vm00.stdout:(100/148): protobuf-compiler-3.14.0-17.el9.x86_ 5.6 MB/s | 862 kB 00:00 2026-03-31T22:53:30.505 INFO:teuthology.orchestra.run.vm00.stdout:(101/148): grpc-data-1.46.7-10.el9.noarch.rpm 5.9 MB/s | 19 kB 00:00 2026-03-31T22:53:30.509 INFO:teuthology.orchestra.run.vm00.stdout:(102/148): libarrow-doc-9.0.0-15.el9.noarch.rpm 6.3 MB/s | 25 kB 00:00 2026-03-31T22:53:30.513 INFO:teuthology.orchestra.run.vm00.stdout:(103/148): liboath-2.6.12-1.el9.x86_64.rpm 12 MB/s | 49 kB 00:00 2026-03-31T22:53:30.518 INFO:teuthology.orchestra.run.vm00.stdout:(104/148): libunwind-1.6.2-1.el9.x86_64.rpm 16 MB/s | 67 kB 00:00 2026-03-31T22:53:30.523 INFO:teuthology.orchestra.run.vm00.stdout:(105/148): luarocks-3.9.2-5.el9.noarch.rpm 26 MB/s | 151 kB 00:00 2026-03-31T22:53:30.544 INFO:teuthology.orchestra.run.vm00.stdout:(106/148): parquet-libs-9.0.0-15.el9.x86_64.rpm 41 MB/s | 838 kB 00:00 2026-03-31T22:53:30.560 INFO:teuthology.orchestra.run.vm00.stdout:(107/148): python3-asyncssh-2.13.2-5.el9.noarch 34 MB/s | 548 kB 00:00 2026-03-31T22:53:30.565 INFO:teuthology.orchestra.run.vm00.stdout:(108/148): python3-autocommand-2.2.2-8.el9.noar 6.0 MB/s | 29 kB 00:00 2026-03-31T22:53:30.572 INFO:teuthology.orchestra.run.vm00.stdout:(109/148): python3-backports-tarfile-1.2.0-1.el 9.2 MB/s | 60 kB 00:00 2026-03-31T22:53:30.585 INFO:teuthology.orchestra.run.vm00.stdout:(110/148): libarrow-9.0.0-15.el9.x86_64.rpm 54 MB/s | 4.4 MB 00:00 2026-03-31T22:53:30.586 INFO:teuthology.orchestra.run.vm00.stdout:(111/148): python3-bcrypt-3.2.2-1.el9.x86_64.rp 2.9 MB/s | 43 kB 00:00 2026-03-31T22:53:30.760 INFO:teuthology.orchestra.run.vm00.stdout:(112/148): ceph-test-20.2.0-721.g5bb32787.el9.x 13 MB/s | 84 MB 00:06 2026-03-31T22:53:30.761 INFO:teuthology.orchestra.run.vm00.stdout:(113/148): python3-certifi-2023.05.07-4.el9.noa 81 kB/s | 14 kB 00:00 2026-03-31T22:53:30.762 INFO:teuthology.orchestra.run.vm00.stdout:(114/148): python3-cachetools-4.2.4-1.el9.noarc 182 kB/s | 32 kB 00:00 2026-03-31T22:53:30.766 INFO:teuthology.orchestra.run.vm00.stdout:(115/148): python3-cheroot-10.0.1-5.el9.noarch. 32 MB/s | 173 kB 00:00 2026-03-31T22:53:30.768 INFO:teuthology.orchestra.run.vm00.stdout:(116/148): python3-google-auth-2.45.0-1.el9.noa 45 MB/s | 254 kB 00:00 2026-03-31T22:53:30.774 INFO:teuthology.orchestra.run.vm00.stdout:(117/148): python3-grpcio-tools-1.46.7-10.el9.x 24 MB/s | 144 kB 00:00 2026-03-31T22:53:30.781 INFO:teuthology.orchestra.run.vm00.stdout:(118/148): python3-influxdb-5.3.1-1.el9.noarch. 19 MB/s | 139 kB 00:00 2026-03-31T22:53:30.783 INFO:teuthology.orchestra.run.vm00.stdout:(119/148): python3-cherrypy-18.10.0-5.el9.noarc 13 MB/s | 290 kB 00:00 2026-03-31T22:53:30.786 INFO:teuthology.orchestra.run.vm00.stdout:(120/148): python3-isodate-0.6.1-3.el9.noarch.r 10 MB/s | 56 kB 00:00 2026-03-31T22:53:30.788 INFO:teuthology.orchestra.run.vm00.stdout:(121/148): python3-jaraco-8.2.1-3.el9.noarch.rp 2.0 MB/s | 11 kB 00:00 2026-03-31T22:53:30.792 INFO:teuthology.orchestra.run.vm00.stdout:(122/148): python3-jaraco-classes-3.2.1-5.el9.n 3.4 MB/s | 18 kB 00:00 2026-03-31T22:53:30.794 INFO:teuthology.orchestra.run.vm00.stdout:(123/148): python3-jaraco-collections-3.0.0-8.e 3.8 MB/s | 23 kB 00:00 2026-03-31T22:53:30.800 INFO:teuthology.orchestra.run.vm00.stdout:(124/148): python3-grpcio-1.46.7-10.el9.x86_64. 60 MB/s | 2.0 MB 00:00 2026-03-31T22:53:30.801 INFO:teuthology.orchestra.run.vm00.stdout:(125/148): python3-jaraco-context-6.0.1-3.el9.n 2.0 MB/s | 20 kB 00:00 2026-03-31T22:53:30.802 INFO:teuthology.orchestra.run.vm00.stdout:(126/148): python3-jaraco-functools-3.5.0-2.el9 2.4 MB/s | 19 kB 00:00 2026-03-31T22:53:30.803 INFO:teuthology.orchestra.run.vm00.stdout:(127/148): python3-jaraco-text-4.0.0-2.el9.noar 10 MB/s | 26 kB 00:00 2026-03-31T22:53:30.810 INFO:teuthology.orchestra.run.vm00.stdout:(128/148): python3-more-itertools-8.12.0-2.el9. 10 MB/s | 79 kB 00:00 2026-03-31T22:53:30.811 INFO:teuthology.orchestra.run.vm00.stdout:(129/148): python3-msgpack-1.0.3-2.el9.x86_64.r 10 MB/s | 86 kB 00:00 2026-03-31T22:53:30.815 INFO:teuthology.orchestra.run.vm00.stdout:(130/148): python3-natsort-7.1.1-5.el9.noarch.r 10 MB/s | 58 kB 00:00 2026-03-31T22:53:30.818 INFO:teuthology.orchestra.run.vm00.stdout:(131/148): python3-kubernetes-26.1.0-3.el9.noar 64 MB/s | 1.0 MB 00:00 2026-03-31T22:53:30.819 INFO:teuthology.orchestra.run.vm00.stdout:(132/148): python3-portend-3.1.0-2.el9.noarch.r 2.1 MB/s | 16 kB 00:00 2026-03-31T22:53:30.823 INFO:teuthology.orchestra.run.vm00.stdout:(133/148): python3-pyOpenSSL-21.0.0-1.el9.noarc 12 MB/s | 90 kB 00:00 2026-03-31T22:53:30.827 INFO:teuthology.orchestra.run.vm00.stdout:(134/148): python3-repoze-lru-0.7-16.el9.noarch 3.5 MB/s | 31 kB 00:00 2026-03-31T22:53:30.834 INFO:teuthology.orchestra.run.vm00.stdout:(135/148): python3-saml-1.16.0-1.el9.noarch.rpm 17 MB/s | 125 kB 00:00 2026-03-31T22:53:30.837 INFO:teuthology.orchestra.run.vm00.stdout:(136/148): python3-tempora-5.0.0-2.el9.noarch.r 15 MB/s | 36 kB 00:00 2026-03-31T22:53:30.840 INFO:teuthology.orchestra.run.vm00.stdout:(137/148): python3-typing-extensions-4.15.0-1.e 25 MB/s | 86 kB 00:00 2026-03-31T22:53:30.842 INFO:teuthology.orchestra.run.vm00.stdout:(138/148): python3-rsa-4.9-2.el9.noarch.rpm 3.1 MB/s | 59 kB 00:00 2026-03-31T22:53:30.844 INFO:teuthology.orchestra.run.vm00.stdout:(139/148): python3-websocket-client-1.2.3-2.el9 25 MB/s | 90 kB 00:00 2026-03-31T22:53:30.846 INFO:teuthology.orchestra.run.vm00.stdout:(140/148): python3-routes-2.5.1-5.el9.noarch.rp 6.9 MB/s | 188 kB 00:00 2026-03-31T22:53:30.847 INFO:teuthology.orchestra.run.vm00.stdout:(141/148): python3-xmlsec-1.3.13-1.el9.x86_64.r 10 MB/s | 48 kB 00:00 2026-03-31T22:53:30.847 INFO:teuthology.orchestra.run.vm00.stdout:(142/148): python3-xmltodict-0.12.0-15.el9.noar 7.7 MB/s | 22 kB 00:00 2026-03-31T22:53:30.849 INFO:teuthology.orchestra.run.vm00.stdout:(143/148): python3-zc-lockfile-2.0-10.el9.noarc 6.6 MB/s | 20 kB 00:00 2026-03-31T22:53:30.852 INFO:teuthology.orchestra.run.vm00.stdout:(144/148): s3cmd-2.4.0-1.el9.noarch.rpm 40 MB/s | 206 kB 00:00 2026-03-31T22:53:30.854 INFO:teuthology.orchestra.run.vm00.stdout:(145/148): re2-20211101-20.el9.x86_64.rpm 26 MB/s | 191 kB 00:00 2026-03-31T22:53:30.878 INFO:teuthology.orchestra.run.vm00.stdout:(146/148): thrift-0.15.0-4.el9.x86_64.rpm 55 MB/s | 1.6 MB 00:00 2026-03-31T22:53:31.069 INFO:teuthology.orchestra.run.vm05.stdout:(31/148): ceph-mgr-dashboard-20.2.0-721.g5bb327 6.0 MB/s | 11 MB 00:01 2026-03-31T22:53:31.177 INFO:teuthology.orchestra.run.vm05.stdout:(32/148): ceph-mgr-k8sevents-20.2.0-721.g5bb327 204 kB/s | 22 kB 00:00 2026-03-31T22:53:31.321 INFO:teuthology.orchestra.run.vm05.stdout:(33/148): ceph-mgr-modules-core-20.2.0-721.g5bb 2.0 MB/s | 290 kB 00:00 2026-03-31T22:53:31.430 INFO:teuthology.orchestra.run.vm05.stdout:(34/148): ceph-mgr-rook-20.2.0-721.g5bb32787.el 459 kB/s | 50 kB 00:00 2026-03-31T22:53:31.515 INFO:teuthology.orchestra.run.vm05.stdout:(35/148): ceph-mgr-diskprediction-local-20.2.0- 3.6 MB/s | 7.4 MB 00:02 2026-03-31T22:53:31.538 INFO:teuthology.orchestra.run.vm05.stdout:(36/148): ceph-prometheus-alerts-20.2.0-721.g5b 161 kB/s | 17 kB 00:00 2026-03-31T22:53:31.640 INFO:teuthology.orchestra.run.vm05.stdout:(37/148): ceph-volume-20.2.0-721.g5bb32787.el9. 2.3 MB/s | 298 kB 00:00 2026-03-31T22:53:31.801 INFO:teuthology.orchestra.run.vm00.stdout:(147/148): librbd1-20.2.0-721.g5bb32787.el9.x86 3.0 MB/s | 2.8 MB 00:00 2026-03-31T22:53:31.851 INFO:teuthology.orchestra.run.vm05.stdout:(38/148): bzip2-1.0.8-11.el9.x86_64.rpm 260 kB/s | 55 kB 00:00 2026-03-31T22:53:31.866 INFO:teuthology.orchestra.run.vm05.stdout:(39/148): cephadm-20.2.0-721.g5bb32787.el9.noar 3.0 MB/s | 1.0 MB 00:00 2026-03-31T22:53:31.891 INFO:teuthology.orchestra.run.vm00.stdout:(148/148): librados2-20.2.0-721.g5bb32787.el9.x 3.4 MB/s | 3.5 MB 00:01 2026-03-31T22:53:31.894 INFO:teuthology.orchestra.run.vm00.stdout:-------------------------------------------------------------------------------- 2026-03-31T22:53:31.894 INFO:teuthology.orchestra.run.vm00.stdout:Total 24 MB/s | 269 MB 00:11 2026-03-31T22:53:31.987 INFO:teuthology.orchestra.run.vm05.stdout:(40/148): cryptsetup-2.8.1-3.el9.x86_64.rpm 2.5 MB/s | 351 kB 00:00 2026-03-31T22:53:31.995 INFO:teuthology.orchestra.run.vm05.stdout:(41/148): fuse-2.9.9-17.el9.x86_64.rpm 619 kB/s | 80 kB 00:00 2026-03-31T22:53:32.038 INFO:teuthology.orchestra.run.vm05.stdout:(42/148): libconfig-1.7.2-9.el9.x86_64.rpm 1.6 MB/s | 72 kB 00:00 2026-03-31T22:53:32.043 INFO:teuthology.orchestra.run.vm05.stdout:(43/148): ledmon-libs-1.1.0-3.el9.x86_64.rpm 716 kB/s | 40 kB 00:00 2026-03-31T22:53:32.098 INFO:teuthology.orchestra.run.vm05.stdout:(44/148): libquadmath-11.5.0-14.el9.x86_64.rpm 3.3 MB/s | 184 kB 00:00 2026-03-31T22:53:32.132 INFO:teuthology.orchestra.run.vm05.stdout:(45/148): libgfortran-11.5.0-14.el9.x86_64.rpm 8.3 MB/s | 794 kB 00:00 2026-03-31T22:53:32.132 INFO:teuthology.orchestra.run.vm05.stdout:(46/148): mailcap-2.1.49-5.el9.noarch.rpm 965 kB/s | 33 kB 00:00 2026-03-31T22:53:32.185 INFO:teuthology.orchestra.run.vm05.stdout:(47/148): pciutils-3.7.0-7.el9.x86_64.rpm 1.7 MB/s | 93 kB 00:00 2026-03-31T22:53:32.203 INFO:teuthology.orchestra.run.vm05.stdout:(48/148): python3-cffi-1.14.5-5.el9.x86_64.rpm 3.5 MB/s | 253 kB 00:00 2026-03-31T22:53:32.250 INFO:teuthology.orchestra.run.vm05.stdout:(49/148): python3-cryptography-36.0.1-5.el9.x86 19 MB/s | 1.2 MB 00:00 2026-03-31T22:53:32.263 INFO:teuthology.orchestra.run.vm05.stdout:(50/148): python3-ply-3.11-14.el9.noarch.rpm 1.8 MB/s | 106 kB 00:00 2026-03-31T22:53:32.296 INFO:teuthology.orchestra.run.vm05.stdout:(51/148): python3-pycparser-2.20-6.el9.noarch.r 2.9 MB/s | 135 kB 00:00 2026-03-31T22:53:32.324 INFO:teuthology.orchestra.run.vm05.stdout:(52/148): python3-requests-2.25.1-10.el9.noarch 2.0 MB/s | 126 kB 00:00 2026-03-31T22:53:32.365 INFO:teuthology.orchestra.run.vm05.stdout:(53/148): python3-urllib3-1.26.5-7.el9.noarch.r 3.1 MB/s | 218 kB 00:00 2026-03-31T22:53:32.386 INFO:teuthology.orchestra.run.vm05.stdout:(54/148): smartmontools-7.2-10.el9.x86_64.rpm 8.7 MB/s | 556 kB 00:00 2026-03-31T22:53:32.414 INFO:teuthology.orchestra.run.vm05.stdout:(55/148): unzip-6.0-59.el9.x86_64.rpm 3.6 MB/s | 182 kB 00:00 2026-03-31T22:53:32.415 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T22:53:32.432 INFO:teuthology.orchestra.run.vm05.stdout:(56/148): zip-3.0-35.el9.x86_64.rpm 5.7 MB/s | 266 kB 00:00 2026-03-31T22:53:32.479 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T22:53:32.479 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T22:53:32.782 INFO:teuthology.orchestra.run.vm05.stdout:(57/148): flexiblas-3.0.4-9.el9.x86_64.rpm 85 kB/s | 30 kB 00:00 2026-03-31T22:53:32.832 INFO:teuthology.orchestra.run.vm05.stdout:(58/148): boost-program-options-1.75.0-13.el9.x 249 kB/s | 104 kB 00:00 2026-03-31T22:53:32.966 INFO:teuthology.orchestra.run.vm05.stdout:(59/148): flexiblas-openblas-openmp-3.0.4-9.el9 110 kB/s | 15 kB 00:00 2026-03-31T22:53:33.166 INFO:teuthology.orchestra.run.vm05.stdout:(60/148): flexiblas-netlib-3.0.4-9.el9.x86_64.r 7.8 MB/s | 3.0 MB 00:00 2026-03-31T22:53:33.167 INFO:teuthology.orchestra.run.vm05.stdout:(61/148): libnbd-1.20.3-4.el9.x86_64.rpm 815 kB/s | 164 kB 00:00 2026-03-31T22:53:33.225 INFO:teuthology.orchestra.run.vm05.stdout:(62/148): libpmemobj-1.12.1-1.el9.x86_64.rpm 2.7 MB/s | 160 kB 00:00 2026-03-31T22:53:33.227 INFO:teuthology.orchestra.run.vm05.stdout:(63/148): librabbitmq-0.11.0-7.el9.x86_64.rpm 765 kB/s | 45 kB 00:00 2026-03-31T22:53:33.287 INFO:teuthology.orchestra.run.vm05.stdout:(64/148): libstoragemgmt-1.10.1-1.el9.x86_64.rp 4.0 MB/s | 246 kB 00:00 2026-03-31T22:53:33.333 INFO:teuthology.orchestra.run.vm05.stdout:(65/148): librdkafka-1.6.1-102.el9.x86_64.rpm 6.0 MB/s | 662 kB 00:00 2026-03-31T22:53:33.336 INFO:teuthology.orchestra.run.vm05.stdout:(66/148): libxslt-1.1.34-12.el9.x86_64.rpm 4.7 MB/s | 233 kB 00:00 2026-03-31T22:53:33.398 INFO:teuthology.orchestra.run.vm05.stdout:(67/148): lua-5.4.4-4.el9.x86_64.rpm 3.0 MB/s | 188 kB 00:00 2026-03-31T22:53:33.398 INFO:teuthology.orchestra.run.vm09.stdout:(10/148): ceph-osd-20.2.0-721.g5bb32787.el9.x86 2.5 MB/s | 17 MB 00:06 2026-03-31T22:53:33.425 INFO:teuthology.orchestra.run.vm05.stdout:(68/148): lttng-ust-2.12.0-6.el9.x86_64.rpm 3.1 MB/s | 292 kB 00:00 2026-03-31T22:53:33.444 INFO:teuthology.orchestra.run.vm05.stdout:(69/148): openblas-0.3.29-1.el9.x86_64.rpm 915 kB/s | 42 kB 00:00 2026-03-31T22:53:33.507 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T22:53:33.507 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T22:53:33.511 INFO:teuthology.orchestra.run.vm09.stdout:(11/148): libcephfs-daemon-20.2.0-721.g5bb32787 316 kB/s | 35 kB 00:00 2026-03-31T22:53:33.529 INFO:teuthology.orchestra.run.vm05.stdout:(70/148): perl-Benchmark-1.23-483.el9.noarch.rp 314 kB/s | 26 kB 00:00 2026-03-31T22:53:33.623 INFO:teuthology.orchestra.run.vm09.stdout:(12/148): libcephfs-devel-20.2.0-721.g5bb32787. 306 kB/s | 34 kB 00:00 2026-03-31T22:53:33.736 INFO:teuthology.orchestra.run.vm09.stdout:(13/148): libcephfs-proxy2-20.2.0-721.g5bb32787 215 kB/s | 24 kB 00:00 2026-03-31T22:53:33.770 INFO:teuthology.orchestra.run.vm05.stdout:(71/148): perl-Test-Harness-3.42-461.el9.noarch 1.2 MB/s | 295 kB 00:00 2026-03-31T22:53:33.812 INFO:teuthology.orchestra.run.vm05.stdout:(72/148): openblas-openmp-0.3.29-1.el9.x86_64.r 14 MB/s | 5.3 MB 00:00 2026-03-31T22:53:33.885 INFO:teuthology.orchestra.run.vm05.stdout:(73/148): protobuf-3.14.0-17.el9.x86_64.rpm 8.8 MB/s | 1.0 MB 00:00 2026-03-31T22:53:33.954 INFO:teuthology.orchestra.run.vm05.stdout:(74/148): python3-devel-3.9.25-3.el9.x86_64.rpm 3.5 MB/s | 244 kB 00:00 2026-03-31T22:53:34.151 INFO:teuthology.orchestra.run.vm05.stdout:(75/148): python3-babel-2.9.1-2.el9.noarch.rpm 18 MB/s | 6.0 MB 00:00 2026-03-31T22:53:34.152 INFO:teuthology.orchestra.run.vm05.stdout:(76/148): python3-jinja2-2.11.3-8.el9.noarch.rp 1.2 MB/s | 249 kB 00:00 2026-03-31T22:53:34.183 INFO:teuthology.orchestra.run.vm09.stdout:(14/148): libcephfs2-20.2.0-721.g5bb32787.el9.x 1.9 MB/s | 867 kB 00:00 2026-03-31T22:53:34.198 INFO:teuthology.orchestra.run.vm05.stdout:(77/148): python3-jmespath-1.0.1-1.el9.noarch.r 1.0 MB/s | 48 kB 00:00 2026-03-31T22:53:34.214 INFO:teuthology.orchestra.run.vm05.stdout:(78/148): python3-libstoragemgmt-1.10.1-1.el9.x 2.8 MB/s | 177 kB 00:00 2026-03-31T22:53:34.261 INFO:teuthology.orchestra.run.vm05.stdout:(79/148): python3-markupsafe-1.1.1-12.el9.x86_6 753 kB/s | 35 kB 00:00 2026-03-31T22:53:34.296 INFO:teuthology.orchestra.run.vm09.stdout:(15/148): libcephsqlite-20.2.0-721.g5bb32787.el 1.4 MB/s | 164 kB 00:00 2026-03-31T22:53:34.312 INFO:teuthology.orchestra.run.vm05.stdout:(80/148): python3-lxml-4.6.5-3.el9.x86_64.rpm 11 MB/s | 1.2 MB 00:00 2026-03-31T22:53:34.409 INFO:teuthology.orchestra.run.vm05.stdout:(81/148): python3-numpy-f2py-1.23.5-2.el9.x86_6 4.5 MB/s | 442 kB 00:00 2026-03-31T22:53:34.410 INFO:teuthology.orchestra.run.vm09.stdout:(16/148): librados-devel-20.2.0-721.g5bb32787.e 1.1 MB/s | 126 kB 00:00 2026-03-31T22:53:34.483 INFO:teuthology.orchestra.run.vm05.stdout:(82/148): python3-packaging-20.9-5.el9.noarch.r 1.0 MB/s | 77 kB 00:00 2026-03-31T22:53:34.525 INFO:teuthology.orchestra.run.vm09.stdout:(17/148): libradosstriper1-20.2.0-721.g5bb32787 2.1 MB/s | 250 kB 00:00 2026-03-31T22:53:34.555 INFO:teuthology.orchestra.run.vm05.stdout:(83/148): python3-numpy-1.23.5-2.el9.x86_64.rpm 21 MB/s | 6.1 MB 00:00 2026-03-31T22:53:34.556 INFO:teuthology.orchestra.run.vm05.stdout:(84/148): python3-protobuf-3.14.0-17.el9.noarch 3.6 MB/s | 267 kB 00:00 2026-03-31T22:53:34.593 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T22:53:34.600 INFO:teuthology.orchestra.run.vm00.stdout: Installing : thrift-0.15.0-4.el9.x86_64 1/150 2026-03-31T22:53:34.602 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-more-itertools-8.12.0-2.el9.noarch 2/150 2026-03-31T22:53:34.614 INFO:teuthology.orchestra.run.vm00.stdout: Installing : liboath-2.6.12-1.el9.x86_64 3/150 2026-03-31T22:53:34.615 INFO:teuthology.orchestra.run.vm05.stdout:(85/148): python3-pyasn1-0.4.8-7.el9.noarch.rpm 2.6 MB/s | 157 kB 00:00 2026-03-31T22:53:34.623 INFO:teuthology.orchestra.run.vm05.stdout:(86/148): python3-pyasn1-modules-0.4.8-7.el9.no 4.1 MB/s | 277 kB 00:00 2026-03-31T22:53:34.671 INFO:teuthology.orchestra.run.vm05.stdout:(87/148): python3-requests-oauthlib-1.3.0-12.el 952 kB/s | 54 kB 00:00 2026-03-31T22:53:34.731 INFO:teuthology.orchestra.run.vm05.stdout:(88/148): python3-toml-0.10.2-6.el9.noarch.rpm 700 kB/s | 42 kB 00:00 2026-03-31T22:53:34.762 INFO:teuthology.orchestra.run.vm09.stdout:(18/148): ceph-radosgw-20.2.0-721.g5bb32787.el9 3.6 MB/s | 24 MB 00:06 2026-03-31T22:53:34.782 INFO:teuthology.orchestra.run.vm00.stdout: Installing : lttng-ust-2.12.0-6.el9.x86_64 4/150 2026-03-31T22:53:34.784 INFO:teuthology.orchestra.run.vm00.stdout: Upgrading : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 5/150 2026-03-31T22:53:34.794 INFO:teuthology.orchestra.run.vm05.stdout:(89/148): qatlib-25.08.0-2.el9.x86_64.rpm 3.8 MB/s | 240 kB 00:00 2026-03-31T22:53:34.838 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 5/150 2026-03-31T22:53:34.840 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 6/150 2026-03-31T22:53:34.853 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 6/150 2026-03-31T22:53:34.856 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_ 7/150 2026-03-31T22:53:34.857 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_ 8/150 2026-03-31T22:53:34.879 INFO:teuthology.orchestra.run.vm09.stdout:(19/148): python3-ceph-argparse-20.2.0-721.g5bb 385 kB/s | 45 kB 00:00 2026-03-31T22:53:34.886 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_ 8/150 2026-03-31T22:53:34.892 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 9/150 2026-03-31T22:53:34.901 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libxslt-1.1.34-12.el9.x86_64 10/150 2026-03-31T22:53:34.904 INFO:teuthology.orchestra.run.vm00.stdout: Installing : librdkafka-1.6.1-102.el9.x86_64 11/150 2026-03-31T22:53:34.908 INFO:teuthology.orchestra.run.vm00.stdout: Installing : librabbitmq-0.11.0-7.el9.x86_64 12/150 2026-03-31T22:53:34.911 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libpmemobj-1.12.1-1.el9.x86_64 13/150 2026-03-31T22:53:34.915 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-jaraco-8.2.1-3.el9.noarch 14/150 2026-03-31T22:53:35.048 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libnbd-1.20.3-4.el9.x86_64 15/150 2026-03-31T22:53:35.111 INFO:teuthology.orchestra.run.vm09.stdout:(20/148): python3-ceph-common-20.2.0-721.g5bb32 757 kB/s | 175 kB 00:00 2026-03-31T22:53:35.122 INFO:teuthology.orchestra.run.vm00.stdout: Upgrading : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 16/150 2026-03-31T22:53:35.178 INFO:teuthology.orchestra.run.vm05.stdout:(90/148): ceph-test-20.2.0-721.g5bb32787.el9.x8 9.8 MB/s | 84 MB 00:08 2026-03-31T22:53:35.211 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 16/150 2026-03-31T22:53:35.221 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-lxml-4.6.5-3.el9.x86_64 17/150 2026-03-31T22:53:35.231 INFO:teuthology.orchestra.run.vm00.stdout: Installing : xmlsec1-1.2.29-13.el9.x86_64 18/150 2026-03-31T22:53:35.233 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 19/150 2026-03-31T22:53:35.258 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 19/150 2026-03-31T22:53:35.259 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 20/150 2026-03-31T22:53:35.274 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 20/150 2026-03-31T22:53:35.309 INFO:teuthology.orchestra.run.vm00.stdout: Installing : re2-1:20211101-20.el9.x86_64 21/150 2026-03-31T22:53:35.336 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libarrow-9.0.0-15.el9.x86_64 22/150 2026-03-31T22:53:35.350 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-pyasn1-0.4.8-7.el9.noarch 23/150 2026-03-31T22:53:35.361 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-packaging-20.9-5.el9.noarch 24/150 2026-03-31T22:53:35.370 INFO:teuthology.orchestra.run.vm00.stdout: Installing : protobuf-3.14.0-17.el9.x86_64 25/150 2026-03-31T22:53:35.375 INFO:teuthology.orchestra.run.vm00.stdout: Installing : lua-5.4.4-4.el9.x86_64 26/150 2026-03-31T22:53:35.381 INFO:teuthology.orchestra.run.vm00.stdout: Installing : flexiblas-3.0.4-9.el9.x86_64 27/150 2026-03-31T22:53:35.408 INFO:teuthology.orchestra.run.vm00.stdout: Installing : unzip-6.0-59.el9.x86_64 28/150 2026-03-31T22:53:35.424 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-urllib3-1.26.5-7.el9.noarch 29/150 2026-03-31T22:53:35.429 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-requests-2.25.1-10.el9.noarch 30/150 2026-03-31T22:53:35.436 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libquadmath-11.5.0-14.el9.x86_64 31/150 2026-03-31T22:53:35.439 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libgfortran-11.5.0-14.el9.x86_64 32/150 2026-03-31T22:53:35.454 INFO:teuthology.orchestra.run.vm05.stdout:(91/148): qatlib-service-25.08.0-2.el9.x86_64.r 56 kB/s | 37 kB 00:00 2026-03-31T22:53:35.477 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ledmon-libs-1.1.0-3.el9.x86_64 33/150 2026-03-31T22:53:35.484 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 34/150 2026-03-31T22:53:35.494 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9 35/150 2026-03-31T22:53:35.508 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 36/150 2026-03-31T22:53:35.516 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-requests-oauthlib-1.3.0-12.el9.noarch 37/150 2026-03-31T22:53:35.545 INFO:teuthology.orchestra.run.vm00.stdout: Installing : zip-3.0-35.el9.x86_64 38/150 2026-03-31T22:53:35.550 INFO:teuthology.orchestra.run.vm00.stdout: Installing : luarocks-3.9.2-5.el9.noarch 39/150 2026-03-31T22:53:35.558 INFO:teuthology.orchestra.run.vm00.stdout: Installing : lua-devel-5.4.4-4.el9.x86_64 40/150 2026-03-31T22:53:35.617 INFO:teuthology.orchestra.run.vm00.stdout: Installing : protobuf-compiler-3.14.0-17.el9.x86_64 41/150 2026-03-31T22:53:35.634 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-pyasn1-modules-0.4.8-7.el9.noarch 42/150 2026-03-31T22:53:35.639 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-rsa-4.9-2.el9.noarch 43/150 2026-03-31T22:53:35.644 INFO:teuthology.orchestra.run.vm00.stdout: Installing : xmlsec1-openssl-1.2.29-13.el9.x86_64 44/150 2026-03-31T22:53:35.662 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-xmlsec-1.3.13-1.el9.x86_64 45/150 2026-03-31T22:53:35.669 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 46/150 2026-03-31T22:53:35.674 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-jaraco-classes-3.2.1-5.el9.noarch 47/150 2026-03-31T22:53:35.679 INFO:teuthology.orchestra.run.vm05.stdout:(92/148): qatzip-libs-1.3.1-1.el9.x86_64.rpm 133 kB/s | 66 kB 00:00 2026-03-31T22:53:35.684 INFO:teuthology.orchestra.run.vm00.stdout: Installing : xmlstarlet-1.6.1-20.el9.x86_64 48/150 2026-03-31T22:53:35.690 INFO:teuthology.orchestra.run.vm00.stdout: Installing : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 49/150 2026-03-31T22:53:35.695 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-zc-lockfile-2.0-10.el9.noarch 50/150 2026-03-31T22:53:35.712 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-xmltodict-0.12.0-15.el9.noarch 51/150 2026-03-31T22:53:35.719 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-websocket-client-1.2.3-2.el9.noarch 52/150 2026-03-31T22:53:35.725 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-typing-extensions-4.15.0-1.el9.noarch 53/150 2026-03-31T22:53:35.739 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-repoze-lru-0.7-16.el9.noarch 54/150 2026-03-31T22:53:35.751 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-routes-2.5.1-5.el9.noarch 55/150 2026-03-31T22:53:35.760 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-natsort-7.1.1-5.el9.noarch 56/150 2026-03-31T22:53:35.787 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-msgpack-1.0.3-2.el9.x86_64 57/150 2026-03-31T22:53:35.801 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-influxdb-5.3.1-1.el9.noarch 58/150 2026-03-31T22:53:35.822 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-isodate-0.6.1-3.el9.noarch 59/150 2026-03-31T22:53:35.828 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-saml-1.16.0-1.el9.noarch 60/150 2026-03-31T22:53:35.839 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-certifi-2023.05.07-4.el9.noarch 61/150 2026-03-31T22:53:35.866 INFO:teuthology.orchestra.run.vm05.stdout:(93/148): python3-scipy-1.9.3-2.el9.x86_64.rpm 15 MB/s | 19 MB 00:01 2026-03-31T22:53:35.868 INFO:teuthology.orchestra.run.vm05.stdout:(94/148): socat-1.7.4.1-8.el9.x86_64.rpm 733 kB/s | 303 kB 00:00 2026-03-31T22:53:35.869 INFO:teuthology.orchestra.run.vm05.stdout:(95/148): xmlsec1-1.2.29-13.el9.x86_64.rpm 992 kB/s | 189 kB 00:00 2026-03-31T22:53:35.887 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-cachetools-4.2.4-1.el9.noarch 62/150 2026-03-31T22:53:35.914 INFO:teuthology.orchestra.run.vm05.stdout:(96/148): xmlsec1-openssl-1.2.29-13.el9.x86_64. 1.9 MB/s | 90 kB 00:00 2026-03-31T22:53:35.925 INFO:teuthology.orchestra.run.vm05.stdout:(97/148): xmlstarlet-1.6.1-20.el9.x86_64.rpm 1.1 MB/s | 64 kB 00:00 2026-03-31T22:53:35.945 INFO:teuthology.orchestra.run.vm05.stdout:(98/148): abseil-cpp-20211102.0-4.el9.x86_64.rp 27 MB/s | 551 kB 00:00 2026-03-31T22:53:35.956 INFO:teuthology.orchestra.run.vm05.stdout:(99/148): gperftools-libs-2.9.1-3.el9.x86_64.rp 28 MB/s | 308 kB 00:00 2026-03-31T22:53:35.959 INFO:teuthology.orchestra.run.vm05.stdout:(100/148): grpc-data-1.46.7-10.el9.noarch.rpm 8.1 MB/s | 19 kB 00:00 2026-03-31T22:53:36.018 INFO:teuthology.orchestra.run.vm05.stdout:(101/148): libarrow-9.0.0-15.el9.x86_64.rpm 75 MB/s | 4.4 MB 00:00 2026-03-31T22:53:36.021 INFO:teuthology.orchestra.run.vm05.stdout:(102/148): libarrow-doc-9.0.0-15.el9.noarch.rpm 9.4 MB/s | 25 kB 00:00 2026-03-31T22:53:36.024 INFO:teuthology.orchestra.run.vm05.stdout:(103/148): liboath-2.6.12-1.el9.x86_64.rpm 16 MB/s | 49 kB 00:00 2026-03-31T22:53:36.027 INFO:teuthology.orchestra.run.vm05.stdout:(104/148): libunwind-1.6.2-1.el9.x86_64.rpm 21 MB/s | 67 kB 00:00 2026-03-31T22:53:36.031 INFO:teuthology.orchestra.run.vm05.stdout:(105/148): luarocks-3.9.2-5.el9.noarch.rpm 36 MB/s | 151 kB 00:00 2026-03-31T22:53:36.043 INFO:teuthology.orchestra.run.vm05.stdout:(106/148): parquet-libs-9.0.0-15.el9.x86_64.rpm 70 MB/s | 838 kB 00:00 2026-03-31T22:53:36.054 INFO:teuthology.orchestra.run.vm05.stdout:(107/148): python3-asyncssh-2.13.2-5.el9.noarch 48 MB/s | 548 kB 00:00 2026-03-31T22:53:36.057 INFO:teuthology.orchestra.run.vm05.stdout:(108/148): python3-autocommand-2.2.2-8.el9.noar 13 MB/s | 29 kB 00:00 2026-03-31T22:53:36.060 INFO:teuthology.orchestra.run.vm05.stdout:(109/148): python3-backports-tarfile-1.2.0-1.el 17 MB/s | 60 kB 00:00 2026-03-31T22:53:36.063 INFO:teuthology.orchestra.run.vm05.stdout:(110/148): python3-bcrypt-3.2.2-1.el9.x86_64.rp 18 MB/s | 43 kB 00:00 2026-03-31T22:53:36.065 INFO:teuthology.orchestra.run.vm05.stdout:(111/148): python3-cachetools-4.2.4-1.el9.noarc 16 MB/s | 32 kB 00:00 2026-03-31T22:53:36.067 INFO:teuthology.orchestra.run.vm05.stdout:(112/148): python3-certifi-2023.05.07-4.el9.noa 5.4 MB/s | 14 kB 00:00 2026-03-31T22:53:36.072 INFO:teuthology.orchestra.run.vm05.stdout:(113/148): python3-cheroot-10.0.1-5.el9.noarch. 37 MB/s | 173 kB 00:00 2026-03-31T22:53:36.078 INFO:teuthology.orchestra.run.vm05.stdout:(114/148): python3-cherrypy-18.10.0-5.el9.noarc 50 MB/s | 290 kB 00:00 2026-03-31T22:53:36.078 INFO:teuthology.orchestra.run.vm09.stdout:(21/148): python3-cephfs-20.2.0-721.g5bb32787.e 168 kB/s | 163 kB 00:00 2026-03-31T22:53:36.083 INFO:teuthology.orchestra.run.vm05.stdout:(115/148): python3-google-auth-2.45.0-1.el9.noa 47 MB/s | 254 kB 00:00 2026-03-31T22:53:36.110 INFO:teuthology.orchestra.run.vm05.stdout:(116/148): python3-grpcio-1.46.7-10.el9.x86_64. 77 MB/s | 2.0 MB 00:00 2026-03-31T22:53:36.114 INFO:teuthology.orchestra.run.vm05.stdout:(117/148): python3-grpcio-tools-1.46.7-10.el9.x 39 MB/s | 144 kB 00:00 2026-03-31T22:53:36.118 INFO:teuthology.orchestra.run.vm05.stdout:(118/148): python3-influxdb-5.3.1-1.el9.noarch. 36 MB/s | 139 kB 00:00 2026-03-31T22:53:36.120 INFO:teuthology.orchestra.run.vm05.stdout:(119/148): python3-isodate-0.6.1-3.el9.noarch.r 23 MB/s | 56 kB 00:00 2026-03-31T22:53:36.129 INFO:teuthology.orchestra.run.vm05.stdout:(120/148): python3-jaraco-8.2.1-3.el9.noarch.rp 1.2 MB/s | 11 kB 00:00 2026-03-31T22:53:36.132 INFO:teuthology.orchestra.run.vm05.stdout:(121/148): python3-jaraco-classes-3.2.1-5.el9.n 8.0 MB/s | 18 kB 00:00 2026-03-31T22:53:36.134 INFO:teuthology.orchestra.run.vm05.stdout:(122/148): python3-jaraco-collections-3.0.0-8.e 11 MB/s | 23 kB 00:00 2026-03-31T22:53:36.137 INFO:teuthology.orchestra.run.vm05.stdout:(123/148): python3-jaraco-context-6.0.1-3.el9.n 7.2 MB/s | 20 kB 00:00 2026-03-31T22:53:36.139 INFO:teuthology.orchestra.run.vm05.stdout:(124/148): python3-jaraco-functools-3.5.0-2.el9 9.1 MB/s | 19 kB 00:00 2026-03-31T22:53:36.141 INFO:teuthology.orchestra.run.vm05.stdout:(125/148): python3-jaraco-text-4.0.0-2.el9.noar 12 MB/s | 26 kB 00:00 2026-03-31T22:53:36.155 INFO:teuthology.orchestra.run.vm05.stdout:(126/148): python3-kubernetes-26.1.0-3.el9.noar 73 MB/s | 1.0 MB 00:00 2026-03-31T22:53:36.159 INFO:teuthology.orchestra.run.vm05.stdout:(127/148): python3-more-itertools-8.12.0-2.el9. 24 MB/s | 79 kB 00:00 2026-03-31T22:53:36.162 INFO:teuthology.orchestra.run.vm05.stdout:(128/148): python3-msgpack-1.0.3-2.el9.x86_64.r 25 MB/s | 86 kB 00:00 2026-03-31T22:53:36.168 INFO:teuthology.orchestra.run.vm05.stdout:(129/148): python3-natsort-7.1.1-5.el9.noarch.r 10 MB/s | 58 kB 00:00 2026-03-31T22:53:36.170 INFO:teuthology.orchestra.run.vm05.stdout:(130/148): python3-portend-3.1.0-2.el9.noarch.r 8.1 MB/s | 16 kB 00:00 2026-03-31T22:53:36.174 INFO:teuthology.orchestra.run.vm05.stdout:(131/148): python3-pyOpenSSL-21.0.0-1.el9.noarc 23 MB/s | 90 kB 00:00 2026-03-31T22:53:36.176 INFO:teuthology.orchestra.run.vm05.stdout:(132/148): python3-repoze-lru-0.7-16.el9.noarch 13 MB/s | 31 kB 00:00 2026-03-31T22:53:36.183 INFO:teuthology.orchestra.run.vm05.stdout:(133/148): python3-routes-2.5.1-5.el9.noarch.rp 26 MB/s | 188 kB 00:00 2026-03-31T22:53:36.187 INFO:teuthology.orchestra.run.vm05.stdout:(134/148): python3-rsa-4.9-2.el9.noarch.rpm 20 MB/s | 59 kB 00:00 2026-03-31T22:53:36.191 INFO:teuthology.orchestra.run.vm05.stdout:(135/148): python3-saml-1.16.0-1.el9.noarch.rpm 26 MB/s | 125 kB 00:00 2026-03-31T22:53:36.194 INFO:teuthology.orchestra.run.vm05.stdout:(136/148): python3-tempora-5.0.0-2.el9.noarch.r 13 MB/s | 36 kB 00:00 2026-03-31T22:53:36.199 INFO:teuthology.orchestra.run.vm05.stdout:(137/148): python3-typing-extensions-4.15.0-1.e 18 MB/s | 86 kB 00:00 2026-03-31T22:53:36.203 INFO:teuthology.orchestra.run.vm05.stdout:(138/148): python3-websocket-client-1.2.3-2.el9 23 MB/s | 90 kB 00:00 2026-03-31T22:53:36.206 INFO:teuthology.orchestra.run.vm05.stdout:(139/148): python3-xmlsec-1.3.13-1.el9.x86_64.r 15 MB/s | 48 kB 00:00 2026-03-31T22:53:36.209 INFO:teuthology.orchestra.run.vm05.stdout:(140/148): python3-xmltodict-0.12.0-15.el9.noar 8.6 MB/s | 22 kB 00:00 2026-03-31T22:53:36.211 INFO:teuthology.orchestra.run.vm05.stdout:(141/148): python3-zc-lockfile-2.0-10.el9.noarc 11 MB/s | 20 kB 00:00 2026-03-31T22:53:36.217 INFO:teuthology.orchestra.run.vm05.stdout:(142/148): re2-20211101-20.el9.x86_64.rpm 33 MB/s | 191 kB 00:00 2026-03-31T22:53:36.222 INFO:teuthology.orchestra.run.vm05.stdout:(143/148): s3cmd-2.4.0-1.el9.noarch.rpm 36 MB/s | 206 kB 00:00 2026-03-31T22:53:36.246 INFO:teuthology.orchestra.run.vm05.stdout:(144/148): thrift-0.15.0-4.el9.x86_64.rpm 69 MB/s | 1.6 MB 00:00 2026-03-31T22:53:36.271 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-google-auth-1:2.45.0-1.el9.noarch 63/150 2026-03-31T22:53:36.286 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-kubernetes-1:26.1.0-3.el9.noarch 64/150 2026-03-31T22:53:36.293 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-backports-tarfile-1.2.0-1.el9.noarch 65/150 2026-03-31T22:53:36.303 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-jaraco-context-6.0.1-3.el9.noarch 66/150 2026-03-31T22:53:36.307 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-autocommand-2.2.2-8.el9.noarch 67/150 2026-03-31T22:53:36.312 INFO:teuthology.orchestra.run.vm09.stdout:(22/148): python3-rados-20.2.0-721.g5bb32787.el 1.4 MB/s | 323 kB 00:00 2026-03-31T22:53:36.315 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libunwind-1.6.2-1.el9.x86_64 68/150 2026-03-31T22:53:36.318 INFO:teuthology.orchestra.run.vm00.stdout: Installing : gperftools-libs-2.9.1-3.el9.x86_64 69/150 2026-03-31T22:53:36.320 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libarrow-doc-9.0.0-15.el9.noarch 70/150 2026-03-31T22:53:36.354 INFO:teuthology.orchestra.run.vm00.stdout: Installing : grpc-data-1.46.7-10.el9.noarch 71/150 2026-03-31T22:53:36.408 INFO:teuthology.orchestra.run.vm00.stdout: Installing : abseil-cpp-20211102.0-4.el9.x86_64 72/150 2026-03-31T22:53:36.421 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-grpcio-1.46.7-10.el9.x86_64 73/150 2026-03-31T22:53:36.430 INFO:teuthology.orchestra.run.vm00.stdout: Installing : socat-1.7.4.1-8.el9.x86_64 74/150 2026-03-31T22:53:36.435 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-toml-0.10.2-6.el9.noarch 75/150 2026-03-31T22:53:36.440 INFO:teuthology.orchestra.run.vm05.stdout:(145/148): lua-devel-5.4.4-4.el9.x86_64.rpm 39 kB/s | 22 kB 00:00 2026-03-31T22:53:36.444 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-jaraco-functools-3.5.0-2.el9.noarch 76/150 2026-03-31T22:53:36.449 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-jaraco-text-4.0.0-2.el9.noarch 77/150 2026-03-31T22:53:36.458 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-jaraco-collections-3.0.0-8.el9.noarch 78/150 2026-03-31T22:53:36.463 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-tempora-5.0.0-2.el9.noarch 79/150 2026-03-31T22:53:36.496 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-portend-3.1.0-2.el9.noarch 80/150 2026-03-31T22:53:36.508 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-protobuf-3.14.0-17.el9.noarch 81/150 2026-03-31T22:53:36.520 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-grpcio-tools-1.46.7-10.el9.x86_64 82/150 2026-03-31T22:53:36.529 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-markupsafe-1.1.1-12.el9.x86_64 83/150 2026-03-31T22:53:36.546 INFO:teuthology.orchestra.run.vm09.stdout:(23/148): python3-rbd-20.2.0-721.g5bb32787.el9. 1.3 MB/s | 304 kB 00:00 2026-03-31T22:53:36.572 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-jmespath-1.0.1-1.el9.noarch 84/150 2026-03-31T22:53:36.727 INFO:teuthology.orchestra.run.vm09.stdout:(24/148): python3-rgw-20.2.0-721.g5bb32787.el9. 551 kB/s | 99 kB 00:00 2026-03-31T22:53:36.731 INFO:teuthology.orchestra.run.vm05.stdout:(146/148): protobuf-compiler-3.14.0-17.el9.x86_ 1.0 MB/s | 862 kB 00:00 2026-03-31T22:53:36.832 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-devel-3.9.25-3.el9.x86_64 85/150 2026-03-31T22:53:36.845 INFO:teuthology.orchestra.run.vm09.stdout:(25/148): rbd-fuse-20.2.0-721.g5bb32787.el9.x86 770 kB/s | 91 kB 00:00 2026-03-31T22:53:36.862 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-babel-2.9.1-2.el9.noarch 86/150 2026-03-31T22:53:36.866 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-jinja2-2.11.3-8.el9.noarch 87/150 2026-03-31T22:53:36.869 INFO:teuthology.orchestra.run.vm00.stdout: Installing : perl-Benchmark-1.23-483.el9.noarch 88/150 2026-03-31T22:53:36.933 INFO:teuthology.orchestra.run.vm00.stdout: Installing : openblas-0.3.29-1.el9.x86_64 89/150 2026-03-31T22:53:36.938 INFO:teuthology.orchestra.run.vm00.stdout: Installing : openblas-openmp-0.3.29-1.el9.x86_64 90/150 2026-03-31T22:53:36.966 INFO:teuthology.orchestra.run.vm00.stdout: Installing : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 91/150 2026-03-31T22:53:37.059 INFO:teuthology.orchestra.run.vm05.stdout:(147/148): librados2-20.2.0-721.g5bb32787.el9.x 4.3 MB/s | 3.5 MB 00:00 2026-03-31T22:53:37.228 INFO:teuthology.orchestra.run.vm09.stdout:(26/148): librgw2-20.2.0-721.g5bb32787.el9.x86_ 2.4 MB/s | 6.4 MB 00:02 2026-03-31T22:53:37.339 INFO:teuthology.orchestra.run.vm00.stdout: Installing : flexiblas-netlib-3.0.4-9.el9.x86_64 92/150 2026-03-31T22:53:37.342 INFO:teuthology.orchestra.run.vm09.stdout:(27/148): rbd-nbd-20.2.0-721.g5bb32787.el9.x86_ 1.5 MB/s | 179 kB 00:00 2026-03-31T22:53:37.425 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-numpy-1:1.23.5-2.el9.x86_64 93/150 2026-03-31T22:53:37.458 INFO:teuthology.orchestra.run.vm09.stdout:(28/148): ceph-grafana-dashboards-20.2.0-721.g5 374 kB/s | 43 kB 00:00 2026-03-31T22:53:37.571 INFO:teuthology.orchestra.run.vm09.stdout:(29/148): ceph-mgr-cephadm-20.2.0-721.g5bb32787 1.5 MB/s | 173 kB 00:00 2026-03-31T22:53:37.750 INFO:teuthology.orchestra.run.vm05.stdout:(148/148): librbd1-20.2.0-721.g5bb32787.el9.x86 2.2 MB/s | 2.8 MB 00:01 2026-03-31T22:53:37.754 INFO:teuthology.orchestra.run.vm05.stdout:-------------------------------------------------------------------------------- 2026-03-31T22:53:37.754 INFO:teuthology.orchestra.run.vm05.stdout:Total 15 MB/s | 269 MB 00:17 2026-03-31T22:53:38.008 INFO:teuthology.orchestra.run.vm09.stdout:(30/148): rbd-mirror-20.2.0-721.g5bb32787.el9.x 2.5 MB/s | 2.9 MB 00:01 2026-03-31T22:53:38.192 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 94/150 2026-03-31T22:53:38.216 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-scipy-1.9.3-2.el9.x86_64 95/150 2026-03-31T22:53:38.224 INFO:teuthology.orchestra.run.vm00.stdout: Installing : boost-program-options-1.75.0-13.el9.x86_64 96/150 2026-03-31T22:53:38.327 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T22:53:38.382 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T22:53:38.382 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T22:53:38.527 INFO:teuthology.orchestra.run.vm00.stdout: Installing : parquet-libs-9.0.0-15.el9.x86_64 97/150 2026-03-31T22:53:38.530 INFO:teuthology.orchestra.run.vm00.stdout: Installing : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 98/150 2026-03-31T22:53:38.551 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 98/150 2026-03-31T22:53:38.554 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 99/150 2026-03-31T22:53:39.382 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T22:53:39.382 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T22:53:39.720 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 100/150 2026-03-31T22:53:39.738 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 100/150 2026-03-31T22:53:39.763 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 100/150 2026-03-31T22:53:39.767 INFO:teuthology.orchestra.run.vm00.stdout: Installing : smartmontools-1:7.2-10.el9.x86_64 101/150 2026-03-31T22:53:39.778 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: smartmontools-1:7.2-10.el9.x86_64 101/150 2026-03-31T22:53:39.778 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/smartd.service → /usr/lib/systemd/system/smartd.service. 2026-03-31T22:53:39.778 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:39.801 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-ply-3.11-14.el9.noarch 102/150 2026-03-31T22:53:39.820 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-pycparser-2.20-6.el9.noarch 103/150 2026-03-31T22:53:39.907 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-cffi-1.14.5-5.el9.x86_64 104/150 2026-03-31T22:53:39.921 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-cryptography-36.0.1-5.el9.x86_64 105/150 2026-03-31T22:53:39.949 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-pyOpenSSL-21.0.0-1.el9.noarch 106/150 2026-03-31T22:53:39.990 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-cheroot-10.0.1-5.el9.noarch 107/150 2026-03-31T22:53:40.051 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-cherrypy-18.10.0-5.el9.noarch 108/150 2026-03-31T22:53:40.061 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-asyncssh-2.13.2-5.el9.noarch 109/150 2026-03-31T22:53:40.066 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-bcrypt-3.2.2-1.el9.x86_64 110/150 2026-03-31T22:53:40.073 INFO:teuthology.orchestra.run.vm00.stdout: Installing : pciutils-3.7.0-7.el9.x86_64 111/150 2026-03-31T22:53:40.077 INFO:teuthology.orchestra.run.vm00.stdout: Installing : qatlib-25.08.0-2.el9.x86_64 112/150 2026-03-31T22:53:40.079 INFO:teuthology.orchestra.run.vm00.stdout: Installing : qatlib-service-25.08.0-2.el9.x86_64 113/150 2026-03-31T22:53:40.096 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: qatlib-service-25.08.0-2.el9.x86_64 113/150 2026-03-31T22:53:40.430 INFO:teuthology.orchestra.run.vm00.stdout: Installing : qatzip-libs-1.3.1-1.el9.x86_64 114/150 2026-03-31T22:53:40.436 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 115/150 2026-03-31T22:53:40.454 INFO:teuthology.orchestra.run.vm09.stdout:(31/148): ceph-mgr-diskprediction-local-20.2.0- 3.0 MB/s | 7.4 MB 00:02 2026-03-31T22:53:40.457 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T22:53:40.464 INFO:teuthology.orchestra.run.vm05.stdout: Installing : thrift-0.15.0-4.el9.x86_64 1/150 2026-03-31T22:53:40.466 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-more-itertools-8.12.0-2.el9.noarch 2/150 2026-03-31T22:53:40.477 INFO:teuthology.orchestra.run.vm05.stdout: Installing : liboath-2.6.12-1.el9.x86_64 3/150 2026-03-31T22:53:40.484 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 115/150 2026-03-31T22:53:40.484 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph.target → /usr/lib/systemd/system/ceph.target. 2026-03-31T22:53:40.484 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-crash.service → /usr/lib/systemd/system/ceph-crash.service. 2026-03-31T22:53:40.484 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:40.488 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 116/150 2026-03-31T22:53:40.572 INFO:teuthology.orchestra.run.vm09.stdout:(32/148): ceph-mgr-k8sevents-20.2.0-721.g5bb327 188 kB/s | 22 kB 00:00 2026-03-31T22:53:40.641 INFO:teuthology.orchestra.run.vm05.stdout: Installing : lttng-ust-2.12.0-6.el9.x86_64 4/150 2026-03-31T22:53:40.642 INFO:teuthology.orchestra.run.vm05.stdout: Upgrading : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 5/150 2026-03-31T22:53:40.692 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 5/150 2026-03-31T22:53:40.693 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 6/150 2026-03-31T22:53:40.696 INFO:teuthology.orchestra.run.vm09.stdout:(33/148): ceph-mgr-modules-core-20.2.0-721.g5bb 2.3 MB/s | 290 kB 00:00 2026-03-31T22:53:40.706 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 6/150 2026-03-31T22:53:40.708 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_ 7/150 2026-03-31T22:53:40.710 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_ 8/150 2026-03-31T22:53:40.736 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_ 8/150 2026-03-31T22:53:40.741 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 9/150 2026-03-31T22:53:40.751 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libxslt-1.1.34-12.el9.x86_64 10/150 2026-03-31T22:53:40.754 INFO:teuthology.orchestra.run.vm05.stdout: Installing : librdkafka-1.6.1-102.el9.x86_64 11/150 2026-03-31T22:53:40.758 INFO:teuthology.orchestra.run.vm05.stdout: Installing : librabbitmq-0.11.0-7.el9.x86_64 12/150 2026-03-31T22:53:40.760 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libpmemobj-1.12.1-1.el9.x86_64 13/150 2026-03-31T22:53:40.765 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-jaraco-8.2.1-3.el9.noarch 14/150 2026-03-31T22:53:40.813 INFO:teuthology.orchestra.run.vm09.stdout:(34/148): ceph-mgr-rook-20.2.0-721.g5bb32787.el 428 kB/s | 50 kB 00:00 2026-03-31T22:53:40.897 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libnbd-1.20.3-4.el9.x86_64 15/150 2026-03-31T22:53:40.899 INFO:teuthology.orchestra.run.vm05.stdout: Upgrading : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 16/150 2026-03-31T22:53:40.932 INFO:teuthology.orchestra.run.vm09.stdout:(35/148): ceph-prometheus-alerts-20.2.0-721.g5b 146 kB/s | 17 kB 00:00 2026-03-31T22:53:40.945 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 16/150 2026-03-31T22:53:40.953 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-lxml-4.6.5-3.el9.x86_64 17/150 2026-03-31T22:53:40.962 INFO:teuthology.orchestra.run.vm05.stdout: Installing : xmlsec1-1.2.29-13.el9.x86_64 18/150 2026-03-31T22:53:40.964 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 19/150 2026-03-31T22:53:40.985 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 19/150 2026-03-31T22:53:40.987 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 20/150 2026-03-31T22:53:40.999 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 20/150 2026-03-31T22:53:41.030 INFO:teuthology.orchestra.run.vm05.stdout: Installing : re2-1:20211101-20.el9.x86_64 21/150 2026-03-31T22:53:41.052 INFO:teuthology.orchestra.run.vm09.stdout:(36/148): ceph-volume-20.2.0-721.g5bb32787.el9. 2.4 MB/s | 298 kB 00:00 2026-03-31T22:53:41.054 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libarrow-9.0.0-15.el9.x86_64 22/150 2026-03-31T22:53:41.067 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-pyasn1-0.4.8-7.el9.noarch 23/150 2026-03-31T22:53:41.077 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-packaging-20.9-5.el9.noarch 24/150 2026-03-31T22:53:41.084 INFO:teuthology.orchestra.run.vm05.stdout: Installing : protobuf-3.14.0-17.el9.x86_64 25/150 2026-03-31T22:53:41.088 INFO:teuthology.orchestra.run.vm05.stdout: Installing : lua-5.4.4-4.el9.x86_64 26/150 2026-03-31T22:53:41.093 INFO:teuthology.orchestra.run.vm05.stdout: Installing : flexiblas-3.0.4-9.el9.x86_64 27/150 2026-03-31T22:53:41.119 INFO:teuthology.orchestra.run.vm05.stdout: Installing : unzip-6.0-59.el9.x86_64 28/150 2026-03-31T22:53:41.137 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-urllib3-1.26.5-7.el9.noarch 29/150 2026-03-31T22:53:41.141 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-requests-2.25.1-10.el9.noarch 30/150 2026-03-31T22:53:41.148 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libquadmath-11.5.0-14.el9.x86_64 31/150 2026-03-31T22:53:41.151 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libgfortran-11.5.0-14.el9.x86_64 32/150 2026-03-31T22:53:41.187 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ledmon-libs-1.1.0-3.el9.x86_64 33/150 2026-03-31T22:53:41.194 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 34/150 2026-03-31T22:53:41.203 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9 35/150 2026-03-31T22:53:41.216 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 36/150 2026-03-31T22:53:41.224 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-requests-oauthlib-1.3.0-12.el9.noarch 37/150 2026-03-31T22:53:41.251 INFO:teuthology.orchestra.run.vm05.stdout: Installing : zip-3.0-35.el9.x86_64 38/150 2026-03-31T22:53:41.255 INFO:teuthology.orchestra.run.vm05.stdout: Installing : luarocks-3.9.2-5.el9.noarch 39/150 2026-03-31T22:53:41.264 INFO:teuthology.orchestra.run.vm05.stdout: Installing : lua-devel-5.4.4-4.el9.x86_64 40/150 2026-03-31T22:53:41.321 INFO:teuthology.orchestra.run.vm05.stdout: Installing : protobuf-compiler-3.14.0-17.el9.x86_64 41/150 2026-03-31T22:53:41.342 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-pyasn1-modules-0.4.8-7.el9.noarch 42/150 2026-03-31T22:53:41.346 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-rsa-4.9-2.el9.noarch 43/150 2026-03-31T22:53:41.352 INFO:teuthology.orchestra.run.vm05.stdout: Installing : xmlsec1-openssl-1.2.29-13.el9.x86_64 44/150 2026-03-31T22:53:41.370 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-xmlsec-1.3.13-1.el9.x86_64 45/150 2026-03-31T22:53:41.376 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 46/150 2026-03-31T22:53:41.381 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-jaraco-classes-3.2.1-5.el9.noarch 47/150 2026-03-31T22:53:41.390 INFO:teuthology.orchestra.run.vm05.stdout: Installing : xmlstarlet-1.6.1-20.el9.x86_64 48/150 2026-03-31T22:53:41.396 INFO:teuthology.orchestra.run.vm05.stdout: Installing : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 49/150 2026-03-31T22:53:41.400 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-zc-lockfile-2.0-10.el9.noarch 50/150 2026-03-31T22:53:41.404 INFO:teuthology.orchestra.run.vm09.stdout:(37/148): cephadm-20.2.0-721.g5bb32787.el9.noar 2.8 MB/s | 1.0 MB 00:00 2026-03-31T22:53:41.416 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-xmltodict-0.12.0-15.el9.noarch 51/150 2026-03-31T22:53:41.422 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-websocket-client-1.2.3-2.el9.noarch 52/150 2026-03-31T22:53:41.429 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-typing-extensions-4.15.0-1.el9.noarch 53/150 2026-03-31T22:53:41.442 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-repoze-lru-0.7-16.el9.noarch 54/150 2026-03-31T22:53:41.453 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-routes-2.5.1-5.el9.noarch 55/150 2026-03-31T22:53:41.461 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-natsort-7.1.1-5.el9.noarch 56/150 2026-03-31T22:53:41.483 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-msgpack-1.0.3-2.el9.x86_64 57/150 2026-03-31T22:53:41.498 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-influxdb-5.3.1-1.el9.noarch 58/150 2026-03-31T22:53:41.515 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-isodate-0.6.1-3.el9.noarch 59/150 2026-03-31T22:53:41.522 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-saml-1.16.0-1.el9.noarch 60/150 2026-03-31T22:53:41.531 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-certifi-2023.05.07-4.el9.noarch 61/150 2026-03-31T22:53:41.576 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-cachetools-4.2.4-1.el9.noarch 62/150 2026-03-31T22:53:41.647 INFO:teuthology.orchestra.run.vm09.stdout:(38/148): bzip2-1.0.8-11.el9.x86_64.rpm 225 kB/s | 55 kB 00:00 2026-03-31T22:53:41.694 INFO:teuthology.orchestra.run.vm09.stdout:(39/148): ceph-mgr-dashboard-20.2.0-721.g5bb327 2.6 MB/s | 11 MB 00:04 2026-03-31T22:53:41.818 INFO:teuthology.orchestra.run.vm09.stdout:(40/148): cryptsetup-2.8.1-3.el9.x86_64.rpm 2.0 MB/s | 351 kB 00:00 2026-03-31T22:53:41.878 INFO:teuthology.orchestra.run.vm09.stdout:(41/148): ledmon-libs-1.1.0-3.el9.x86_64.rpm 680 kB/s | 40 kB 00:00 2026-03-31T22:53:41.905 INFO:teuthology.orchestra.run.vm09.stdout:(42/148): fuse-2.9.9-17.el9.x86_64.rpm 378 kB/s | 80 kB 00:00 2026-03-31T22:53:41.929 INFO:teuthology.orchestra.run.vm09.stdout:(43/148): libconfig-1.7.2-9.el9.x86_64.rpm 1.4 MB/s | 72 kB 00:00 2026-03-31T22:53:41.942 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-google-auth-1:2.45.0-1.el9.noarch 63/150 2026-03-31T22:53:41.958 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-kubernetes-1:26.1.0-3.el9.noarch 64/150 2026-03-31T22:53:41.963 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-backports-tarfile-1.2.0-1.el9.noarch 65/150 2026-03-31T22:53:41.971 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-jaraco-context-6.0.1-3.el9.noarch 66/150 2026-03-31T22:53:41.975 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-autocommand-2.2.2-8.el9.noarch 67/150 2026-03-31T22:53:41.982 INFO:teuthology.orchestra.run.vm09.stdout:(44/148): libquadmath-11.5.0-14.el9.x86_64.rpm 3.4 MB/s | 184 kB 00:00 2026-03-31T22:53:41.983 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libunwind-1.6.2-1.el9.x86_64 68/150 2026-03-31T22:53:41.986 INFO:teuthology.orchestra.run.vm05.stdout: Installing : gperftools-libs-2.9.1-3.el9.x86_64 69/150 2026-03-31T22:53:41.989 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libarrow-doc-9.0.0-15.el9.noarch 70/150 2026-03-31T22:53:42.018 INFO:teuthology.orchestra.run.vm05.stdout: Installing : grpc-data-1.46.7-10.el9.noarch 71/150 2026-03-31T22:53:42.034 INFO:teuthology.orchestra.run.vm09.stdout:(45/148): mailcap-2.1.49-5.el9.noarch.rpm 652 kB/s | 33 kB 00:00 2026-03-31T22:53:42.067 INFO:teuthology.orchestra.run.vm05.stdout: Installing : abseil-cpp-20211102.0-4.el9.x86_64 72/150 2026-03-31T22:53:42.079 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-grpcio-1.46.7-10.el9.x86_64 73/150 2026-03-31T22:53:42.086 INFO:teuthology.orchestra.run.vm09.stdout:(46/148): pciutils-3.7.0-7.el9.x86_64.rpm 1.7 MB/s | 93 kB 00:00 2026-03-31T22:53:42.087 INFO:teuthology.orchestra.run.vm05.stdout: Installing : socat-1.7.4.1-8.el9.x86_64 74/150 2026-03-31T22:53:42.091 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-toml-0.10.2-6.el9.noarch 75/150 2026-03-31T22:53:42.099 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-jaraco-functools-3.5.0-2.el9.noarch 76/150 2026-03-31T22:53:42.104 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-jaraco-text-4.0.0-2.el9.noarch 77/150 2026-03-31T22:53:42.113 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-jaraco-collections-3.0.0-8.el9.noarch 78/150 2026-03-31T22:53:42.118 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-tempora-5.0.0-2.el9.noarch 79/150 2026-03-31T22:53:42.119 INFO:teuthology.orchestra.run.vm09.stdout:(47/148): libgfortran-11.5.0-14.el9.x86_64.rpm 3.6 MB/s | 794 kB 00:00 2026-03-31T22:53:42.140 INFO:teuthology.orchestra.run.vm09.stdout:(48/148): python3-cffi-1.14.5-5.el9.x86_64.rpm 4.7 MB/s | 253 kB 00:00 2026-03-31T22:53:42.150 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-portend-3.1.0-2.el9.noarch 80/150 2026-03-31T22:53:42.162 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-protobuf-3.14.0-17.el9.noarch 81/150 2026-03-31T22:53:42.171 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-grpcio-tools-1.46.7-10.el9.x86_64 82/150 2026-03-31T22:53:42.179 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-markupsafe-1.1.1-12.el9.x86_64 83/150 2026-03-31T22:53:42.191 INFO:teuthology.orchestra.run.vm09.stdout:(49/148): python3-ply-3.11-14.el9.noarch.rpm 2.0 MB/s | 106 kB 00:00 2026-03-31T22:53:42.219 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-jmespath-1.0.1-1.el9.noarch 84/150 2026-03-31T22:53:42.244 INFO:teuthology.orchestra.run.vm09.stdout:(50/148): python3-pycparser-2.20-6.el9.noarch.r 2.5 MB/s | 135 kB 00:00 2026-03-31T22:53:42.283 INFO:teuthology.orchestra.run.vm09.stdout:(51/148): python3-cryptography-36.0.1-5.el9.x86 7.6 MB/s | 1.2 MB 00:00 2026-03-31T22:53:42.296 INFO:teuthology.orchestra.run.vm09.stdout:(52/148): python3-requests-2.25.1-10.el9.noarch 2.4 MB/s | 126 kB 00:00 2026-03-31T22:53:42.339 INFO:teuthology.orchestra.run.vm09.stdout:(53/148): python3-urllib3-1.26.5-7.el9.noarch.r 3.8 MB/s | 218 kB 00:00 2026-03-31T22:53:42.394 INFO:teuthology.orchestra.run.vm09.stdout:(54/148): unzip-6.0-59.el9.x86_64.rpm 3.2 MB/s | 182 kB 00:00 2026-03-31T22:53:42.397 INFO:teuthology.orchestra.run.vm09.stdout:(55/148): smartmontools-7.2-10.el9.x86_64.rpm 5.3 MB/s | 556 kB 00:00 2026-03-31T22:53:42.451 INFO:teuthology.orchestra.run.vm09.stdout:(56/148): zip-3.0-35.el9.x86_64.rpm 4.6 MB/s | 266 kB 00:00 2026-03-31T22:53:42.472 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-devel-3.9.25-3.el9.x86_64 85/150 2026-03-31T22:53:42.502 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-babel-2.9.1-2.el9.noarch 86/150 2026-03-31T22:53:42.507 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-jinja2-2.11.3-8.el9.noarch 87/150 2026-03-31T22:53:42.510 INFO:teuthology.orchestra.run.vm05.stdout: Installing : perl-Benchmark-1.23-483.el9.noarch 88/150 2026-03-31T22:53:42.573 INFO:teuthology.orchestra.run.vm05.stdout: Installing : openblas-0.3.29-1.el9.x86_64 89/150 2026-03-31T22:53:42.577 INFO:teuthology.orchestra.run.vm05.stdout: Installing : openblas-openmp-0.3.29-1.el9.x86_64 90/150 2026-03-31T22:53:42.600 INFO:teuthology.orchestra.run.vm05.stdout: Installing : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 91/150 2026-03-31T22:53:42.623 INFO:teuthology.orchestra.run.vm09.stdout:(57/148): flexiblas-3.0.4-9.el9.x86_64.rpm 172 kB/s | 30 kB 00:00 2026-03-31T22:53:42.632 INFO:teuthology.orchestra.run.vm09.stdout:(58/148): boost-program-options-1.75.0-13.el9.x 443 kB/s | 104 kB 00:00 2026-03-31T22:53:42.682 INFO:teuthology.orchestra.run.vm09.stdout:(59/148): flexiblas-openblas-openmp-3.0.4-9.el9 304 kB/s | 15 kB 00:00 2026-03-31T22:53:42.778 INFO:teuthology.orchestra.run.vm09.stdout:(60/148): libnbd-1.20.3-4.el9.x86_64.rpm 1.7 MB/s | 164 kB 00:00 2026-03-31T22:53:42.831 INFO:teuthology.orchestra.run.vm09.stdout:(61/148): libpmemobj-1.12.1-1.el9.x86_64.rpm 3.0 MB/s | 160 kB 00:00 2026-03-31T22:53:42.880 INFO:teuthology.orchestra.run.vm09.stdout:(62/148): librabbitmq-0.11.0-7.el9.x86_64.rpm 920 kB/s | 45 kB 00:00 2026-03-31T22:53:42.963 INFO:teuthology.orchestra.run.vm09.stdout:(63/148): flexiblas-netlib-3.0.4-9.el9.x86_64.r 8.8 MB/s | 3.0 MB 00:00 2026-03-31T22:53:42.979 INFO:teuthology.orchestra.run.vm09.stdout:(64/148): librdkafka-1.6.1-102.el9.x86_64.rpm 6.5 MB/s | 662 kB 00:00 2026-03-31T22:53:42.985 INFO:teuthology.orchestra.run.vm05.stdout: Installing : flexiblas-netlib-3.0.4-9.el9.x86_64 92/150 2026-03-31T22:53:43.015 INFO:teuthology.orchestra.run.vm09.stdout:(65/148): libstoragemgmt-1.10.1-1.el9.x86_64.rp 4.7 MB/s | 246 kB 00:00 2026-03-31T22:53:43.031 INFO:teuthology.orchestra.run.vm09.stdout:(66/148): libxslt-1.1.34-12.el9.x86_64.rpm 4.4 MB/s | 233 kB 00:00 2026-03-31T22:53:43.068 INFO:teuthology.orchestra.run.vm09.stdout:(67/148): lttng-ust-2.12.0-6.el9.x86_64.rpm 5.4 MB/s | 292 kB 00:00 2026-03-31T22:53:43.070 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-numpy-1:1.23.5-2.el9.x86_64 93/150 2026-03-31T22:53:43.083 INFO:teuthology.orchestra.run.vm09.stdout:(68/148): lua-5.4.4-4.el9.x86_64.rpm 3.6 MB/s | 188 kB 00:00 2026-03-31T22:53:43.118 INFO:teuthology.orchestra.run.vm09.stdout:(69/148): openblas-0.3.29-1.el9.x86_64.rpm 860 kB/s | 42 kB 00:00 2026-03-31T22:53:43.167 INFO:teuthology.orchestra.run.vm09.stdout:(70/148): perl-Benchmark-1.23-483.el9.noarch.rp 536 kB/s | 26 kB 00:00 2026-03-31T22:53:43.223 INFO:teuthology.orchestra.run.vm09.stdout:(71/148): perl-Test-Harness-3.42-461.el9.noarch 5.2 MB/s | 295 kB 00:00 2026-03-31T22:53:43.284 INFO:teuthology.orchestra.run.vm09.stdout:(72/148): protobuf-3.14.0-17.el9.x86_64.rpm 16 MB/s | 1.0 MB 00:00 2026-03-31T22:53:43.338 INFO:teuthology.orchestra.run.vm09.stdout:(73/148): openblas-openmp-0.3.29-1.el9.x86_64.r 21 MB/s | 5.3 MB 00:00 2026-03-31T22:53:43.404 INFO:teuthology.orchestra.run.vm09.stdout:(74/148): python3-devel-3.9.25-3.el9.x86_64.rpm 3.6 MB/s | 244 kB 00:00 2026-03-31T22:53:43.456 INFO:teuthology.orchestra.run.vm09.stdout:(75/148): python3-jinja2-2.11.3-8.el9.noarch.rp 4.7 MB/s | 249 kB 00:00 2026-03-31T22:53:43.506 INFO:teuthology.orchestra.run.vm09.stdout:(76/148): python3-jmespath-1.0.1-1.el9.noarch.r 964 kB/s | 48 kB 00:00 2026-03-31T22:53:43.557 INFO:teuthology.orchestra.run.vm09.stdout:(77/148): python3-libstoragemgmt-1.10.1-1.el9.x 3.4 MB/s | 177 kB 00:00 2026-03-31T22:53:43.831 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 94/150 2026-03-31T22:53:43.835 INFO:teuthology.orchestra.run.vm09.stdout:(78/148): python3-babel-2.9.1-2.el9.noarch.rpm 11 MB/s | 6.0 MB 00:00 2026-03-31T22:53:43.854 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-scipy-1.9.3-2.el9.x86_64 95/150 2026-03-31T22:53:43.861 INFO:teuthology.orchestra.run.vm05.stdout: Installing : boost-program-options-1.75.0-13.el9.x86_64 96/150 2026-03-31T22:53:43.884 INFO:teuthology.orchestra.run.vm09.stdout:(79/148): python3-markupsafe-1.1.1-12.el9.x86_6 702 kB/s | 35 kB 00:00 2026-03-31T22:53:44.009 INFO:teuthology.orchestra.run.vm09.stdout:(80/148): python3-lxml-4.6.5-3.el9.x86_64.rpm 2.7 MB/s | 1.2 MB 00:00 2026-03-31T22:53:44.063 INFO:teuthology.orchestra.run.vm09.stdout:(81/148): python3-numpy-f2py-1.23.5-2.el9.x86_6 8.1 MB/s | 442 kB 00:00 2026-03-31T22:53:44.113 INFO:teuthology.orchestra.run.vm09.stdout:(82/148): python3-packaging-20.9-5.el9.noarch.r 1.5 MB/s | 77 kB 00:00 2026-03-31T22:53:44.145 INFO:teuthology.orchestra.run.vm09.stdout:(83/148): python3-numpy-1.23.5-2.el9.x86_64.rpm 24 MB/s | 6.1 MB 00:00 2026-03-31T22:53:44.164 INFO:teuthology.orchestra.run.vm05.stdout: Installing : parquet-libs-9.0.0-15.el9.x86_64 97/150 2026-03-31T22:53:44.165 INFO:teuthology.orchestra.run.vm09.stdout:(84/148): python3-protobuf-3.14.0-17.el9.noarch 5.1 MB/s | 267 kB 00:00 2026-03-31T22:53:44.166 INFO:teuthology.orchestra.run.vm05.stdout: Installing : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 98/150 2026-03-31T22:53:44.189 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 98/150 2026-03-31T22:53:44.191 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 99/150 2026-03-31T22:53:44.196 INFO:teuthology.orchestra.run.vm09.stdout:(85/148): python3-pyasn1-0.4.8-7.el9.noarch.rpm 3.0 MB/s | 157 kB 00:00 2026-03-31T22:53:44.218 INFO:teuthology.orchestra.run.vm09.stdout:(86/148): python3-pyasn1-modules-0.4.8-7.el9.no 5.2 MB/s | 277 kB 00:00 2026-03-31T22:53:44.248 INFO:teuthology.orchestra.run.vm09.stdout:(87/148): python3-requests-oauthlib-1.3.0-12.el 1.0 MB/s | 54 kB 00:00 2026-03-31T22:53:44.298 INFO:teuthology.orchestra.run.vm09.stdout:(88/148): python3-toml-0.10.2-6.el9.noarch.rpm 852 kB/s | 42 kB 00:00 2026-03-31T22:53:44.350 INFO:teuthology.orchestra.run.vm09.stdout:(89/148): qatlib-25.08.0-2.el9.x86_64.rpm 4.5 MB/s | 240 kB 00:00 2026-03-31T22:53:44.399 INFO:teuthology.orchestra.run.vm09.stdout:(90/148): qatlib-service-25.08.0-2.el9.x86_64.r 758 kB/s | 37 kB 00:00 2026-03-31T22:53:44.449 INFO:teuthology.orchestra.run.vm09.stdout:(91/148): qatzip-libs-1.3.1-1.el9.x86_64.rpm 1.3 MB/s | 66 kB 00:00 2026-03-31T22:53:44.502 INFO:teuthology.orchestra.run.vm09.stdout:(92/148): socat-1.7.4.1-8.el9.x86_64.rpm 5.6 MB/s | 303 kB 00:00 2026-03-31T22:53:44.553 INFO:teuthology.orchestra.run.vm09.stdout:(93/148): xmlsec1-1.2.29-13.el9.x86_64.rpm 3.6 MB/s | 189 kB 00:00 2026-03-31T22:53:44.606 INFO:teuthology.orchestra.run.vm09.stdout:(94/148): xmlsec1-openssl-1.2.29-13.el9.x86_64. 1.7 MB/s | 90 kB 00:00 2026-03-31T22:53:44.656 INFO:teuthology.orchestra.run.vm09.stdout:(95/148): xmlstarlet-1.6.1-20.el9.x86_64.rpm 1.3 MB/s | 64 kB 00:00 2026-03-31T22:53:44.803 INFO:teuthology.orchestra.run.vm09.stdout:(96/148): lua-devel-5.4.4-4.el9.x86_64.rpm 152 kB/s | 22 kB 00:00 2026-03-31T22:53:45.050 INFO:teuthology.orchestra.run.vm09.stdout:(97/148): protobuf-compiler-3.14.0-17.el9.x86_6 3.4 MB/s | 862 kB 00:00 2026-03-31T22:53:45.064 INFO:teuthology.orchestra.run.vm09.stdout:(98/148): abseil-cpp-20211102.0-4.el9.x86_64.rp 39 MB/s | 551 kB 00:00 2026-03-31T22:53:45.072 INFO:teuthology.orchestra.run.vm09.stdout:(99/148): gperftools-libs-2.9.1-3.el9.x86_64.rp 41 MB/s | 308 kB 00:00 2026-03-31T22:53:45.074 INFO:teuthology.orchestra.run.vm09.stdout:(100/148): grpc-data-1.46.7-10.el9.noarch.rpm 9.4 MB/s | 19 kB 00:00 2026-03-31T22:53:45.139 INFO:teuthology.orchestra.run.vm09.stdout:(101/148): libarrow-9.0.0-15.el9.x86_64.rpm 68 MB/s | 4.4 MB 00:00 2026-03-31T22:53:45.142 INFO:teuthology.orchestra.run.vm09.stdout:(102/148): libarrow-doc-9.0.0-15.el9.noarch.rpm 8.4 MB/s | 25 kB 00:00 2026-03-31T22:53:45.174 INFO:teuthology.orchestra.run.vm09.stdout:(103/148): liboath-2.6.12-1.el9.x86_64.rpm 1.5 MB/s | 49 kB 00:00 2026-03-31T22:53:45.178 INFO:teuthology.orchestra.run.vm09.stdout:(104/148): libunwind-1.6.2-1.el9.x86_64.rpm 18 MB/s | 67 kB 00:00 2026-03-31T22:53:45.182 INFO:teuthology.orchestra.run.vm09.stdout:(105/148): luarocks-3.9.2-5.el9.noarch.rpm 34 MB/s | 151 kB 00:00 2026-03-31T22:53:45.195 INFO:teuthology.orchestra.run.vm09.stdout:(106/148): parquet-libs-9.0.0-15.el9.x86_64.rpm 67 MB/s | 838 kB 00:00 2026-03-31T22:53:45.205 INFO:teuthology.orchestra.run.vm09.stdout:(107/148): python3-asyncssh-2.13.2-5.el9.noarch 54 MB/s | 548 kB 00:00 2026-03-31T22:53:45.207 INFO:teuthology.orchestra.run.vm09.stdout:(108/148): python3-autocommand-2.2.2-8.el9.noar 12 MB/s | 29 kB 00:00 2026-03-31T22:53:45.211 INFO:teuthology.orchestra.run.vm09.stdout:(109/148): python3-backports-tarfile-1.2.0-1.el 18 MB/s | 60 kB 00:00 2026-03-31T22:53:45.214 INFO:teuthology.orchestra.run.vm09.stdout:(110/148): python3-bcrypt-3.2.2-1.el9.x86_64.rp 15 MB/s | 43 kB 00:00 2026-03-31T22:53:45.216 INFO:teuthology.orchestra.run.vm09.stdout:(111/148): python3-cachetools-4.2.4-1.el9.noarc 15 MB/s | 32 kB 00:00 2026-03-31T22:53:45.218 INFO:teuthology.orchestra.run.vm09.stdout:(112/148): python3-certifi-2023.05.07-4.el9.noa 7.1 MB/s | 14 kB 00:00 2026-03-31T22:53:45.223 INFO:teuthology.orchestra.run.vm09.stdout:(113/148): python3-cheroot-10.0.1-5.el9.noarch. 39 MB/s | 173 kB 00:00 2026-03-31T22:53:45.229 INFO:teuthology.orchestra.run.vm09.stdout:(114/148): python3-cherrypy-18.10.0-5.el9.noarc 48 MB/s | 290 kB 00:00 2026-03-31T22:53:45.235 INFO:teuthology.orchestra.run.vm09.stdout:(115/148): python3-google-auth-2.45.0-1.el9.noa 45 MB/s | 254 kB 00:00 2026-03-31T22:53:45.301 INFO:teuthology.orchestra.run.vm09.stdout:(116/148): python3-scipy-1.9.3-2.el9.x86_64.rpm 18 MB/s | 19 MB 00:01 2026-03-31T22:53:45.308 INFO:teuthology.orchestra.run.vm09.stdout:(117/148): python3-grpcio-1.46.7-10.el9.x86_64. 28 MB/s | 2.0 MB 00:00 2026-03-31T22:53:45.312 INFO:teuthology.orchestra.run.vm09.stdout:(118/148): python3-influxdb-5.3.1-1.el9.noarch. 32 MB/s | 139 kB 00:00 2026-03-31T22:53:45.315 INFO:teuthology.orchestra.run.vm09.stdout:(119/148): python3-isodate-0.6.1-3.el9.noarch.r 18 MB/s | 56 kB 00:00 2026-03-31T22:53:45.317 INFO:teuthology.orchestra.run.vm09.stdout:(120/148): python3-grpcio-tools-1.46.7-10.el9.x 9.1 MB/s | 144 kB 00:00 2026-03-31T22:53:45.318 INFO:teuthology.orchestra.run.vm09.stdout:(121/148): python3-jaraco-8.2.1-3.el9.noarch.rp 4.8 MB/s | 11 kB 00:00 2026-03-31T22:53:45.319 INFO:teuthology.orchestra.run.vm09.stdout:(122/148): python3-jaraco-classes-3.2.1-5.el9.n 7.7 MB/s | 18 kB 00:00 2026-03-31T22:53:45.320 INFO:teuthology.orchestra.run.vm09.stdout:(123/148): python3-jaraco-collections-3.0.0-8.e 11 MB/s | 23 kB 00:00 2026-03-31T22:53:45.321 INFO:teuthology.orchestra.run.vm09.stdout:(124/148): python3-jaraco-context-6.0.1-3.el9.n 9.6 MB/s | 20 kB 00:00 2026-03-31T22:53:45.322 INFO:teuthology.orchestra.run.vm09.stdout:(125/148): python3-jaraco-functools-3.5.0-2.el9 10 MB/s | 19 kB 00:00 2026-03-31T22:53:45.323 INFO:teuthology.orchestra.run.vm09.stdout:(126/148): python3-jaraco-text-4.0.0-2.el9.noar 13 MB/s | 26 kB 00:00 2026-03-31T22:53:45.331 INFO:teuthology.orchestra.run.vm09.stdout:(127/148): python3-more-itertools-8.12.0-2.el9. 9.8 MB/s | 79 kB 00:00 2026-03-31T22:53:45.337 INFO:teuthology.orchestra.run.vm09.stdout:(128/148): python3-kubernetes-26.1.0-3.el9.noar 66 MB/s | 1.0 MB 00:00 2026-03-31T22:53:45.340 INFO:teuthology.orchestra.run.vm09.stdout:(129/148): python3-msgpack-1.0.3-2.el9.x86_64.r 10 MB/s | 86 kB 00:00 2026-03-31T22:53:45.341 INFO:teuthology.orchestra.run.vm09.stdout:(130/148): python3-natsort-7.1.1-5.el9.noarch.r 16 MB/s | 58 kB 00:00 2026-03-31T22:53:45.342 INFO:teuthology.orchestra.run.vm09.stdout:(131/148): python3-portend-3.1.0-2.el9.noarch.r 7.0 MB/s | 16 kB 00:00 2026-03-31T22:53:45.345 INFO:teuthology.orchestra.run.vm09.stdout:(132/148): python3-pyOpenSSL-21.0.0-1.el9.noarc 27 MB/s | 90 kB 00:00 2026-03-31T22:53:45.345 INFO:teuthology.orchestra.run.vm09.stdout:(133/148): python3-repoze-lru-0.7-16.el9.noarch 10 MB/s | 31 kB 00:00 2026-03-31T22:53:45.349 INFO:teuthology.orchestra.run.vm09.stdout:(134/148): python3-routes-2.5.1-5.el9.noarch.rp 44 MB/s | 188 kB 00:00 2026-03-31T22:53:45.351 INFO:teuthology.orchestra.run.vm09.stdout:(135/148): python3-rsa-4.9-2.el9.noarch.rpm 10 MB/s | 59 kB 00:00 2026-03-31T22:53:45.354 INFO:teuthology.orchestra.run.vm09.stdout:(136/148): python3-saml-1.16.0-1.el9.noarch.rpm 28 MB/s | 125 kB 00:00 2026-03-31T22:53:45.354 INFO:teuthology.orchestra.run.vm09.stdout:(137/148): python3-tempora-5.0.0-2.el9.noarch.r 12 MB/s | 36 kB 00:00 2026-03-31T22:53:45.357 INFO:teuthology.orchestra.run.vm09.stdout:(138/148): python3-typing-extensions-4.15.0-1.e 25 MB/s | 86 kB 00:00 2026-03-31T22:53:45.360 INFO:teuthology.orchestra.run.vm09.stdout:(139/148): python3-websocket-client-1.2.3-2.el9 16 MB/s | 90 kB 00:00 2026-03-31T22:53:45.361 INFO:teuthology.orchestra.run.vm09.stdout:(140/148): python3-xmlsec-1.3.13-1.el9.x86_64.r 15 MB/s | 48 kB 00:00 2026-03-31T22:53:45.362 INFO:teuthology.orchestra.run.vm09.stdout:(141/148): python3-xmltodict-0.12.0-15.el9.noar 11 MB/s | 22 kB 00:00 2026-03-31T22:53:45.366 INFO:teuthology.orchestra.run.vm09.stdout:(142/148): python3-zc-lockfile-2.0-10.el9.noarc 3.6 MB/s | 20 kB 00:00 2026-03-31T22:53:45.369 INFO:teuthology.orchestra.run.vm09.stdout:(143/148): re2-20211101-20.el9.x86_64.rpm 29 MB/s | 191 kB 00:00 2026-03-31T22:53:45.379 INFO:teuthology.orchestra.run.vm09.stdout:(144/148): s3cmd-2.4.0-1.el9.noarch.rpm 16 MB/s | 206 kB 00:00 2026-03-31T22:53:45.383 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 100/150 2026-03-31T22:53:45.388 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 100/150 2026-03-31T22:53:45.401 INFO:teuthology.orchestra.run.vm09.stdout:(145/148): thrift-0.15.0-4.el9.x86_64.rpm 50 MB/s | 1.6 MB 00:00 2026-03-31T22:53:45.415 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 100/150 2026-03-31T22:53:45.478 INFO:teuthology.orchestra.run.vm05.stdout: Installing : smartmontools-1:7.2-10.el9.x86_64 101/150 2026-03-31T22:53:45.491 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: smartmontools-1:7.2-10.el9.x86_64 101/150 2026-03-31T22:53:45.492 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/smartd.service → /usr/lib/systemd/system/smartd.service. 2026-03-31T22:53:45.492 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:45.631 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-ply-3.11-14.el9.noarch 102/150 2026-03-31T22:53:45.655 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-pycparser-2.20-6.el9.noarch 103/150 2026-03-31T22:53:45.765 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-cffi-1.14.5-5.el9.x86_64 104/150 2026-03-31T22:53:45.937 INFO:teuthology.orchestra.run.vm09.stdout:(146/148): ceph-test-20.2.0-721.g5bb32787.el9.x 5.4 MB/s | 84 MB 00:15 2026-03-31T22:53:45.944 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-cryptography-36.0.1-5.el9.x86_64 105/150 2026-03-31T22:53:46.027 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-pyOpenSSL-21.0.0-1.el9.noarch 106/150 2026-03-31T22:53:46.066 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-cheroot-10.0.1-5.el9.noarch 107/150 2026-03-31T22:53:46.123 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-cherrypy-18.10.0-5.el9.noarch 108/150 2026-03-31T22:53:46.157 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-asyncssh-2.13.2-5.el9.noarch 109/150 2026-03-31T22:53:46.164 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-bcrypt-3.2.2-1.el9.x86_64 110/150 2026-03-31T22:53:46.169 INFO:teuthology.orchestra.run.vm05.stdout: Installing : pciutils-3.7.0-7.el9.x86_64 111/150 2026-03-31T22:53:46.173 INFO:teuthology.orchestra.run.vm05.stdout: Installing : qatlib-25.08.0-2.el9.x86_64 112/150 2026-03-31T22:53:46.174 INFO:teuthology.orchestra.run.vm05.stdout: Installing : qatlib-service-25.08.0-2.el9.x86_64 113/150 2026-03-31T22:53:46.192 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: qatlib-service-25.08.0-2.el9.x86_64 113/150 2026-03-31T22:53:46.508 INFO:teuthology.orchestra.run.vm05.stdout: Installing : qatzip-libs-1.3.1-1.el9.x86_64 114/150 2026-03-31T22:53:46.513 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 115/150 2026-03-31T22:53:46.545 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 116/150 2026-03-31T22:53:46.545 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /sys 2026-03-31T22:53:46.545 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /proc 2026-03-31T22:53:46.545 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /mnt 2026-03-31T22:53:46.545 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /var/tmp 2026-03-31T22:53:46.545 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /home 2026-03-31T22:53:46.545 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /root 2026-03-31T22:53:46.545 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /tmp 2026-03-31T22:53:46.545 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:46.558 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 115/150 2026-03-31T22:53:46.558 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph.target → /usr/lib/systemd/system/ceph.target. 2026-03-31T22:53:46.558 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-crash.service → /usr/lib/systemd/system/ceph-crash.service. 2026-03-31T22:53:46.558 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:46.558 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 116/150 2026-03-31T22:53:46.659 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 117/150 2026-03-31T22:53:46.680 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 117/150 2026-03-31T22:53:46.680 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:46.680 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-03-31T22:53:46.680 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-03-31T22:53:46.680 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-03-31T22:53:46.680 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:46.815 INFO:teuthology.orchestra.run.vm09.stdout:(147/148): librbd1-20.2.0-721.g5bb32787.el9.x86 2.0 MB/s | 2.8 MB 00:01 2026-03-31T22:53:46.977 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 118/150 2026-03-31T22:53:46.997 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 118/150 2026-03-31T22:53:46.997 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:46.997 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-03-31T22:53:46.997 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-03-31T22:53:46.997 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-03-31T22:53:46.997 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:47.042 INFO:teuthology.orchestra.run.vm00.stdout: Installing : mailcap-2.1.49-5.el9.noarch 119/150 2026-03-31T22:53:47.046 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libconfig-1.7.2-9.el9.x86_64 120/150 2026-03-31T22:53:47.055 INFO:teuthology.orchestra.run.vm09.stdout:(148/148): librados2-20.2.0-721.g5bb32787.el9.x 2.1 MB/s | 3.5 MB 00:01 2026-03-31T22:53:47.057 INFO:teuthology.orchestra.run.vm09.stdout:-------------------------------------------------------------------------------- 2026-03-31T22:53:47.057 INFO:teuthology.orchestra.run.vm09.stdout:Total 10 MB/s | 269 MB 00:25 2026-03-31T22:53:47.063 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 121/150 2026-03-31T22:53:47.063 INFO:teuthology.orchestra.run.vm00.stdout:Creating group 'qat' with GID 994. 2026-03-31T22:53:47.063 INFO:teuthology.orchestra.run.vm00.stdout:Creating group 'libstoragemgmt' with GID 993. 2026-03-31T22:53:47.063 INFO:teuthology.orchestra.run.vm00.stdout:Creating user 'libstoragemgmt' (daemon account for libstoragemgmt) with UID 993 and GID 993. 2026-03-31T22:53:47.063 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:47.073 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libstoragemgmt-1.10.1-1.el9.x86_64 121/150 2026-03-31T22:53:47.098 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 121/150 2026-03-31T22:53:47.098 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/libstoragemgmt.service → /usr/lib/systemd/system/libstoragemgmt.service. 2026-03-31T22:53:47.098 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:47.118 INFO:teuthology.orchestra.run.vm00.stdout: Installing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 122/150 2026-03-31T22:53:47.147 INFO:teuthology.orchestra.run.vm00.stdout: Installing : fuse-2.9.9-17.el9.x86_64 123/150 2026-03-31T22:53:47.224 INFO:teuthology.orchestra.run.vm00.stdout: Installing : cryptsetup-2.8.1-3.el9.x86_64 124/150 2026-03-31T22:53:47.228 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 125/150 2026-03-31T22:53:47.242 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 125/150 2026-03-31T22:53:47.242 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:47.242 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-03-31T22:53:47.242 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:47.551 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T22:53:47.606 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T22:53:47.606 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T22:53:47.993 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 126/150 2026-03-31T22:53:48.016 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 126/150 2026-03-31T22:53:48.016 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:48.016 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-03-31T22:53:48.016 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-03-31T22:53:48.016 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-03-31T22:53:48.016 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:48.084 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 127/150 2026-03-31T22:53:48.087 INFO:teuthology.orchestra.run.vm00.stdout: Installing : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 127/150 2026-03-31T22:53:48.096 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 128/150 2026-03-31T22:53:48.123 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 129/150 2026-03-31T22:53:48.125 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 130/150 2026-03-31T22:53:48.600 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T22:53:48.600 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T22:53:49.410 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 130/150 2026-03-31T22:53:49.423 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 131/150 2026-03-31T22:53:49.710 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T22:53:49.718 INFO:teuthology.orchestra.run.vm09.stdout: Installing : thrift-0.15.0-4.el9.x86_64 1/150 2026-03-31T22:53:49.720 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-more-itertools-8.12.0-2.el9.noarch 2/150 2026-03-31T22:53:49.731 INFO:teuthology.orchestra.run.vm09.stdout: Installing : liboath-2.6.12-1.el9.x86_64 3/150 2026-03-31T22:53:49.896 INFO:teuthology.orchestra.run.vm09.stdout: Installing : lttng-ust-2.12.0-6.el9.x86_64 4/150 2026-03-31T22:53:49.897 INFO:teuthology.orchestra.run.vm09.stdout: Upgrading : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 5/150 2026-03-31T22:53:49.948 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 5/150 2026-03-31T22:53:49.949 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 6/150 2026-03-31T22:53:49.962 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 6/150 2026-03-31T22:53:49.965 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_ 7/150 2026-03-31T22:53:49.965 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_ 8/150 2026-03-31T22:53:49.992 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_ 8/150 2026-03-31T22:53:49.997 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 9/150 2026-03-31T22:53:50.007 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libxslt-1.1.34-12.el9.x86_64 10/150 2026-03-31T22:53:50.011 INFO:teuthology.orchestra.run.vm09.stdout: Installing : librdkafka-1.6.1-102.el9.x86_64 11/150 2026-03-31T22:53:50.015 INFO:teuthology.orchestra.run.vm09.stdout: Installing : librabbitmq-0.11.0-7.el9.x86_64 12/150 2026-03-31T22:53:50.017 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libpmemobj-1.12.1-1.el9.x86_64 13/150 2026-03-31T22:53:50.022 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-jaraco-8.2.1-3.el9.noarch 14/150 2026-03-31T22:53:50.024 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 131/150 2026-03-31T22:53:50.026 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 132/150 2026-03-31T22:53:50.039 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 132/150 2026-03-31T22:53:50.040 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 133/150 2026-03-31T22:53:50.102 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 133/150 2026-03-31T22:53:50.150 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 134/150 2026-03-31T22:53:50.152 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 135/150 2026-03-31T22:53:50.159 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libnbd-1.20.3-4.el9.x86_64 15/150 2026-03-31T22:53:50.162 INFO:teuthology.orchestra.run.vm09.stdout: Upgrading : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 16/150 2026-03-31T22:53:50.180 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 135/150 2026-03-31T22:53:50.180 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:50.180 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-03-31T22:53:50.180 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-03-31T22:53:50.180 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-03-31T22:53:50.180 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:50.194 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 136/150 2026-03-31T22:53:50.205 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 136/150 2026-03-31T22:53:50.211 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 16/150 2026-03-31T22:53:50.219 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-lxml-4.6.5-3.el9.x86_64 17/150 2026-03-31T22:53:50.228 INFO:teuthology.orchestra.run.vm09.stdout: Installing : xmlsec1-1.2.29-13.el9.x86_64 18/150 2026-03-31T22:53:50.230 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 19/150 2026-03-31T22:53:50.249 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 137/150 2026-03-31T22:53:50.252 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 19/150 2026-03-31T22:53:50.253 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 20/150 2026-03-31T22:53:50.268 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 20/150 2026-03-31T22:53:50.299 INFO:teuthology.orchestra.run.vm09.stdout: Installing : re2-1:20211101-20.el9.x86_64 21/150 2026-03-31T22:53:50.324 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libarrow-9.0.0-15.el9.x86_64 22/150 2026-03-31T22:53:50.338 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-pyasn1-0.4.8-7.el9.noarch 23/150 2026-03-31T22:53:50.347 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-packaging-20.9-5.el9.noarch 24/150 2026-03-31T22:53:50.354 INFO:teuthology.orchestra.run.vm09.stdout: Installing : protobuf-3.14.0-17.el9.x86_64 25/150 2026-03-31T22:53:50.358 INFO:teuthology.orchestra.run.vm09.stdout: Installing : lua-5.4.4-4.el9.x86_64 26/150 2026-03-31T22:53:50.363 INFO:teuthology.orchestra.run.vm09.stdout: Installing : flexiblas-3.0.4-9.el9.x86_64 27/150 2026-03-31T22:53:50.390 INFO:teuthology.orchestra.run.vm09.stdout: Installing : unzip-6.0-59.el9.x86_64 28/150 2026-03-31T22:53:50.405 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-urllib3-1.26.5-7.el9.noarch 29/150 2026-03-31T22:53:50.410 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-requests-2.25.1-10.el9.noarch 30/150 2026-03-31T22:53:50.417 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libquadmath-11.5.0-14.el9.x86_64 31/150 2026-03-31T22:53:50.419 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libgfortran-11.5.0-14.el9.x86_64 32/150 2026-03-31T22:53:50.456 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ledmon-libs-1.1.0-3.el9.x86_64 33/150 2026-03-31T22:53:50.463 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 34/150 2026-03-31T22:53:50.473 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9 35/150 2026-03-31T22:53:50.486 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 36/150 2026-03-31T22:53:50.494 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-requests-oauthlib-1.3.0-12.el9.noarch 37/150 2026-03-31T22:53:50.522 INFO:teuthology.orchestra.run.vm09.stdout: Installing : zip-3.0-35.el9.x86_64 38/150 2026-03-31T22:53:50.527 INFO:teuthology.orchestra.run.vm09.stdout: Installing : luarocks-3.9.2-5.el9.noarch 39/150 2026-03-31T22:53:50.536 INFO:teuthology.orchestra.run.vm09.stdout: Installing : lua-devel-5.4.4-4.el9.x86_64 40/150 2026-03-31T22:53:50.592 INFO:teuthology.orchestra.run.vm09.stdout: Installing : protobuf-compiler-3.14.0-17.el9.x86_64 41/150 2026-03-31T22:53:50.609 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-pyasn1-modules-0.4.8-7.el9.noarch 42/150 2026-03-31T22:53:50.614 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-rsa-4.9-2.el9.noarch 43/150 2026-03-31T22:53:50.619 INFO:teuthology.orchestra.run.vm09.stdout: Installing : xmlsec1-openssl-1.2.29-13.el9.x86_64 44/150 2026-03-31T22:53:50.636 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-xmlsec-1.3.13-1.el9.x86_64 45/150 2026-03-31T22:53:50.644 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 46/150 2026-03-31T22:53:50.649 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-jaraco-classes-3.2.1-5.el9.noarch 47/150 2026-03-31T22:53:50.659 INFO:teuthology.orchestra.run.vm09.stdout: Installing : xmlstarlet-1.6.1-20.el9.x86_64 48/150 2026-03-31T22:53:50.665 INFO:teuthology.orchestra.run.vm09.stdout: Installing : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 49/150 2026-03-31T22:53:50.670 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-zc-lockfile-2.0-10.el9.noarch 50/150 2026-03-31T22:53:50.688 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-xmltodict-0.12.0-15.el9.noarch 51/150 2026-03-31T22:53:50.694 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-websocket-client-1.2.3-2.el9.noarch 52/150 2026-03-31T22:53:50.701 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-typing-extensions-4.15.0-1.el9.noarch 53/150 2026-03-31T22:53:50.715 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-repoze-lru-0.7-16.el9.noarch 54/150 2026-03-31T22:53:50.727 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-routes-2.5.1-5.el9.noarch 55/150 2026-03-31T22:53:50.736 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-natsort-7.1.1-5.el9.noarch 56/150 2026-03-31T22:53:50.760 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-msgpack-1.0.3-2.el9.x86_64 57/150 2026-03-31T22:53:50.775 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-influxdb-5.3.1-1.el9.noarch 58/150 2026-03-31T22:53:50.794 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-isodate-0.6.1-3.el9.noarch 59/150 2026-03-31T22:53:50.802 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-saml-1.16.0-1.el9.noarch 60/150 2026-03-31T22:53:50.811 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-certifi-2023.05.07-4.el9.noarch 61/150 2026-03-31T22:53:50.858 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-cachetools-4.2.4-1.el9.noarch 62/150 2026-03-31T22:53:51.220 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-google-auth-1:2.45.0-1.el9.noarch 63/150 2026-03-31T22:53:51.235 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-kubernetes-1:26.1.0-3.el9.noarch 64/150 2026-03-31T22:53:51.241 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-backports-tarfile-1.2.0-1.el9.noarch 65/150 2026-03-31T22:53:51.247 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-jaraco-context-6.0.1-3.el9.noarch 66/150 2026-03-31T22:53:51.252 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-autocommand-2.2.2-8.el9.noarch 67/150 2026-03-31T22:53:51.258 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libunwind-1.6.2-1.el9.x86_64 68/150 2026-03-31T22:53:51.262 INFO:teuthology.orchestra.run.vm09.stdout: Installing : gperftools-libs-2.9.1-3.el9.x86_64 69/150 2026-03-31T22:53:51.264 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libarrow-doc-9.0.0-15.el9.noarch 70/150 2026-03-31T22:53:51.296 INFO:teuthology.orchestra.run.vm09.stdout: Installing : grpc-data-1.46.7-10.el9.noarch 71/150 2026-03-31T22:53:51.345 INFO:teuthology.orchestra.run.vm09.stdout: Installing : abseil-cpp-20211102.0-4.el9.x86_64 72/150 2026-03-31T22:53:51.358 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-grpcio-1.46.7-10.el9.x86_64 73/150 2026-03-31T22:53:51.365 INFO:teuthology.orchestra.run.vm09.stdout: Installing : socat-1.7.4.1-8.el9.x86_64 74/150 2026-03-31T22:53:51.370 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-toml-0.10.2-6.el9.noarch 75/150 2026-03-31T22:53:51.377 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-jaraco-functools-3.5.0-2.el9.noarch 76/150 2026-03-31T22:53:51.382 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-jaraco-text-4.0.0-2.el9.noarch 77/150 2026-03-31T22:53:51.391 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-jaraco-collections-3.0.0-8.el9.noarch 78/150 2026-03-31T22:53:51.396 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-tempora-5.0.0-2.el9.noarch 79/150 2026-03-31T22:53:51.431 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-portend-3.1.0-2.el9.noarch 80/150 2026-03-31T22:53:51.444 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-protobuf-3.14.0-17.el9.noarch 81/150 2026-03-31T22:53:51.452 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-grpcio-tools-1.46.7-10.el9.x86_64 82/150 2026-03-31T22:53:51.460 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-markupsafe-1.1.1-12.el9.x86_64 83/150 2026-03-31T22:53:51.504 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-jmespath-1.0.1-1.el9.noarch 84/150 2026-03-31T22:53:51.537 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 138/150 2026-03-31T22:53:51.540 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 139/150 2026-03-31T22:53:51.563 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 139/150 2026-03-31T22:53:51.563 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:51.563 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-03-31T22:53:51.563 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-03-31T22:53:51.563 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-03-31T22:53:51.563 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:51.575 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 140/150 2026-03-31T22:53:51.596 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-721.g5bb327 140/150 2026-03-31T22:53:51.596 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:51.596 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-03-31T22:53:51.596 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:51.780 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-devel-3.9.25-3.el9.x86_64 85/150 2026-03-31T22:53:51.807 INFO:teuthology.orchestra.run.vm00.stdout: Installing : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 141/150 2026-03-31T22:53:51.869 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 141/150 2026-03-31T22:53:51.869 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:51.869 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-03-31T22:53:51.869 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-03-31T22:53:51.869 INFO:teuthology.orchestra.run.vm00.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-03-31T22:53:51.869 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:51.914 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-babel-2.9.1-2.el9.noarch 86/150 2026-03-31T22:53:52.064 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-jinja2-2.11.3-8.el9.noarch 87/150 2026-03-31T22:53:52.093 INFO:teuthology.orchestra.run.vm09.stdout: Installing : perl-Benchmark-1.23-483.el9.noarch 88/150 2026-03-31T22:53:52.194 INFO:teuthology.orchestra.run.vm09.stdout: Installing : openblas-0.3.29-1.el9.x86_64 89/150 2026-03-31T22:53:52.232 INFO:teuthology.orchestra.run.vm09.stdout: Installing : openblas-openmp-0.3.29-1.el9.x86_64 90/150 2026-03-31T22:53:52.265 INFO:teuthology.orchestra.run.vm09.stdout: Installing : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 91/150 2026-03-31T22:53:52.721 INFO:teuthology.orchestra.run.vm09.stdout: Installing : flexiblas-netlib-3.0.4-9.el9.x86_64 92/150 2026-03-31T22:53:52.808 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-numpy-1:1.23.5-2.el9.x86_64 93/150 2026-03-31T22:53:52.891 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 116/150 2026-03-31T22:53:52.891 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /sys 2026-03-31T22:53:52.891 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /proc 2026-03-31T22:53:52.891 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /mnt 2026-03-31T22:53:52.891 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /var/tmp 2026-03-31T22:53:52.891 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /home 2026-03-31T22:53:52.891 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /root 2026-03-31T22:53:52.891 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /tmp 2026-03-31T22:53:52.891 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:53.008 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 117/150 2026-03-31T22:53:53.030 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 117/150 2026-03-31T22:53:53.030 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:53.030 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-03-31T22:53:53.030 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-03-31T22:53:53.030 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-03-31T22:53:53.030 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:53.264 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 118/150 2026-03-31T22:53:53.284 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 118/150 2026-03-31T22:53:53.284 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:53.284 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-03-31T22:53:53.284 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-03-31T22:53:53.284 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-03-31T22:53:53.284 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:53.292 INFO:teuthology.orchestra.run.vm05.stdout: Installing : mailcap-2.1.49-5.el9.noarch 119/150 2026-03-31T22:53:53.296 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libconfig-1.7.2-9.el9.x86_64 120/150 2026-03-31T22:53:53.313 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 121/150 2026-03-31T22:53:53.313 INFO:teuthology.orchestra.run.vm05.stdout:Creating group 'qat' with GID 994. 2026-03-31T22:53:53.313 INFO:teuthology.orchestra.run.vm05.stdout:Creating group 'libstoragemgmt' with GID 993. 2026-03-31T22:53:53.313 INFO:teuthology.orchestra.run.vm05.stdout:Creating user 'libstoragemgmt' (daemon account for libstoragemgmt) with UID 993 and GID 993. 2026-03-31T22:53:53.313 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:53.323 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libstoragemgmt-1.10.1-1.el9.x86_64 121/150 2026-03-31T22:53:53.348 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 121/150 2026-03-31T22:53:53.348 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/libstoragemgmt.service → /usr/lib/systemd/system/libstoragemgmt.service. 2026-03-31T22:53:53.348 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:53.368 INFO:teuthology.orchestra.run.vm05.stdout: Installing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 122/150 2026-03-31T22:53:53.395 INFO:teuthology.orchestra.run.vm05.stdout: Installing : fuse-2.9.9-17.el9.x86_64 123/150 2026-03-31T22:53:53.465 INFO:teuthology.orchestra.run.vm05.stdout: Installing : cryptsetup-2.8.1-3.el9.x86_64 124/150 2026-03-31T22:53:53.470 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 125/150 2026-03-31T22:53:53.482 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 125/150 2026-03-31T22:53:53.482 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:53.482 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-03-31T22:53:53.483 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:53.555 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 94/150 2026-03-31T22:53:53.578 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-scipy-1.9.3-2.el9.x86_64 95/150 2026-03-31T22:53:53.586 INFO:teuthology.orchestra.run.vm09.stdout: Installing : boost-program-options-1.75.0-13.el9.x86_64 96/150 2026-03-31T22:53:53.885 INFO:teuthology.orchestra.run.vm09.stdout: Installing : parquet-libs-9.0.0-15.el9.x86_64 97/150 2026-03-31T22:53:53.888 INFO:teuthology.orchestra.run.vm09.stdout: Installing : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 98/150 2026-03-31T22:53:53.909 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 98/150 2026-03-31T22:53:53.911 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 99/150 2026-03-31T22:53:54.231 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 126/150 2026-03-31T22:53:54.252 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 126/150 2026-03-31T22:53:54.252 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:54.252 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-03-31T22:53:54.252 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-03-31T22:53:54.253 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-03-31T22:53:54.253 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:54.313 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 127/150 2026-03-31T22:53:54.316 INFO:teuthology.orchestra.run.vm05.stdout: Installing : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 127/150 2026-03-31T22:53:54.325 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 128/150 2026-03-31T22:53:54.351 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 129/150 2026-03-31T22:53:54.354 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 130/150 2026-03-31T22:53:55.083 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 100/150 2026-03-31T22:53:55.088 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 100/150 2026-03-31T22:53:55.113 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 100/150 2026-03-31T22:53:55.116 INFO:teuthology.orchestra.run.vm09.stdout: Installing : smartmontools-1:7.2-10.el9.x86_64 101/150 2026-03-31T22:53:55.129 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: smartmontools-1:7.2-10.el9.x86_64 101/150 2026-03-31T22:53:55.129 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/smartd.service → /usr/lib/systemd/system/smartd.service. 2026-03-31T22:53:55.129 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:53:55.154 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-ply-3.11-14.el9.noarch 102/150 2026-03-31T22:53:55.195 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-pycparser-2.20-6.el9.noarch 103/150 2026-03-31T22:53:55.282 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-cffi-1.14.5-5.el9.x86_64 104/150 2026-03-31T22:53:55.294 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-cryptography-36.0.1-5.el9.x86_64 105/150 2026-03-31T22:53:55.322 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-pyOpenSSL-21.0.0-1.el9.noarch 106/150 2026-03-31T22:53:55.405 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-cheroot-10.0.1-5.el9.noarch 107/150 2026-03-31T22:53:55.467 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-cherrypy-18.10.0-5.el9.noarch 108/150 2026-03-31T22:53:55.477 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-asyncssh-2.13.2-5.el9.noarch 109/150 2026-03-31T22:53:55.482 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-bcrypt-3.2.2-1.el9.x86_64 110/150 2026-03-31T22:53:55.489 INFO:teuthology.orchestra.run.vm09.stdout: Installing : pciutils-3.7.0-7.el9.x86_64 111/150 2026-03-31T22:53:55.493 INFO:teuthology.orchestra.run.vm09.stdout: Installing : qatlib-25.08.0-2.el9.x86_64 112/150 2026-03-31T22:53:55.495 INFO:teuthology.orchestra.run.vm09.stdout: Installing : qatlib-service-25.08.0-2.el9.x86_64 113/150 2026-03-31T22:53:55.511 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: qatlib-service-25.08.0-2.el9.x86_64 113/150 2026-03-31T22:53:55.636 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 130/150 2026-03-31T22:53:55.646 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 131/150 2026-03-31T22:53:55.854 INFO:teuthology.orchestra.run.vm09.stdout: Installing : qatzip-libs-1.3.1-1.el9.x86_64 114/150 2026-03-31T22:53:55.898 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 115/150 2026-03-31T22:53:55.942 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 115/150 2026-03-31T22:53:55.942 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph.target → /usr/lib/systemd/system/ceph.target. 2026-03-31T22:53:55.942 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-crash.service → /usr/lib/systemd/system/ceph-crash.service. 2026-03-31T22:53:55.942 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:53:55.946 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 116/150 2026-03-31T22:53:56.296 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 131/150 2026-03-31T22:53:56.298 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 132/150 2026-03-31T22:53:56.311 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 132/150 2026-03-31T22:53:56.313 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 133/150 2026-03-31T22:53:56.373 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 133/150 2026-03-31T22:53:56.424 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 134/150 2026-03-31T22:53:56.427 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 135/150 2026-03-31T22:53:56.447 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 135/150 2026-03-31T22:53:56.447 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:56.447 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-03-31T22:53:56.447 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-03-31T22:53:56.447 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-03-31T22:53:56.447 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:56.479 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 136/150 2026-03-31T22:53:56.490 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 136/150 2026-03-31T22:53:56.536 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 137/150 2026-03-31T22:53:56.623 INFO:teuthology.orchestra.run.vm00.stdout: Installing : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 142/150 2026-03-31T22:53:56.631 INFO:teuthology.orchestra.run.vm00.stdout: Installing : perl-Test-Harness-1:3.42-461.el9.noarch 143/150 2026-03-31T22:53:56.638 INFO:teuthology.orchestra.run.vm00.stdout: Installing : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_6 144/150 2026-03-31T22:53:56.649 INFO:teuthology.orchestra.run.vm00.stdout: Installing : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 145/150 2026-03-31T22:53:56.669 INFO:teuthology.orchestra.run.vm00.stdout: Installing : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 146/150 2026-03-31T22:53:56.676 INFO:teuthology.orchestra.run.vm00.stdout: Installing : s3cmd-2.4.0-1.el9.noarch 147/150 2026-03-31T22:53:56.680 INFO:teuthology.orchestra.run.vm00.stdout: Installing : bzip2-1.0.8-11.el9.x86_64 148/150 2026-03-31T22:53:56.680 INFO:teuthology.orchestra.run.vm00.stdout: Cleanup : librbd1-2:16.2.4-5.el9.x86_64 149/150 2026-03-31T22:53:56.697 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librbd1-2:16.2.4-5.el9.x86_64 149/150 2026-03-31T22:53:56.697 INFO:teuthology.orchestra.run.vm00.stdout: Cleanup : librados2-2:16.2.4-5.el9.x86_64 150/150 2026-03-31T22:53:58.051 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 138/150 2026-03-31T22:53:58.076 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 139/150 2026-03-31T22:53:58.097 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 139/150 2026-03-31T22:53:58.097 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:58.097 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-03-31T22:53:58.097 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-03-31T22:53:58.097 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-03-31T22:53:58.097 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:58.138 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 140/150 2026-03-31T22:53:58.158 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-721.g5bb327 140/150 2026-03-31T22:53:58.158 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:58.158 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-03-31T22:53:58.158 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:58.220 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librados2-2:16.2.4-5.el9.x86_64 150/150 2026-03-31T22:53:58.220 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 1/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 3/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 4/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 5/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 6/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 7/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 9/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 10/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 11/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 12/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_ 13/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_6 14/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_ 15/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 16/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 17/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 18/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 19/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 20/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9 21/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 22/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 23/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 24/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 25/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 26/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 27/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 28/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 29/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 30/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 31/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 32/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 33/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 34/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 35/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 36/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 37/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 38/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 39/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : bzip2-1.0.8-11.el9.x86_64 40/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : cryptsetup-2.8.1-3.el9.x86_64 41/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : fuse-2.9.9-17.el9.x86_64 42/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ledmon-libs-1.1.0-3.el9.x86_64 43/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libconfig-1.7.2-9.el9.x86_64 44/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libgfortran-11.5.0-14.el9.x86_64 45/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libquadmath-11.5.0-14.el9.x86_64 46/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : mailcap-2.1.49-5.el9.noarch 47/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : pciutils-3.7.0-7.el9.x86_64 48/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cffi-1.14.5-5.el9.x86_64 49/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cryptography-36.0.1-5.el9.x86_64 50/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-ply-3.11-14.el9.noarch 51/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pycparser-2.20-6.el9.noarch 52/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-requests-2.25.1-10.el9.noarch 53/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-urllib3-1.26.5-7.el9.noarch 54/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : smartmontools-1:7.2-10.el9.x86_64 55/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : unzip-6.0-59.el9.x86_64 56/150 2026-03-31T22:53:58.221 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : zip-3.0-35.el9.x86_64 57/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : boost-program-options-1.75.0-13.el9.x86_64 58/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : flexiblas-3.0.4-9.el9.x86_64 59/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : flexiblas-netlib-3.0.4-9.el9.x86_64 60/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 61/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libnbd-1.20.3-4.el9.x86_64 62/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libpmemobj-1.12.1-1.el9.x86_64 63/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librabbitmq-0.11.0-7.el9.x86_64 64/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librdkafka-1.6.1-102.el9.x86_64 65/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libstoragemgmt-1.10.1-1.el9.x86_64 66/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libxslt-1.1.34-12.el9.x86_64 67/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : lttng-ust-2.12.0-6.el9.x86_64 68/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : lua-5.4.4-4.el9.x86_64 69/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : openblas-0.3.29-1.el9.x86_64 70/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : openblas-openmp-0.3.29-1.el9.x86_64 71/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : perl-Benchmark-1.23-483.el9.noarch 72/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : perl-Test-Harness-1:3.42-461.el9.noarch 73/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : protobuf-3.14.0-17.el9.x86_64 74/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-babel-2.9.1-2.el9.noarch 75/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-devel-3.9.25-3.el9.x86_64 76/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jinja2-2.11.3-8.el9.noarch 77/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jmespath-1.0.1-1.el9.noarch 78/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-libstoragemgmt-1.10.1-1.el9.x86_64 79/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-lxml-4.6.5-3.el9.x86_64 80/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-markupsafe-1.1.1-12.el9.x86_64 81/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-numpy-1:1.23.5-2.el9.x86_64 82/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 83/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-packaging-20.9-5.el9.noarch 84/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-protobuf-3.14.0-17.el9.noarch 85/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pyasn1-0.4.8-7.el9.noarch 86/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pyasn1-modules-0.4.8-7.el9.noarch 87/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-requests-oauthlib-1.3.0-12.el9.noarch 88/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-scipy-1.9.3-2.el9.x86_64 89/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-toml-0.10.2-6.el9.noarch 90/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : qatlib-25.08.0-2.el9.x86_64 91/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : qatlib-service-25.08.0-2.el9.x86_64 92/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : qatzip-libs-1.3.1-1.el9.x86_64 93/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 94/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : xmlsec1-1.2.29-13.el9.x86_64 95/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : xmlsec1-openssl-1.2.29-13.el9.x86_64 96/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 97/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 98/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : protobuf-compiler-3.14.0-17.el9.x86_64 99/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : abseil-cpp-20211102.0-4.el9.x86_64 100/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : gperftools-libs-2.9.1-3.el9.x86_64 101/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : grpc-data-1.46.7-10.el9.noarch 102/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libarrow-9.0.0-15.el9.x86_64 103/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libarrow-doc-9.0.0-15.el9.noarch 104/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : liboath-2.6.12-1.el9.x86_64 105/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libunwind-1.6.2-1.el9.x86_64 106/150 2026-03-31T22:53:58.222 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 107/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : parquet-libs-9.0.0-15.el9.x86_64 108/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-asyncssh-2.13.2-5.el9.noarch 109/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-autocommand-2.2.2-8.el9.noarch 110/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-backports-tarfile-1.2.0-1.el9.noarch 111/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-bcrypt-3.2.2-1.el9.x86_64 112/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cachetools-4.2.4-1.el9.noarch 113/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-certifi-2023.05.07-4.el9.noarch 114/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cheroot-10.0.1-5.el9.noarch 115/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cherrypy-18.10.0-5.el9.noarch 116/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-google-auth-1:2.45.0-1.el9.noarch 117/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-grpcio-1.46.7-10.el9.x86_64 118/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-grpcio-tools-1.46.7-10.el9.x86_64 119/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-influxdb-5.3.1-1.el9.noarch 120/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-isodate-0.6.1-3.el9.noarch 121/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-8.2.1-3.el9.noarch 122/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-classes-3.2.1-5.el9.noarch 123/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-collections-3.0.0-8.el9.noarch 124/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-context-6.0.1-3.el9.noarch 125/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-functools-3.5.0-2.el9.noarch 126/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-text-4.0.0-2.el9.noarch 127/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-kubernetes-1:26.1.0-3.el9.noarch 128/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-more-itertools-8.12.0-2.el9.noarch 129/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-msgpack-1.0.3-2.el9.x86_64 130/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-natsort-7.1.1-5.el9.noarch 131/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-portend-3.1.0-2.el9.noarch 132/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pyOpenSSL-21.0.0-1.el9.noarch 133/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-repoze-lru-0.7-16.el9.noarch 134/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-routes-2.5.1-5.el9.noarch 135/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-rsa-4.9-2.el9.noarch 136/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-saml-1.16.0-1.el9.noarch 137/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-tempora-5.0.0-2.el9.noarch 138/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-typing-extensions-4.15.0-1.el9.noarch 139/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-websocket-client-1.2.3-2.el9.noarch 140/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-xmlsec-1.3.13-1.el9.x86_64 141/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-xmltodict-0.12.0-15.el9.noarch 142/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-zc-lockfile-2.0-10.el9.noarch 143/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : re2-1:20211101-20.el9.x86_64 144/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : s3cmd-2.4.0-1.el9.noarch 145/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : thrift-0.15.0-4.el9.x86_64 146/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 147/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librados2-2:16.2.4-5.el9.x86_64 148/150 2026-03-31T22:53:58.223 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 149/150 2026-03-31T22:53:58.316 INFO:teuthology.orchestra.run.vm05.stdout: Installing : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 141/150 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librbd1-2:16.2.4-5.el9.x86_64 150/150 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout:Upgraded: 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout:Installed: 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: abseil-cpp-20211102.0-4.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: boost-program-options-1.75.0-13.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: bzip2-1.0.8-11.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-immutable-object-cache-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: cryptsetup-2.8.1-3.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-3.0.4-9.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-netlib-3.0.4-9.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: fuse-2.9.9-17.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: gperftools-libs-2.9.1-3.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: grpc-data-1.46.7-10.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: ledmon-libs-1.1.0-3.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libarrow-9.0.0-15.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libarrow-doc-9.0.0-15.el9.noarch 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libconfig-1.7.2-9.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libgfortran-11.5.0-14.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libnbd-1.20.3-4.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: liboath-2.6.12-1.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libpmemobj-1.12.1-1.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: libquadmath-11.5.0-14.el9.x86_64 2026-03-31T22:53:58.320 INFO:teuthology.orchestra.run.vm00.stdout: librabbitmq-0.11.0-7.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: librdkafka-1.6.1-102.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: libunwind-1.6.2-1.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: libxslt-1.1.34-12.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: lttng-ust-2.12.0-6.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: lua-5.4.4-4.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: luarocks-3.9.2-5.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: mailcap-2.1.49-5.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: openblas-0.3.29-1.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: openblas-openmp-0.3.29-1.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: parquet-libs-9.0.0-15.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: pciutils-3.7.0-7.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: perl-Benchmark-1.23-483.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: perl-Test-Harness-1:3.42-461.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: protobuf-3.14.0-17.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: protobuf-compiler-3.14.0-17.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-asyncssh-2.13.2-5.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-autocommand-2.2.2-8.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-babel-2.9.1-2.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-backports-tarfile-1.2.0-1.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-bcrypt-3.2.2-1.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-cachetools-4.2.4-1.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-certifi-2023.05.07-4.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-cffi-1.14.5-5.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-cheroot-10.0.1-5.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-cherrypy-18.10.0-5.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-cryptography-36.0.1-5.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-devel-3.9.25-3.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-google-auth-1:2.45.0-1.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-grpcio-1.46.7-10.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-grpcio-tools-1.46.7-10.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-influxdb-5.3.1-1.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-isodate-0.6.1-3.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-8.2.1-3.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-classes-3.2.1-5.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-collections-3.0.0-8.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-context-6.0.1-3.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-functools-3.5.0-2.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-text-4.0.0-2.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-jinja2-2.11.3-8.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-jmespath-1.0.1-1.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-kubernetes-1:26.1.0-3.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-lxml-4.6.5-3.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-markupsafe-1.1.1-12.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-more-itertools-8.12.0-2.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-msgpack-1.0.3-2.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-natsort-7.1.1-5.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-numpy-1:1.23.5-2.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-numpy-f2py-1:1.23.5-2.el9.x86_64 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-packaging-20.9-5.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-ply-3.11-14.el9.noarch 2026-03-31T22:53:58.321 INFO:teuthology.orchestra.run.vm00.stdout: python3-portend-3.1.0-2.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-protobuf-3.14.0-17.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyOpenSSL-21.0.0-1.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyasn1-0.4.8-7.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyasn1-modules-0.4.8-7.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-pycparser-2.20-6.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-repoze-lru-0.7-16.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-requests-2.25.1-10.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-requests-oauthlib-1.3.0-12.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-routes-2.5.1-5.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-rsa-4.9-2.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-saml-1.16.0-1.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-scipy-1.9.3-2.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-tempora-5.0.0-2.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-toml-0.10.2-6.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-typing-extensions-4.15.0-1.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-urllib3-1.26.5-7.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-websocket-client-1.2.3-2.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-xmlsec-1.3.13-1.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-xmltodict-0.12.0-15.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: python3-zc-lockfile-2.0-10.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: qatlib-25.08.0-2.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: qatlib-service-25.08.0-2.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: qatzip-libs-1.3.1-1.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: re2-1:20211101-20.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: s3cmd-2.4.0-1.el9.noarch 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: smartmontools-1:7.2-10.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: socat-1.7.4.1-8.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: thrift-0.15.0-4.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: unzip-6.0-59.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: xmlsec1-1.2.29-13.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: xmlsec1-openssl-1.2.29-13.el9.x86_64 2026-03-31T22:53:58.322 INFO:teuthology.orchestra.run.vm00.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-03-31T22:53:58.323 INFO:teuthology.orchestra.run.vm00.stdout: zip-3.0-35.el9.x86_64 2026-03-31T22:53:58.323 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:53:58.323 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T22:53:58.339 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 141/150 2026-03-31T22:53:58.339 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:53:58.339 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-03-31T22:53:58.339 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-03-31T22:53:58.339 INFO:teuthology.orchestra.run.vm05.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-03-31T22:53:58.339 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:53:58.412 DEBUG:teuthology.parallel:result is None 2026-03-31T22:54:02.281 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 116/150 2026-03-31T22:54:02.281 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /sys 2026-03-31T22:54:02.281 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /proc 2026-03-31T22:54:02.281 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /mnt 2026-03-31T22:54:02.281 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /var/tmp 2026-03-31T22:54:02.281 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /home 2026-03-31T22:54:02.281 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /root 2026-03-31T22:54:02.281 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /tmp 2026-03-31T22:54:02.281 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:02.397 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 117/150 2026-03-31T22:54:02.417 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 117/150 2026-03-31T22:54:02.417 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:54:02.417 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-03-31T22:54:02.417 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-03-31T22:54:02.417 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mds.target → /usr/lib/systemd/system/ceph-mds.target. 2026-03-31T22:54:02.417 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:02.649 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 118/150 2026-03-31T22:54:02.668 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 118/150 2026-03-31T22:54:02.668 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:54:02.668 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-03-31T22:54:02.668 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-03-31T22:54:02.668 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mon.target → /usr/lib/systemd/system/ceph-mon.target. 2026-03-31T22:54:02.668 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:02.676 INFO:teuthology.orchestra.run.vm09.stdout: Installing : mailcap-2.1.49-5.el9.noarch 119/150 2026-03-31T22:54:02.679 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libconfig-1.7.2-9.el9.x86_64 120/150 2026-03-31T22:54:02.695 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 121/150 2026-03-31T22:54:02.695 INFO:teuthology.orchestra.run.vm09.stdout:Creating group 'qat' with GID 994. 2026-03-31T22:54:02.696 INFO:teuthology.orchestra.run.vm09.stdout:Creating group 'libstoragemgmt' with GID 993. 2026-03-31T22:54:02.696 INFO:teuthology.orchestra.run.vm09.stdout:Creating user 'libstoragemgmt' (daemon account for libstoragemgmt) with UID 993 and GID 993. 2026-03-31T22:54:02.696 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:02.705 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libstoragemgmt-1.10.1-1.el9.x86_64 121/150 2026-03-31T22:54:02.729 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 121/150 2026-03-31T22:54:02.729 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/libstoragemgmt.service → /usr/lib/systemd/system/libstoragemgmt.service. 2026-03-31T22:54:02.729 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:02.749 INFO:teuthology.orchestra.run.vm09.stdout: Installing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 122/150 2026-03-31T22:54:02.775 INFO:teuthology.orchestra.run.vm09.stdout: Installing : fuse-2.9.9-17.el9.x86_64 123/150 2026-03-31T22:54:02.847 INFO:teuthology.orchestra.run.vm09.stdout: Installing : cryptsetup-2.8.1-3.el9.x86_64 124/150 2026-03-31T22:54:02.851 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 125/150 2026-03-31T22:54:02.864 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 125/150 2026-03-31T22:54:02.864 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:54:02.864 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-03-31T22:54:02.864 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:03.357 INFO:teuthology.orchestra.run.vm05.stdout: Installing : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 142/150 2026-03-31T22:54:03.363 INFO:teuthology.orchestra.run.vm05.stdout: Installing : perl-Test-Harness-1:3.42-461.el9.noarch 143/150 2026-03-31T22:54:03.368 INFO:teuthology.orchestra.run.vm05.stdout: Installing : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_6 144/150 2026-03-31T22:54:03.379 INFO:teuthology.orchestra.run.vm05.stdout: Installing : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 145/150 2026-03-31T22:54:03.397 INFO:teuthology.orchestra.run.vm05.stdout: Installing : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 146/150 2026-03-31T22:54:03.404 INFO:teuthology.orchestra.run.vm05.stdout: Installing : s3cmd-2.4.0-1.el9.noarch 147/150 2026-03-31T22:54:03.407 INFO:teuthology.orchestra.run.vm05.stdout: Installing : bzip2-1.0.8-11.el9.x86_64 148/150 2026-03-31T22:54:03.407 INFO:teuthology.orchestra.run.vm05.stdout: Cleanup : librbd1-2:16.2.4-5.el9.x86_64 149/150 2026-03-31T22:54:03.423 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librbd1-2:16.2.4-5.el9.x86_64 149/150 2026-03-31T22:54:03.423 INFO:teuthology.orchestra.run.vm05.stdout: Cleanup : librados2-2:16.2.4-5.el9.x86_64 150/150 2026-03-31T22:54:03.608 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 126/150 2026-03-31T22:54:03.633 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 126/150 2026-03-31T22:54:03.633 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:54:03.633 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-03-31T22:54:03.633 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-03-31T22:54:03.633 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-osd.target → /usr/lib/systemd/system/ceph-osd.target. 2026-03-31T22:54:03.634 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:03.701 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 127/150 2026-03-31T22:54:03.704 INFO:teuthology.orchestra.run.vm09.stdout: Installing : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 127/150 2026-03-31T22:54:03.712 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 128/150 2026-03-31T22:54:03.739 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 129/150 2026-03-31T22:54:03.744 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 130/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librados2-2:16.2.4-5.el9.x86_64 150/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 1/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 3/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 4/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 5/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 6/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 7/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 9/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 10/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 11/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 12/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_ 13/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_6 14/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_ 15/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 16/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 17/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 18/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 19/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 20/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9 21/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 22/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 23/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 24/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 25/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 26/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 27/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 28/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 29/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 30/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 31/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 32/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 33/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 34/150 2026-03-31T22:54:04.782 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 35/150 2026-03-31T22:54:04.784 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 36/150 2026-03-31T22:54:04.784 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 37/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 38/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 39/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : bzip2-1.0.8-11.el9.x86_64 40/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : cryptsetup-2.8.1-3.el9.x86_64 41/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : fuse-2.9.9-17.el9.x86_64 42/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ledmon-libs-1.1.0-3.el9.x86_64 43/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libconfig-1.7.2-9.el9.x86_64 44/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libgfortran-11.5.0-14.el9.x86_64 45/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libquadmath-11.5.0-14.el9.x86_64 46/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : mailcap-2.1.49-5.el9.noarch 47/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : pciutils-3.7.0-7.el9.x86_64 48/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cffi-1.14.5-5.el9.x86_64 49/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cryptography-36.0.1-5.el9.x86_64 50/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-ply-3.11-14.el9.noarch 51/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pycparser-2.20-6.el9.noarch 52/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-requests-2.25.1-10.el9.noarch 53/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-urllib3-1.26.5-7.el9.noarch 54/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : smartmontools-1:7.2-10.el9.x86_64 55/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : unzip-6.0-59.el9.x86_64 56/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : zip-3.0-35.el9.x86_64 57/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : boost-program-options-1.75.0-13.el9.x86_64 58/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : flexiblas-3.0.4-9.el9.x86_64 59/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : flexiblas-netlib-3.0.4-9.el9.x86_64 60/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 61/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libnbd-1.20.3-4.el9.x86_64 62/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libpmemobj-1.12.1-1.el9.x86_64 63/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librabbitmq-0.11.0-7.el9.x86_64 64/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librdkafka-1.6.1-102.el9.x86_64 65/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libstoragemgmt-1.10.1-1.el9.x86_64 66/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libxslt-1.1.34-12.el9.x86_64 67/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : lttng-ust-2.12.0-6.el9.x86_64 68/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : lua-5.4.4-4.el9.x86_64 69/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : openblas-0.3.29-1.el9.x86_64 70/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : openblas-openmp-0.3.29-1.el9.x86_64 71/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : perl-Benchmark-1.23-483.el9.noarch 72/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : perl-Test-Harness-1:3.42-461.el9.noarch 73/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : protobuf-3.14.0-17.el9.x86_64 74/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-babel-2.9.1-2.el9.noarch 75/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-devel-3.9.25-3.el9.x86_64 76/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jinja2-2.11.3-8.el9.noarch 77/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jmespath-1.0.1-1.el9.noarch 78/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-libstoragemgmt-1.10.1-1.el9.x86_64 79/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-lxml-4.6.5-3.el9.x86_64 80/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-markupsafe-1.1.1-12.el9.x86_64 81/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-numpy-1:1.23.5-2.el9.x86_64 82/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 83/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-packaging-20.9-5.el9.noarch 84/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-protobuf-3.14.0-17.el9.noarch 85/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pyasn1-0.4.8-7.el9.noarch 86/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pyasn1-modules-0.4.8-7.el9.noarch 87/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-requests-oauthlib-1.3.0-12.el9.noarch 88/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-scipy-1.9.3-2.el9.x86_64 89/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-toml-0.10.2-6.el9.noarch 90/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : qatlib-25.08.0-2.el9.x86_64 91/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : qatlib-service-25.08.0-2.el9.x86_64 92/150 2026-03-31T22:54:04.785 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : qatzip-libs-1.3.1-1.el9.x86_64 93/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 94/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : xmlsec1-1.2.29-13.el9.x86_64 95/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : xmlsec1-openssl-1.2.29-13.el9.x86_64 96/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 97/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 98/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : protobuf-compiler-3.14.0-17.el9.x86_64 99/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : abseil-cpp-20211102.0-4.el9.x86_64 100/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : gperftools-libs-2.9.1-3.el9.x86_64 101/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : grpc-data-1.46.7-10.el9.noarch 102/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libarrow-9.0.0-15.el9.x86_64 103/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libarrow-doc-9.0.0-15.el9.noarch 104/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : liboath-2.6.12-1.el9.x86_64 105/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libunwind-1.6.2-1.el9.x86_64 106/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 107/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : parquet-libs-9.0.0-15.el9.x86_64 108/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-asyncssh-2.13.2-5.el9.noarch 109/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-autocommand-2.2.2-8.el9.noarch 110/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-backports-tarfile-1.2.0-1.el9.noarch 111/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-bcrypt-3.2.2-1.el9.x86_64 112/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cachetools-4.2.4-1.el9.noarch 113/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-certifi-2023.05.07-4.el9.noarch 114/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cheroot-10.0.1-5.el9.noarch 115/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cherrypy-18.10.0-5.el9.noarch 116/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-google-auth-1:2.45.0-1.el9.noarch 117/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-grpcio-1.46.7-10.el9.x86_64 118/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-grpcio-tools-1.46.7-10.el9.x86_64 119/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-influxdb-5.3.1-1.el9.noarch 120/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-isodate-0.6.1-3.el9.noarch 121/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-8.2.1-3.el9.noarch 122/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-classes-3.2.1-5.el9.noarch 123/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-collections-3.0.0-8.el9.noarch 124/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-context-6.0.1-3.el9.noarch 125/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-functools-3.5.0-2.el9.noarch 126/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-text-4.0.0-2.el9.noarch 127/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-kubernetes-1:26.1.0-3.el9.noarch 128/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-more-itertools-8.12.0-2.el9.noarch 129/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-msgpack-1.0.3-2.el9.x86_64 130/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-natsort-7.1.1-5.el9.noarch 131/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-portend-3.1.0-2.el9.noarch 132/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pyOpenSSL-21.0.0-1.el9.noarch 133/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-repoze-lru-0.7-16.el9.noarch 134/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-routes-2.5.1-5.el9.noarch 135/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-rsa-4.9-2.el9.noarch 136/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-saml-1.16.0-1.el9.noarch 137/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-tempora-5.0.0-2.el9.noarch 138/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-typing-extensions-4.15.0-1.el9.noarch 139/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-websocket-client-1.2.3-2.el9.noarch 140/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-xmlsec-1.3.13-1.el9.x86_64 141/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-xmltodict-0.12.0-15.el9.noarch 142/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-zc-lockfile-2.0-10.el9.noarch 143/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : re2-1:20211101-20.el9.x86_64 144/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : s3cmd-2.4.0-1.el9.noarch 145/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : thrift-0.15.0-4.el9.x86_64 146/150 2026-03-31T22:54:04.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 147/150 2026-03-31T22:54:04.787 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librados2-2:16.2.4-5.el9.x86_64 148/150 2026-03-31T22:54:04.787 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 149/150 2026-03-31T22:54:04.880 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librbd1-2:16.2.4-5.el9.x86_64 150/150 2026-03-31T22:54:04.880 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:54:04.880 INFO:teuthology.orchestra.run.vm05.stdout:Upgraded: 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout:Installed: 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: abseil-cpp-20211102.0-4.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: boost-program-options-1.75.0-13.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: bzip2-1.0.8-11.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-immutable-object-cache-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: cryptsetup-2.8.1-3.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-3.0.4-9.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-netlib-3.0.4-9.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: fuse-2.9.9-17.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: gperftools-libs-2.9.1-3.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: grpc-data-1.46.7-10.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: ledmon-libs-1.1.0-3.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libarrow-9.0.0-15.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libarrow-doc-9.0.0-15.el9.noarch 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libconfig-1.7.2-9.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libgfortran-11.5.0-14.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libnbd-1.20.3-4.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: liboath-2.6.12-1.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libpmemobj-1.12.1-1.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libquadmath-11.5.0-14.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: librabbitmq-0.11.0-7.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: librdkafka-1.6.1-102.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libunwind-1.6.2-1.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: libxslt-1.1.34-12.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: lttng-ust-2.12.0-6.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: lua-5.4.4-4.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-03-31T22:54:04.881 INFO:teuthology.orchestra.run.vm05.stdout: luarocks-3.9.2-5.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: mailcap-2.1.49-5.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: openblas-0.3.29-1.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: openblas-openmp-0.3.29-1.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: parquet-libs-9.0.0-15.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: pciutils-3.7.0-7.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: perl-Benchmark-1.23-483.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: perl-Test-Harness-1:3.42-461.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: protobuf-3.14.0-17.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: protobuf-compiler-3.14.0-17.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-asyncssh-2.13.2-5.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-autocommand-2.2.2-8.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-babel-2.9.1-2.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-backports-tarfile-1.2.0-1.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-bcrypt-3.2.2-1.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-cachetools-4.2.4-1.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-certifi-2023.05.07-4.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-cffi-1.14.5-5.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-cheroot-10.0.1-5.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-cherrypy-18.10.0-5.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-cryptography-36.0.1-5.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-devel-3.9.25-3.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-google-auth-1:2.45.0-1.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-grpcio-1.46.7-10.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-grpcio-tools-1.46.7-10.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-influxdb-5.3.1-1.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-isodate-0.6.1-3.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-8.2.1-3.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-classes-3.2.1-5.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-collections-3.0.0-8.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-context-6.0.1-3.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-functools-3.5.0-2.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-text-4.0.0-2.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-jinja2-2.11.3-8.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-jmespath-1.0.1-1.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-kubernetes-1:26.1.0-3.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-lxml-4.6.5-3.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-markupsafe-1.1.1-12.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-more-itertools-8.12.0-2.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-msgpack-1.0.3-2.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-natsort-7.1.1-5.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-numpy-1:1.23.5-2.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-numpy-f2py-1:1.23.5-2.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-packaging-20.9-5.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-ply-3.11-14.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-portend-3.1.0-2.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-protobuf-3.14.0-17.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyOpenSSL-21.0.0-1.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyasn1-0.4.8-7.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyasn1-modules-0.4.8-7.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-pycparser-2.20-6.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-repoze-lru-0.7-16.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-requests-2.25.1-10.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-requests-oauthlib-1.3.0-12.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-routes-2.5.1-5.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-rsa-4.9-2.el9.noarch 2026-03-31T22:54:04.882 INFO:teuthology.orchestra.run.vm05.stdout: python3-saml-1.16.0-1.el9.noarch 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: python3-scipy-1.9.3-2.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: python3-tempora-5.0.0-2.el9.noarch 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: python3-toml-0.10.2-6.el9.noarch 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: python3-typing-extensions-4.15.0-1.el9.noarch 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: python3-urllib3-1.26.5-7.el9.noarch 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: python3-websocket-client-1.2.3-2.el9.noarch 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: python3-xmlsec-1.3.13-1.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: python3-xmltodict-0.12.0-15.el9.noarch 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: python3-zc-lockfile-2.0-10.el9.noarch 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: qatlib-25.08.0-2.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: qatlib-service-25.08.0-2.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: qatzip-libs-1.3.1-1.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: re2-1:20211101-20.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: s3cmd-2.4.0-1.el9.noarch 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: smartmontools-1:7.2-10.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: socat-1.7.4.1-8.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: thrift-0.15.0-4.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: unzip-6.0-59.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: xmlsec1-1.2.29-13.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: xmlsec1-openssl-1.2.29-13.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: zip-3.0-35.el9.x86_64 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:54:04.883 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T22:54:04.978 DEBUG:teuthology.parallel:result is None 2026-03-31T22:54:05.154 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 130/150 2026-03-31T22:54:05.182 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 131/150 2026-03-31T22:54:05.865 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 131/150 2026-03-31T22:54:05.868 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 132/150 2026-03-31T22:54:05.879 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 132/150 2026-03-31T22:54:05.881 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 133/150 2026-03-31T22:54:05.940 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 133/150 2026-03-31T22:54:05.991 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 134/150 2026-03-31T22:54:05.993 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 135/150 2026-03-31T22:54:06.013 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 135/150 2026-03-31T22:54:06.013 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:54:06.013 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-03-31T22:54:06.013 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-03-31T22:54:06.013 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-mgr.target → /usr/lib/systemd/system/ceph-mgr.target. 2026-03-31T22:54:06.013 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:06.027 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 136/150 2026-03-31T22:54:06.035 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 136/150 2026-03-31T22:54:06.082 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 137/150 2026-03-31T22:54:07.446 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 138/150 2026-03-31T22:54:07.451 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 139/150 2026-03-31T22:54:07.470 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 139/150 2026-03-31T22:54:07.470 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:54:07.470 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-03-31T22:54:07.470 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-03-31T22:54:07.470 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-radosgw.target → /usr/lib/systemd/system/ceph-radosgw.target. 2026-03-31T22:54:07.470 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:07.482 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 140/150 2026-03-31T22:54:07.499 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-721.g5bb327 140/150 2026-03-31T22:54:07.499 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:54:07.499 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-03-31T22:54:07.499 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:07.682 INFO:teuthology.orchestra.run.vm09.stdout: Installing : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 141/150 2026-03-31T22:54:07.701 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 141/150 2026-03-31T22:54:07.701 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T22:54:07.701 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-03-31T22:54:07.701 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-03-31T22:54:07.701 INFO:teuthology.orchestra.run.vm09.stdout:Created symlink /etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target → /usr/lib/systemd/system/ceph-rbd-mirror.target. 2026-03-31T22:54:07.701 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:12.419 INFO:teuthology.orchestra.run.vm09.stdout: Installing : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 142/150 2026-03-31T22:54:12.426 INFO:teuthology.orchestra.run.vm09.stdout: Installing : perl-Test-Harness-1:3.42-461.el9.noarch 143/150 2026-03-31T22:54:12.432 INFO:teuthology.orchestra.run.vm09.stdout: Installing : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_6 144/150 2026-03-31T22:54:12.445 INFO:teuthology.orchestra.run.vm09.stdout: Installing : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 145/150 2026-03-31T22:54:12.463 INFO:teuthology.orchestra.run.vm09.stdout: Installing : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 146/150 2026-03-31T22:54:12.471 INFO:teuthology.orchestra.run.vm09.stdout: Installing : s3cmd-2.4.0-1.el9.noarch 147/150 2026-03-31T22:54:12.475 INFO:teuthology.orchestra.run.vm09.stdout: Installing : bzip2-1.0.8-11.el9.x86_64 148/150 2026-03-31T22:54:12.475 INFO:teuthology.orchestra.run.vm09.stdout: Cleanup : librbd1-2:16.2.4-5.el9.x86_64 149/150 2026-03-31T22:54:12.489 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librbd1-2:16.2.4-5.el9.x86_64 149/150 2026-03-31T22:54:12.489 INFO:teuthology.orchestra.run.vm09.stdout: Cleanup : librados2-2:16.2.4-5.el9.x86_64 150/150 2026-03-31T22:54:13.867 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librados2-2:16.2.4-5.el9.x86_64 150/150 2026-03-31T22:54:13.867 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 1/150 2026-03-31T22:54:13.867 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2/150 2026-03-31T22:54:13.867 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 3/150 2026-03-31T22:54:13.867 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 4/150 2026-03-31T22:54:13.867 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 5/150 2026-03-31T22:54:13.867 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 6/150 2026-03-31T22:54:13.867 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 7/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 9/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 10/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 11/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 12/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_ 13/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_6 14/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_ 15/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 16/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 17/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 18/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 19/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 20/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9 21/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 22/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 23/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 24/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 25/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 26/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 27/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 28/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 29/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 30/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 31/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 32/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 33/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 34/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 35/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 36/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 37/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 38/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 39/150 2026-03-31T22:54:13.868 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : bzip2-1.0.8-11.el9.x86_64 40/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : cryptsetup-2.8.1-3.el9.x86_64 41/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : fuse-2.9.9-17.el9.x86_64 42/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ledmon-libs-1.1.0-3.el9.x86_64 43/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libconfig-1.7.2-9.el9.x86_64 44/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libgfortran-11.5.0-14.el9.x86_64 45/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libquadmath-11.5.0-14.el9.x86_64 46/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : mailcap-2.1.49-5.el9.noarch 47/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : pciutils-3.7.0-7.el9.x86_64 48/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cffi-1.14.5-5.el9.x86_64 49/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cryptography-36.0.1-5.el9.x86_64 50/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-ply-3.11-14.el9.noarch 51/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pycparser-2.20-6.el9.noarch 52/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-requests-2.25.1-10.el9.noarch 53/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-urllib3-1.26.5-7.el9.noarch 54/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : smartmontools-1:7.2-10.el9.x86_64 55/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : unzip-6.0-59.el9.x86_64 56/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : zip-3.0-35.el9.x86_64 57/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : boost-program-options-1.75.0-13.el9.x86_64 58/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : flexiblas-3.0.4-9.el9.x86_64 59/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : flexiblas-netlib-3.0.4-9.el9.x86_64 60/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 61/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libnbd-1.20.3-4.el9.x86_64 62/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libpmemobj-1.12.1-1.el9.x86_64 63/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librabbitmq-0.11.0-7.el9.x86_64 64/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librdkafka-1.6.1-102.el9.x86_64 65/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libstoragemgmt-1.10.1-1.el9.x86_64 66/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libxslt-1.1.34-12.el9.x86_64 67/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : lttng-ust-2.12.0-6.el9.x86_64 68/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : lua-5.4.4-4.el9.x86_64 69/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : openblas-0.3.29-1.el9.x86_64 70/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : openblas-openmp-0.3.29-1.el9.x86_64 71/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : perl-Benchmark-1.23-483.el9.noarch 72/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : perl-Test-Harness-1:3.42-461.el9.noarch 73/150 2026-03-31T22:54:13.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : protobuf-3.14.0-17.el9.x86_64 74/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-babel-2.9.1-2.el9.noarch 75/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-devel-3.9.25-3.el9.x86_64 76/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jinja2-2.11.3-8.el9.noarch 77/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jmespath-1.0.1-1.el9.noarch 78/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-libstoragemgmt-1.10.1-1.el9.x86_64 79/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-lxml-4.6.5-3.el9.x86_64 80/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-markupsafe-1.1.1-12.el9.x86_64 81/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-numpy-1:1.23.5-2.el9.x86_64 82/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 83/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-packaging-20.9-5.el9.noarch 84/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-protobuf-3.14.0-17.el9.noarch 85/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pyasn1-0.4.8-7.el9.noarch 86/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pyasn1-modules-0.4.8-7.el9.noarch 87/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-requests-oauthlib-1.3.0-12.el9.noarch 88/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-scipy-1.9.3-2.el9.x86_64 89/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-toml-0.10.2-6.el9.noarch 90/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : qatlib-25.08.0-2.el9.x86_64 91/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : qatlib-service-25.08.0-2.el9.x86_64 92/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : qatzip-libs-1.3.1-1.el9.x86_64 93/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 94/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : xmlsec1-1.2.29-13.el9.x86_64 95/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : xmlsec1-openssl-1.2.29-13.el9.x86_64 96/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 97/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 98/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : protobuf-compiler-3.14.0-17.el9.x86_64 99/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : abseil-cpp-20211102.0-4.el9.x86_64 100/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : gperftools-libs-2.9.1-3.el9.x86_64 101/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : grpc-data-1.46.7-10.el9.noarch 102/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libarrow-9.0.0-15.el9.x86_64 103/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libarrow-doc-9.0.0-15.el9.noarch 104/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : liboath-2.6.12-1.el9.x86_64 105/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libunwind-1.6.2-1.el9.x86_64 106/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 107/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : parquet-libs-9.0.0-15.el9.x86_64 108/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-asyncssh-2.13.2-5.el9.noarch 109/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-autocommand-2.2.2-8.el9.noarch 110/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-backports-tarfile-1.2.0-1.el9.noarch 111/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-bcrypt-3.2.2-1.el9.x86_64 112/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cachetools-4.2.4-1.el9.noarch 113/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-certifi-2023.05.07-4.el9.noarch 114/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cheroot-10.0.1-5.el9.noarch 115/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cherrypy-18.10.0-5.el9.noarch 116/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-google-auth-1:2.45.0-1.el9.noarch 117/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-grpcio-1.46.7-10.el9.x86_64 118/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-grpcio-tools-1.46.7-10.el9.x86_64 119/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-influxdb-5.3.1-1.el9.noarch 120/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-isodate-0.6.1-3.el9.noarch 121/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-8.2.1-3.el9.noarch 122/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-classes-3.2.1-5.el9.noarch 123/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-collections-3.0.0-8.el9.noarch 124/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-context-6.0.1-3.el9.noarch 125/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-functools-3.5.0-2.el9.noarch 126/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-text-4.0.0-2.el9.noarch 127/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-kubernetes-1:26.1.0-3.el9.noarch 128/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-more-itertools-8.12.0-2.el9.noarch 129/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-msgpack-1.0.3-2.el9.x86_64 130/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-natsort-7.1.1-5.el9.noarch 131/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-portend-3.1.0-2.el9.noarch 132/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pyOpenSSL-21.0.0-1.el9.noarch 133/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-repoze-lru-0.7-16.el9.noarch 134/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-routes-2.5.1-5.el9.noarch 135/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-rsa-4.9-2.el9.noarch 136/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-saml-1.16.0-1.el9.noarch 137/150 2026-03-31T22:54:13.871 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-tempora-5.0.0-2.el9.noarch 138/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-typing-extensions-4.15.0-1.el9.noarch 139/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-websocket-client-1.2.3-2.el9.noarch 140/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-xmlsec-1.3.13-1.el9.x86_64 141/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-xmltodict-0.12.0-15.el9.noarch 142/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-zc-lockfile-2.0-10.el9.noarch 143/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : re2-1:20211101-20.el9.x86_64 144/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : s3cmd-2.4.0-1.el9.noarch 145/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : thrift-0.15.0-4.el9.x86_64 146/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 147/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librados2-2:16.2.4-5.el9.x86_64 148/150 2026-03-31T22:54:13.872 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 149/150 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librbd1-2:16.2.4-5.el9.x86_64 150/150 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout:Upgraded: 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout:Installed: 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: abseil-cpp-20211102.0-4.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: boost-program-options-1.75.0-13.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: bzip2-1.0.8-11.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-immutable-object-cache-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.965 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: cryptsetup-2.8.1-3.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-3.0.4-9.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-netlib-3.0.4-9.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: fuse-2.9.9-17.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: gperftools-libs-2.9.1-3.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: grpc-data-1.46.7-10.el9.noarch 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: ledmon-libs-1.1.0-3.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libarrow-9.0.0-15.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libarrow-doc-9.0.0-15.el9.noarch 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libconfig-1.7.2-9.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libgfortran-11.5.0-14.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libnbd-1.20.3-4.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: liboath-2.6.12-1.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libpmemobj-1.12.1-1.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libquadmath-11.5.0-14.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: librabbitmq-0.11.0-7.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: librdkafka-1.6.1-102.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libunwind-1.6.2-1.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: libxslt-1.1.34-12.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: lttng-ust-2.12.0-6.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: lua-5.4.4-4.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: luarocks-3.9.2-5.el9.noarch 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: mailcap-2.1.49-5.el9.noarch 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: openblas-0.3.29-1.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: openblas-openmp-0.3.29-1.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: parquet-libs-9.0.0-15.el9.x86_64 2026-03-31T22:54:13.966 INFO:teuthology.orchestra.run.vm09.stdout: pciutils-3.7.0-7.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: perl-Benchmark-1.23-483.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: perl-Test-Harness-1:3.42-461.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: protobuf-3.14.0-17.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: protobuf-compiler-3.14.0-17.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-asyncssh-2.13.2-5.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-autocommand-2.2.2-8.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-babel-2.9.1-2.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-backports-tarfile-1.2.0-1.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-bcrypt-3.2.2-1.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-cachetools-4.2.4-1.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-certifi-2023.05.07-4.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-cffi-1.14.5-5.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-cheroot-10.0.1-5.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-cherrypy-18.10.0-5.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-cryptography-36.0.1-5.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-devel-3.9.25-3.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-google-auth-1:2.45.0-1.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-grpcio-1.46.7-10.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-grpcio-tools-1.46.7-10.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-influxdb-5.3.1-1.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-isodate-0.6.1-3.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-8.2.1-3.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-classes-3.2.1-5.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-collections-3.0.0-8.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-context-6.0.1-3.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-functools-3.5.0-2.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-text-4.0.0-2.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-jinja2-2.11.3-8.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-jmespath-1.0.1-1.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-kubernetes-1:26.1.0-3.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-lxml-4.6.5-3.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-markupsafe-1.1.1-12.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-more-itertools-8.12.0-2.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-msgpack-1.0.3-2.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-natsort-7.1.1-5.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-numpy-1:1.23.5-2.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-numpy-f2py-1:1.23.5-2.el9.x86_64 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-packaging-20.9-5.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-ply-3.11-14.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-portend-3.1.0-2.el9.noarch 2026-03-31T22:54:13.967 INFO:teuthology.orchestra.run.vm09.stdout: python3-protobuf-3.14.0-17.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyOpenSSL-21.0.0-1.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyasn1-0.4.8-7.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyasn1-modules-0.4.8-7.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-pycparser-2.20-6.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-repoze-lru-0.7-16.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-requests-2.25.1-10.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-requests-oauthlib-1.3.0-12.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-routes-2.5.1-5.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-rsa-4.9-2.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-saml-1.16.0-1.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-scipy-1.9.3-2.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-tempora-5.0.0-2.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-toml-0.10.2-6.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-typing-extensions-4.15.0-1.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-urllib3-1.26.5-7.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-websocket-client-1.2.3-2.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-xmlsec-1.3.13-1.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-xmltodict-0.12.0-15.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: python3-zc-lockfile-2.0-10.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: qatlib-25.08.0-2.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: qatlib-service-25.08.0-2.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: qatzip-libs-1.3.1-1.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: re2-1:20211101-20.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: s3cmd-2.4.0-1.el9.noarch 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: smartmontools-1:7.2-10.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: socat-1.7.4.1-8.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: thrift-0.15.0-4.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: unzip-6.0-59.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: xmlsec1-1.2.29-13.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: xmlsec1-openssl-1.2.29-13.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: zip-3.0-35.el9.x86_64 2026-03-31T22:54:13.968 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:54:13.969 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T22:54:14.049 DEBUG:teuthology.parallel:result is None 2026-03-31T22:54:14.049 DEBUG:teuthology.packaging:Querying https://shaman.ceph.com/api/search?status=ready&project=ceph&flavor=default&distros=centos%2F9%2Fx86_64&sha1=5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:54:14.769 DEBUG:teuthology.orchestra.run.vm00:> rpm -q ceph --qf '%{VERSION}-%{RELEASE}' 2026-03-31T22:54:14.789 INFO:teuthology.orchestra.run.vm00.stdout:20.2.0-721.g5bb32787.el9 2026-03-31T22:54:14.789 INFO:teuthology.packaging:The installed version of ceph is 20.2.0-721.g5bb32787.el9 2026-03-31T22:54:14.789 INFO:teuthology.task.install:The correct ceph version 20.2.0-721.g5bb32787 is installed. 2026-03-31T22:54:14.789 DEBUG:teuthology.packaging:Querying https://shaman.ceph.com/api/search?status=ready&project=ceph&flavor=default&distros=centos%2F9%2Fx86_64&sha1=5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:54:15.455 DEBUG:teuthology.orchestra.run.vm05:> rpm -q ceph --qf '%{VERSION}-%{RELEASE}' 2026-03-31T22:54:15.472 INFO:teuthology.orchestra.run.vm05.stdout:20.2.0-721.g5bb32787.el9 2026-03-31T22:54:15.472 INFO:teuthology.packaging:The installed version of ceph is 20.2.0-721.g5bb32787.el9 2026-03-31T22:54:15.473 INFO:teuthology.task.install:The correct ceph version 20.2.0-721.g5bb32787 is installed. 2026-03-31T22:54:15.473 DEBUG:teuthology.packaging:Querying https://shaman.ceph.com/api/search?status=ready&project=ceph&flavor=default&distros=centos%2F9%2Fx86_64&sha1=5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:54:16.039 DEBUG:teuthology.orchestra.run.vm09:> rpm -q ceph --qf '%{VERSION}-%{RELEASE}' 2026-03-31T22:54:16.059 INFO:teuthology.orchestra.run.vm09.stdout:20.2.0-721.g5bb32787.el9 2026-03-31T22:54:16.059 INFO:teuthology.packaging:The installed version of ceph is 20.2.0-721.g5bb32787.el9 2026-03-31T22:54:16.059 INFO:teuthology.task.install:The correct ceph version 20.2.0-721.g5bb32787 is installed. 2026-03-31T22:54:16.059 INFO:teuthology.task.install.util:Shipping valgrind.supp... 2026-03-31T22:54:16.059 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:54:16.059 DEBUG:teuthology.orchestra.run.vm00:> sudo dd of=/home/ubuntu/cephtest/valgrind.supp 2026-03-31T22:54:16.082 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:54:16.082 DEBUG:teuthology.orchestra.run.vm05:> sudo dd of=/home/ubuntu/cephtest/valgrind.supp 2026-03-31T22:54:16.106 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:54:16.106 DEBUG:teuthology.orchestra.run.vm09:> sudo dd of=/home/ubuntu/cephtest/valgrind.supp 2026-03-31T22:54:16.129 INFO:teuthology.task.install.util:Shipping 'daemon-helper'... 2026-03-31T22:54:16.130 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:54:16.130 DEBUG:teuthology.orchestra.run.vm00:> sudo dd of=/usr/bin/daemon-helper 2026-03-31T22:54:16.151 DEBUG:teuthology.orchestra.run.vm00:> sudo chmod a=rx -- /usr/bin/daemon-helper 2026-03-31T22:54:16.210 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:54:16.210 DEBUG:teuthology.orchestra.run.vm05:> sudo dd of=/usr/bin/daemon-helper 2026-03-31T22:54:16.232 DEBUG:teuthology.orchestra.run.vm05:> sudo chmod a=rx -- /usr/bin/daemon-helper 2026-03-31T22:54:16.292 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:54:16.292 DEBUG:teuthology.orchestra.run.vm09:> sudo dd of=/usr/bin/daemon-helper 2026-03-31T22:54:16.313 DEBUG:teuthology.orchestra.run.vm09:> sudo chmod a=rx -- /usr/bin/daemon-helper 2026-03-31T22:54:16.374 INFO:teuthology.task.install.util:Shipping 'adjust-ulimits'... 2026-03-31T22:54:16.374 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:54:16.374 DEBUG:teuthology.orchestra.run.vm00:> sudo dd of=/usr/bin/adjust-ulimits 2026-03-31T22:54:16.395 DEBUG:teuthology.orchestra.run.vm00:> sudo chmod a=rx -- /usr/bin/adjust-ulimits 2026-03-31T22:54:16.457 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:54:16.457 DEBUG:teuthology.orchestra.run.vm05:> sudo dd of=/usr/bin/adjust-ulimits 2026-03-31T22:54:16.479 DEBUG:teuthology.orchestra.run.vm05:> sudo chmod a=rx -- /usr/bin/adjust-ulimits 2026-03-31T22:54:16.542 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:54:16.542 DEBUG:teuthology.orchestra.run.vm09:> sudo dd of=/usr/bin/adjust-ulimits 2026-03-31T22:54:16.565 DEBUG:teuthology.orchestra.run.vm09:> sudo chmod a=rx -- /usr/bin/adjust-ulimits 2026-03-31T22:54:16.628 INFO:teuthology.task.install.util:Shipping 'stdin-killer'... 2026-03-31T22:54:16.628 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:54:16.628 DEBUG:teuthology.orchestra.run.vm00:> sudo dd of=/usr/bin/stdin-killer 2026-03-31T22:54:16.655 DEBUG:teuthology.orchestra.run.vm00:> sudo chmod a=rx -- /usr/bin/stdin-killer 2026-03-31T22:54:16.718 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:54:16.718 DEBUG:teuthology.orchestra.run.vm05:> sudo dd of=/usr/bin/stdin-killer 2026-03-31T22:54:16.741 DEBUG:teuthology.orchestra.run.vm05:> sudo chmod a=rx -- /usr/bin/stdin-killer 2026-03-31T22:54:16.804 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:54:16.804 DEBUG:teuthology.orchestra.run.vm09:> sudo dd of=/usr/bin/stdin-killer 2026-03-31T22:54:16.826 DEBUG:teuthology.orchestra.run.vm09:> sudo chmod a=rx -- /usr/bin/stdin-killer 2026-03-31T22:54:16.887 INFO:teuthology.run_tasks:Running task cephadm... 2026-03-31T22:54:16.934 INFO:tasks.cephadm:Config: {'conf': {'global': {'mon election default strategy': 3}, 'mgr': {'debug mgr': 20, 'debug ms': 1, 'mgr/cephadm/use_agent': True}, 'mon': {'debug mon': 20, 'debug ms': 1, 'debug paxos': 20}, 'osd': {'debug ms': 1, 'debug osd': 20, 'osd mclock iops capacity threshold hdd': 49000}}, 'flavor': 'default', 'log-ignorelist': ['\\(MDS_ALL_DOWN\\)', '\\(MDS_UP_LESS_THAN_MAX\\)', 'MON_DOWN', 'mons down', 'mon down', 'out of quorum', 'CEPHADM_STRAY_DAEMON', 'CEPHADM_FAILED_DAEMON'], 'log-only-match': ['CEPHADM_'], 'sha1': '5bb3278730741031382ca9c3dc9d221a942e06a2', 'cephadm_binary_url': 'https://download.ceph.com/rpm-20.2.0/el9/noarch/cephadm'} 2026-03-31T22:54:16.934 INFO:tasks.cephadm:Cluster image is quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:54:16.935 INFO:tasks.cephadm:Cluster fsid is 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:54:16.935 INFO:tasks.cephadm:Choosing monitor IPs and ports... 2026-03-31T22:54:16.935 INFO:tasks.cephadm:Monitor IPs: {'mon.a': '192.168.123.100', 'mon.b': '192.168.123.105', 'mon.c': '192.168.123.109'} 2026-03-31T22:54:16.935 INFO:tasks.cephadm:First mon is mon.a on vm00 2026-03-31T22:54:16.935 INFO:tasks.cephadm:First mgr is a 2026-03-31T22:54:16.935 INFO:tasks.cephadm:Normalizing hostnames... 2026-03-31T22:54:16.935 DEBUG:teuthology.orchestra.run.vm00:> sudo hostname $(hostname -s) 2026-03-31T22:54:16.956 DEBUG:teuthology.orchestra.run.vm05:> sudo hostname $(hostname -s) 2026-03-31T22:54:16.979 DEBUG:teuthology.orchestra.run.vm09:> sudo hostname $(hostname -s) 2026-03-31T22:54:17.002 INFO:tasks.cephadm:Downloading cephadm from url: https://download.ceph.com/rpm-20.2.0/el9/noarch/cephadm 2026-03-31T22:54:17.002 DEBUG:teuthology.orchestra.run.vm00:> curl --silent -L https://download.ceph.com/rpm-20.2.0/el9/noarch/cephadm > /home/ubuntu/cephtest/cephadm && ls -l /home/ubuntu/cephtest/cephadm 2026-03-31T22:54:18.100 INFO:teuthology.orchestra.run.vm00.stdout:-rw-r--r--. 1 ubuntu ubuntu 1036391 Mar 31 22:54 /home/ubuntu/cephtest/cephadm 2026-03-31T22:54:18.100 DEBUG:teuthology.orchestra.run.vm05:> curl --silent -L https://download.ceph.com/rpm-20.2.0/el9/noarch/cephadm > /home/ubuntu/cephtest/cephadm && ls -l /home/ubuntu/cephtest/cephadm 2026-03-31T22:54:19.188 INFO:teuthology.orchestra.run.vm05.stdout:-rw-r--r--. 1 ubuntu ubuntu 1036391 Mar 31 22:54 /home/ubuntu/cephtest/cephadm 2026-03-31T22:54:19.188 DEBUG:teuthology.orchestra.run.vm09:> curl --silent -L https://download.ceph.com/rpm-20.2.0/el9/noarch/cephadm > /home/ubuntu/cephtest/cephadm && ls -l /home/ubuntu/cephtest/cephadm 2026-03-31T22:54:20.353 INFO:teuthology.orchestra.run.vm09.stdout:-rw-r--r--. 1 ubuntu ubuntu 1036391 Mar 31 22:54 /home/ubuntu/cephtest/cephadm 2026-03-31T22:54:20.353 DEBUG:teuthology.orchestra.run.vm00:> test -s /home/ubuntu/cephtest/cephadm && test $(stat -c%s /home/ubuntu/cephtest/cephadm) -gt 1000 && chmod +x /home/ubuntu/cephtest/cephadm 2026-03-31T22:54:20.367 DEBUG:teuthology.orchestra.run.vm05:> test -s /home/ubuntu/cephtest/cephadm && test $(stat -c%s /home/ubuntu/cephtest/cephadm) -gt 1000 && chmod +x /home/ubuntu/cephtest/cephadm 2026-03-31T22:54:20.381 DEBUG:teuthology.orchestra.run.vm09:> test -s /home/ubuntu/cephtest/cephadm && test $(stat -c%s /home/ubuntu/cephtest/cephadm) -gt 1000 && chmod +x /home/ubuntu/cephtest/cephadm 2026-03-31T22:54:20.398 INFO:tasks.cephadm:Pulling image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 on all hosts... 2026-03-31T22:54:20.398 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 pull 2026-03-31T22:54:20.409 DEBUG:teuthology.orchestra.run.vm05:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 pull 2026-03-31T22:54:20.423 DEBUG:teuthology.orchestra.run.vm09:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 pull 2026-03-31T22:54:20.570 INFO:teuthology.orchestra.run.vm00.stderr:Pulling container image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2... 2026-03-31T22:54:20.583 INFO:teuthology.orchestra.run.vm05.stderr:Pulling container image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2... 2026-03-31T22:54:20.600 INFO:teuthology.orchestra.run.vm09.stderr:Pulling container image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2... 2026-03-31T22:54:49.823 INFO:teuthology.orchestra.run.vm00.stdout:{ 2026-03-31T22:54:49.823 INFO:teuthology.orchestra.run.vm00.stdout: "ceph_version": "ceph version 20.2.0-721-g5bb32787 (5bb3278730741031382ca9c3dc9d221a942e06a2) tentacle (stable)", 2026-03-31T22:54:49.823 INFO:teuthology.orchestra.run.vm00.stdout: "image_id": "1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0", 2026-03-31T22:54:49.823 INFO:teuthology.orchestra.run.vm00.stdout: "repo_digests": [ 2026-03-31T22:54:49.824 INFO:teuthology.orchestra.run.vm00.stdout: "quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072" 2026-03-31T22:54:49.824 INFO:teuthology.orchestra.run.vm00.stdout: ] 2026-03-31T22:54:49.824 INFO:teuthology.orchestra.run.vm00.stdout:} 2026-03-31T22:55:02.884 INFO:teuthology.orchestra.run.vm05.stdout:{ 2026-03-31T22:55:02.885 INFO:teuthology.orchestra.run.vm05.stdout: "ceph_version": "ceph version 20.2.0-721-g5bb32787 (5bb3278730741031382ca9c3dc9d221a942e06a2) tentacle (stable)", 2026-03-31T22:55:02.885 INFO:teuthology.orchestra.run.vm05.stdout: "image_id": "1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0", 2026-03-31T22:55:02.885 INFO:teuthology.orchestra.run.vm05.stdout: "repo_digests": [ 2026-03-31T22:55:02.885 INFO:teuthology.orchestra.run.vm05.stdout: "quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072" 2026-03-31T22:55:02.885 INFO:teuthology.orchestra.run.vm05.stdout: ] 2026-03-31T22:55:02.885 INFO:teuthology.orchestra.run.vm05.stdout:} 2026-03-31T22:55:06.962 INFO:teuthology.orchestra.run.vm09.stdout:{ 2026-03-31T22:55:06.962 INFO:teuthology.orchestra.run.vm09.stdout: "ceph_version": "ceph version 20.2.0-721-g5bb32787 (5bb3278730741031382ca9c3dc9d221a942e06a2) tentacle (stable)", 2026-03-31T22:55:06.962 INFO:teuthology.orchestra.run.vm09.stdout: "image_id": "1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0", 2026-03-31T22:55:06.962 INFO:teuthology.orchestra.run.vm09.stdout: "repo_digests": [ 2026-03-31T22:55:06.962 INFO:teuthology.orchestra.run.vm09.stdout: "quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072" 2026-03-31T22:55:06.962 INFO:teuthology.orchestra.run.vm09.stdout: ] 2026-03-31T22:55:06.962 INFO:teuthology.orchestra.run.vm09.stdout:} 2026-03-31T22:55:06.979 DEBUG:teuthology.orchestra.run.vm00:> sudo mkdir -p /etc/ceph 2026-03-31T22:55:07.004 DEBUG:teuthology.orchestra.run.vm05:> sudo mkdir -p /etc/ceph 2026-03-31T22:55:07.031 DEBUG:teuthology.orchestra.run.vm09:> sudo mkdir -p /etc/ceph 2026-03-31T22:55:07.056 DEBUG:teuthology.orchestra.run.vm00:> sudo chmod 777 /etc/ceph 2026-03-31T22:55:07.077 DEBUG:teuthology.orchestra.run.vm05:> sudo chmod 777 /etc/ceph 2026-03-31T22:55:07.101 DEBUG:teuthology.orchestra.run.vm09:> sudo chmod 777 /etc/ceph 2026-03-31T22:55:07.123 INFO:tasks.cephadm:Writing seed config... 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [global] mon election default strategy = 3 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [mgr] debug mgr = 20 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [mgr] debug ms = 1 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [mgr] mgr/cephadm/use_agent = True 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [mon] debug mon = 20 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [mon] debug ms = 1 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [mon] debug paxos = 20 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [osd] debug ms = 1 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [osd] debug osd = 20 2026-03-31T22:55:07.124 INFO:tasks.cephadm: override: [osd] osd mclock iops capacity threshold hdd = 49000 2026-03-31T22:55:07.124 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:55:07.124 DEBUG:teuthology.orchestra.run.vm00:> dd of=/home/ubuntu/cephtest/seed.ceph.conf 2026-03-31T22:55:07.138 DEBUG:tasks.cephadm:Final config: [global] # make logging friendly to teuthology log_to_file = true log_to_stderr = false log to journald = false mon cluster log to file = true mon cluster log file level = debug mon clock drift allowed = 1.000 # replicate across OSDs, not hosts osd crush chooseleaf type = 0 #osd pool default size = 2 osd pool default erasure code profile = plugin=isa technique=reed_sol_van k=2 m=1 crush-failure-domain=osd # enable some debugging auth debug = true ms die on old message = true ms die on bug = true debug asserts on shutdown = true # adjust warnings mon max pg per osd = 10000# >= luminous mon pg warn max object skew = 0 mon osd allow primary affinity = true mon osd allow pg remap = true mon warn on legacy crush tunables = false mon warn on crush straw calc version zero = false mon warn on no sortbitwise = false mon warn on osd down out interval zero = false mon warn on too few osds = false mon_warn_on_pool_pg_num_not_power_of_two = false # disable pg_autoscaler by default for new pools osd_pool_default_pg_autoscale_mode = off # tests delete pools mon allow pool delete = true fsid = 8bb14950-2d54-11f1-a348-07063966e06c mon election default strategy = 3 [osd] osd scrub load threshold = 5.0 osd scrub max interval = 600 osd mclock profile = high_recovery_ops osd mclock skip benchmark = true osd recover clone overlap = true osd recovery max chunk = 1048576 osd deep scrub update digest min age = 30 osd map max advance = 10 osd memory target autotune = true # debugging osd debug shutdown = true osd debug op order = true osd debug verify stray on activate = true osd debug pg log writeout = true osd debug verify cached snaps = true osd debug verify missing on start = true osd debug misdirected ops = true osd op queue = debug_random osd op queue cut off = debug_random osd shutdown pgref assert = true bdev debug aio = true osd sloppy crc = true debug ms = 1 debug osd = 20 osd mclock iops capacity threshold hdd = 49000 [mgr] mon reweight min pgs per osd = 4 mon reweight min bytes per osd = 10 mgr/telemetry/nag = false debug mgr = 20 debug ms = 1 mgr/cephadm/use_agent = True [mon] mon data avail warn = 5 mon mgr mkfs grace = 240 mon reweight min pgs per osd = 4 mon osd reporter subtree level = osd mon osd prime pg temp = true mon reweight min bytes per osd = 10 # rotate auth tickets quickly to exercise renewal paths auth mon ticket ttl = 660# 11m auth service ticket ttl = 240# 4m # don't complain about global id reclaim mon_warn_on_insecure_global_id_reclaim = false mon_warn_on_insecure_global_id_reclaim_allowed = false debug mon = 20 debug ms = 1 debug paxos = 20 [client.rgw] rgw cache enabled = true rgw enable ops log = true rgw enable usage log = true 2026-03-31T22:55:07.138 DEBUG:teuthology.orchestra.run.vm00:mon.a> sudo journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.a.service 2026-03-31T22:55:07.180 DEBUG:teuthology.orchestra.run.vm00:mgr.a> sudo journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.a.service 2026-03-31T22:55:07.222 INFO:tasks.cephadm:Bootstrapping... 2026-03-31T22:55:07.222 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 -v bootstrap --fsid 8bb14950-2d54-11f1-a348-07063966e06c --config /home/ubuntu/cephtest/seed.ceph.conf --output-config /etc/ceph/ceph.conf --output-keyring /etc/ceph/ceph.client.admin.keyring --output-pub-ssh-key /home/ubuntu/cephtest/ceph.pub --mon-id a --mgr-id a --orphan-initial-daemons --skip-monitoring-stack --mon-ip 192.168.123.100 --skip-admin-label && sudo chmod +r /etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:07.361 INFO:teuthology.orchestra.run.vm00.stdout:-------------------------------------------------------------------------------- 2026-03-31T22:55:07.361 INFO:teuthology.orchestra.run.vm00.stdout:cephadm ['--image', 'quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2', '-v', 'bootstrap', '--fsid', '8bb14950-2d54-11f1-a348-07063966e06c', '--config', '/home/ubuntu/cephtest/seed.ceph.conf', '--output-config', '/etc/ceph/ceph.conf', '--output-keyring', '/etc/ceph/ceph.client.admin.keyring', '--output-pub-ssh-key', '/home/ubuntu/cephtest/ceph.pub', '--mon-id', 'a', '--mgr-id', 'a', '--orphan-initial-daemons', '--skip-monitoring-stack', '--mon-ip', '192.168.123.100', '--skip-admin-label'] 2026-03-31T22:55:07.361 INFO:teuthology.orchestra.run.vm00.stderr:Specifying an fsid for your cluster offers no advantages and may increase the likelihood of fsid conflicts. 2026-03-31T22:55:07.361 INFO:teuthology.orchestra.run.vm00.stdout:Verifying podman|docker is present... 2026-03-31T22:55:07.380 INFO:teuthology.orchestra.run.vm00.stdout:/bin/podman: stdout 5.8.0 2026-03-31T22:55:07.380 INFO:teuthology.orchestra.run.vm00.stdout:Verifying lvm2 is present... 2026-03-31T22:55:07.380 INFO:teuthology.orchestra.run.vm00.stdout:Verifying time synchronization is in place... 2026-03-31T22:55:07.386 INFO:teuthology.orchestra.run.vm00.stdout:Non-zero exit code 1 from systemctl is-enabled chrony.service 2026-03-31T22:55:07.386 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Failed to get unit file state for chrony.service: No such file or directory 2026-03-31T22:55:07.390 INFO:teuthology.orchestra.run.vm00.stdout:Non-zero exit code 3 from systemctl is-active chrony.service 2026-03-31T22:55:07.390 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stdout inactive 2026-03-31T22:55:07.395 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stdout enabled 2026-03-31T22:55:07.399 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stdout active 2026-03-31T22:55:07.399 INFO:teuthology.orchestra.run.vm00.stdout:Unit chronyd.service is enabled and running 2026-03-31T22:55:07.399 INFO:teuthology.orchestra.run.vm00.stdout:Repeating the final host check... 2026-03-31T22:55:07.415 INFO:teuthology.orchestra.run.vm00.stdout:/bin/podman: stdout 5.8.0 2026-03-31T22:55:07.415 INFO:teuthology.orchestra.run.vm00.stdout:podman (/bin/podman) version 5.8.0 is present 2026-03-31T22:55:07.415 INFO:teuthology.orchestra.run.vm00.stdout:systemctl is present 2026-03-31T22:55:07.415 INFO:teuthology.orchestra.run.vm00.stdout:lvcreate is present 2026-03-31T22:55:07.420 INFO:teuthology.orchestra.run.vm00.stdout:Non-zero exit code 1 from systemctl is-enabled chrony.service 2026-03-31T22:55:07.420 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Failed to get unit file state for chrony.service: No such file or directory 2026-03-31T22:55:07.425 INFO:teuthology.orchestra.run.vm00.stdout:Non-zero exit code 3 from systemctl is-active chrony.service 2026-03-31T22:55:07.425 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stdout inactive 2026-03-31T22:55:07.430 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stdout enabled 2026-03-31T22:55:07.434 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stdout active 2026-03-31T22:55:07.434 INFO:teuthology.orchestra.run.vm00.stdout:Unit chronyd.service is enabled and running 2026-03-31T22:55:07.434 INFO:teuthology.orchestra.run.vm00.stdout:Host looks OK 2026-03-31T22:55:07.434 INFO:teuthology.orchestra.run.vm00.stdout:Cluster fsid: 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:07.434 INFO:teuthology.orchestra.run.vm00.stdout:Acquiring lock 139825100587792 on /run/cephadm/8bb14950-2d54-11f1-a348-07063966e06c.lock 2026-03-31T22:55:07.434 INFO:teuthology.orchestra.run.vm00.stdout:Lock 139825100587792 acquired on /run/cephadm/8bb14950-2d54-11f1-a348-07063966e06c.lock 2026-03-31T22:55:07.434 INFO:teuthology.orchestra.run.vm00.stdout:Verifying IP 192.168.123.100 port 3300 ... 2026-03-31T22:55:07.435 INFO:teuthology.orchestra.run.vm00.stdout:Verifying IP 192.168.123.100 port 6789 ... 2026-03-31T22:55:07.435 INFO:teuthology.orchestra.run.vm00.stdout:Base mon IP(s) is [192.168.123.100:3300, 192.168.123.100:6789], mon addrv is [v2:192.168.123.100:3300,v1:192.168.123.100:6789] 2026-03-31T22:55:07.437 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout default via 192.168.123.1 dev eth0 proto dhcp src 192.168.123.100 metric 100 2026-03-31T22:55:07.437 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout 192.168.123.0/24 dev eth0 proto kernel scope link src 192.168.123.100 metric 100 2026-03-31T22:55:07.439 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout ::1 dev lo proto kernel metric 256 pref medium 2026-03-31T22:55:07.440 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout fe80::/64 dev eth0 proto kernel metric 1024 pref medium 2026-03-31T22:55:07.441 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout 1: lo: mtu 65536 state UNKNOWN qlen 1000 2026-03-31T22:55:07.441 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout inet6 ::1/128 scope host 2026-03-31T22:55:07.441 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout valid_lft forever preferred_lft forever 2026-03-31T22:55:07.441 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout 2: eth0: mtu 1500 state UP qlen 1000 2026-03-31T22:55:07.442 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout inet6 fe80::5055:ff:fe00:0/64 scope link noprefixroute 2026-03-31T22:55:07.442 INFO:teuthology.orchestra.run.vm00.stdout:/sbin/ip: stdout valid_lft forever preferred_lft forever 2026-03-31T22:55:07.442 INFO:teuthology.orchestra.run.vm00.stdout:Mon IP `192.168.123.100` is in CIDR network `192.168.123.0/24` 2026-03-31T22:55:07.442 INFO:teuthology.orchestra.run.vm00.stdout:Mon IP `192.168.123.100` is in CIDR network `192.168.123.0/24` 2026-03-31T22:55:07.442 INFO:teuthology.orchestra.run.vm00.stdout:Inferred mon public CIDR from local network configuration ['192.168.123.0/24', '192.168.123.0/24'] 2026-03-31T22:55:07.442 INFO:teuthology.orchestra.run.vm00.stdout:Internal network (--cluster-network) has not been provided, OSD replication will default to the public_network 2026-03-31T22:55:07.443 INFO:teuthology.orchestra.run.vm00.stdout:Pulling container image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2... 2026-03-31T22:55:08.675 INFO:teuthology.orchestra.run.vm00.stdout:/bin/podman: stdout 1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 2026-03-31T22:55:08.675 INFO:teuthology.orchestra.run.vm00.stdout:/bin/podman: stderr Trying to pull quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2... 2026-03-31T22:55:08.675 INFO:teuthology.orchestra.run.vm00.stdout:/bin/podman: stderr Getting image source signatures 2026-03-31T22:55:08.675 INFO:teuthology.orchestra.run.vm00.stdout:/bin/podman: stderr Copying blob sha256:c5e2fec2b14297976ed6be660ee65b05385cd8280d29bf3bfae1c02e4f087d96 2026-03-31T22:55:08.675 INFO:teuthology.orchestra.run.vm00.stdout:/bin/podman: stderr Copying blob sha256:25df8c9a4e544adeec432a985d98aea47c269790b5bbd65f1ad4abb5621c4c30 2026-03-31T22:55:08.675 INFO:teuthology.orchestra.run.vm00.stdout:/bin/podman: stderr Copying config sha256:1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 2026-03-31T22:55:08.675 INFO:teuthology.orchestra.run.vm00.stdout:/bin/podman: stderr Writing manifest to image destination 2026-03-31T22:55:08.817 INFO:teuthology.orchestra.run.vm00.stdout:ceph: stdout ceph version 20.2.0-721-g5bb32787 (5bb3278730741031382ca9c3dc9d221a942e06a2) tentacle (stable) 2026-03-31T22:55:08.817 INFO:teuthology.orchestra.run.vm00.stdout:Ceph version: ceph version 20.2.0-721-g5bb32787 (5bb3278730741031382ca9c3dc9d221a942e06a2) tentacle (stable) 2026-03-31T22:55:08.817 INFO:teuthology.orchestra.run.vm00.stdout:Extracting ceph user uid/gid from container image... 2026-03-31T22:55:08.958 INFO:teuthology.orchestra.run.vm00.stdout:stat: stdout 167 167 2026-03-31T22:55:08.958 INFO:teuthology.orchestra.run.vm00.stdout:Creating initial keys... 2026-03-31T22:55:09.063 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph-authtool: stdout AQDNUMxpamliAhAA+CuDmRH0Iel1ORGnnxon5Q== 2026-03-31T22:55:09.177 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph-authtool: stdout AQDNUMxpbg6XCRAA6p2p0ZB/c/D/mxGB0+RlQA== 2026-03-31T22:55:09.287 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph-authtool: stdout AQDNUMxpyFkxEBAAiocePyoSwKXku9ZkUyLiVQ== 2026-03-31T22:55:09.288 INFO:teuthology.orchestra.run.vm00.stdout:Creating initial monmap... 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/monmaptool: stdout /usr/bin/monmaptool: monmap file /tmp/monmap 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/monmaptool: stdout setting min_mon_release = tentacle 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/monmaptool: stdout /usr/bin/monmaptool: set fsid to 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/monmaptool: stdout /usr/bin/monmaptool: writing epoch 0 to /tmp/monmap (1 monitors) 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout:monmaptool for a [v2:192.168.123.100:3300,v1:192.168.123.100:6789] on /usr/bin/monmaptool: monmap file /tmp/monmap 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout:setting min_mon_release = tentacle 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/monmaptool: set fsid to 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/monmaptool: writing epoch 0 to /tmp/monmap (1 monitors) 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:09.409 INFO:teuthology.orchestra.run.vm00.stdout:Creating mon... 2026-03-31T22:55:09.548 INFO:teuthology.orchestra.run.vm00.stdout:create mon.a on 2026-03-31T22:55:09.712 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Removed "/etc/systemd/system/multi-user.target.wants/ceph.target". 2026-03-31T22:55:09.831 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Created symlink /etc/systemd/system/multi-user.target.wants/ceph.target → /etc/systemd/system/ceph.target. 2026-03-31T22:55:09.953 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Created symlink /etc/systemd/system/multi-user.target.wants/ceph-8bb14950-2d54-11f1-a348-07063966e06c.target → /etc/systemd/system/ceph-8bb14950-2d54-11f1-a348-07063966e06c.target. 2026-03-31T22:55:09.953 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Created symlink /etc/systemd/system/ceph.target.wants/ceph-8bb14950-2d54-11f1-a348-07063966e06c.target → /etc/systemd/system/ceph-8bb14950-2d54-11f1-a348-07063966e06c.target. 2026-03-31T22:55:10.091 INFO:teuthology.orchestra.run.vm00.stdout:Non-zero exit code 1 from systemctl reset-failed ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.a 2026-03-31T22:55:10.091 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Failed to reset failed state of unit ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.a.service: Unit ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.a.service not loaded. 2026-03-31T22:55:10.224 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Created symlink /etc/systemd/system/ceph-8bb14950-2d54-11f1-a348-07063966e06c.target.wants/ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.a.service → /etc/systemd/system/ceph-8bb14950-2d54-11f1-a348-07063966e06c@.service. 2026-03-31T22:55:10.380 INFO:teuthology.orchestra.run.vm00.stdout:firewalld does not appear to be present 2026-03-31T22:55:10.380 INFO:teuthology.orchestra.run.vm00.stdout:Not possible to enable service . firewalld.service is not available 2026-03-31T22:55:10.380 INFO:teuthology.orchestra.run.vm00.stdout:Waiting for mon to start... 2026-03-31T22:55:10.380 INFO:teuthology.orchestra.run.vm00.stdout:Waiting for mon... 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout cluster: 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout id: 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout health: HEALTH_OK 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout services: 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon: 1 daemons, quorum a (age 0.123904s) [leader: a] 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mgr: no daemons active 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout osd: 0 osds: 0 up, 0 in 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout data: 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout pools: 0 pools, 0 pgs 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout objects: 0 objects, 0 B 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout usage: 0 B used, 0 B / 0 B avail 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout pgs: 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:mon is available 2026-03-31T22:55:10.554 INFO:teuthology.orchestra.run.vm00.stdout:Assimilating anything we can from ceph.conf... 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout [global] 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout fsid = 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_cluster_log_file_level = debug 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_host = [v2:192.168.123.100:3300,v1:192.168.123.100:6789] 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_osd_allow_pg_remap = true 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_osd_allow_primary_affinity = true 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_warn_on_no_sortbitwise = false 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout osd_crush_chooseleaf_type = 0 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout [mgr] 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mgr/cephadm/use_agent = True 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mgr/telemetry/nag = false 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout [osd] 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout osd_map_max_advance = 10 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout osd_sloppy_crc = true 2026-03-31T22:55:10.750 INFO:teuthology.orchestra.run.vm00.stdout:Generating new minimal ceph.conf... 2026-03-31T22:55:10.921 INFO:teuthology.orchestra.run.vm00.stdout:Restarting the monitor... 2026-03-31T22:55:11.119 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 podman[61870]: 2026-03-31 22:55:11.111545155 +0000 UTC m=+0.124357413 container died 237f79a04017551e48c3bab6a0f5a7b0a23ae8eaa6879820075061ec03704838 (image=quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.name=CentOS Stream 9 Base Image, org.opencontainers.image.documentation=https://docs.ceph.com/, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, OSD_FLAVOR=default, ceph=True, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.license=GPLv2, io.buildah.version=1.43.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_REF=tentacle-release, FROM_IMAGE=quay.io/centos/centos:stream9) 2026-03-31T22:55:11.333 INFO:teuthology.orchestra.run.vm00.stdout:Setting public_network to 192.168.123.0/24 in global config section 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 podman[61870]: 2026-03-31 22:55:11.123910084 +0000 UTC m=+0.136722342 container remove 237f79a04017551e48c3bab6a0f5a7b0a23ae8eaa6879820075061ec03704838 (image=quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a, org.label-schema.license=GPLv2, OSD_FLAVOR=default, org.label-schema.schema-version=1.0, org.label-schema.build-date=20260316, org.label-schema.vendor=CentOS, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_REF=tentacle-release, org.opencontainers.image.documentation=https://docs.ceph.com/, ceph=True, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.name=CentOS Stream 9 Base Image, org.opencontainers.image.authors=Ceph Release Team , GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, FROM_IMAGE=quay.io/centos/centos:stream9, io.buildah.version=1.43.0) 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 bash[61870]: ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 systemd[1]: ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.a.service: Deactivated successfully. 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 systemd[1]: Stopped Ceph mon.a for 8bb14950-2d54-11f1-a348-07063966e06c. 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 systemd[1]: Starting Ceph mon.a for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 podman[61951]: 2026-03-31 22:55:11.276722025 +0000 UTC m=+0.016558962 container create ccaba7e50d34f86eea5c2ddad21e7aa26b517bc56df8aba713a3ca611adaa61e (image=quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, org.opencontainers.image.documentation=https://docs.ceph.com/, FROM_IMAGE=quay.io/centos/centos:stream9, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.build-date=20260316, OSD_FLAVOR=default, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, ceph=True, io.buildah.version=1.43.0, org.label-schema.name=CentOS Stream 9 Base Image, CEPH_REF=tentacle-release, org.label-schema.license=GPLv2) 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 podman[61951]: 2026-03-31 22:55:11.310632983 +0000 UTC m=+0.050469930 container init ccaba7e50d34f86eea5c2ddad21e7aa26b517bc56df8aba713a3ca611adaa61e (image=quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.opencontainers.image.documentation=https://docs.ceph.com/, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.vendor=CentOS, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, OSD_FLAVOR=default, ceph=True, org.label-schema.name=CentOS Stream 9 Base Image, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.build-date=20260316, io.buildah.version=1.43.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_REF=tentacle-release) 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 podman[61951]: 2026-03-31 22:55:11.31353737 +0000 UTC m=+0.053374307 container start ccaba7e50d34f86eea5c2ddad21e7aa26b517bc56df8aba713a3ca611adaa61e (image=quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a, org.label-schema.name=CentOS Stream 9 Base Image, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.opencontainers.image.documentation=https://docs.ceph.com/, org.label-schema.build-date=20260316, io.buildah.version=1.43.0, org.label-schema.vendor=CentOS, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_REF=tentacle-release, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.schema-version=1.0, OSD_FLAVOR=default, ceph=True, FROM_IMAGE=quay.io/centos/centos:stream9, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.license=GPLv2) 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 bash[61951]: ccaba7e50d34f86eea5c2ddad21e7aa26b517bc56df8aba713a3ca611adaa61e 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 podman[61951]: 2026-03-31 22:55:11.27032548 +0000 UTC m=+0.010162427 image pull 1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 systemd[1]: Started Ceph mon.a for 8bb14950-2d54-11f1-a348-07063966e06c. 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: set uid:gid to 167:167 (ceph:ceph) 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: ceph version 20.2.0-721-g5bb32787 (5bb3278730741031382ca9c3dc9d221a942e06a2) tentacle (stable - RelWithDebInfo), process ceph-mon, pid 2 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: pidfile_write: ignore empty --pid-file 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: load: jerasure load: lrc load: isa 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: RocksDB version: 7.9.2 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Git sha 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Compile date 2026-03-19 20:43:26 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: DB SUMMARY 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: DB Session ID: IPNC9Z78OY9BPZBKA44S 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: CURRENT file: CURRENT 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: IDENTITY file: IDENTITY 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: MANIFEST file: MANIFEST-000010 size: 179 Bytes 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: SST files in /var/lib/ceph/mon/ceph-a/store.db dir, Total Num: 1, files: 000008.sst 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Write Ahead Log file in /var/lib/ceph/mon/ceph-a/store.db: 000009.log size: 77055 ; 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.error_if_exists: 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.create_if_missing: 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.paranoid_checks: 1 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.flush_verify_memtable_count: 1 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.track_and_verify_wals_in_manifest: 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.verify_sst_unique_id_in_manifest: 1 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.env: 0x555d479c54c0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.fs: PosixFileSystem 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.info_log: 0x555d48cf8000 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_file_opening_threads: 16 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.statistics: (nil) 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.use_fsync: 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_log_file_size: 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_manifest_file_size: 1073741824 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.log_file_time_to_roll: 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.keep_log_file_num: 1000 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.recycle_log_file_num: 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.allow_fallocate: 1 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.allow_mmap_reads: 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.allow_mmap_writes: 0 2026-03-31T22:55:11.384 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.use_direct_reads: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.use_direct_io_for_flush_and_compaction: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.create_missing_column_families: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.db_log_dir: 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.wal_dir: 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.table_cache_numshardbits: 6 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.WAL_ttl_seconds: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.WAL_size_limit_MB: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_write_batch_group_size_bytes: 1048576 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.manifest_preallocation_size: 4194304 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.is_fd_close_on_exec: 1 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.advise_random_on_open: 1 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.db_write_buffer_size: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.write_buffer_manager: 0x555d48cec140 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.access_hint_on_compaction_start: 1 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.random_access_max_buffer_size: 1048576 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.use_adaptive_mutex: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.rate_limiter: (nil) 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.sst_file_manager.rate_bytes_per_sec: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.wal_recovery_mode: 2 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.enable_thread_tracking: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.enable_pipelined_write: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.unordered_write: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.allow_concurrent_memtable_write: 1 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.enable_write_thread_adaptive_yield: 1 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.write_thread_max_yield_usec: 100 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.write_thread_slow_yield_usec: 3 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.row_cache: None 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.wal_filter: None 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.avoid_flush_during_recovery: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.allow_ingest_behind: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.two_write_queues: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.manual_wal_flush: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.wal_compression: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.atomic_flush: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.avoid_unnecessary_blocking_io: 0 2026-03-31T22:55:11.385 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.persist_stats_to_disk: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.write_dbid_to_manifest: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.log_readahead_size: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.file_checksum_gen_factory: Unknown 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.best_efforts_recovery: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bgerror_resume_count: 2147483647 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bgerror_resume_retry_interval: 1000000 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.allow_data_in_errors: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.db_host_id: __hostname__ 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.enforce_single_del_contracts: true 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_background_jobs: 2 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_background_compactions: -1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_subcompactions: 1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.avoid_flush_during_shutdown: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.writable_file_max_buffer_size: 1048576 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.delayed_write_rate : 16777216 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_total_wal_size: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.delete_obsolete_files_period_micros: 21600000000 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.stats_dump_period_sec: 600 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.stats_persist_period_sec: 600 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.stats_history_buffer_size: 1048576 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_open_files: -1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bytes_per_sync: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.wal_bytes_per_sync: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.strict_bytes_per_sync: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_readahead_size: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_background_flushes: -1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Compression algorithms supported: 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: kZSTD supported: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: kXpressCompression supported: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: kBZip2Compression supported: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: kZSTDNotFinalCompression supported: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: kLZ4Compression supported: 1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: kZlibCompression supported: 1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: kLZ4HCCompression supported: 1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: kSnappyCompression supported: 1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Fast CRC32 supported: Supported on x86 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: DMutex implementation: pthread_mutex_t 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/version_set.cc:5527] Recovering from manifest file: /var/lib/ceph/mon/ceph-a/store.db/MANIFEST-000010 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/column_family.cc:630] --------------- Options for column family [default]: 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.comparator: leveldb.BytewiseComparator 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.merge_operator: 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_filter: None 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_filter_factory: None 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.sst_partitioner_factory: None 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.memtable_factory: SkipListFactory 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.table_factory: BlockBasedTable 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: table_factory options: flush_block_policy_factory: FlushBlockBySizePolicyFactory (0x555d48ce8c00) 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: cache_index_and_filter_blocks: 1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: cache_index_and_filter_blocks_with_high_priority: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: pin_l0_filter_and_index_blocks_in_cache: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: pin_top_level_index_and_filter: 1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: index_type: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: data_block_index_type: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: index_shortening: 1 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: data_block_hash_table_util_ratio: 0.750000 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: checksum: 4 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: no_block_cache: 0 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: block_cache: 0x555d48d0bb90 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: block_cache_name: BinnedLRUCache 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: block_cache_options: 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: capacity : 536870912 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: num_shard_bits : 4 2026-03-31T22:55:11.386 INFO:journalctl@ceph.mon.a.vm00.stdout: strict_capacity_limit : 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: high_pri_pool_ratio: 0.000 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: block_cache_compressed: (nil) 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: persistent_cache: (nil) 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: block_size: 4096 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: block_size_deviation: 10 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: block_restart_interval: 16 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: index_block_restart_interval: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: metadata_block_size: 4096 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: partition_filters: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: use_delta_encoding: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: filter_policy: bloomfilter 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: whole_key_filtering: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: verify_compression: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: read_amp_bytes_per_bit: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: format_version: 5 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: enable_index_compression: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: block_align: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: max_auto_readahead_size: 262144 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: prepopulate_block_cache: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: initial_auto_readahead_size: 8192 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout: num_file_reads_for_auto_readahead: 2 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.write_buffer_size: 33554432 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_write_buffer_number: 2 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression: NoCompression 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression: Disabled 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.prefix_extractor: nullptr 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.memtable_insert_with_hint_prefix_extractor: nullptr 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.num_levels: 7 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.min_write_buffer_number_to_merge: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_write_buffer_number_to_maintain: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_write_buffer_size_to_maintain: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression_opts.window_bits: -14 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression_opts.level: 32767 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression_opts.strategy: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression_opts.max_dict_bytes: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression_opts.zstd_max_train_bytes: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression_opts.parallel_threads: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression_opts.enabled: false 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression_opts.max_dict_buffer_bytes: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bottommost_compression_opts.use_zstd_dict_trainer: true 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression_opts.window_bits: -14 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression_opts.level: 32767 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression_opts.strategy: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression_opts.max_dict_bytes: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression_opts.zstd_max_train_bytes: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression_opts.use_zstd_dict_trainer: true 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression_opts.parallel_threads: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression_opts.enabled: false 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compression_opts.max_dict_buffer_bytes: 0 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.level0_file_num_compaction_trigger: 4 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.level0_slowdown_writes_trigger: 20 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.level0_stop_writes_trigger: 36 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.target_file_size_base: 67108864 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.target_file_size_multiplier: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bytes_for_level_base: 268435456 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.level_compaction_dynamic_level_bytes: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bytes_for_level_multiplier: 10.000000 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bytes_for_level_multiplier_addtl[0]: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bytes_for_level_multiplier_addtl[1]: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bytes_for_level_multiplier_addtl[2]: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bytes_for_level_multiplier_addtl[3]: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bytes_for_level_multiplier_addtl[4]: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bytes_for_level_multiplier_addtl[5]: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_bytes_for_level_multiplier_addtl[6]: 1 2026-03-31T22:55:11.387 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_sequential_skip_in_iterations: 8 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_compaction_bytes: 1677721600 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.ignore_max_compaction_bytes_for_input: true 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.arena_block_size: 1048576 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.soft_pending_compaction_bytes_limit: 68719476736 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.hard_pending_compaction_bytes_limit: 274877906944 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.disable_auto_compactions: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_style: kCompactionStyleLevel 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_pri: kMinOverlappingRatio 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_options_universal.size_ratio: 1 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_options_universal.min_merge_width: 2 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_options_universal.max_merge_width: 4294967295 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_options_universal.max_size_amplification_percent: 200 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_options_universal.compression_size_percent: -1 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_options_universal.stop_style: kCompactionStopStyleTotalSize 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_options_fifo.max_table_files_size: 1073741824 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.compaction_options_fifo.allow_compaction: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.table_properties_collectors: CompactOnDeletionCollector (Sliding window size = 32768 Deletion trigger = 16384 Deletion ratio = 0); 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.inplace_update_support: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.inplace_update_num_locks: 10000 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.memtable_prefix_bloom_size_ratio: 0.000000 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.memtable_whole_key_filtering: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.memtable_huge_page_size: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.bloom_locality: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.max_successive_merges: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.optimize_filters_for_hits: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.paranoid_file_checks: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.force_consistency_checks: 1 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.report_bg_io_stats: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.ttl: 2592000 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.periodic_compaction_seconds: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.preclude_last_level_data_seconds: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.preserve_internal_time_seconds: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.enable_blob_files: false 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.min_blob_size: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.blob_file_size: 268435456 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.blob_compression_type: NoCompression 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.enable_blob_garbage_collection: false 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.blob_garbage_collection_age_cutoff: 0.250000 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.blob_garbage_collection_force_threshold: 1.000000 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.blob_compaction_readahead_size: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.blob_file_starting_level: 0 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: Options.experimental_mempurge_threshold: 0.000000 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/version_set.cc:5566] Recovered from manifest file:/var/lib/ceph/mon/ceph-a/store.db/MANIFEST-000010 succeeded,manifest_file_number is 10, next_file_number is 12, last_sequence is 5, log_number is 5,prev_log_number is 0,max_column_family is 0,min_log_number_to_keep is 5 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/version_set.cc:5581] Column family [default] (ID 0), log number is 5 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/db_impl/db_impl_open.cc:539] DB ID: 7a1a3244-ba6c-4841-944e-bd19a79994ec 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: EVENT_LOG_v1 {"time_micros": 1774997711339197, "job": 1, "event": "recovery_started", "wal_files": [9]} 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/db_impl/db_impl_open.cc:1043] Recovering log #9 mode 2 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: EVENT_LOG_v1 {"time_micros": 1774997711343098, "cf_name": "default", "job": 1, "event": "table_file_creation", "file_number": 13, "file_size": 74067, "file_checksum": "", "file_checksum_func_name": "Unknown", "smallest_seqno": 8, "largest_seqno": 231, "table_properties": {"data_size": 72346, "index_size": 174, "index_partitions": 0, "top_level_index_size": 0, "index_key_is_user_key": 1, "index_value_is_delta_encoded": 1, "filter_size": 517, "raw_key_size": 9938, "raw_average_key_size": 49, "raw_value_size": 66691, "raw_average_value_size": 330, "num_data_blocks": 8, "num_entries": 202, "num_filter_entries": 202, "num_deletions": 3, "num_merge_operands": 0, "num_range_deletions": 0, "format_version": 0, "fixed_key_len": 0, "filter_policy": "bloomfilter", "column_family_name": "default", "column_family_id": 0, "comparator": "leveldb.BytewiseComparator", "merge_operator": "", "prefix_extractor_name": "nullptr", "property_collectors": "[CompactOnDeletionCollector]", "compression": "NoCompression", "compression_options": "window_bits=-14; level=32767; strategy=0; max_dict_bytes=0; zstd_max_train_bytes=0; enabled=0; max_dict_buffer_bytes=0; use_zstd_dict_trainer=1; ", "creation_time": 1774997711, "oldest_key_time": 0, "file_creation_time": 0, "slow_compression_estimated_data_size": 0, "fast_compression_estimated_data_size": 0, "db_id": "7a1a3244-ba6c-4841-944e-bd19a79994ec", "db_session_id": "IPNC9Z78OY9BPZBKA44S", "orig_file_number": 13, "seqno_to_time_mapping": "N/A"}} 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: EVENT_LOG_v1 {"time_micros": 1774997711343189, "job": 1, "event": "recovery_finished"} 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/version_set.cc:5047] Creating manifest 15 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [file/delete_scheduler.cc:74] Deleted file /var/lib/ceph/mon/ceph-a/store.db/000009.log immediately, rate_bytes_per_sec 0, total_trash_size 0 max_trash_db_ratio 0.250000 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/db_impl/db_impl_open.cc:1987] SstFileManager instance 0x555d48d0ce00 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: DB pointer 0x555d48e58000 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/db_impl/db_impl.cc:1109] ------- DUMPING STATS ------- 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: rocksdb: [db/db_impl/db_impl.cc:1111] 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout: ** DB Stats ** 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout: Uptime(secs): 0.0 total, 0.0 interval 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout: Cumulative writes: 0 writes, 0 keys, 0 commit groups, 0.0 writes per commit group, ingest: 0.00 GB, 0.00 MB/s 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout: Cumulative WAL: 0 writes, 0 syncs, 0.00 writes per sync, written: 0.00 GB, 0.00 MB/s 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout: Cumulative stall: 00:00:0.000 H:M:S, 0.0 percent 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout: Interval writes: 0 writes, 0 keys, 0 commit groups, 0.0 writes per commit group, ingest: 0.00 MB, 0.00 MB/s 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout: Interval WAL: 0 writes, 0 syncs, 0.00 writes per sync, written: 0.00 GB, 0.00 MB/s 2026-03-31T22:55:11.388 INFO:journalctl@ceph.mon.a.vm00.stdout: Interval stall: 00:00:0.000 H:M:S, 0.0 percent 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: ** Compaction Stats [default] ** 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Level Files Size Score Read(GB) Rn(GB) Rnp1(GB) Write(GB) Wnew(GB) Moved(GB) W-Amp Rd(MB/s) Wr(MB/s) Comp(sec) CompMergeCPU(sec) Comp(cnt) Avg(sec) KeyIn KeyDrop Rblob(GB) Wblob(GB) 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: L0 2/0 74.19 KB 0.5 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 18.9 0.00 0.00 1 0.004 0 0 0.0 0.0 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Sum 2/0 74.19 KB 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 18.9 0.00 0.00 1 0.004 0 0 0.0 0.0 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Int 0/0 0.00 KB 0.0 0.0 0.0 0.0 0.0 0.0 0.0 1.0 0.0 18.9 0.00 0.00 1 0.004 0 0 0.0 0.0 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: ** Compaction Stats [default] ** 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Priority Files Size Score Read(GB) Rn(GB) Rnp1(GB) Write(GB) Wnew(GB) Moved(GB) W-Amp Rd(MB/s) Wr(MB/s) Comp(sec) CompMergeCPU(sec) Comp(cnt) Avg(sec) KeyIn KeyDrop Rblob(GB) Wblob(GB) 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: User 0/0 0.00 KB 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 18.9 0.00 0.00 1 0.004 0 0 0.0 0.0 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Blob file count: 0, total size: 0.0 GB, garbage size: 0.0 GB, space amp: 0.0 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Uptime(secs): 0.0 total, 0.0 interval 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Flush(GB): cumulative 0.000, interval 0.000 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: AddFile(GB): cumulative 0.000, interval 0.000 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: AddFile(Total Files): cumulative 0, interval 0 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: AddFile(L0 Files): cumulative 0, interval 0 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: AddFile(Keys): cumulative 0, interval 0 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Cumulative compaction: 0.00 GB write, 9.58 MB/s write, 0.00 GB read, 0.00 MB/s read, 0.0 seconds 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Interval compaction: 0.00 GB write, 9.58 MB/s write, 0.00 GB read, 0.00 MB/s read, 0.0 seconds 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Stalls(count): 0 level0_slowdown, 0 level0_slowdown_with_compaction, 0 level0_numfiles, 0 level0_numfiles_with_compaction, 0 stop for pending_compaction_bytes, 0 slowdown for pending_compaction_bytes, 0 memtable_compaction, 0 memtable_slowdown, interval 0 total count 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Block cache BinnedLRUCache@0x555d48d0bb90#2 capacity: 512.00 MB usage: 1.06 KB table_size: 0 occupancy: 18446744073709551615 collections: 1 last_copies: 0 last_secs: 9e-06 secs_since: 0 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: Block cache entry stats(count,size,portion): FilterBlock(2,0.70 KB,0.00013411%) IndexBlock(2,0.36 KB,6.85453e-05%) Misc(1,0.00 KB,0%) 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: ** File Read Latency Histogram By Level [default] ** 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: starting mon.a rank 0 at public addrs [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] at bind addrs [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon_data /var/lib/ceph/mon/ceph-a fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mon.a@-1(???) e1 preinit fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mon.a@-1(???).mds e1 new map 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mon.a@-1(???).mds e1 print_map 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: e1 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: btime 2026-03-31T22:55:10:407537+0000 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: enable_multiple, ever_enabled_multiple: 1,1 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: default compat: compat={},rocompat={},incompat={1=base v0.20,2=client writeable ranges,3=default file layouts on dirs,4=dir inode in separate object,5=mds uses versioned encoding,6=dirfrag is stored in omap,8=no anchor table,9=file layout v2,10=snaprealm v2,11=minor log segments,12=quiesce subvolumes} 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: legacy client fscid: -1 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout: No filesystems configured 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mon.a@-1(???).osd e1 crush map has features 3314932999778484224, adjusting msgr requires 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mon.a@-1(???).osd e1 crush map has features 288514050185494528, adjusting msgr requires 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mon.a@-1(???).osd e1 crush map has features 288514050185494528, adjusting msgr requires 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mon.a@-1(???).osd e1 crush map has features 288514050185494528, adjusting msgr requires 2026-03-31T22:55:11.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mon.a@-1(???).paxosservice(auth 1..2) refresh upgraded, format 0 -> 3 2026-03-31T22:55:11.519 INFO:teuthology.orchestra.run.vm00.stdout:Wrote config to /etc/ceph/ceph.conf 2026-03-31T22:55:11.520 INFO:teuthology.orchestra.run.vm00.stdout:Wrote keyring to /etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:11.520 INFO:teuthology.orchestra.run.vm00.stdout:Creating mgr... 2026-03-31T22:55:11.520 INFO:teuthology.orchestra.run.vm00.stdout:Verifying port 0.0.0.0:9283 ... 2026-03-31T22:55:11.521 INFO:teuthology.orchestra.run.vm00.stdout:Verifying port 0.0.0.0:8765 ... 2026-03-31T22:55:11.660 INFO:teuthology.orchestra.run.vm00.stdout:Non-zero exit code 1 from systemctl reset-failed ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.a 2026-03-31T22:55:11.660 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Failed to reset failed state of unit ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.a.service: Unit ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.a.service not loaded. 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mon.a is new leader, mons a in quorum (ranks 0) 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: monmap epoch 1 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: last_changed 2026-03-31T22:55:09.387964+0000 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: created 2026-03-31T22:55:09.387964+0000 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: min_mon_release 20 (tentacle) 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: election_strategy: 1 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: 0: [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon.a 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: fsmap 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: osdmap e1: 0 total, 0 up, 0 in 2026-03-31T22:55:11.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:11 vm00 ceph-mon[61968]: mgrmap e1: no daemons active 2026-03-31T22:55:11.778 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stderr Created symlink /etc/systemd/system/ceph-8bb14950-2d54-11f1-a348-07063966e06c.target.wants/ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.a.service → /etc/systemd/system/ceph-8bb14950-2d54-11f1-a348-07063966e06c@.service. 2026-03-31T22:55:11.920 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:11 vm00 podman[62192]: 2026-03-31 22:55:11.919639101 +0000 UTC m=+0.048211842 container start 581ccc516b3e7bdcb08f2967101951e083e3bdc499310843f7a9409f7c7737c3 (image=quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a, ceph=True, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, io.buildah.version=1.43.0, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.build-date=20260316, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_REF=tentacle-release, org.opencontainers.image.authors=Ceph Release Team , OSD_FLAVOR=default, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2) 2026-03-31T22:55:11.927 INFO:teuthology.orchestra.run.vm00.stdout:firewalld does not appear to be present 2026-03-31T22:55:11.927 INFO:teuthology.orchestra.run.vm00.stdout:Not possible to enable service . firewalld.service is not available 2026-03-31T22:55:11.927 INFO:teuthology.orchestra.run.vm00.stdout:firewalld does not appear to be present 2026-03-31T22:55:11.927 INFO:teuthology.orchestra.run.vm00.stdout:Not possible to open ports <[9283, 8765]>. firewalld.service is not available 2026-03-31T22:55:11.927 INFO:teuthology.orchestra.run.vm00.stdout:Waiting for mgr to start... 2026-03-31T22:55:11.927 INFO:teuthology.orchestra.run.vm00.stdout:Waiting for mgr... 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout { 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "fsid": "8bb14950-2d54-11f1-a348-07063966e06c", 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "health": { 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "status": "HEALTH_OK", 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "checks": {}, 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "mutes": [] 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "election_epoch": 5, 2026-03-31T22:55:12.101 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "quorum": [ 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 0 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ], 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "quorum_names": [ 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "a" 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ], 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "quorum_age": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "monmap": { 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "min_mon_release_name": "tentacle", 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_mons": 1 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "osdmap": { 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_osds": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_up_osds": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "osd_up_since": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_in_osds": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "osd_in_since": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_remapped_pgs": 0 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "pgmap": { 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "pgs_by_state": [], 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_pgs": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_pools": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_objects": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "data_bytes": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "bytes_used": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "bytes_avail": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "bytes_total": 0 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "fsmap": { 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "btime": "2026-03-31T22:55:10:407537+0000", 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "by_rank": [], 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "up:standby": 0 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "mgrmap": { 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "available": false, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_standbys": 0, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "modules": [ 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "iostat", 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "nfs" 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ], 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "services": {} 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "servicemap": { 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "modified": "2026-03-31T22:55:10.408123+0000", 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "services": {} 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "progress_events": {} 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout } 2026-03-31T22:55:12.102 INFO:teuthology.orchestra.run.vm00.stdout:mgr not available, waiting (1/15)... 2026-03-31T22:55:12.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:11 vm00 bash[62192]: 581ccc516b3e7bdcb08f2967101951e083e3bdc499310843f7a9409f7c7737c3 2026-03-31T22:55:12.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:11 vm00 podman[62192]: 2026-03-31 22:55:11.880162084 +0000 UTC m=+0.008734814 image pull 1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 2026-03-31T22:55:12.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:11 vm00 systemd[1]: Started Ceph mgr.a for 8bb14950-2d54-11f1-a348-07063966e06c. 2026-03-31T22:55:12.878 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:12 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/4108912729' entity='client.admin' 2026-03-31T22:55:12.878 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:12 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3613355052' entity='client.admin' cmd={"prefix": "status", "format": "json-pretty"} : dispatch 2026-03-31T22:55:13.154 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:12 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: /lib64/python3.9/site-packages/scipy/__init__.py:73: UserWarning: NumPy was imported from a Python sub-interpreter but NumPy does not properly support sub-interpreters. This will likely work for most users but might cause hard to track down issues or subtle bugs. A common user of the rare sub-interpreter feature is wsgi which also allows single-interpreter mode. 2026-03-31T22:55:13.154 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:12 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: Improvements in the case of bugs are welcome, but is not on the NumPy roadmap, and full support may require significant effort to achieve. 2026-03-31T22:55:13.154 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:12 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: from numpy import show_config as show_numpy_config 2026-03-31T22:55:14.281 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:14.281 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout { 2026-03-31T22:55:14.281 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "fsid": "8bb14950-2d54-11f1-a348-07063966e06c", 2026-03-31T22:55:14.281 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "health": { 2026-03-31T22:55:14.281 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "status": "HEALTH_OK", 2026-03-31T22:55:14.281 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "checks": {}, 2026-03-31T22:55:14.281 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "mutes": [] 2026-03-31T22:55:14.281 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:14.281 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "election_epoch": 5, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "quorum": [ 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 0 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ], 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "quorum_names": [ 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "a" 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ], 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "quorum_age": 2, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "monmap": { 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "min_mon_release_name": "tentacle", 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_mons": 1 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "osdmap": { 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_osds": 0, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_up_osds": 0, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "osd_up_since": 0, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_in_osds": 0, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "osd_in_since": 0, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_remapped_pgs": 0 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "pgmap": { 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "pgs_by_state": [], 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_pgs": 0, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_pools": 0, 2026-03-31T22:55:14.282 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_objects": 0, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "data_bytes": 0, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "bytes_used": 0, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "bytes_avail": 0, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "bytes_total": 0 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "fsmap": { 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "btime": "2026-03-31T22:55:10:407537+0000", 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "by_rank": [], 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "up:standby": 0 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "mgrmap": { 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "available": false, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_standbys": 0, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "modules": [ 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "iostat", 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "nfs" 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ], 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "services": {} 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "servicemap": { 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "modified": "2026-03-31T22:55:10.408123+0000", 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "services": {} 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "progress_events": {} 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout } 2026-03-31T22:55:14.283 INFO:teuthology.orchestra.run.vm00.stdout:mgr not available, waiting (2/15)... 2026-03-31T22:55:14.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:14 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2725805953' entity='client.admin' cmd={"prefix": "status", "format": "json-pretty"} : dispatch 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: Activating manager daemon a 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: mgrmap e2: a(active, starting, since 0.204774s) 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' cmd={"prefix": "mds metadata"} : dispatch 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' cmd={"prefix": "osd metadata"} : dispatch 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' cmd={"prefix": "mon metadata"} : dispatch 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "a", "id": "a"} : dispatch 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: Manager daemon a is now available 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/mirror_snapshot_schedule"} : dispatch 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/trash_purge_schedule"} : dispatch 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' 2026-03-31T22:55:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:15 vm00 ceph-mon[61968]: from='mgr.14100 192.168.123.100:0/570519725' entity='mgr.a' 2026-03-31T22:55:16.584 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:16.584 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout { 2026-03-31T22:55:16.584 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "fsid": "8bb14950-2d54-11f1-a348-07063966e06c", 2026-03-31T22:55:16.584 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "health": { 2026-03-31T22:55:16.584 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "status": "HEALTH_OK", 2026-03-31T22:55:16.584 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "checks": {}, 2026-03-31T22:55:16.584 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "mutes": [] 2026-03-31T22:55:16.584 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:16.584 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "election_epoch": 5, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "quorum": [ 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 0 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ], 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "quorum_names": [ 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "a" 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ], 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "quorum_age": 5, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "monmap": { 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "min_mon_release_name": "tentacle", 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_mons": 1 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "osdmap": { 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_osds": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_up_osds": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "osd_up_since": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_in_osds": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "osd_in_since": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_remapped_pgs": 0 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "pgmap": { 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "pgs_by_state": [], 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_pgs": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_pools": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_objects": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "data_bytes": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "bytes_used": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "bytes_avail": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "bytes_total": 0 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "fsmap": { 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "btime": "2026-03-31T22:55:10:407537+0000", 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "by_rank": [], 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "up:standby": 0 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "mgrmap": { 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "available": true, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_standbys": 0, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "modules": [ 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "iostat", 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "nfs" 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ], 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "services": {} 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "servicemap": { 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 1, 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "modified": "2026-03-31T22:55:10.408123+0000", 2026-03-31T22:55:16.585 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "services": {} 2026-03-31T22:55:16.586 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout }, 2026-03-31T22:55:16.586 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "progress_events": {} 2026-03-31T22:55:16.586 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout } 2026-03-31T22:55:16.586 INFO:teuthology.orchestra.run.vm00.stdout:mgr is available 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout [global] 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout fsid = 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_cluster_log_file_level = debug 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_host = [v2:192.168.123.100:3300,v1:192.168.123.100:6789] 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_osd_allow_pg_remap = true 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_osd_allow_primary_affinity = true 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mon_warn_on_no_sortbitwise = false 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout osd_crush_chooseleaf_type = 0 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout [mgr] 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout mgr/telemetry/nag = false 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout [osd] 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout osd_map_max_advance = 10 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout osd_sloppy_crc = true 2026-03-31T22:55:16.836 INFO:teuthology.orchestra.run.vm00.stdout:Enabling cephadm module... 2026-03-31T22:55:17.145 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:17 vm00 ceph-mon[61968]: mgrmap e3: a(active, since 1.20967s) 2026-03-31T22:55:17.145 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:17 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3419499432' entity='client.admin' cmd={"prefix": "status", "format": "json-pretty"} : dispatch 2026-03-31T22:55:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:17 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/278680172' entity='client.admin' cmd={"prefix": "config assimilate-conf"} : dispatch 2026-03-31T22:55:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:17 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/278680172' entity='client.admin' cmd='[{"prefix": "config assimilate-conf"}]': finished 2026-03-31T22:55:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:17 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/194886' entity='client.admin' cmd={"prefix": "mgr module enable", "module": "cephadm"} : dispatch 2026-03-31T22:55:18.094 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:17 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ignoring --setuser ceph since I am not root 2026-03-31T22:55:18.094 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:17 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ignoring --setgroup ceph since I am not root 2026-03-31T22:55:18.118 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout { 2026-03-31T22:55:18.118 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 4, 2026-03-31T22:55:18.118 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "available": true, 2026-03-31T22:55:18.118 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "active_name": "a", 2026-03-31T22:55:18.118 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_standby": 0 2026-03-31T22:55:18.118 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout } 2026-03-31T22:55:18.118 INFO:teuthology.orchestra.run.vm00.stdout:Waiting for the mgr to restart... 2026-03-31T22:55:18.118 INFO:teuthology.orchestra.run.vm00.stdout:Waiting for mgr epoch 4... 2026-03-31T22:55:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:18 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/194886' entity='client.admin' cmd='[{"prefix": "mgr module enable", "module": "cephadm"}]': finished 2026-03-31T22:55:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:18 vm00 ceph-mon[61968]: mgrmap e4: a(active, since 2s) 2026-03-31T22:55:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:18 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/1712715665' entity='client.admin' cmd={"prefix": "mgr stat"} : dispatch 2026-03-31T22:55:19.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:18 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: /lib64/python3.9/site-packages/scipy/__init__.py:73: UserWarning: NumPy was imported from a Python sub-interpreter but NumPy does not properly support sub-interpreters. This will likely work for most users but might cause hard to track down issues or subtle bugs. A common user of the rare sub-interpreter feature is wsgi which also allows single-interpreter mode. 2026-03-31T22:55:19.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:18 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: Improvements in the case of bugs are welcome, but is not on the NumPy roadmap, and full support may require significant effort to achieve. 2026-03-31T22:55:19.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:18 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: from numpy import show_config as show_numpy_config 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: Active manager daemon a restarted 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: Activating manager daemon a 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: osdmap e2: 0 total, 0 up, 0 in 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: mgrmap e5: a(active, starting, since 0.0602007s) 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "a", "id": "a"} : dispatch 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "mds metadata"} : dispatch 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "osd metadata"} : dispatch 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "mon metadata"} : dispatch 2026-03-31T22:55:21.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:20 vm00 ceph-mon[61968]: Manager daemon a is now available 2026-03-31T22:55:21.872 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout { 2026-03-31T22:55:21.872 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "mgrmap_epoch": 6, 2026-03-31T22:55:21.872 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "initialized": true 2026-03-31T22:55:21.872 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout } 2026-03-31T22:55:21.872 INFO:teuthology.orchestra.run.vm00.stdout:mgr epoch 4 is available 2026-03-31T22:55:21.872 INFO:teuthology.orchestra.run.vm00.stdout:Verifying orchestrator module is enabled... 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: Found migration_current of "None". Setting to last migration. 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/mirror_snapshot_schedule"} : dispatch 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/trash_purge_schedule"} : dispatch 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:21] ENGINE Bus STARTING 2026-03-31T22:55:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: mgrmap e6: a(active, since 1.06291s) 2026-03-31T22:55:22.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='client.14122 -' entity='client.admin' cmd=[{"prefix": "get_command_descriptions"}]: dispatch 2026-03-31T22:55:22.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='client.14122 -' entity='client.admin' cmd=[{"prefix": "mgr_status"}]: dispatch 2026-03-31T22:55:22.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:21] ENGINE Serving on http://192.168.123.100:8765 2026-03-31T22:55:22.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:22.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:22 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/725161177' entity='client.admin' cmd={"prefix": "mgr module enable", "module": "orchestrator"} : dispatch 2026-03-31T22:55:22.845 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stderr module 'orchestrator' is already enabled (always-on) 2026-03-31T22:55:22.845 INFO:teuthology.orchestra.run.vm00.stdout:Setting orchestrator backend to cephadm... 2026-03-31T22:55:23.352 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout value unchanged 2026-03-31T22:55:23.353 INFO:teuthology.orchestra.run.vm00.stdout:Generating ssh key... 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: Generating public/private ed25519 key pair. 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: Your identification has been saved in /tmp/tmp96hp4yw5/key 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: Your public key has been saved in /tmp/tmp96hp4yw5/key.pub 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: The key fingerprint is: 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: SHA256:44T5EoVxv47HiZ5HPvQhaIftLQNcUbv3zMD8+thx5Uk ceph-8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: The key's randomart image is: 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: +--[ED25519 256]--+ 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: | . . .. | 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: | + .. . | 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: | . . ... | 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: | + .. + | 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: | +.S=. . =E.| 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: | =***....Bo| 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: | ..=B=+ . oB| 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: | o oB o +o| 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: | o. + o.o| 2026-03-31T22:55:23.604 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: +----[SHA256]-----+ 2026-03-31T22:55:23.860 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:21] ENGINE Serving on https://192.168.123.100:7150 2026-03-31T22:55:23.860 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:21] ENGINE Bus STARTED 2026-03-31T22:55:23.860 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:21] ENGINE Client ('192.168.123.100', 56522) lost — peer dropped the TLS connection suddenly, during handshake: (6, 'TLS/SSL connection has been closed (EOF) (_ssl.c:1147)') 2026-03-31T22:55:23.860 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/725161177' entity='client.admin' cmd='[{"prefix": "mgr module enable", "module": "orchestrator"}]': finished 2026-03-31T22:55:23.860 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-mon[61968]: mgrmap e7: a(active, since 2s) 2026-03-31T22:55:23.860 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:23.860 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:23.860 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:23.860 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:23 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:23.864 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDjyD5B1e2pFlWsCTrxzrbzLTjuMzcFWOcYorLg94tKc ceph-8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:23.864 INFO:teuthology.orchestra.run.vm00.stdout:Wrote public SSH key to /home/ubuntu/cephtest/ceph.pub 2026-03-31T22:55:23.864 INFO:teuthology.orchestra.run.vm00.stdout:Adding key to root@localhost authorized_keys... 2026-03-31T22:55:23.864 INFO:teuthology.orchestra.run.vm00.stdout:Adding host vm00... 2026-03-31T22:55:25.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:24 vm00 ceph-mon[61968]: from='client.14132 -' entity='client.admin' cmd=[{"prefix": "orch set backend", "module_name": "cephadm", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:25.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:24 vm00 ceph-mon[61968]: from='client.14134 -' entity='client.admin' cmd=[{"prefix": "cephadm set-user", "user": "root", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:25.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:24 vm00 ceph-mon[61968]: from='client.14136 -' entity='client.admin' cmd=[{"prefix": "cephadm generate-key", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:25.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:24 vm00 ceph-mon[61968]: Generating ssh key... 2026-03-31T22:55:25.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:24 vm00 ceph-mon[61968]: from='client.14138 -' entity='client.admin' cmd=[{"prefix": "cephadm get-pub-key", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:25.658 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout Added host 'vm00' with addr '192.168.123.100' 2026-03-31T22:55:25.658 INFO:teuthology.orchestra.run.vm00.stdout:Deploying unmanaged mon service... 2026-03-31T22:55:25.893 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:25 vm00 ceph-mon[61968]: from='client.14140 -' entity='client.admin' cmd=[{"prefix": "orch host add", "hostname": "vm00", "addr": "192.168.123.100", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:25.893 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:25 vm00 ceph-mon[61968]: Deploying cephadm binary to vm00 2026-03-31T22:55:25.893 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:25 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:25.893 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:25 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:25.918 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout Scheduled mon update... 2026-03-31T22:55:25.918 INFO:teuthology.orchestra.run.vm00.stdout:Deploying unmanaged mgr service... 2026-03-31T22:55:26.426 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout Scheduled mgr update... 2026-03-31T22:55:26.942 INFO:teuthology.orchestra.run.vm00.stdout:Enabling the dashboard module... 2026-03-31T22:55:26.965 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:26 vm00 ceph-mon[61968]: Added host vm00 2026-03-31T22:55:26.965 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:26 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:26.965 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:26 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:26.965 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:26 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2591816257' entity='client.admin' 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='client.14142 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "mon", "unmanaged": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: Saving service mon spec with placement count:5 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='client.14144 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "mgr", "unmanaged": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: Saving service mgr spec with placement count:2 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3660190842' entity='client.admin' 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/722490489' entity='client.admin' cmd={"prefix": "mgr module enable", "module": "dashboard"} : dispatch 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm00", "name": "osd_memory_target"} : dispatch 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm00", "caps": []} : dispatch 2026-03-31T22:55:27.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-mon[61968]: from='mgr.14118 192.168.123.100:0/166800412' entity='mgr.a' cmd='[{"prefix": "auth get-or-create", "entity": "client.agent.vm00", "caps": []}]': finished 2026-03-31T22:55:28.223 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ignoring --setuser ceph since I am not root 2026-03-31T22:55:28.223 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:27 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ignoring --setgroup ceph since I am not root 2026-03-31T22:55:28.261 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout { 2026-03-31T22:55:28.261 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "epoch": 8, 2026-03-31T22:55:28.261 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "available": true, 2026-03-31T22:55:28.261 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "active_name": "a", 2026-03-31T22:55:28.261 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "num_standby": 0 2026-03-31T22:55:28.261 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout } 2026-03-31T22:55:28.261 INFO:teuthology.orchestra.run.vm00.stdout:Waiting for the mgr to restart... 2026-03-31T22:55:28.261 INFO:teuthology.orchestra.run.vm00.stdout:Waiting for mgr epoch 8... 2026-03-31T22:55:29.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:28 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/722490489' entity='client.admin' cmd='[{"prefix": "mgr module enable", "module": "dashboard"}]': finished 2026-03-31T22:55:29.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:28 vm00 ceph-mon[61968]: mgrmap e8: a(active, since 7s) 2026-03-31T22:55:29.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:28 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3957834446' entity='client.admin' cmd={"prefix": "mgr stat"} : dispatch 2026-03-31T22:55:29.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:28 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: /lib64/python3.9/site-packages/scipy/__init__.py:73: UserWarning: NumPy was imported from a Python sub-interpreter but NumPy does not properly support sub-interpreters. This will likely work for most users but might cause hard to track down issues or subtle bugs. A common user of the rare sub-interpreter feature is wsgi which also allows single-interpreter mode. 2026-03-31T22:55:29.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:28 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: Improvements in the case of bugs are welcome, but is not on the NumPy roadmap, and full support may require significant effort to achieve. 2026-03-31T22:55:29.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:28 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: from numpy import show_config as show_numpy_config 2026-03-31T22:55:31.400 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: Active manager daemon a restarted 2026-03-31T22:55:31.401 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: Activating manager daemon a 2026-03-31T22:55:31.401 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: osdmap e3: 0 total, 0 up, 0 in 2026-03-31T22:55:31.401 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: mgrmap e9: a(active, starting, since 0.00566902s) 2026-03-31T22:55:31.401 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:55:31.401 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "a", "id": "a"} : dispatch 2026-03-31T22:55:31.401 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mds metadata"} : dispatch 2026-03-31T22:55:31.401 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata"} : dispatch 2026-03-31T22:55:31.401 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata"} : dispatch 2026-03-31T22:55:31.401 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:30 vm00 ceph-mon[61968]: Manager daemon a is now available 2026-03-31T22:55:31.934 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout { 2026-03-31T22:55:31.934 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "mgrmap_epoch": 10, 2026-03-31T22:55:31.934 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout "initialized": true 2026-03-31T22:55:31.934 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout } 2026-03-31T22:55:31.934 INFO:teuthology.orchestra.run.vm00.stdout:mgr epoch 8 is available 2026-03-31T22:55:31.934 INFO:teuthology.orchestra.run.vm00.stdout:Using certmgr to generate dashboard self-signed certificate... 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/mirror_snapshot_schedule"} : dispatch 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/trash_purge_schedule"} : dispatch 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm00", "name": "osd_memory_target"} : dispatch 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm00", "caps": []} : dispatch 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:31] ENGINE Bus STARTING 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:31] ENGINE Serving on http://192.168.123.100:8765 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: mgrmap e10: a(active, since 1.00929s) 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='client.14156 -' entity='client.admin' cmd=[{"prefix": "get_command_descriptions"}]: dispatch 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: from='client.14156 -' entity='client.admin' cmd=[{"prefix": "mgr_status"}]: dispatch 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:31] ENGINE Serving on https://192.168.123.100:7150 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:31] ENGINE Bus STARTED 2026-03-31T22:55:32.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:32 vm00 ceph-mon[61968]: [31/Mar/2026:22:55:31] ENGINE Client ('192.168.123.100', 56418) lost — peer dropped the TLS connection suddenly, during handshake: (6, 'TLS/SSL connection has been closed (EOF) (_ssl.c:1147)') 2026-03-31T22:55:33.151 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout {"cert": "-----BEGIN CERTIFICATE-----\nMIIE+TCCAuGgAwIBAgIUBSRVYFHgqjCEriG+lfdZiRbD3qMwDQYJKoZIhvcNAQEL\nBQAwFzEVMBMGA1UEAwwMY2VwaGFkbS1yb290MB4XDTI2MDMzMTIyNTUzM1oXDTI5\nMDMzMDIyNTUzM1owGjEYMBYGA1UEAwwPMTkyLjE2OC4xMjMuMTAwMIICIjANBgkq\nhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAya9oYoEbKcwwr/AfAUrU3nr3nkIq3rQn\nArq+WpdCPW/ezZq0ULFawU336EF40WyVyZa3mrYagjKw8RMm30NAfqBhx2BPdnTO\nd2RmkWhdQ60y67rbprM+z3nr2VZXeeN7IIfSnlDz7shwhyNnmp1kroa50DeZQZLp\nFwUBVdOWG5F1Zhz/BMXshStU/Iv6S8+9OJBh1AK8oaYv54dsulYFs0Fq+KdBj9N2\nR47hWINVAjQsEp/wecm3UTVPEu2iLgasRxDAhg/MyD7DstrPQ/vbV2XFVX1rblmF\n5jV+Re2RoskQe9sIG9YoKyAzgxt/HBqgjgersTNlt7zqGDKo4nu+ei/kp0XI47fq\nRZhsz93zTweyp3VjvpVKDvKQbp/ATCpg8XCnGXuSPRlND2BqAPaU4oqrjE3yapAE\nP1Dzkm3EE7bMEaW9tBB3asQNjU1xGKnzIeDoUMUL3zJVOf4FI2f1xdp/eeDxwJFv\n2jIQGS2Np+x/h0Kus0TEiKXveB5CSqEeSqG1+SEPRax8Q6/8/R2TknTmuAnvWVRZ\nhBMak0hqD1GjASURW1a876QILusB9wvyKNwkD9q+dibWg5c7GBoE1D2hhSxYxWEJ\ni7F4sCKJhXMHZovkq5/DyhgqgEi22N6NOpAccooP5MTtJBNO+8TzWNnVUP/Fcl2W\nR2nYLQKtbQkCAwEAAaM6MDgwKAYDVR0RBCEwH4IEdm0wMIIRZGFzaGJvYXJkX3Nl\ncnZlcnOHBMCoe2QwDAYDVR0TAQH/BAIwADANBgkqhkiG9w0BAQsFAAOCAgEAqfXm\nnzpBZwK+oPkbia+r5vLXjYSscrugJ5wdlPx/gwVIAfQgKEfZUrA6BK7qlgWGMj3G\ndR5SfvUkd/x0otif6V7KJ3itjn2ekKAS1d0Eh3ZBOR9xNou30rwiMt+8ydhuvTJk\nIw5wLy7Een9r0aae0KCQvJaDT6n5GqnSaW2z0k9No6tOhGH570i84IdKQaRgSLQy\n/hVU8Bf/R+4hHqJ/eTnMNnqIdTTWTUM3gF2WBIvDnxAyXb6zuQCvtO7aQsx0wvKw\nCieeJOlcQ+pwPXwqYQQTtoAlWUWBjaXjq54rbC2EotUkIpilqRytkSRwumgzXk6J\ni3ihTvNWyPqx9qV7poRnV+6/svaF1MwB2nbi9m7QKXyyYd60SPJ+TiKBWFLEDkwr\nhIeGM/XOGB4ieJk6RqhMKetrnUmTh1rQwIcT9aIjlfppg/e0zC0NQNCYVLx2j/WV\n4xnQXEvBsnB1Ow39A5PW8+2DpeS1zQyZXUaorWjJJOFHP1AYKAjq7Qr8QUEQ4sjz\nN6mBanWxwPMzBoYDhj8BhOa+SGF+5En0lY/O/2iaptSRIYXxJMDuChzxjesxtbOu\nRa29P8+ZCAiCcVsooLuq0iclOzfZrtf9uQtoIVc0C2/qfa2wlCqQOj/qE+tL9W5K\nJUibovpgqlNUZ3bdKEASj9q1KWoQ+ySCN7U14gg=\n-----END CERTIFICATE-----\n", "key": "-----BEGIN RSA PRIVATE KEY-----\nMIIJJwIBAAKCAgEAya9oYoEbKcwwr/AfAUrU3nr3nkIq3rQnArq+WpdCPW/ezZq0\nULFawU336EF40WyVyZa3mrYagjKw8RMm30NAfqBhx2BPdnTOd2RmkWhdQ60y67rb\nprM+z3nr2VZXeeN7IIfSnlDz7shwhyNnmp1kroa50DeZQZLpFwUBVdOWG5F1Zhz/\nBMXshStU/Iv6S8+9OJBh1AK8oaYv54dsulYFs0Fq+KdBj9N2R47hWINVAjQsEp/w\necm3UTVPEu2iLgasRxDAhg/MyD7DstrPQ/vbV2XFVX1rblmF5jV+Re2RoskQe9sI\nG9YoKyAzgxt/HBqgjgersTNlt7zqGDKo4nu+ei/kp0XI47fqRZhsz93zTweyp3Vj\nvpVKDvKQbp/ATCpg8XCnGXuSPRlND2BqAPaU4oqrjE3yapAEP1Dzkm3EE7bMEaW9\ntBB3asQNjU1xGKnzIeDoUMUL3zJVOf4FI2f1xdp/eeDxwJFv2jIQGS2Np+x/h0Ku\ns0TEiKXveB5CSqEeSqG1+SEPRax8Q6/8/R2TknTmuAnvWVRZhBMak0hqD1GjASUR\nW1a876QILusB9wvyKNwkD9q+dibWg5c7GBoE1D2hhSxYxWEJi7F4sCKJhXMHZovk\nq5/DyhgqgEi22N6NOpAccooP5MTtJBNO+8TzWNnVUP/Fcl2WR2nYLQKtbQkCAwEA\nAQKCAgAU05OsZFeZO9glOt8Aw6jypnEOexexMpo6qm0ArtDsxjIUOi73jDDqeDIq\n3iPrQFLBY9pm0/otyq1B36/qayfsS4R9UaEzAsHVS11aE5RZ6jcxZ3TKJIVOaHEl\n2sgiso5i4BOsPIDL/g+8rLfEnA8rBIeIjE07m7+NHHu0UKqWAzHDzye4ZvXTFJPs\nlknM7VFFV4d6nKO874vP1UixfzOMfKterOrUvsnefs8fVESBe0C31aM/+EgBT0dw\nlXBjX1YH6p9u2I/Vd8G+pOC1TPsr7brwZoii9wSgLpLxBqS80hVtLMOMGFPCjEs1\ndS/th4Rf1kPxAvU0gYIM6KJO7OdQyL2dd2b/g63Dpbiz2Ek49/FayMpxEyQP/8+H\nYq6Cv7WHj+u04VGqQ/Y1i3d78W6+kMdifJDCT5q1HEifGUfmv5VpTVG2jp9G73l1\nIo943qZaK+gMRg1YZUi79x7RLLK+5WkmoQAtitq5UQaJM4HXA4/j+itXPud7p5MX\nW6/qs861u7EXALmjWTvzA4CF983kTid4zzR64+R3dqWI0PgtDlxycZ5X68EL+y4a\ncLRRD9FftIXgyN1DJSEZrcOqjboAUz67YJofFeSKklYQHuYmirkhpuLhDWEl8H1N\nXqLWdQu5/QKUSeDndPosox83KLfIbNjq9+SS0yFzS7IApjYsmQKCAQEA8l9mDSzP\nQIG1mFgKGvc7kYA8PLVxi8Jbxjxwamt/MZdaNYjBYanXZQIcsgMlOPGo1X3GKBFg\nLybpBiltAJkMB8Q5soSp8O1D1G28zQ7exzLNYkR/FgpRxvLFYNSpATggO3Q0qXw1\niooddYIdFO19L+CFvtpMJEBrYauaWGyer5LYz637TOgAu09da9RRnz9TDPpByRF4\nk0TgY5NOzeID8WzwBr/FyRdBg7QSm7rdgU+BoBPgmMbynGmbyUntEfL4fbzTzeX+\n5uJjhLgynfIhk9pELks/Pe/vF8VFwBmYu/0qOnHSO2HjtY557UVzHbRLF7+D5fp0\n1rzhPmXhLTJaRQKCAQEA1QZfUAHvQYzKDDXtFYqaDAVWnl9b+eIQueqsPw6TbWZz\n3SAEH1WaZMeSU7oyUB5uSZ3/zKv0Lbh8EEezh8yEGZ+c/JWqbMpYAhab01YMVh4r\nNvd8y0v7DwEKE86MsMXe9l4rKprgpilZG4IUTextViFSRnElUPIsF87eBu0NrqZD\nVW7WjS7Z2zR+Vgf4BpcfiMRFbgqHtlfKDsdsyLjNtwt/vlarqPXKabYeNVZHD/Fy\no46c0R/lC85p/n/+eXARE9mfokrqo9p2Wcfn/GHFePCiIOdBqipubi5/QBJOJ5Y/\nU6i5N/TwBehpEs6ulAgt1MwGYRjLQ8UgS9DaUsX19QKCAQBjmBPlQg5/4Uzvpu2w\nGvudVGZsQ9q+pUT09ozzDe0cQadN7D4alaHu3alQ9L3s4lCQObidalZtyb1jTM5/\numQZJObYlm8Gk1iWB+nXEU8154hB4wed1Ou5prS4j2pLF/cWty+UnXea0Bo+2RJ+\nMQCKFeafgDBdjSMS5WLtBuVGHD9/R2zGM2dpkLlAv+QyXeEQqh0yND2ptlnGURfG\nviMEp9ER1Oe/NBE1v18OIwE2S6dmUgSugsHayAK9+eSyxXaapGccanAOC+T8+nvC\nL2bgzIXLyBvapadHy0ZYI6yBQwyBK5hJ0nWxezRa7XjaMeXRgl7+t6YfP0oHS/H1\nkoVJAoIBAE2AIv71pTjg4sUsfVO/+Cl1a7z0jn7LyzAIe5/5SpObmX2SX02TFDek\nD81cSV+yoocv71dkG9Licmht3ZgDRPAZjMCsJrOrrfcciJaLkLNoCGrYAVPpEa60\nvxjTpYXOhz50phm6/Q4Wru9+FKsA4Apc1xT/pwbbBEWpIke8yKSIoqibSX1V1TaV\n03yzK34iTvuwBF5zgEpLiBx2W1JvQZBdo5z45Tz0Jy7sbfMTr4jSW9W7VbwTcX4p\nG5DWuhsuNVhWbWha/yCbo0EopHLCk1hbm3CDEfW5uDFnwy3pOvvWArYKBVChUQ2l\nL00DWqpKi3oGvC9pGNJQDl7m3fcNRF0CggEAH9Y7VMaO8A0edQTXB9WWduUtdCSX\naW00OxrMCTbqnOV5ffsGViiTVBSgPw3PCTWL0OTi3BDVKXacLwmcJt4d5FCXDJ8e\nGuokzksmprxws4PFicwCecHQghPMHWCLOf+MeSy+ny7F4QXiH+yBC96ba7zHXKpW\n99l4ojYu5hziZndQCXp0u1I2qlPpjQA+hJUxIb7lZkauGhhmPgfW1/nYL6ipkn3N\nK6Q13L4GBJgznZtPIUWThBo7aCUAxgb/9401LSFUfgY7q/qMV2hUTyfdkQMRFnTv\nxcxID98GSsF/HpaVR7ATWOySGSiTrAL9DkiuGE6Ow1fZI5MYq1t/+RByXg==\n-----END RSA PRIVATE KEY-----\n"} 2026-03-31T22:55:33.383 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:33 vm00 ceph-mon[61968]: from='client.14164 -' entity='client.admin' cmd=[{"prefix": "orch certmgr generate-certificates", "module_name": "dashboard", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:33.383 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:33 vm00 ceph-mon[61968]: Deploying daemon agent.vm00 on vm00 2026-03-31T22:55:33.408 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout SSL certificate updated 2026-03-31T22:55:33.701 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout SSL certificate key updated 2026-03-31T22:55:33.701 INFO:teuthology.orchestra.run.vm00.stdout:Creating initial admin user... 2026-03-31T22:55:34.190 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout {"username": "admin", "password": "$2b$12$iHkg2nSGOQaeYiyM/iX9Ueqsukh4L2zYhxeCdhEr0umig/s/yAh4W", "roles": ["administrator"], "name": null, "email": null, "lastUpdate": 1774997734, "enabled": true, "pwdExpirationDate": null, "pwdUpdateRequired": true} 2026-03-31T22:55:34.190 INFO:teuthology.orchestra.run.vm00.stdout:Fetching dashboard port number... 2026-03-31T22:55:34.407 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:34 vm00 ceph-mon[61968]: mgrmap e11: a(active, since 2s) 2026-03-31T22:55:34.407 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:34 vm00 ceph-mon[61968]: from='client.14166 -' entity='client.admin' cmd=[{"prefix": "dashboard set-ssl-certificate", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:34.407 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:34 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:34.407 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:34 vm00 ceph-mon[61968]: from='client.14168 -' entity='client.admin' cmd=[{"prefix": "dashboard set-ssl-certificate-key", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:34.407 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:34 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:34.407 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:34 vm00 ceph-mon[61968]: from='client.14170 -' entity='client.admin' cmd=[{"prefix": "dashboard ac-user-create", "username": "admin", "rolename": "administrator", "force_password": true, "pwd_update_required": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:34.430 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stdout 8443 2026-03-31T22:55:34.430 INFO:teuthology.orchestra.run.vm00.stdout:firewalld does not appear to be present 2026-03-31T22:55:34.430 INFO:teuthology.orchestra.run.vm00.stdout:Not possible to open ports <[8443]>. firewalld.service is not available 2026-03-31T22:55:34.432 INFO:teuthology.orchestra.run.vm00.stdout:Ceph Dashboard is now available at: 2026-03-31T22:55:34.432 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.432 INFO:teuthology.orchestra.run.vm00.stdout: URL: https://vm00.local:8443/ 2026-03-31T22:55:34.432 INFO:teuthology.orchestra.run.vm00.stdout: User: admin 2026-03-31T22:55:34.432 INFO:teuthology.orchestra.run.vm00.stdout: Password: g1ka0ee8u7 2026-03-31T22:55:34.432 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.432 INFO:teuthology.orchestra.run.vm00.stdout:Saving cluster configuration to /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config directory 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout:/usr/bin/ceph: stderr set mgr/dashboard/cluster/status 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout:You can access the Ceph CLI as following in case of multi-cluster or non-default config: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: sudo /home/ubuntu/cephtest/cephadm shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout:Or, if you are only running a single cluster on this host: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: sudo /home/ubuntu/cephtest/cephadm shell 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout:Please consider enabling telemetry to help improve Ceph: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: ceph telemetry on 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout:For more information see: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: https://docs.ceph.com/en/latest/mgr/telemetry/ 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:34.740 INFO:teuthology.orchestra.run.vm00.stdout:Bootstrap complete. 2026-03-31T22:55:34.749 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stdout static 2026-03-31T22:55:34.754 INFO:teuthology.orchestra.run.vm00.stdout:Non-zero exit code 3 from systemctl is-active logrotate 2026-03-31T22:55:34.754 INFO:teuthology.orchestra.run.vm00.stdout:systemctl: stdout inactive 2026-03-31T22:55:34.755 INFO:teuthology.orchestra.run.vm00.stdout:Enabling the logrotate.timer service to perform daily log rotation. 2026-03-31T22:55:34.898 INFO:tasks.cephadm:Fetching config... 2026-03-31T22:55:34.898 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:55:34.898 DEBUG:teuthology.orchestra.run.vm00:> dd if=/etc/ceph/ceph.conf of=/dev/stdout 2026-03-31T22:55:34.914 INFO:tasks.cephadm:Fetching client.admin keyring... 2026-03-31T22:55:34.914 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:55:34.914 DEBUG:teuthology.orchestra.run.vm00:> dd if=/etc/ceph/ceph.client.admin.keyring of=/dev/stdout 2026-03-31T22:55:34.969 INFO:tasks.cephadm:Fetching mon keyring... 2026-03-31T22:55:34.969 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:55:34.969 DEBUG:teuthology.orchestra.run.vm00:> sudo dd if=/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/keyring of=/dev/stdout 2026-03-31T22:55:35.036 INFO:tasks.cephadm:Fetching pub ssh key... 2026-03-31T22:55:35.036 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:55:35.036 DEBUG:teuthology.orchestra.run.vm00:> dd if=/home/ubuntu/cephtest/ceph.pub of=/dev/stdout 2026-03-31T22:55:35.095 INFO:tasks.cephadm:Installing pub ssh key for root users... 2026-03-31T22:55:35.095 DEBUG:teuthology.orchestra.run.vm00:> sudo install -d -m 0700 /root/.ssh && echo 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDjyD5B1e2pFlWsCTrxzrbzLTjuMzcFWOcYorLg94tKc ceph-8bb14950-2d54-11f1-a348-07063966e06c' | sudo tee -a /root/.ssh/authorized_keys && sudo chmod 0600 /root/.ssh/authorized_keys 2026-03-31T22:55:35.170 INFO:teuthology.orchestra.run.vm00.stdout:ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDjyD5B1e2pFlWsCTrxzrbzLTjuMzcFWOcYorLg94tKc ceph-8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:35.184 DEBUG:teuthology.orchestra.run.vm05:> sudo install -d -m 0700 /root/.ssh && echo 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDjyD5B1e2pFlWsCTrxzrbzLTjuMzcFWOcYorLg94tKc ceph-8bb14950-2d54-11f1-a348-07063966e06c' | sudo tee -a /root/.ssh/authorized_keys && sudo chmod 0600 /root/.ssh/authorized_keys 2026-03-31T22:55:35.215 INFO:teuthology.orchestra.run.vm05.stdout:ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDjyD5B1e2pFlWsCTrxzrbzLTjuMzcFWOcYorLg94tKc ceph-8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:35.224 DEBUG:teuthology.orchestra.run.vm09:> sudo install -d -m 0700 /root/.ssh && echo 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDjyD5B1e2pFlWsCTrxzrbzLTjuMzcFWOcYorLg94tKc ceph-8bb14950-2d54-11f1-a348-07063966e06c' | sudo tee -a /root/.ssh/authorized_keys && sudo chmod 0600 /root/.ssh/authorized_keys 2026-03-31T22:55:35.259 INFO:teuthology.orchestra.run.vm09.stdout:ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDjyD5B1e2pFlWsCTrxzrbzLTjuMzcFWOcYorLg94tKc ceph-8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:35.271 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph config set mgr mgr/cephadm/allow_ptrace true 2026-03-31T22:55:35.291 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:35.291 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:35 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3869946218' entity='client.admin' cmd={"prefix": "config get", "who": "mgr", "key": "mgr/dashboard/ssl_server_port"} : dispatch 2026-03-31T22:55:35.291 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:35 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2083115067' entity='client.admin' 2026-03-31T22:55:35.409 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:35.786 INFO:tasks.cephadm:Distributing conf and client.admin keyring to all hosts + 0755 2026-03-31T22:55:35.786 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch client-keyring set client.admin '*' --mode 0755 2026-03-31T22:55:35.956 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:36.278 INFO:tasks.cephadm:Writing (initial) conf and keyring to vm05 2026-03-31T22:55:36.278 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:55:36.278 DEBUG:teuthology.orchestra.run.vm05:> dd of=/etc/ceph/ceph.conf 2026-03-31T22:55:36.293 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:55:36.293 DEBUG:teuthology.orchestra.run.vm05:> dd of=/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:36.346 INFO:tasks.cephadm:Adding host vm05 to orchestrator... 2026-03-31T22:55:36.346 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch host add vm05 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2272671641' entity='client.admin' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:36.480 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:38.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:37 vm00 ceph-mon[61968]: from='client.14178 -' entity='client.admin' cmd=[{"prefix": "orch client-keyring set", "entity": "client.admin", "placement": "*", "mode": "0755", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:38.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:37 vm00 ceph-mon[61968]: Updating vm00:/etc/ceph/ceph.conf 2026-03-31T22:55:38.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:37 vm00 ceph-mon[61968]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:38.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:37 vm00 ceph-mon[61968]: Updating vm00:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:38.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:37 vm00 ceph-mon[61968]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:55:38.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:37 vm00 ceph-mon[61968]: from='client.14180 -' entity='client.admin' cmd=[{"prefix": "orch host add", "hostname": "vm05", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:38.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:37 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:38.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:37 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:38.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:37 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:38.275 INFO:teuthology.orchestra.run.vm00.stdout:Added host 'vm05' with addr '192.168.123.105' 2026-03-31T22:55:38.320 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch host ls --format=json 2026-03-31T22:55:38.444 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:38.693 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:38.693 INFO:teuthology.orchestra.run.vm00.stdout:[{"addr": "192.168.123.100", "hostname": "vm00", "labels": [], "status": ""}, {"addr": "192.168.123.105", "hostname": "vm05", "labels": [], "status": ""}] 2026-03-31T22:55:38.736 INFO:tasks.cephadm:Writing (initial) conf and keyring to vm09 2026-03-31T22:55:38.737 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:55:38.737 DEBUG:teuthology.orchestra.run.vm09:> dd of=/etc/ceph/ceph.conf 2026-03-31T22:55:38.751 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:55:38.751 DEBUG:teuthology.orchestra.run.vm09:> dd of=/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:38.805 INFO:tasks.cephadm:Adding host vm09 to orchestrator... 2026-03-31T22:55:38.805 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch host add vm09 2026-03-31T22:55:38.814 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: Deploying cephadm binary to vm05 2026-03-31T22:55:38.814 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: mgrmap e12: a(active, since 6s) 2026-03-31T22:55:38.814 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:38.814 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:38.814 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:38.814 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:38.814 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:38.814 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm05", "name": "osd_memory_target"} : dispatch 2026-03-31T22:55:38.814 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:38.935 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: Added host vm05 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: Updating vm05:/etc/ceph/ceph.conf 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: from='client.14182 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: Updating vm05:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm05", "caps": []} : dispatch 2026-03-31T22:55:40.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:39 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "auth get-or-create", "entity": "client.agent.vm05", "caps": []}]': finished 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='client.14184 -' entity='client.admin' cmd=[{"prefix": "orch host add", "hostname": "vm09", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: Deploying daemon agent.vm05 on vm05 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:40 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:41.282 INFO:teuthology.orchestra.run.vm00.stdout:Added host 'vm09' with addr '192.168.123.109' 2026-03-31T22:55:41.333 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch host ls --format=json 2026-03-31T22:55:41.504 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:41.769 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:41.769 INFO:teuthology.orchestra.run.vm00.stdout:[{"addr": "192.168.123.100", "hostname": "vm00", "labels": [], "status": ""}, {"addr": "192.168.123.105", "hostname": "vm05", "labels": [], "status": ""}, {"addr": "192.168.123.109", "hostname": "vm09", "labels": [], "status": ""}] 2026-03-31T22:55:41.818 INFO:tasks.cephadm:Setting crush tunables to default 2026-03-31T22:55:41.818 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd crush tunables default 2026-03-31T22:55:41.950 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:42.037 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: Deploying cephadm binary to vm09 2026-03-31T22:55:42.038 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:42.038 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:42.038 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:42.038 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:42.038 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:42.038 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:42.038 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm09", "name": "osd_memory_target"} : dispatch 2026-03-31T22:55:42.038 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:42.038 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:41 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:43.065 INFO:teuthology.orchestra.run.vm00.stderr:adjusted tunables profile to default 2026-03-31T22:55:43.110 INFO:tasks.cephadm:Adding mon.a on vm00 2026-03-31T22:55:43.110 INFO:tasks.cephadm:Adding mon.b on vm05 2026-03-31T22:55:43.110 INFO:tasks.cephadm:Adding mon.c on vm09 2026-03-31T22:55:43.111 DEBUG:teuthology.orchestra.run.vm09:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch apply mon '3;vm00:192.168.123.100=a;vm05:192.168.123.105=b;vm09:192.168.123.109=c' 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: Added host vm09 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: Updating vm09:/etc/ceph/ceph.conf 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: from='client.14186 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: Updating vm09:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm09", "caps": []} : dispatch 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "auth get-or-create", "entity": "client.agent.vm09", "caps": []}]': finished 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/15630842' entity='client.admin' cmd={"prefix": "osd crush tunables", "profile": "default"} : dispatch 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:42 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:43.247 INFO:teuthology.orchestra.run.vm09.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:43.505 INFO:teuthology.orchestra.run.vm09.stdout:Scheduled mon update... 2026-03-31T22:55:43.553 DEBUG:teuthology.orchestra.run.vm05:mon.b> sudo journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.b.service 2026-03-31T22:55:43.555 DEBUG:teuthology.orchestra.run.vm09:mon.c> sudo journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.c.service 2026-03-31T22:55:43.557 INFO:tasks.cephadm:Waiting for 3 mons in monmap... 2026-03-31T22:55:43.557 DEBUG:teuthology.orchestra.run.vm09:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph mon dump -f json 2026-03-31T22:55:43.779 INFO:teuthology.orchestra.run.vm09.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:44.089 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:55:44.089 INFO:teuthology.orchestra.run.vm09.stdout:{"epoch":1,"fsid":"8bb14950-2d54-11f1-a348-07063966e06c","modified":"2026-03-31T22:55:09.387964Z","created":"2026-03-31T22:55:09.387964Z","min_mon_release":20,"min_mon_release_name":"tentacle","election_strategy":1,"disallowed_leaders":"","stretch_mode":false,"tiebreaker_mon":"","removed_ranks":"","features":{"persistent":["kraken","luminous","mimic","osdmap-prune","nautilus","octopus","pacific","elector-pinging","quincy","reef","squid","tentacle"],"optional":[]},"mons":[{"rank":0,"name":"a","public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:3300","nonce":0},{"type":"v1","addr":"192.168.123.100:6789","nonce":0}]},"addr":"192.168.123.100:6789/0","public_addr":"192.168.123.100:6789/0","priority":0,"weight":0,"crush_location":"{}"}],"quorum":[0]} 2026-03-31T22:55:44.089 INFO:teuthology.orchestra.run.vm09.stderr:dumped monmap epoch 1 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: Deploying daemon agent.vm09 on vm09 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/15630842' entity='client.admin' cmd='[{"prefix": "osd crush tunables", "profile": "default"}]': finished 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: osdmap e4: 0 total, 0 up, 0 in 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:44.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:44.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:44 vm09 ceph-mon[98050]: mon.c@-1(synchronizing).paxosservice(auth 1..6) refresh upgraded, format 0 -> 3 2026-03-31T22:55:45.231 INFO:tasks.cephadm:Waiting for 3 mons in monmap... 2026-03-31T22:55:45.231 DEBUG:teuthology.orchestra.run.vm09:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph mon dump -f json 2026-03-31T22:55:45.355 INFO:teuthology.orchestra.run.vm09.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.c/config 2026-03-31T22:55:46.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:46 vm05 ceph-mon[69577]: mon.b@-1(synchronizing).paxosservice(auth 1..6) refresh upgraded, format 0 -> 3 2026-03-31T22:55:50.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: Deploying daemon mon.b on vm05 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: mon.a calling monitor election 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: mon.c calling monitor election 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: mon.a is new leader, mons a,c in quorum (ranks 0,1) 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: monmap epoch 2 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: last_changed 2026-03-31T22:55:44.800432+0000 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: created 2026-03-31T22:55:09.387964+0000 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: min_mon_release 20 (tentacle) 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: election_strategy: 1 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: 0: [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon.a 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: 1: [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] mon.c 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: fsmap 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: osdmap e4: 0 total, 0 up, 0 in 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: mgrmap e12: a(active, since 18s) 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: overall HEALTH_OK 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:50.167 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:49 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: Deploying daemon mon.b on vm05 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: mon.a calling monitor election 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: mon.c calling monitor election 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: mon.a is new leader, mons a,c in quorum (ranks 0,1) 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: monmap epoch 2 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: last_changed 2026-03-31T22:55:44.800432+0000 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: created 2026-03-31T22:55:09.387964+0000 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: min_mon_release 20 (tentacle) 2026-03-31T22:55:50.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: election_strategy: 1 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: 0: [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon.a 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: 1: [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] mon.c 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: fsmap 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: osdmap e4: 0 total, 0 up, 0 in 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: mgrmap e12: a(active, since 18s) 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: overall HEALTH_OK 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:50.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:49 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:51.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:55:50 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: 2026-03-31T22:55:50.799+0000 7f80c34e2640 -1 mgr.server handle_report got status from non-daemon mon.c 2026-03-31T22:55:55.053 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:55:55.054 INFO:teuthology.orchestra.run.vm09.stdout:{"epoch":3,"fsid":"8bb14950-2d54-11f1-a348-07063966e06c","modified":"2026-03-31T22:55:50.037853Z","created":"2026-03-31T22:55:09.387964Z","min_mon_release":20,"min_mon_release_name":"tentacle","election_strategy":1,"disallowed_leaders":"","stretch_mode":false,"tiebreaker_mon":"","removed_ranks":"","features":{"persistent":["kraken","luminous","mimic","osdmap-prune","nautilus","octopus","pacific","elector-pinging","quincy","reef","squid","tentacle"],"optional":[]},"mons":[{"rank":0,"name":"a","public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:3300","nonce":0},{"type":"v1","addr":"192.168.123.100:6789","nonce":0}]},"addr":"192.168.123.100:6789/0","public_addr":"192.168.123.100:6789/0","priority":0,"weight":0,"crush_location":"{}"},{"rank":1,"name":"c","public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:3300","nonce":0},{"type":"v1","addr":"192.168.123.109:6789","nonce":0}]},"addr":"192.168.123.109:6789/0","public_addr":"192.168.123.109:6789/0","priority":0,"weight":0,"crush_location":"{}"},{"rank":2,"name":"b","public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:3300","nonce":0},{"type":"v1","addr":"192.168.123.105:6789","nonce":0}]},"addr":"192.168.123.105:6789/0","public_addr":"192.168.123.105:6789/0","priority":0,"weight":0,"crush_location":"{}"}],"quorum":[0,1,2]} 2026-03-31T22:55:55.054 INFO:teuthology.orchestra.run.vm09.stderr:dumped monmap epoch 3 2026-03-31T22:55:55.108 INFO:tasks.cephadm:Generating final ceph.conf file... 2026-03-31T22:55:55.108 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph config generate-minimal-conf 2026-03-31T22:55:55.251 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:55.366 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:55.366 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:55.366 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:55.366 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: mon.a calling monitor election 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: mon.c calling monitor election 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='client.? 192.168.123.109:0/1836314284' entity='client.admin' cmd={"prefix": "mon dump", "format": "json"} : dispatch 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: pgmap v4: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: mon.b calling monitor election 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: pgmap v5: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: pgmap v6: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: mon.a is new leader, mons a,c,b in quorum (ranks 0,1,2) 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: monmap epoch 3 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: last_changed 2026-03-31T22:55:50.037853+0000 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: created 2026-03-31T22:55:09.387964+0000 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: min_mon_release 20 (tentacle) 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: election_strategy: 1 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: 0: [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon.a 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: 1: [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] mon.c 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: 2: [v2:192.168.123.105:3300/0,v1:192.168.123.105:6789/0] mon.b 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: fsmap 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: osdmap e4: 0 total, 0 up, 0 in 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: mgrmap e12: a(active, since 24s) 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: overall HEALTH_OK 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.367 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: mon.a calling monitor election 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: mon.c calling monitor election 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='client.? 192.168.123.109:0/1836314284' entity='client.admin' cmd={"prefix": "mon dump", "format": "json"} : dispatch 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: pgmap v4: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: mon.b calling monitor election 2026-03-31T22:55:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: pgmap v5: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: pgmap v6: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: mon.a is new leader, mons a,c,b in quorum (ranks 0,1,2) 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: monmap epoch 3 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: last_changed 2026-03-31T22:55:50.037853+0000 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: created 2026-03-31T22:55:09.387964+0000 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: min_mon_release 20 (tentacle) 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: election_strategy: 1 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: 0: [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon.a 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: 1: [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] mon.c 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: 2: [v2:192.168.123.105:3300/0,v1:192.168.123.105:6789/0] mon.b 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: fsmap 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: osdmap e4: 0 total, 0 up, 0 in 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: mgrmap e12: a(active, since 24s) 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: overall HEALTH_OK 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: Deploying daemon mon.b on vm05 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: mon.a calling monitor election 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: mon.c calling monitor election 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: mon.a is new leader, mons a,c in quorum (ranks 0,1) 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: monmap epoch 2 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: last_changed 2026-03-31T22:55:44.800432+0000 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: created 2026-03-31T22:55:09.387964+0000 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: min_mon_release 20 (tentacle) 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: election_strategy: 1 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: 0: [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon.a 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: 1: [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] mon.c 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: fsmap 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: osdmap e4: 0 total, 0 up, 0 in 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: mgrmap e12: a(active, since 18s) 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: overall HEALTH_OK 2026-03-31T22:55:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: mon.a calling monitor election 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: mon.c calling monitor election 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='client.? 192.168.123.109:0/1836314284' entity='client.admin' cmd={"prefix": "mon dump", "format": "json"} : dispatch 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: pgmap v4: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: mon.b calling monitor election 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: pgmap v5: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: pgmap v6: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: mon.a is new leader, mons a,c,b in quorum (ranks 0,1,2) 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: monmap epoch 3 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: last_changed 2026-03-31T22:55:50.037853+0000 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: created 2026-03-31T22:55:09.387964+0000 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: min_mon_release 20 (tentacle) 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: election_strategy: 1 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: 0: [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon.a 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: 1: [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] mon.c 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: 2: [v2:192.168.123.105:3300/0,v1:192.168.123.105:6789/0] mon.b 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: fsmap 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: osdmap e4: 0 total, 0 up, 0 in 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: mgrmap e12: a(active, since 24s) 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: overall HEALTH_OK 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.495 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.495 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.495 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.495 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.495 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:55.516 INFO:teuthology.orchestra.run.vm00.stdout:# minimal ceph.conf for 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:55.516 INFO:teuthology.orchestra.run.vm00.stdout:[global] 2026-03-31T22:55:55.516 INFO:teuthology.orchestra.run.vm00.stdout: fsid = 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T22:55:55.516 INFO:teuthology.orchestra.run.vm00.stdout: mon_host = [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] [v2:192.168.123.105:3300/0,v1:192.168.123.105:6789/0] [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] 2026-03-31T22:55:55.566 INFO:tasks.cephadm:Distributing (final) config and client.admin keyring... 2026-03-31T22:55:55.566 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:55:55.566 DEBUG:teuthology.orchestra.run.vm00:> sudo dd of=/etc/ceph/ceph.conf 2026-03-31T22:55:55.596 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:55:55.597 DEBUG:teuthology.orchestra.run.vm00:> sudo dd of=/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:55.663 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:55:55.663 DEBUG:teuthology.orchestra.run.vm05:> sudo dd of=/etc/ceph/ceph.conf 2026-03-31T22:55:55.693 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:55:55.693 DEBUG:teuthology.orchestra.run.vm05:> sudo dd of=/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:55.761 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:55:55.761 DEBUG:teuthology.orchestra.run.vm09:> sudo dd of=/etc/ceph/ceph.conf 2026-03-31T22:55:55.789 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:55:55.789 DEBUG:teuthology.orchestra.run.vm09:> sudo dd of=/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:55:55.854 INFO:tasks.cephadm:Adding mgr.a on vm00 2026-03-31T22:55:55.855 INFO:tasks.cephadm:Adding mgr.b on vm05 2026-03-31T22:55:55.855 DEBUG:teuthology.orchestra.run.vm09:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch apply mgr '2;vm00=a;vm05=b' 2026-03-31T22:55:56.050 INFO:teuthology.orchestra.run.vm09.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.c/config 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: Reconfiguring mon.a (unknown last config time)... 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config get", "who": "mon", "key": "public_network"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: Reconfiguring daemon mon.a on vm00 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: Reconfiguring mon.b (monmap changed)... 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config get", "who": "mon", "key": "public_network"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: Reconfiguring daemon mon.b on vm05 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2944877780' entity='client.admin' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: Reconfiguring mon.c (monmap changed)... 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config get", "who": "mon", "key": "public_network"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: Reconfiguring daemon mon.c on vm09 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:56.208 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: Reconfiguring mon.a (unknown last config time)... 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config get", "who": "mon", "key": "public_network"} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: Reconfiguring daemon mon.a on vm00 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: Reconfiguring mon.b (monmap changed)... 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config get", "who": "mon", "key": "public_network"} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: Reconfiguring daemon mon.b on vm05 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/2944877780' entity='client.admin' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: Reconfiguring mon.c (monmap changed)... 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config get", "who": "mon", "key": "public_network"} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: Reconfiguring daemon mon.c on vm09 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.360 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.361 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.361 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.361 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.361 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:56.361 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.361 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:56.361 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:56.361 INFO:teuthology.orchestra.run.vm09.stdout:Scheduled mgr update... 2026-03-31T22:55:56.409 DEBUG:teuthology.orchestra.run.vm05:mgr.b> sudo journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.b.service 2026-03-31T22:55:56.411 DEBUG:tasks.cephadm:set 0 configs 2026-03-31T22:55:56.411 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph config dump 2026-03-31T22:55:56.429 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: Reconfiguring mon.a (unknown last config time)... 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config get", "who": "mon", "key": "public_network"} : dispatch 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: Reconfiguring daemon mon.a on vm00 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: Reconfiguring mon.b (monmap changed)... 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config get", "who": "mon", "key": "public_network"} : dispatch 2026-03-31T22:55:56.430 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: Reconfiguring daemon mon.b on vm05 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/2944877780' entity='client.admin' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: Reconfiguring mon.c (monmap changed)... 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "mon."} : dispatch 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config get", "who": "mon", "key": "public_network"} : dispatch 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: Reconfiguring daemon mon.c on vm09 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:56.431 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:55:56.550 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:56.812 INFO:teuthology.orchestra.run.vm00.stdout:WHO MASK LEVEL OPTION VALUE RO 2026-03-31T22:55:56.812 INFO:teuthology.orchestra.run.vm00.stdout:global dev auth_debug true 2026-03-31T22:55:56.812 INFO:teuthology.orchestra.run.vm00.stdout:global basic container_image quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072 * 2026-03-31T22:55:56.812 INFO:teuthology.orchestra.run.vm00.stdout:global dev debug_asserts_on_shutdown true 2026-03-31T22:55:56.812 INFO:teuthology.orchestra.run.vm00.stdout:global basic log_to_file true 2026-03-31T22:55:56.812 INFO:teuthology.orchestra.run.vm00.stdout:global basic log_to_journald false 2026-03-31T22:55:56.812 INFO:teuthology.orchestra.run.vm00.stdout:global basic log_to_stderr false 2026-03-31T22:55:56.812 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_allow_pool_delete true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_clock_drift_allowed 1.000000 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_cluster_log_to_file true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_election_default_strategy 3 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_max_pg_per_osd 10000 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_pg_warn_max_object_skew 0.000000 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_warn_on_crush_straw_calc_version_zero false 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_warn_on_legacy_crush_tunables false 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_warn_on_osd_down_out_interval_zero false 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global dev mon_warn_on_pool_pg_num_not_power_of_two false 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced mon_warn_on_too_few_osds false 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global dev ms_die_on_bug true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global dev ms_die_on_old_message true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced osd_pool_default_erasure_code_profile plugin=isa technique=reed_sol_van k=2 m=1 crush-failure-domain=osd 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced osd_pool_default_pg_autoscale_mode off 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:global advanced public_network 192.168.123.0/24 * 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced auth_allow_insecure_global_id_reclaim false 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced auth_mon_ticket_ttl 660.000000 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced auth_service_ticket_ttl 240.000000 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced debug_mon 20/20 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced debug_ms 1/1 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced debug_paxos 20/20 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced mon_data_avail_warn 5 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced mon_mgr_mkfs_grace 240 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon dev mon_osd_prime_pg_temp true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced mon_osd_reporter_subtree_level osd 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced mon_reweight_min_bytes_per_osd 10 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced mon_reweight_min_pgs_per_osd 4 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced mon_warn_on_insecure_global_id_reclaim false 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mon advanced mon_warn_on_insecure_global_id_reclaim_allowed false 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced debug_mgr 20/20 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced debug_ms 1/1 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced mgr/cephadm/allow_ptrace true * 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced mgr/cephadm/container_init True * 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced mgr/cephadm/migration_current 7 * 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced mgr/cephadm/use_agent true * 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced mgr/dashboard/ssl_server_port 8443 * 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced mgr/orchestrator/orchestrator cephadm 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced mon_reweight_min_bytes_per_osd 10 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:mgr advanced mon_reweight_min_pgs_per_osd 4 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd dev bdev_debug_aio true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced debug_ms 1/1 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced debug_osd 20/20 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd dev osd_debug_misdirected_ops true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd dev osd_debug_op_order true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd dev osd_debug_pg_log_writeout true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd dev osd_debug_shutdown true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd dev osd_debug_verify_cached_snaps true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd dev osd_debug_verify_missing_on_start true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd dev osd_debug_verify_stray_on_activate true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_deep_scrub_update_digest_min_age 30 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd basic osd_mclock_iops_capacity_threshold_hdd 49000.000000 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_mclock_profile high_recovery_ops 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd dev osd_mclock_skip_benchmark true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_memory_target_autotune true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_op_queue debug_random * 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_op_queue_cut_off debug_random * 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_recover_clone_overlap true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_recovery_max_chunk 1048576 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_scrub_load_threshold 5.000000 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_scrub_max_interval 600.000000 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:osd advanced osd_shutdown_pgref_assert true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:client.rgw advanced rgw_cache_enabled true 2026-03-31T22:55:56.813 INFO:teuthology.orchestra.run.vm00.stdout:client.rgw advanced rgw_enable_ops_log true 2026-03-31T22:55:56.814 INFO:teuthology.orchestra.run.vm00.stdout:client.rgw advanced rgw_enable_usage_log true 2026-03-31T22:55:56.876 INFO:tasks.cephadm:Deploying OSDs... 2026-03-31T22:55:56.876 DEBUG:teuthology.orchestra.run.vm00:> set -ex 2026-03-31T22:55:56.876 DEBUG:teuthology.orchestra.run.vm00:> dd if=/scratch_devs of=/dev/stdout 2026-03-31T22:55:56.892 DEBUG:teuthology.misc:devs=['/dev/vg_nvme/lv_1', '/dev/vg_nvme/lv_2', '/dev/vg_nvme/lv_3', '/dev/vg_nvme/lv_4'] 2026-03-31T22:55:56.892 DEBUG:teuthology.orchestra.run.vm00:> stat /dev/vg_nvme/lv_1 2026-03-31T22:55:56.950 INFO:teuthology.orchestra.run.vm00.stdout: File: /dev/vg_nvme/lv_1 -> ../dm-0 2026-03-31T22:55:56.950 INFO:teuthology.orchestra.run.vm00.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:56.950 INFO:teuthology.orchestra.run.vm00.stdout:Device: 6h/6d Inode: 955 Links: 1 2026-03-31T22:55:56.950 INFO:teuthology.orchestra.run.vm00.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:56.950 INFO:teuthology.orchestra.run.vm00.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:56.950 INFO:teuthology.orchestra.run.vm00.stdout:Access: 2026-03-31 22:55:56.572909711 +0000 2026-03-31T22:55:56.950 INFO:teuthology.orchestra.run.vm00.stdout:Modify: 2026-03-31 22:52:59.689477766 +0000 2026-03-31T22:55:56.950 INFO:teuthology.orchestra.run.vm00.stdout:Change: 2026-03-31 22:52:59.689477766 +0000 2026-03-31T22:55:56.950 INFO:teuthology.orchestra.run.vm00.stdout: Birth: 2026-03-31 22:52:59.689477766 +0000 2026-03-31T22:55:56.950 DEBUG:teuthology.orchestra.run.vm00:> sudo dd if=/dev/vg_nvme/lv_1 of=/dev/null count=1 2026-03-31T22:55:56.993 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:56 vm05 systemd[1]: Starting Ceph mgr.b for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T22:55:57.016 INFO:teuthology.orchestra.run.vm00.stderr:1+0 records in 2026-03-31T22:55:57.016 INFO:teuthology.orchestra.run.vm00.stderr:1+0 records out 2026-03-31T22:55:57.016 INFO:teuthology.orchestra.run.vm00.stderr:512 bytes copied, 0.000134242 s, 3.8 MB/s 2026-03-31T22:55:57.017 DEBUG:teuthology.orchestra.run.vm00:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_1 2026-03-31T22:55:57.073 DEBUG:teuthology.orchestra.run.vm00:> stat /dev/vg_nvme/lv_2 2026-03-31T22:55:57.128 INFO:teuthology.orchestra.run.vm00.stdout: File: /dev/vg_nvme/lv_2 -> ../dm-1 2026-03-31T22:55:57.128 INFO:teuthology.orchestra.run.vm00.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:57.128 INFO:teuthology.orchestra.run.vm00.stdout:Device: 6h/6d Inode: 960 Links: 1 2026-03-31T22:55:57.128 INFO:teuthology.orchestra.run.vm00.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:57.128 INFO:teuthology.orchestra.run.vm00.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:57.128 INFO:teuthology.orchestra.run.vm00.stdout:Access: 2026-03-31 22:55:56.572909711 +0000 2026-03-31T22:55:57.128 INFO:teuthology.orchestra.run.vm00.stdout:Modify: 2026-03-31 22:52:59.690477767 +0000 2026-03-31T22:55:57.128 INFO:teuthology.orchestra.run.vm00.stdout:Change: 2026-03-31 22:52:59.690477767 +0000 2026-03-31T22:55:57.129 INFO:teuthology.orchestra.run.vm00.stdout: Birth: 2026-03-31 22:52:59.690477767 +0000 2026-03-31T22:55:57.129 DEBUG:teuthology.orchestra.run.vm00:> sudo dd if=/dev/vg_nvme/lv_2 of=/dev/null count=1 2026-03-31T22:55:57.193 INFO:teuthology.orchestra.run.vm00.stderr:1+0 records in 2026-03-31T22:55:57.193 INFO:teuthology.orchestra.run.vm00.stderr:1+0 records out 2026-03-31T22:55:57.193 INFO:teuthology.orchestra.run.vm00.stderr:512 bytes copied, 0.000112241 s, 4.6 MB/s 2026-03-31T22:55:57.194 DEBUG:teuthology.orchestra.run.vm00:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_2 2026-03-31T22:55:57.250 DEBUG:teuthology.orchestra.run.vm00:> stat /dev/vg_nvme/lv_3 2026-03-31T22:55:57.307 INFO:teuthology.orchestra.run.vm00.stdout: File: /dev/vg_nvme/lv_3 -> ../dm-2 2026-03-31T22:55:57.307 INFO:teuthology.orchestra.run.vm00.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:57.307 INFO:teuthology.orchestra.run.vm00.stdout:Device: 6h/6d Inode: 968 Links: 1 2026-03-31T22:55:57.307 INFO:teuthology.orchestra.run.vm00.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:57.307 INFO:teuthology.orchestra.run.vm00.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:57.307 INFO:teuthology.orchestra.run.vm00.stdout:Access: 2026-03-31 22:55:56.572909711 +0000 2026-03-31T22:55:57.307 INFO:teuthology.orchestra.run.vm00.stdout:Modify: 2026-03-31 22:52:59.691477768 +0000 2026-03-31T22:55:57.307 INFO:teuthology.orchestra.run.vm00.stdout:Change: 2026-03-31 22:52:59.691477768 +0000 2026-03-31T22:55:57.307 INFO:teuthology.orchestra.run.vm00.stdout: Birth: 2026-03-31 22:52:59.691477768 +0000 2026-03-31T22:55:57.307 DEBUG:teuthology.orchestra.run.vm00:> sudo dd if=/dev/vg_nvme/lv_3 of=/dev/null count=1 2026-03-31T22:55:57.341 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: Updating vm00:/etc/ceph/ceph.conf 2026-03-31T22:55:57.341 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: Updating vm05:/etc/ceph/ceph.conf 2026-03-31T22:55:57.341 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:57 vm05 podman[70957]: 2026-03-31 22:55:57.028859569 +0000 UTC m=+0.015426448 container create 51fbff09a17eb61b9f4e2eb9f25e54db47265653f87c79b8d3071b316047a2a2 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, FROM_IMAGE=quay.io/centos/centos:stream9, org.opencontainers.image.documentation=https://docs.ceph.com/, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, org.label-schema.build-date=20260316, ceph=True, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.license=GPLv2, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, io.buildah.version=1.43.0, CEPH_REF=tentacle-release, OSD_FLAVOR=default) 2026-03-31T22:55:57.341 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:57 vm05 podman[70957]: 2026-03-31 22:55:57.055628185 +0000 UTC m=+0.042195054 container init 51fbff09a17eb61b9f4e2eb9f25e54db47265653f87c79b8d3071b316047a2a2 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b, OSD_FLAVOR=default, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, ceph=True, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_REF=tentacle-release, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.schema-version=1.0, org.label-schema.build-date=20260316, org.opencontainers.image.documentation=https://docs.ceph.com/, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.43.0, org.label-schema.license=GPLv2, FROM_IMAGE=quay.io/centos/centos:stream9) 2026-03-31T22:55:57.341 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:57 vm05 podman[70957]: 2026-03-31 22:55:57.060243698 +0000 UTC m=+0.046810577 container start 51fbff09a17eb61b9f4e2eb9f25e54db47265653f87c79b8d3071b316047a2a2 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b, org.opencontainers.image.authors=Ceph Release Team , io.buildah.version=1.43.0, org.label-schema.name=CentOS Stream 9 Base Image, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, ceph=True, org.label-schema.license=GPLv2, OSD_FLAVOR=default, org.label-schema.schema-version=1.0, CEPH_REF=tentacle-release, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.vendor=CentOS, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.build-date=20260316, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.opencontainers.image.documentation=https://docs.ceph.com/) 2026-03-31T22:55:57.341 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:57 vm05 bash[70957]: 51fbff09a17eb61b9f4e2eb9f25e54db47265653f87c79b8d3071b316047a2a2 2026-03-31T22:55:57.341 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:57 vm05 podman[70957]: 2026-03-31 22:55:57.022591763 +0000 UTC m=+0.009158642 image pull 1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072 2026-03-31T22:55:57.341 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:57 vm05 systemd[1]: Started Ceph mgr.b for 8bb14950-2d54-11f1-a348-07063966e06c. 2026-03-31T22:55:57.341 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mgr[70973]: -- 192.168.123.105:0/3461893176 <== mon.2 v2:192.168.123.105:3300/0 4 ==== auth_reply(proto 2 0 (0) Success) ==== 194+0+0 (secure 0 0 0) 0x55fe71828000 con 0x55fe71807400 2026-03-31T22:55:57.387 INFO:teuthology.orchestra.run.vm00.stderr:1+0 records in 2026-03-31T22:55:57.387 INFO:teuthology.orchestra.run.vm00.stderr:1+0 records out 2026-03-31T22:55:57.387 INFO:teuthology.orchestra.run.vm00.stderr:512 bytes copied, 0.000198681 s, 2.6 MB/s 2026-03-31T22:55:57.388 DEBUG:teuthology.orchestra.run.vm00:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_3 2026-03-31T22:55:57.448 DEBUG:teuthology.orchestra.run.vm00:> stat /dev/vg_nvme/lv_4 2026-03-31T22:55:57.508 INFO:teuthology.orchestra.run.vm00.stdout: File: /dev/vg_nvme/lv_4 -> ../dm-3 2026-03-31T22:55:57.508 INFO:teuthology.orchestra.run.vm00.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:57.508 INFO:teuthology.orchestra.run.vm00.stdout:Device: 6h/6d Inode: 971 Links: 1 2026-03-31T22:55:57.508 INFO:teuthology.orchestra.run.vm00.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:57.508 INFO:teuthology.orchestra.run.vm00.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:57.508 INFO:teuthology.orchestra.run.vm00.stdout:Access: 2026-03-31 22:55:56.572909711 +0000 2026-03-31T22:55:57.508 INFO:teuthology.orchestra.run.vm00.stdout:Modify: 2026-03-31 22:52:59.692477769 +0000 2026-03-31T22:55:57.508 INFO:teuthology.orchestra.run.vm00.stdout:Change: 2026-03-31 22:52:59.692477769 +0000 2026-03-31T22:55:57.508 INFO:teuthology.orchestra.run.vm00.stdout: Birth: 2026-03-31 22:52:59.692477769 +0000 2026-03-31T22:55:57.508 DEBUG:teuthology.orchestra.run.vm00:> sudo dd if=/dev/vg_nvme/lv_4 of=/dev/null count=1 2026-03-31T22:55:57.573 INFO:teuthology.orchestra.run.vm00.stderr:1+0 records in 2026-03-31T22:55:57.573 INFO:teuthology.orchestra.run.vm00.stderr:1+0 records out 2026-03-31T22:55:57.573 INFO:teuthology.orchestra.run.vm00.stderr:512 bytes copied, 0.000200836 s, 2.5 MB/s 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: Updating vm00:/etc/ceph/ceph.conf 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: Updating vm05:/etc/ceph/ceph.conf 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: Updating vm09:/etc/ceph/ceph.conf 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='client.24103 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "mgr", "placement": "2;vm00=a;vm05=b", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: Saving service mgr spec with placement vm00=a;vm05=b;count:2 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "mgr.b", "caps": ["mon", "profile mgr", "osd", "allow *", "mds", "allow *"]} : dispatch 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "auth get-or-create", "entity": "mgr.b", "caps": ["mon", "profile mgr", "osd", "allow *", "mds", "allow *"]}]': finished 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr services"} : dispatch 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: Deploying daemon mgr.b on vm05 2026-03-31T22:55:57.573 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3025451472' entity='client.admin' cmd={"prefix": "config dump"} : dispatch 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: pgmap v7: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "mgr.a", "caps": ["mon", "profile mgr", "osd", "allow *", "mds", "allow *"]} : dispatch 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr services"} : dispatch 2026-03-31T22:55:57.574 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:57 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:57.574 DEBUG:teuthology.orchestra.run.vm00:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_4 2026-03-31T22:55:57.602 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: Updating vm09:/etc/ceph/ceph.conf 2026-03-31T22:55:57.602 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:57.602 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:57.602 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:57.602 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.602 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='client.24103 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "mgr", "placement": "2;vm00=a;vm05=b", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: Saving service mgr spec with placement vm00=a;vm05=b;count:2 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "mgr.b", "caps": ["mon", "profile mgr", "osd", "allow *", "mds", "allow *"]} : dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "auth get-or-create", "entity": "mgr.b", "caps": ["mon", "profile mgr", "osd", "allow *", "mds", "allow *"]}]': finished 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr services"} : dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: Deploying daemon mgr.b on vm05 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/3025451472' entity='client.admin' cmd={"prefix": "config dump"} : dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: pgmap v7: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "mgr.a", "caps": ["mon", "profile mgr", "osd", "allow *", "mds", "allow *"]} : dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr services"} : dispatch 2026-03-31T22:55:57.603 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:57 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:57.633 DEBUG:teuthology.orchestra.run.vm05:> set -ex 2026-03-31T22:55:57.633 DEBUG:teuthology.orchestra.run.vm05:> dd if=/scratch_devs of=/dev/stdout 2026-03-31T22:55:57.652 DEBUG:teuthology.misc:devs=['/dev/vg_nvme/lv_1', '/dev/vg_nvme/lv_2', '/dev/vg_nvme/lv_3', '/dev/vg_nvme/lv_4'] 2026-03-31T22:55:57.653 DEBUG:teuthology.orchestra.run.vm05:> stat /dev/vg_nvme/lv_1 2026-03-31T22:55:57.720 INFO:teuthology.orchestra.run.vm05.stdout: File: /dev/vg_nvme/lv_1 -> ../dm-0 2026-03-31T22:55:57.720 INFO:teuthology.orchestra.run.vm05.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:57.720 INFO:teuthology.orchestra.run.vm05.stdout:Device: 6h/6d Inode: 955 Links: 1 2026-03-31T22:55:57.720 INFO:teuthology.orchestra.run.vm05.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:57.720 INFO:teuthology.orchestra.run.vm05.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:57.720 INFO:teuthology.orchestra.run.vm05.stdout:Access: 2026-03-31 22:55:57.712924075 +0000 2026-03-31T22:55:57.720 INFO:teuthology.orchestra.run.vm05.stdout:Modify: 2026-03-31 22:52:57.388621554 +0000 2026-03-31T22:55:57.720 INFO:teuthology.orchestra.run.vm05.stdout:Change: 2026-03-31 22:52:57.388621554 +0000 2026-03-31T22:55:57.720 INFO:teuthology.orchestra.run.vm05.stdout: Birth: 2026-03-31 22:52:57.388621554 +0000 2026-03-31T22:55:57.720 DEBUG:teuthology.orchestra.run.vm05:> sudo dd if=/dev/vg_nvme/lv_1 of=/dev/null count=1 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: Updating vm00:/etc/ceph/ceph.conf 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: Updating vm05:/etc/ceph/ceph.conf 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: Updating vm09:/etc/ceph/ceph.conf 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='client.24103 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "mgr", "placement": "2;vm00=a;vm05=b", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: Saving service mgr spec with placement vm00=a;vm05=b;count:2 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "mgr.b", "caps": ["mon", "profile mgr", "osd", "allow *", "mds", "allow *"]} : dispatch 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "auth get-or-create", "entity": "mgr.b", "caps": ["mon", "profile mgr", "osd", "allow *", "mds", "allow *"]}]': finished 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr services"} : dispatch 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: Deploying daemon mgr.b on vm05 2026-03-31T22:55:57.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/3025451472' entity='client.admin' cmd={"prefix": "config dump"} : dispatch 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: pgmap v7: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "mgr.a", "caps": ["mon", "profile mgr", "osd", "allow *", "mds", "allow *"]} : dispatch 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr services"} : dispatch 2026-03-31T22:55:57.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:57 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:57.792 INFO:teuthology.orchestra.run.vm05.stderr:1+0 records in 2026-03-31T22:55:57.792 INFO:teuthology.orchestra.run.vm05.stderr:1+0 records out 2026-03-31T22:55:57.792 INFO:teuthology.orchestra.run.vm05.stderr:512 bytes copied, 0.000291936 s, 1.8 MB/s 2026-03-31T22:55:57.794 DEBUG:teuthology.orchestra.run.vm05:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_1 2026-03-31T22:55:57.854 DEBUG:teuthology.orchestra.run.vm05:> stat /dev/vg_nvme/lv_2 2026-03-31T22:55:57.915 INFO:teuthology.orchestra.run.vm05.stdout: File: /dev/vg_nvme/lv_2 -> ../dm-1 2026-03-31T22:55:57.915 INFO:teuthology.orchestra.run.vm05.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:57.915 INFO:teuthology.orchestra.run.vm05.stdout:Device: 6h/6d Inode: 960 Links: 1 2026-03-31T22:55:57.915 INFO:teuthology.orchestra.run.vm05.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:57.915 INFO:teuthology.orchestra.run.vm05.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:57.915 INFO:teuthology.orchestra.run.vm05.stdout:Access: 2026-03-31 22:55:57.894924412 +0000 2026-03-31T22:55:57.915 INFO:teuthology.orchestra.run.vm05.stdout:Modify: 2026-03-31 22:52:57.390621556 +0000 2026-03-31T22:55:57.915 INFO:teuthology.orchestra.run.vm05.stdout:Change: 2026-03-31 22:52:57.390621556 +0000 2026-03-31T22:55:57.915 INFO:teuthology.orchestra.run.vm05.stdout: Birth: 2026-03-31 22:52:57.390621556 +0000 2026-03-31T22:55:57.915 DEBUG:teuthology.orchestra.run.vm05:> sudo dd if=/dev/vg_nvme/lv_2 of=/dev/null count=1 2026-03-31T22:55:57.982 INFO:teuthology.orchestra.run.vm05.stderr:1+0 records in 2026-03-31T22:55:57.982 INFO:teuthology.orchestra.run.vm05.stderr:1+0 records out 2026-03-31T22:55:57.983 INFO:teuthology.orchestra.run.vm05.stderr:512 bytes copied, 0.000157526 s, 3.3 MB/s 2026-03-31T22:55:57.984 DEBUG:teuthology.orchestra.run.vm05:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_2 2026-03-31T22:55:58.040 DEBUG:teuthology.orchestra.run.vm05:> stat /dev/vg_nvme/lv_3 2026-03-31T22:55:58.099 INFO:teuthology.orchestra.run.vm05.stdout: File: /dev/vg_nvme/lv_3 -> ../dm-2 2026-03-31T22:55:58.099 INFO:teuthology.orchestra.run.vm05.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:58.099 INFO:teuthology.orchestra.run.vm05.stdout:Device: 6h/6d Inode: 950 Links: 1 2026-03-31T22:55:58.099 INFO:teuthology.orchestra.run.vm05.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:58.099 INFO:teuthology.orchestra.run.vm05.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:58.099 INFO:teuthology.orchestra.run.vm05.stdout:Access: 2026-03-31 22:55:57.894924412 +0000 2026-03-31T22:55:58.099 INFO:teuthology.orchestra.run.vm05.stdout:Modify: 2026-03-31 22:52:57.387621553 +0000 2026-03-31T22:55:58.099 INFO:teuthology.orchestra.run.vm05.stdout:Change: 2026-03-31 22:52:57.387621553 +0000 2026-03-31T22:55:58.099 INFO:teuthology.orchestra.run.vm05.stdout: Birth: 2026-03-31 22:52:57.387621553 +0000 2026-03-31T22:55:58.099 DEBUG:teuthology.orchestra.run.vm05:> sudo dd if=/dev/vg_nvme/lv_3 of=/dev/null count=1 2026-03-31T22:55:58.166 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: /lib64/python3.9/site-packages/scipy/__init__.py:73: UserWarning: NumPy was imported from a Python sub-interpreter but NumPy does not properly support sub-interpreters. This will likely work for most users but might cause hard to track down issues or subtle bugs. A common user of the rare sub-interpreter feature is wsgi which also allows single-interpreter mode. 2026-03-31T22:55:58.166 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: Improvements in the case of bugs are welcome, but is not on the NumPy roadmap, and full support may require significant effort to achieve. 2026-03-31T22:55:58.166 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: from numpy import show_config as show_numpy_config 2026-03-31T22:55:58.168 INFO:teuthology.orchestra.run.vm05.stderr:1+0 records in 2026-03-31T22:55:58.168 INFO:teuthology.orchestra.run.vm05.stderr:1+0 records out 2026-03-31T22:55:58.168 INFO:teuthology.orchestra.run.vm05.stderr:512 bytes copied, 0.000207287 s, 2.5 MB/s 2026-03-31T22:55:58.169 DEBUG:teuthology.orchestra.run.vm05:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_3 2026-03-31T22:55:58.227 DEBUG:teuthology.orchestra.run.vm05:> stat /dev/vg_nvme/lv_4 2026-03-31T22:55:58.282 INFO:teuthology.orchestra.run.vm05.stdout: File: /dev/vg_nvme/lv_4 -> ../dm-3 2026-03-31T22:55:58.282 INFO:teuthology.orchestra.run.vm05.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:58.282 INFO:teuthology.orchestra.run.vm05.stdout:Device: 6h/6d Inode: 967 Links: 1 2026-03-31T22:55:58.282 INFO:teuthology.orchestra.run.vm05.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:58.282 INFO:teuthology.orchestra.run.vm05.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:58.282 INFO:teuthology.orchestra.run.vm05.stdout:Access: 2026-03-31 22:55:57.894924412 +0000 2026-03-31T22:55:58.282 INFO:teuthology.orchestra.run.vm05.stdout:Modify: 2026-03-31 22:52:57.392621557 +0000 2026-03-31T22:55:58.282 INFO:teuthology.orchestra.run.vm05.stdout:Change: 2026-03-31 22:52:57.392621557 +0000 2026-03-31T22:55:58.282 INFO:teuthology.orchestra.run.vm05.stdout: Birth: 2026-03-31 22:52:57.392621557 +0000 2026-03-31T22:55:58.282 DEBUG:teuthology.orchestra.run.vm05:> sudo dd if=/dev/vg_nvme/lv_4 of=/dev/null count=1 2026-03-31T22:55:58.348 INFO:teuthology.orchestra.run.vm05.stderr:1+0 records in 2026-03-31T22:55:58.348 INFO:teuthology.orchestra.run.vm05.stderr:1+0 records out 2026-03-31T22:55:58.349 INFO:teuthology.orchestra.run.vm05.stderr:512 bytes copied, 0.000156583 s, 3.3 MB/s 2026-03-31T22:55:58.350 DEBUG:teuthology.orchestra.run.vm05:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_4 2026-03-31T22:55:58.407 DEBUG:teuthology.orchestra.run.vm09:> set -ex 2026-03-31T22:55:58.407 DEBUG:teuthology.orchestra.run.vm09:> dd if=/scratch_devs of=/dev/stdout 2026-03-31T22:55:58.422 DEBUG:teuthology.misc:devs=['/dev/vg_nvme/lv_1', '/dev/vg_nvme/lv_2', '/dev/vg_nvme/lv_3', '/dev/vg_nvme/lv_4'] 2026-03-31T22:55:58.422 DEBUG:teuthology.orchestra.run.vm09:> stat /dev/vg_nvme/lv_1 2026-03-31T22:55:58.459 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:55:58.459 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: Reconfiguring mgr.a (unknown last config time)... 2026-03-31T22:55:58.459 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: Reconfiguring daemon mgr.a on vm00 2026-03-31T22:55:58.459 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.459 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.460 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.476 INFO:teuthology.orchestra.run.vm09.stdout: File: /dev/vg_nvme/lv_1 -> ../dm-0 2026-03-31T22:55:58.476 INFO:teuthology.orchestra.run.vm09.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:58.476 INFO:teuthology.orchestra.run.vm09.stdout:Device: 6h/6d Inode: 955 Links: 1 2026-03-31T22:55:58.476 INFO:teuthology.orchestra.run.vm09.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:58.476 INFO:teuthology.orchestra.run.vm09.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:58.476 INFO:teuthology.orchestra.run.vm09.stdout:Access: 2026-03-31 22:55:50.701604099 +0000 2026-03-31T22:55:58.476 INFO:teuthology.orchestra.run.vm09.stdout:Modify: 2026-03-31 22:52:57.049145368 +0000 2026-03-31T22:55:58.476 INFO:teuthology.orchestra.run.vm09.stdout:Change: 2026-03-31 22:52:57.049145368 +0000 2026-03-31T22:55:58.476 INFO:teuthology.orchestra.run.vm09.stdout: Birth: 2026-03-31 22:52:57.049145368 +0000 2026-03-31T22:55:58.476 DEBUG:teuthology.orchestra.run.vm09:> sudo dd if=/dev/vg_nvme/lv_1 of=/dev/null count=1 2026-03-31T22:55:58.539 INFO:teuthology.orchestra.run.vm09.stderr:1+0 records in 2026-03-31T22:55:58.539 INFO:teuthology.orchestra.run.vm09.stderr:1+0 records out 2026-03-31T22:55:58.539 INFO:teuthology.orchestra.run.vm09.stderr:512 bytes copied, 0.000100838 s, 5.1 MB/s 2026-03-31T22:55:58.540 DEBUG:teuthology.orchestra.run.vm09:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_1 2026-03-31T22:55:58.594 DEBUG:teuthology.orchestra.run.vm09:> stat /dev/vg_nvme/lv_2 2026-03-31T22:55:58.650 INFO:teuthology.orchestra.run.vm09.stdout: File: /dev/vg_nvme/lv_2 -> ../dm-1 2026-03-31T22:55:58.651 INFO:teuthology.orchestra.run.vm09.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:58.651 INFO:teuthology.orchestra.run.vm09.stdout:Device: 6h/6d Inode: 966 Links: 1 2026-03-31T22:55:58.651 INFO:teuthology.orchestra.run.vm09.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:58.651 INFO:teuthology.orchestra.run.vm09.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:58.651 INFO:teuthology.orchestra.run.vm09.stdout:Access: 2026-03-31 22:55:50.701604099 +0000 2026-03-31T22:55:58.651 INFO:teuthology.orchestra.run.vm09.stdout:Modify: 2026-03-31 22:52:57.051145370 +0000 2026-03-31T22:55:58.651 INFO:teuthology.orchestra.run.vm09.stdout:Change: 2026-03-31 22:52:57.051145370 +0000 2026-03-31T22:55:58.651 INFO:teuthology.orchestra.run.vm09.stdout: Birth: 2026-03-31 22:52:57.051145370 +0000 2026-03-31T22:55:58.651 DEBUG:teuthology.orchestra.run.vm09:> sudo dd if=/dev/vg_nvme/lv_2 of=/dev/null count=1 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: Reconfiguring mgr.a (unknown last config time)... 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: Reconfiguring daemon mgr.a on vm00 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.711 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.713 INFO:teuthology.orchestra.run.vm09.stderr:1+0 records in 2026-03-31T22:55:58.713 INFO:teuthology.orchestra.run.vm09.stderr:1+0 records out 2026-03-31T22:55:58.713 INFO:teuthology.orchestra.run.vm09.stderr:512 bytes copied, 0.000120665 s, 4.2 MB/s 2026-03-31T22:55:58.714 DEBUG:teuthology.orchestra.run.vm09:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_2 2026-03-31T22:55:58.769 DEBUG:teuthology.orchestra.run.vm09:> stat /dev/vg_nvme/lv_3 2026-03-31T22:55:58.826 INFO:teuthology.orchestra.run.vm09.stdout: File: /dev/vg_nvme/lv_3 -> ../dm-2 2026-03-31T22:55:58.826 INFO:teuthology.orchestra.run.vm09.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:58.826 INFO:teuthology.orchestra.run.vm09.stdout:Device: 6h/6d Inode: 956 Links: 1 2026-03-31T22:55:58.826 INFO:teuthology.orchestra.run.vm09.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:58.826 INFO:teuthology.orchestra.run.vm09.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:58.827 INFO:teuthology.orchestra.run.vm09.stdout:Access: 2026-03-31 22:55:50.701604099 +0000 2026-03-31T22:55:58.827 INFO:teuthology.orchestra.run.vm09.stdout:Modify: 2026-03-31 22:52:57.049145368 +0000 2026-03-31T22:55:58.827 INFO:teuthology.orchestra.run.vm09.stdout:Change: 2026-03-31 22:52:57.049145368 +0000 2026-03-31T22:55:58.827 INFO:teuthology.orchestra.run.vm09.stdout: Birth: 2026-03-31 22:52:57.049145368 +0000 2026-03-31T22:55:58.827 DEBUG:teuthology.orchestra.run.vm09:> sudo dd if=/dev/vg_nvme/lv_3 of=/dev/null count=1 2026-03-31T22:55:58.889 INFO:teuthology.orchestra.run.vm09.stderr:1+0 records in 2026-03-31T22:55:58.889 INFO:teuthology.orchestra.run.vm09.stderr:1+0 records out 2026-03-31T22:55:58.889 INFO:teuthology.orchestra.run.vm09.stderr:512 bytes copied, 0.000140572 s, 3.6 MB/s 2026-03-31T22:55:58.890 DEBUG:teuthology.orchestra.run.vm09:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_3 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: Reconfiguring mgr.a (unknown last config time)... 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: Reconfiguring daemon mgr.a on vm00 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:55:58.945 DEBUG:teuthology.orchestra.run.vm09:> stat /dev/vg_nvme/lv_4 2026-03-31T22:55:59.001 INFO:teuthology.orchestra.run.vm09.stdout: File: /dev/vg_nvme/lv_4 -> ../dm-3 2026-03-31T22:55:59.001 INFO:teuthology.orchestra.run.vm09.stdout: Size: 7 Blocks: 0 IO Block: 4096 symbolic link 2026-03-31T22:55:59.001 INFO:teuthology.orchestra.run.vm09.stdout:Device: 6h/6d Inode: 972 Links: 1 2026-03-31T22:55:59.001 INFO:teuthology.orchestra.run.vm09.stdout:Access: (0777/lrwxrwxrwx) Uid: ( 0/ root) Gid: ( 0/ root) 2026-03-31T22:55:59.001 INFO:teuthology.orchestra.run.vm09.stdout:Context: system_u:object_r:device_t:s0 2026-03-31T22:55:59.001 INFO:teuthology.orchestra.run.vm09.stdout:Access: 2026-03-31 22:55:50.701604099 +0000 2026-03-31T22:55:59.001 INFO:teuthology.orchestra.run.vm09.stdout:Modify: 2026-03-31 22:52:57.052145371 +0000 2026-03-31T22:55:59.001 INFO:teuthology.orchestra.run.vm09.stdout:Change: 2026-03-31 22:52:57.052145371 +0000 2026-03-31T22:55:59.001 INFO:teuthology.orchestra.run.vm09.stdout: Birth: 2026-03-31 22:52:57.052145371 +0000 2026-03-31T22:55:59.001 DEBUG:teuthology.orchestra.run.vm09:> sudo dd if=/dev/vg_nvme/lv_4 of=/dev/null count=1 2026-03-31T22:55:59.063 INFO:teuthology.orchestra.run.vm09.stderr:1+0 records in 2026-03-31T22:55:59.064 INFO:teuthology.orchestra.run.vm09.stderr:1+0 records out 2026-03-31T22:55:59.064 INFO:teuthology.orchestra.run.vm09.stderr:512 bytes copied, 0.000129611 s, 4.0 MB/s 2026-03-31T22:55:59.065 DEBUG:teuthology.orchestra.run.vm09:> ! mount | grep -v devtmpfs | grep -q /dev/vg_nvme/lv_4 2026-03-31T22:55:59.119 INFO:tasks.cephadm:Deploying osd.0 on vm00 with /dev/vg_nvme/lv_4... 2026-03-31T22:55:59.119 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 ceph-volume -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- lvm zap /dev/vg_nvme/lv_4 2026-03-31T22:55:59.246 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:55:59.463 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:55:59 vm09 ceph-mon[98050]: pgmap v8: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:59.649 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:55:59 vm00 ceph-mon[61968]: pgmap v8: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:59.715 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:55:59.732 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch daemon add osd vm00:vg_nvme/lv_4 --skip-validation 2026-03-31T22:55:59.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:55:59 vm05 ceph-mon[69577]: pgmap v8: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:55:59.854 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:01.147 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.147 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='client.14223 -' entity='client.admin' cmd=[{"prefix": "orch daemon add osd", "svc_arg": "vm00:vg_nvme/lv_4", "skip_validation": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:01.147 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: osd.default does not exist. Creating it now. 2026-03-31T22:56:01.147 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: Creating OSDs with service ID: default on vm00:['vg_nvme/lv_4'] 2026-03-31T22:56:01.147 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: Marking host: vm00 for OSDSpec preview refresh. 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: Saving service osd.default spec with placement vm00 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: Standby manager daemon b started 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/crt"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/crt"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/key"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/key"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3869235171' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "e8d1d763-2967-4800-93f0-dda88d3e9875"} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3869235171' entity='client.bootstrap-osd' cmd='[{"prefix": "osd new", "uuid": "e8d1d763-2967-4800-93f0-dda88d3e9875"}]': finished 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: osdmap e5: 1 total, 0 up, 1 in 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:01.148 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:01 vm00 ceph-mon[61968]: pgmap v10: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='client.14223 -' entity='client.admin' cmd=[{"prefix": "orch daemon add osd", "svc_arg": "vm00:vg_nvme/lv_4", "skip_validation": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: osd.default does not exist. Creating it now. 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: Creating OSDs with service ID: default on vm00:['vg_nvme/lv_4'] 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: Marking host: vm00 for OSDSpec preview refresh. 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: Saving service osd.default spec with placement vm00 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: Standby manager daemon b started 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/crt"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/crt"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/key"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/key"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/3869235171' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "e8d1d763-2967-4800-93f0-dda88d3e9875"} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/3869235171' entity='client.bootstrap-osd' cmd='[{"prefix": "osd new", "uuid": "e8d1d763-2967-4800-93f0-dda88d3e9875"}]': finished 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: osdmap e5: 1 total, 0 up, 1 in 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:01 vm09 ceph-mon[98050]: pgmap v10: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='client.14223 -' entity='client.admin' cmd=[{"prefix": "orch daemon add osd", "svc_arg": "vm00:vg_nvme/lv_4", "skip_validation": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: osd.default does not exist. Creating it now. 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: Creating OSDs with service ID: default on vm00:['vg_nvme/lv_4'] 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: Marking host: vm00 for OSDSpec preview refresh. 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: Saving service osd.default spec with placement vm00 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: Standby manager daemon b started 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/crt"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/crt"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/key"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.? 192.168.123.105:0/1313023672' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/key"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/3869235171' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "e8d1d763-2967-4800-93f0-dda88d3e9875"} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/3869235171' entity='client.bootstrap-osd' cmd='[{"prefix": "osd new", "uuid": "e8d1d763-2967-4800-93f0-dda88d3e9875"}]': finished 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: osdmap e5: 1 total, 0 up, 1 in 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:01 vm05 ceph-mon[69577]: pgmap v10: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:02.152 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:02 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2534949888' entity='client.bootstrap-osd' cmd={"prefix": "mon getmap"} : dispatch 2026-03-31T22:56:02.152 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:02 vm00 ceph-mon[61968]: mgrmap e13: a(active, since 30s), standbys: b 2026-03-31T22:56:02.152 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:02 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "b", "id": "b"} : dispatch 2026-03-31T22:56:02.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:02 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/2534949888' entity='client.bootstrap-osd' cmd={"prefix": "mon getmap"} : dispatch 2026-03-31T22:56:02.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:02 vm09 ceph-mon[98050]: mgrmap e13: a(active, since 30s), standbys: b 2026-03-31T22:56:02.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:02 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "b", "id": "b"} : dispatch 2026-03-31T22:56:02.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:02 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/2534949888' entity='client.bootstrap-osd' cmd={"prefix": "mon getmap"} : dispatch 2026-03-31T22:56:02.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:02 vm05 ceph-mon[69577]: mgrmap e13: a(active, since 30s), standbys: b 2026-03-31T22:56:02.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:02 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "b", "id": "b"} : dispatch 2026-03-31T22:56:03.244 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:03 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.0"} : dispatch 2026-03-31T22:56:03.245 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:03 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:03.245 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:03 vm00 ceph-mon[61968]: Deploying daemon osd.0 on vm00 2026-03-31T22:56:03.245 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:03 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.0"} : dispatch 2026-03-31T22:56:03.245 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:03 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:03.245 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:03 vm00 ceph-mon[61968]: Deploying daemon osd.0 on vm00 2026-03-31T22:56:03.245 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:03 vm00 ceph-mon[61968]: pgmap v11: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:03.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:03 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.0"} : dispatch 2026-03-31T22:56:03.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:03 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:03.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:03 vm09 ceph-mon[98050]: Deploying daemon osd.0 on vm00 2026-03-31T22:56:03.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:03 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.0"} : dispatch 2026-03-31T22:56:03.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:03 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:03.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:03 vm09 ceph-mon[98050]: Deploying daemon osd.0 on vm00 2026-03-31T22:56:03.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:03 vm09 ceph-mon[98050]: pgmap v11: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:03.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:03 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.0"} : dispatch 2026-03-31T22:56:03.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:03 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:03.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:03 vm05 ceph-mon[69577]: Deploying daemon osd.0 on vm00 2026-03-31T22:56:03.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:03 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.0"} : dispatch 2026-03-31T22:56:03.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:03 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:03.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:03 vm05 ceph-mon[69577]: Deploying daemon osd.0 on vm00 2026-03-31T22:56:03.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:03 vm05 ceph-mon[69577]: pgmap v11: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:05.612 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:05 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:05.612 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:05 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:05.612 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:05 vm00 ceph-mon[61968]: from='osd.0 [v2:192.168.123.100:6802/3093808026,v1:192.168.123.100:6803/3093808026]' entity='osd.0' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]} : dispatch 2026-03-31T22:56:05.612 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:05 vm00 ceph-mon[61968]: pgmap v12: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:05.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:05 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:05.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:05 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:05.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:05 vm09 ceph-mon[98050]: from='osd.0 [v2:192.168.123.100:6802/3093808026,v1:192.168.123.100:6803/3093808026]' entity='osd.0' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]} : dispatch 2026-03-31T22:56:05.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:05 vm09 ceph-mon[98050]: pgmap v12: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:05.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:05 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:05.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:05 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:05.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:05 vm05 ceph-mon[69577]: from='osd.0 [v2:192.168.123.100:6802/3093808026,v1:192.168.123.100:6803/3093808026]' entity='osd.0' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]} : dispatch 2026-03-31T22:56:05.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:05 vm05 ceph-mon[69577]: pgmap v12: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:06.762 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:06 vm00 ceph-mon[61968]: from='osd.0 [v2:192.168.123.100:6802/3093808026,v1:192.168.123.100:6803/3093808026]' entity='osd.0' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]}]': finished 2026-03-31T22:56:06.762 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:06 vm00 ceph-mon[61968]: osdmap e6: 1 total, 0 up, 1 in 2026-03-31T22:56:06.762 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:06 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:06.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:06 vm09 ceph-mon[98050]: from='osd.0 [v2:192.168.123.100:6802/3093808026,v1:192.168.123.100:6803/3093808026]' entity='osd.0' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]}]': finished 2026-03-31T22:56:06.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:06 vm09 ceph-mon[98050]: osdmap e6: 1 total, 0 up, 1 in 2026-03-31T22:56:06.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:06 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:06.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:06 vm05 ceph-mon[69577]: from='osd.0 [v2:192.168.123.100:6802/3093808026,v1:192.168.123.100:6803/3093808026]' entity='osd.0' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]}]': finished 2026-03-31T22:56:06.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:06 vm05 ceph-mon[69577]: osdmap e6: 1 total, 0 up, 1 in 2026-03-31T22:56:06.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:06 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:07.572 INFO:teuthology.orchestra.run.vm00.stdout:Created osd(s) 0 on host 'vm00' 2026-03-31T22:56:07.619 DEBUG:teuthology.orchestra.run.vm00:osd.0> sudo journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.0.service 2026-03-31T22:56:07.620 INFO:tasks.cephadm:Deploying osd.1 on vm05 with /dev/vg_nvme/lv_4... 2026-03-31T22:56:07.620 DEBUG:teuthology.orchestra.run.vm05:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 ceph-volume -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- lvm zap /dev/vg_nvme/lv_4 2026-03-31T22:56:07.742 INFO:teuthology.orchestra.run.vm05.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.b/config 2026-03-31T22:56:07.806 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:07 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:07.806 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:07 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:07.806 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:07 vm00 ceph-mon[61968]: pgmap v14: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:07.806 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:07 vm00 ceph-mon[61968]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]} : dispatch 2026-03-31T22:56:07.934 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:07 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:07.934 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:07 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:07.934 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:07 vm05 ceph-mon[69577]: pgmap v14: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:07.934 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:07 vm05 ceph-mon[69577]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]} : dispatch 2026-03-31T22:56:07.934 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:07 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:07.934 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:07 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:07 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:07 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:07 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:07 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:07 vm09 ceph-mon[98050]: pgmap v14: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:08.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:07 vm09 ceph-mon[98050]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]} : dispatch 2026-03-31T22:56:08.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:07 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:07 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.288 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T22:56:08.304 DEBUG:teuthology.orchestra.run.vm05:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch daemon add osd vm05:vg_nvme/lv_4 --skip-validation 2026-03-31T22:56:08.435 INFO:teuthology.orchestra.run.vm05.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.b/config 2026-03-31T22:56:08.825 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]}]': finished 2026-03-31T22:56:08.825 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: osdmap e7: 1 total, 0 up, 1 in 2026-03-31T22:56:08.825 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd={"prefix": "osd crush create-or-move", "id": 0, "weight":0.0195, "args": ["host=vm00", "root=default"]} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd.0", "name": "osd_memory_target"} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:08.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:08 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:09.082 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 22:56:08 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0[72064]: 2026-03-31T22:56:08.822+0000 7facba78e640 -1 osd.0 0 waiting for initial osdmap 2026-03-31T22:56:09.082 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 22:56:08 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0[72064]: 2026-03-31T22:56:08.829+0000 7facb4d4b640 -1 osd.0 8 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-03-31T22:56:09.206 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]}]': finished 2026-03-31T22:56:09.206 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: osdmap e7: 1 total, 0 up, 1 in 2026-03-31T22:56:09.206 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd={"prefix": "osd crush create-or-move", "id": 0, "weight":0.0195, "args": ["host=vm00", "root=default"]} : dispatch 2026-03-31T22:56:09.206 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:09.206 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.206 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd.0", "name": "osd_memory_target"} : dispatch 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:09.207 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:08 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["0"]}]': finished 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: osdmap e7: 1 total, 0 up, 1 in 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd={"prefix": "osd crush create-or-move", "id": 0, "weight":0.0195, "args": ["host=vm00", "root=default"]} : dispatch 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd.0", "name": "osd_memory_target"} : dispatch 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:09.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:09.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:09.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:09.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:09.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:09.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:08 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: Adjusting osd_memory_target on vm00 to 635.4M 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: Unable to set osd_memory_target on vm00 to 666352844: error parsing value: Value '666352844' is below minimum 939524096 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='client.24128 -' entity='client.admin' cmd=[{"prefix": "orch daemon add osd", "svc_arg": "vm05:vg_nvme/lv_4", "skip_validation": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: osd.default does not exist. Creating it now. 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: Creating OSDs with service ID: default on vm05:['vg_nvme/lv_4'] 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: Marking host: vm05 for OSDSpec preview refresh. 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: Saving service osd.default spec with placement vm05 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd='[{"prefix": "osd crush create-or-move", "id": 0, "weight":0.0195, "args": ["host=vm00", "root=default"]}]': finished 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: osdmap e8: 1 total, 0 up, 1 in 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: pgmap v17: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='client.? 192.168.123.105:0/656820510' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8"} : dispatch 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='client.? ' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8"} : dispatch 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='client.? ' entity='client.bootstrap-osd' cmd='[{"prefix": "osd new", "uuid": "75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8"}]': finished 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830] boot 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: osdmap e9: 2 total, 1 up, 2 in 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:09.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:09 vm00 ceph-mon[61968]: from='client.? 192.168.123.105:0/682016972' entity='client.bootstrap-osd' cmd={"prefix": "mon getmap"} : dispatch 2026-03-31T22:56:10.180 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: Adjusting osd_memory_target on vm00 to 635.4M 2026-03-31T22:56:10.180 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: Unable to set osd_memory_target on vm00 to 666352844: error parsing value: Value '666352844' is below minimum 939524096 2026-03-31T22:56:10.180 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='client.24128 -' entity='client.admin' cmd=[{"prefix": "orch daemon add osd", "svc_arg": "vm05:vg_nvme/lv_4", "skip_validation": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:10.180 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: osd.default does not exist. Creating it now. 2026-03-31T22:56:10.180 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: Creating OSDs with service ID: default on vm05:['vg_nvme/lv_4'] 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: Marking host: vm05 for OSDSpec preview refresh. 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: Saving service osd.default spec with placement vm05 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd='[{"prefix": "osd crush create-or-move", "id": 0, "weight":0.0195, "args": ["host=vm00", "root=default"]}]': finished 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: osdmap e8: 1 total, 0 up, 1 in 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: pgmap v17: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='client.? 192.168.123.105:0/656820510' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8"} : dispatch 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='client.? ' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8"} : dispatch 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='client.? ' entity='client.bootstrap-osd' cmd='[{"prefix": "osd new", "uuid": "75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8"}]': finished 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830] boot 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: osdmap e9: 2 total, 1 up, 2 in 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:10.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:09 vm05 ceph-mon[69577]: from='client.? 192.168.123.105:0/682016972' entity='client.bootstrap-osd' cmd={"prefix": "mon getmap"} : dispatch 2026-03-31T22:56:10.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: Adjusting osd_memory_target on vm00 to 635.4M 2026-03-31T22:56:10.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: Unable to set osd_memory_target on vm00 to 666352844: error parsing value: Value '666352844' is below minimum 939524096 2026-03-31T22:56:10.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='client.24128 -' entity='client.admin' cmd=[{"prefix": "orch daemon add osd", "svc_arg": "vm05:vg_nvme/lv_4", "skip_validation": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: osd.default does not exist. Creating it now. 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: Creating OSDs with service ID: default on vm05:['vg_nvme/lv_4'] 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: Marking host: vm05 for OSDSpec preview refresh. 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: Saving service osd.default spec with placement vm05 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830]' entity='osd.0' cmd='[{"prefix": "osd crush create-or-move", "id": 0, "weight":0.0195, "args": ["host=vm00", "root=default"]}]': finished 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: osdmap e8: 1 total, 0 up, 1 in 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: pgmap v17: 0 pgs: ; 0 B data, 0 B used, 0 B / 0 B avail 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='client.? 192.168.123.105:0/656820510' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8"} : dispatch 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='client.? ' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8"} : dispatch 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='client.? ' entity='client.bootstrap-osd' cmd='[{"prefix": "osd new", "uuid": "75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8"}]': finished 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: osd.0 [v2:192.168.123.100:6802/2458914830,v1:192.168.123.100:6803/2458914830] boot 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: osdmap e9: 2 total, 1 up, 2 in 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:10.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:09 vm09 ceph-mon[98050]: from='client.? 192.168.123.105:0/682016972' entity='client.bootstrap-osd' cmd={"prefix": "mon getmap"} : dispatch 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: purged_snaps scrub starts 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: purged_snaps scrub ok 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:10.924 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:10 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.003 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: purged_snaps scrub starts 2026-03-31T22:56:11.003 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: purged_snaps scrub ok 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.004 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:10 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: purged_snaps scrub starts 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: purged_snaps scrub ok 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:10 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:12.105 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:11 vm00 ceph-mon[61968]: osdmap e10: 2 total, 1 up, 2 in 2026-03-31T22:56:12.105 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:11 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:12.105 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:11 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.1"} : dispatch 2026-03-31T22:56:12.105 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:11 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:12.105 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:11 vm00 ceph-mon[61968]: Deploying daemon osd.1 on vm05 2026-03-31T22:56:12.105 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:11 vm00 ceph-mon[61968]: pgmap v20: 0 pgs: ; 0 B data, 27 MiB used, 20 GiB / 20 GiB avail 2026-03-31T22:56:12.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:11 vm09 ceph-mon[98050]: osdmap e10: 2 total, 1 up, 2 in 2026-03-31T22:56:12.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:11 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:12.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:11 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.1"} : dispatch 2026-03-31T22:56:12.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:11 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:12.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:11 vm09 ceph-mon[98050]: Deploying daemon osd.1 on vm05 2026-03-31T22:56:12.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:11 vm09 ceph-mon[98050]: pgmap v20: 0 pgs: ; 0 B data, 27 MiB used, 20 GiB / 20 GiB avail 2026-03-31T22:56:12.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:11 vm05 ceph-mon[69577]: osdmap e10: 2 total, 1 up, 2 in 2026-03-31T22:56:12.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:11 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:12.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:11 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.1"} : dispatch 2026-03-31T22:56:12.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:11 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:12.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:11 vm05 ceph-mon[69577]: Deploying daemon osd.1 on vm05 2026-03-31T22:56:12.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:11 vm05 ceph-mon[69577]: pgmap v20: 0 pgs: ; 0 B data, 27 MiB used, 20 GiB / 20 GiB avail 2026-03-31T22:56:13.833 INFO:teuthology.orchestra.run.vm05.stdout:Created osd(s) 1 on host 'vm05' 2026-03-31T22:56:13.878 DEBUG:teuthology.orchestra.run.vm05:osd.1> sudo journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.1.service 2026-03-31T22:56:13.880 INFO:tasks.cephadm:Deploying osd.2 on vm09 with /dev/vg_nvme/lv_4... 2026-03-31T22:56:13.881 DEBUG:teuthology.orchestra.run.vm09:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 ceph-volume -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- lvm zap /dev/vg_nvme/lv_4 2026-03-31T22:56:14.012 INFO:teuthology.orchestra.run.vm09.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.c/config 2026-03-31T22:56:14.069 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:13 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.069 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:13 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.069 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:13 vm09 ceph-mon[98050]: pgmap v21: 0 pgs: ; 0 B data, 27 MiB used, 20 GiB / 20 GiB avail 2026-03-31T22:56:14.069 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:13 vm09 ceph-mon[98050]: from='osd.1 [v2:192.168.123.105:6800/503330297,v1:192.168.123.105:6801/503330297]' entity='osd.1' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["1"]} : dispatch 2026-03-31T22:56:14.069 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:13 vm09 ceph-mon[98050]: from='osd.1 ' entity='osd.1' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["1"]} : dispatch 2026-03-31T22:56:14.150 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:13 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.150 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:13 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.150 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:13 vm00 ceph-mon[61968]: pgmap v21: 0 pgs: ; 0 B data, 27 MiB used, 20 GiB / 20 GiB avail 2026-03-31T22:56:14.150 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:13 vm00 ceph-mon[61968]: from='osd.1 [v2:192.168.123.105:6800/503330297,v1:192.168.123.105:6801/503330297]' entity='osd.1' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["1"]} : dispatch 2026-03-31T22:56:14.150 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:13 vm00 ceph-mon[61968]: from='osd.1 ' entity='osd.1' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["1"]} : dispatch 2026-03-31T22:56:14.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:13 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:13 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:13 vm05 ceph-mon[69577]: pgmap v21: 0 pgs: ; 0 B data, 27 MiB used, 20 GiB / 20 GiB avail 2026-03-31T22:56:14.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:13 vm05 ceph-mon[69577]: from='osd.1 [v2:192.168.123.105:6800/503330297,v1:192.168.123.105:6801/503330297]' entity='osd.1' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["1"]} : dispatch 2026-03-31T22:56:14.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:13 vm05 ceph-mon[69577]: from='osd.1 ' entity='osd.1' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["1"]} : dispatch 2026-03-31T22:56:14.488 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T22:56:14.507 DEBUG:teuthology.orchestra.run.vm09:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph orch daemon add osd vm09:vg_nvme/lv_4 --skip-validation 2026-03-31T22:56:14.638 INFO:teuthology.orchestra.run.vm09.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.c/config 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='osd.1 ' entity='osd.1' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["1"]}]': finished 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='osd.1 [v2:192.168.123.105:6800/503330297,v1:192.168.123.105:6801/503330297]' entity='osd.1' cmd={"prefix": "osd crush create-or-move", "id": 1, "weight":0.0195, "args": ["host=vm05", "root=default"]} : dispatch 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: osdmap e11: 2 total, 1 up, 2 in 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='osd.1 ' entity='osd.1' cmd={"prefix": "osd crush create-or-move", "id": 1, "weight":0.0195, "args": ["host=vm05", "root=default"]} : dispatch 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd.1", "name": "osd_memory_target"} : dispatch 2026-03-31T22:56:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:14.991 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:14.991 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:14.991 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:14.991 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:14.991 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:14 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:15.044 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='osd.1 ' entity='osd.1' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["1"]}]': finished 2026-03-31T22:56:15.044 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='osd.1 [v2:192.168.123.105:6800/503330297,v1:192.168.123.105:6801/503330297]' entity='osd.1' cmd={"prefix": "osd crush create-or-move", "id": 1, "weight":0.0195, "args": ["host=vm05", "root=default"]} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: osdmap e11: 2 total, 1 up, 2 in 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='osd.1 ' entity='osd.1' cmd={"prefix": "osd crush create-or-move", "id": 1, "weight":0.0195, "args": ["host=vm05", "root=default"]} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd.1", "name": "osd_memory_target"} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:14 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:15.045 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 22:56:14 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1[74050]: 2026-03-31T22:56:14.821+0000 7fefa56bc640 -1 osd.1 0 waiting for initial osdmap 2026-03-31T22:56:15.045 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 22:56:14 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1[74050]: 2026-03-31T22:56:14.829+0000 7fefa048b640 -1 osd.1 12 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='osd.1 ' entity='osd.1' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["1"]}]': finished 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='osd.1 [v2:192.168.123.105:6800/503330297,v1:192.168.123.105:6801/503330297]' entity='osd.1' cmd={"prefix": "osd crush create-or-move", "id": 1, "weight":0.0195, "args": ["host=vm05", "root=default"]} : dispatch 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: osdmap e11: 2 total, 1 up, 2 in 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='osd.1 ' entity='osd.1' cmd={"prefix": "osd crush create-or-move", "id": 1, "weight":0.0195, "args": ["host=vm05", "root=default"]} : dispatch 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd.1", "name": "osd_memory_target"} : dispatch 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:14 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: Adjusting osd_memory_target on vm05 to 635.4M 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: Unable to set osd_memory_target on vm05 to 666358579: error parsing value: Value '666358579' is below minimum 939524096 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='osd.1 ' entity='osd.1' cmd='[{"prefix": "osd crush create-or-move", "id": 1, "weight":0.0195, "args": ["host=vm05", "root=default"]}]': finished 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: osdmap e12: 2 total, 1 up, 2 in 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='client.24163 -' entity='client.admin' cmd=[{"prefix": "orch daemon add osd", "svc_arg": "vm09:vg_nvme/lv_4", "skip_validation": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: osd.default does not exist. Creating it now. 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: Creating OSDs with service ID: default on vm09:['vg_nvme/lv_4'] 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: Marking host: vm09 for OSDSpec preview refresh. 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: Saving service osd.default spec with placement vm09 2026-03-31T22:56:15.826 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: pgmap v24: 0 pgs: ; 0 B data, 27 MiB used, 20 GiB / 20 GiB avail 2026-03-31T22:56:15.827 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.827 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:15.827 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:15.827 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:15.827 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='client.? 192.168.123.109:0/770207455' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "be821005-2b57-413b-92e7-ee956ad7fc35"} : dispatch 2026-03-31T22:56:15.827 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='client.? ' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "be821005-2b57-413b-92e7-ee956ad7fc35"} : dispatch 2026-03-31T22:56:15.827 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='client.? ' entity='client.bootstrap-osd' cmd='[{"prefix": "osd new", "uuid": "be821005-2b57-413b-92e7-ee956ad7fc35"}]': finished 2026-03-31T22:56:15.827 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: osd.1 [v2:192.168.123.105:6800/503330297,v1:192.168.123.105:6801/503330297] boot 2026-03-31T22:56:15.827 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: osdmap e13: 3 total, 2 up, 3 in 2026-03-31T22:56:15.829 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.829 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:15 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: Adjusting osd_memory_target on vm05 to 635.4M 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: Unable to set osd_memory_target on vm05 to 666358579: error parsing value: Value '666358579' is below minimum 939524096 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='osd.1 ' entity='osd.1' cmd='[{"prefix": "osd crush create-or-move", "id": 1, "weight":0.0195, "args": ["host=vm05", "root=default"]}]': finished 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: osdmap e12: 2 total, 1 up, 2 in 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='client.24163 -' entity='client.admin' cmd=[{"prefix": "orch daemon add osd", "svc_arg": "vm09:vg_nvme/lv_4", "skip_validation": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: osd.default does not exist. Creating it now. 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: Creating OSDs with service ID: default on vm09:['vg_nvme/lv_4'] 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: Marking host: vm09 for OSDSpec preview refresh. 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: Saving service osd.default spec with placement vm09 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: pgmap v24: 0 pgs: ; 0 B data, 27 MiB used, 20 GiB / 20 GiB avail 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='client.? 192.168.123.109:0/770207455' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "be821005-2b57-413b-92e7-ee956ad7fc35"} : dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='client.? ' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "be821005-2b57-413b-92e7-ee956ad7fc35"} : dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='client.? ' entity='client.bootstrap-osd' cmd='[{"prefix": "osd new", "uuid": "be821005-2b57-413b-92e7-ee956ad7fc35"}]': finished 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: osd.1 [v2:192.168.123.105:6800/503330297,v1:192.168.123.105:6801/503330297] boot 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: osdmap e13: 3 total, 2 up, 3 in 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.958 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:15 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: Adjusting osd_memory_target on vm05 to 635.4M 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: Unable to set osd_memory_target on vm05 to 666358579: error parsing value: Value '666358579' is below minimum 939524096 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='osd.1 ' entity='osd.1' cmd='[{"prefix": "osd crush create-or-move", "id": 1, "weight":0.0195, "args": ["host=vm05", "root=default"]}]': finished 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: osdmap e12: 2 total, 1 up, 2 in 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='client.24163 -' entity='client.admin' cmd=[{"prefix": "orch daemon add osd", "svc_arg": "vm09:vg_nvme/lv_4", "skip_validation": true, "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: osd.default does not exist. Creating it now. 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: Creating OSDs with service ID: default on vm09:['vg_nvme/lv_4'] 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: Marking host: vm09 for OSDSpec preview refresh. 2026-03-31T22:56:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: Saving service osd.default spec with placement vm09 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: pgmap v24: 0 pgs: ; 0 B data, 27 MiB used, 20 GiB / 20 GiB avail 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='client.? 192.168.123.109:0/770207455' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "be821005-2b57-413b-92e7-ee956ad7fc35"} : dispatch 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='client.? ' entity='client.bootstrap-osd' cmd={"prefix": "osd new", "uuid": "be821005-2b57-413b-92e7-ee956ad7fc35"} : dispatch 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='client.? ' entity='client.bootstrap-osd' cmd='[{"prefix": "osd new", "uuid": "be821005-2b57-413b-92e7-ee956ad7fc35"}]': finished 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: osd.1 [v2:192.168.123.105:6800/503330297,v1:192.168.123.105:6801/503330297] boot 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: osdmap e13: 3 total, 2 up, 3 in 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:56:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:15 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: purged_snaps scrub starts 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: purged_snaps scrub ok 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='client.? 192.168.123.109:0/2670882430' entity='client.bootstrap-osd' cmd={"prefix": "mon getmap"} : dispatch 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:16.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:16.916 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:16.916 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:16.916 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:16.916 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:16 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: purged_snaps scrub starts 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: purged_snaps scrub ok 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='client.? 192.168.123.109:0/2670882430' entity='client.bootstrap-osd' cmd={"prefix": "mon getmap"} : dispatch 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.078 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:17.079 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:17.079 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.079 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:17.079 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:17.079 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:16 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: purged_snaps scrub starts 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: purged_snaps scrub ok 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='client.? 192.168.123.109:0/2670882430' entity='client.bootstrap-osd' cmd={"prefix": "mon getmap"} : dispatch 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:16 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:17.931 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:17 vm09 ceph-mon[98050]: pgmap v26: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:17.931 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:17 vm09 ceph-mon[98050]: osdmap e14: 3 total, 2 up, 3 in 2026-03-31T22:56:17.931 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:17 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:17.931 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:17 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.2"} : dispatch 2026-03-31T22:56:17.931 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:17 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:17.931 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:17 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.2"} : dispatch 2026-03-31T22:56:17.931 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:17 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:18.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:17 vm00 ceph-mon[61968]: pgmap v26: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:18.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:17 vm00 ceph-mon[61968]: osdmap e14: 3 total, 2 up, 3 in 2026-03-31T22:56:18.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:17 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:18.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:17 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.2"} : dispatch 2026-03-31T22:56:18.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:17 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:18.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:17 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.2"} : dispatch 2026-03-31T22:56:18.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:17 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:18.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:17 vm05 ceph-mon[69577]: pgmap v26: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:18.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:17 vm05 ceph-mon[69577]: osdmap e14: 3 total, 2 up, 3 in 2026-03-31T22:56:18.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:17 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:18.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:17 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.2"} : dispatch 2026-03-31T22:56:18.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:17 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:18.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:17 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.2"} : dispatch 2026-03-31T22:56:18.181 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:17 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:18 vm00 ceph-mon[61968]: Deploying daemon osd.2 on vm09 2026-03-31T22:56:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:18 vm00 ceph-mon[61968]: Deploying daemon osd.2 on vm09 2026-03-31T22:56:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:18 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:18 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:18 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:18 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:18 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:18 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:18 vm09 ceph-mon[98050]: Deploying daemon osd.2 on vm09 2026-03-31T22:56:19.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:18 vm09 ceph-mon[98050]: Deploying daemon osd.2 on vm09 2026-03-31T22:56:19.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:18 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:18 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:18 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:18 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:18 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:18 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:18 vm05 ceph-mon[69577]: Deploying daemon osd.2 on vm09 2026-03-31T22:56:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:18 vm05 ceph-mon[69577]: Deploying daemon osd.2 on vm09 2026-03-31T22:56:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:18 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:18 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:18 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:18 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:18 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:18 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:19 vm09 ceph-mon[98050]: Detected new or changed devices on vm05 2026-03-31T22:56:19.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:19 vm09 ceph-mon[98050]: pgmap v28: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:19.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:19 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:19 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:19.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:19 vm09 ceph-mon[98050]: from='osd.2 [v2:192.168.123.109:6800/2004690443,v1:192.168.123.109:6801/2004690443]' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:19.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:19 vm09 ceph-mon[98050]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:19 vm05 ceph-mon[69577]: Detected new or changed devices on vm05 2026-03-31T22:56:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:19 vm05 ceph-mon[69577]: pgmap v28: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:19 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:19 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:19 vm05 ceph-mon[69577]: from='osd.2 [v2:192.168.123.109:6800/2004690443,v1:192.168.123.109:6801/2004690443]' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:19 vm05 ceph-mon[69577]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:20.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:19 vm00 ceph-mon[61968]: Detected new or changed devices on vm05 2026-03-31T22:56:20.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:19 vm00 ceph-mon[61968]: pgmap v28: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:20.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:19 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:20.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:19 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:20.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:19 vm00 ceph-mon[61968]: from='osd.2 [v2:192.168.123.109:6800/2004690443,v1:192.168.123.109:6801/2004690443]' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:20.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:19 vm00 ceph-mon[61968]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:21.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:21 vm09 ceph-mon[98050]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]}]': finished 2026-03-31T22:56:21.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:21 vm09 ceph-mon[98050]: osdmap e15: 3 total, 2 up, 3 in 2026-03-31T22:56:21.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:21 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:21.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:21 vm09 ceph-mon[98050]: pgmap v30: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:21.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:21 vm00 ceph-mon[61968]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]}]': finished 2026-03-31T22:56:21.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:21 vm00 ceph-mon[61968]: osdmap e15: 3 total, 2 up, 3 in 2026-03-31T22:56:21.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:21 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:21.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:21 vm00 ceph-mon[61968]: pgmap v30: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:21.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:21 vm05 ceph-mon[69577]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]}]': finished 2026-03-31T22:56:21.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:21 vm05 ceph-mon[69577]: osdmap e15: 3 total, 2 up, 3 in 2026-03-31T22:56:21.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:21 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:21.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:21 vm05 ceph-mon[69577]: pgmap v30: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:22.255 INFO:teuthology.orchestra.run.vm09.stdout:Created osd(s) 2 on host 'vm09' 2026-03-31T22:56:22.305 DEBUG:teuthology.orchestra.run.vm09:osd.2> sudo journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.2.service 2026-03-31T22:56:22.306 INFO:tasks.cephadm:Waiting for 3 OSDs to come up... 2026-03-31T22:56:22.306 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd stat -f json 2026-03-31T22:56:22.436 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:22 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:22 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:22 vm00 ceph-mon[61968]: from='osd.2 [v2:192.168.123.109:6800/2457921800,v1:192.168.123.109:6801/2457921800]' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:22 vm00 ceph-mon[61968]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:22 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:22 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.677 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:22.725 INFO:teuthology.orchestra.run.vm00.stdout:{"epoch":16,"num_osds":3,"num_up_osds":2,"osd_up_since":1774997775,"num_in_osds":3,"osd_in_since":1774997775,"num_remapped_pgs":0} 2026-03-31T22:56:22.951 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:22 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.951 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:22 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.951 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:22 vm09 ceph-mon[98050]: from='osd.2 [v2:192.168.123.109:6800/2457921800,v1:192.168.123.109:6801/2457921800]' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:22.951 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:22 vm09 ceph-mon[98050]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:22.951 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:22 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.951 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:22 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:22 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:22 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:22 vm05 ceph-mon[69577]: from='osd.2 [v2:192.168.123.109:6800/2457921800,v1:192.168.123.109:6801/2457921800]' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:22.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:22 vm05 ceph-mon[69577]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:22.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:22 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:22.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:22 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.726 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd stat -f json 2026-03-31T22:56:23.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]}]': finished 2026-03-31T22:56:23.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: osdmap e16: 3 total, 2 up, 3 in 2026-03-31T22:56:23.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:23.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='osd.2 [v2:192.168.123.109:6800/2457921800,v1:192.168.123.109:6801/2457921800]' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:23.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:23.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/2337858801' entity='client.admin' cmd={"prefix": "osd stat", "format": "json"} : dispatch 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: pgmap v32: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd.2", "name": "osd_memory_target"} : dispatch 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:23.740 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:23 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:23.740 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:23 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[103572]: 2026-03-31T22:56:23.537+0000 7fdea07a0640 -1 osd.2 0 waiting for initial osdmap 2026-03-31T22:56:23.740 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:23 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[103572]: 2026-03-31T22:56:23.540+0000 7fde9b56f640 -1 osd.2 17 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-03-31T22:56:23.848 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:23.870 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]}]': finished 2026-03-31T22:56:23.870 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: osdmap e16: 3 total, 2 up, 3 in 2026-03-31T22:56:23.870 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:23.870 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='osd.2 [v2:192.168.123.109:6800/2457921800,v1:192.168.123.109:6801/2457921800]' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:23.870 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2337858801' entity='client.admin' cmd={"prefix": "osd stat", "format": "json"} : dispatch 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: pgmap v32: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd.2", "name": "osd_memory_target"} : dispatch 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:23.871 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:23 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]}]': finished 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: osdmap e16: 3 total, 2 up, 3 in 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='osd.2 [v2:192.168.123.109:6800/2457921800,v1:192.168.123.109:6801/2457921800]' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/2337858801' entity='client.admin' cmd={"prefix": "osd stat", "format": "json"} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: pgmap v32: 0 pgs: ; 0 B data, 53 MiB used, 40 GiB / 40 GiB avail 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd.2", "name": "osd_memory_target"} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:23.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:23 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:24.092 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:24.135 INFO:teuthology.orchestra.run.vm00.stdout:{"epoch":17,"num_osds":3,"num_up_osds":2,"osd_up_since":1774997775,"num_in_osds":3,"osd_in_since":1774997775,"num_remapped_pgs":0} 2026-03-31T22:56:24.862 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:24 vm09 ceph-mon[98050]: Adjusting osd_memory_target on vm09 to 4731M 2026-03-31T22:56:24.862 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:24 vm09 ceph-mon[98050]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]}]': finished 2026-03-31T22:56:24.862 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:24 vm09 ceph-mon[98050]: osdmap e17: 3 total, 2 up, 3 in 2026-03-31T22:56:24.863 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:24 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:24.863 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:24 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:24.863 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:24 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/2340128331' entity='client.admin' cmd={"prefix": "osd stat", "format": "json"} : dispatch 2026-03-31T22:56:24.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:24 vm00 ceph-mon[61968]: Adjusting osd_memory_target on vm09 to 4731M 2026-03-31T22:56:24.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:24 vm00 ceph-mon[61968]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]}]': finished 2026-03-31T22:56:24.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:24 vm00 ceph-mon[61968]: osdmap e17: 3 total, 2 up, 3 in 2026-03-31T22:56:24.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:24 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:24.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:24 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:24.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:24 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2340128331' entity='client.admin' cmd={"prefix": "osd stat", "format": "json"} : dispatch 2026-03-31T22:56:24.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:24 vm05 ceph-mon[69577]: Adjusting osd_memory_target on vm09 to 4731M 2026-03-31T22:56:24.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:24 vm05 ceph-mon[69577]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]}]': finished 2026-03-31T22:56:24.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:24 vm05 ceph-mon[69577]: osdmap e17: 3 total, 2 up, 3 in 2026-03-31T22:56:24.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:24 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:24.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:24 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:24.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:24 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/2340128331' entity='client.admin' cmd={"prefix": "osd stat", "format": "json"} : dispatch 2026-03-31T22:56:25.135 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd stat -f json 2026-03-31T22:56:25.256 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:25.481 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:25.522 INFO:teuthology.orchestra.run.vm00.stdout:{"epoch":18,"num_osds":3,"num_up_osds":3,"osd_up_since":1774997784,"num_in_osds":3,"osd_in_since":1774997775,"num_remapped_pgs":0} 2026-03-31T22:56:25.523 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd dump --format=json 2026-03-31T22:56:25.644 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: purged_snaps scrub starts 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: purged_snaps scrub ok 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: osd.2 [v2:192.168.123.109:6800/2457921800,v1:192.168.123.109:6801/2457921800] boot 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: osdmap e18: 3 total, 3 up, 3 in 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: pgmap v35: 0 pgs: ; 0 B data, 80 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd pool create", "format": "json", "pool": ".mgr", "pg_num": 1, "pg_num_min": 1, "pg_num_max": 32, "yes_i_really_mean_it": true} : dispatch 2026-03-31T22:56:25.758 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:25 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2662274044' entity='client.admin' cmd={"prefix": "osd stat", "format": "json"} : dispatch 2026-03-31T22:56:25.885 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:25.885 INFO:teuthology.orchestra.run.vm00.stdout:{"epoch":19,"fsid":"8bb14950-2d54-11f1-a348-07063966e06c","created":"2026-03-31T22:55:10.407867+0000","modified":"2026-03-31T22:56:25.541192+0000","last_up_change":"2026-03-31T22:56:24.532279+0000","last_in_change":"2026-03-31T22:56:15.608734+0000","flags":"sortbitwise,recovery_deletes,purged_snapdirs,pglog_hardlimit","flags_num":5799936,"flags_set":["pglog_hardlimit","purged_snapdirs","recovery_deletes","sortbitwise"],"crush_version":10,"full_ratio":0.94999998807907104,"backfillfull_ratio":0.89999997615814209,"nearfull_ratio":0.85000002384185791,"cluster_snapshot":"","pool_max":1,"max_osd":3,"require_min_compat_client":"luminous","min_compat_client":"jewel","require_osd_release":"tentacle","allow_crimson":false,"pools":[{"pool":1,"pool_name":".mgr","create_time":"2026-03-31T22:56:25.120853+0000","flags":32769,"flags_names":"hashpspool,creating","type":1,"size":3,"min_size":2,"crush_rule":0,"peering_crush_bucket_count":0,"peering_crush_bucket_target":0,"peering_crush_bucket_barrier":0,"peering_crush_bucket_mandatory_member":2147483647,"is_stretch_pool":false,"object_hash":2,"pg_autoscale_mode":"off","pg_num":1,"pg_placement_num":1,"pg_placement_num_target":1,"pg_num_target":1,"pg_num_pending":1,"last_pg_merge_meta":{"source_pgid":"0.0","ready_epoch":0,"last_epoch_started":0,"last_epoch_clean":0,"source_version":"0'0","target_version":"0'0"},"last_change":"19","last_force_op_resend":"0","last_force_op_resend_prenautilus":"0","last_force_op_resend_preluminous":"0","auid":0,"snap_mode":"selfmanaged","snap_seq":0,"snap_epoch":0,"pool_snaps":[],"removed_snaps":"[]","quota_max_bytes":0,"quota_max_objects":0,"tiers":[],"tier_of":-1,"read_tier":-1,"write_tier":-1,"cache_mode":"none","target_max_bytes":0,"target_max_objects":0,"cache_target_dirty_ratio_micro":400000,"cache_target_dirty_high_ratio_micro":600000,"cache_target_full_ratio_micro":800000,"cache_min_flush_age":0,"cache_min_evict_age":0,"erasure_code_profile":"","hit_set_params":{"type":"none"},"hit_set_period":0,"hit_set_count":0,"use_gmt_hitset":true,"min_read_recency_for_promote":0,"min_write_recency_for_promote":0,"hit_set_grade_decay_rate":0,"hit_set_search_last_n":0,"grade_table":[],"stripe_width":0,"expected_num_objects":0,"fast_read":false,"nonprimary_shards":"{}","options":{"pg_num_max":32,"pg_num_min":1},"application_metadata":{},"read_balance":{"score_type":"Fair distribution","score_acting":3,"score_stable":3,"optimal_score":1,"raw_score_acting":3,"raw_score_stable":3,"primary_affinity_weighted":1,"average_primary_affinity":1,"average_primary_affinity_weighted":1}}],"osds":[{"osd":0,"uuid":"e8d1d763-2967-4800-93f0-dda88d3e9875","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":9,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6802","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6803","nonce":2458914830}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6804","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6805","nonce":2458914830}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6808","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6809","nonce":2458914830}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6806","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6807","nonce":2458914830}]},"public_addr":"192.168.123.100:6803/2458914830","cluster_addr":"192.168.123.100:6805/2458914830","heartbeat_back_addr":"192.168.123.100:6809/2458914830","heartbeat_front_addr":"192.168.123.100:6807/2458914830","state":["exists","up"]},{"osd":1,"uuid":"75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6800","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6801","nonce":503330297}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6802","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6803","nonce":503330297}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6806","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6807","nonce":503330297}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6804","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6805","nonce":503330297}]},"public_addr":"192.168.123.105:6801/503330297","cluster_addr":"192.168.123.105:6803/503330297","heartbeat_back_addr":"192.168.123.105:6807/503330297","heartbeat_front_addr":"192.168.123.105:6805/503330297","state":["exists","up"]},{"osd":2,"uuid":"be821005-2b57-413b-92e7-ee956ad7fc35","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":18,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6800","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6801","nonce":2457921800}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6802","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6803","nonce":2457921800}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6806","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6807","nonce":2457921800}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6804","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6805","nonce":2457921800}]},"public_addr":"192.168.123.109:6801/2457921800","cluster_addr":"192.168.123.109:6803/2457921800","heartbeat_back_addr":"192.168.123.109:6807/2457921800","heartbeat_front_addr":"192.168.123.109:6805/2457921800","state":["exists","up"]}],"osd_xinfo":[{"osd":0,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-03-31T22:56:07.977821+0000","dead_epoch":0},{"osd":1,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-03-31T22:56:14.031818+0000","dead_epoch":0},{"osd":2,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-03-31T22:56:22.728605+0000","dead_epoch":0}],"pg_upmap":[],"pg_upmap_items":[],"pg_upmap_primaries":[],"pg_temp":[],"primary_temp":[],"blocklist":{"192.168.123.100:0/2735422203":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/2321427443":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/3145658098":"2026-04-01T22:55:30.890195+0000","192.168.123.100:6800/1629928501":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/3581991654":"2026-04-01T22:55:20.757463+0000","192.168.123.100:0/1523559636":"2026-04-01T22:55:20.757463+0000","192.168.123.100:6801/1629928501":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/389484463":"2026-04-01T22:55:20.757463+0000","192.168.123.100:6801/3751763863":"2026-04-01T22:55:20.757463+0000","192.168.123.100:6800/3751763863":"2026-04-01T22:55:20.757463+0000"},"range_blocklist":{},"erasure_code_profiles":{"default":{"crush-failure-domain":"osd","k":"2","m":"1","plugin":"isa","technique":"reed_sol_van"}},"removed_snaps_queue":[],"new_removed_snaps":[],"new_purged_snaps":[],"crush_node_flags":{},"device_class_flags":{},"stretch_mode":{"stretch_mode_enabled":false,"stretch_bucket_count":0,"degraded_stretch_mode":0,"recovering_stretch_mode":0,"stretch_mode_bucket":0}} 2026-03-31T22:56:25.927 INFO:tasks.cephadm.ceph_manager.ceph:[{'pool': 1, 'pool_name': '.mgr', 'create_time': '2026-03-31T22:56:25.120853+0000', 'flags': 32769, 'flags_names': 'hashpspool,creating', 'type': 1, 'size': 3, 'min_size': 2, 'crush_rule': 0, 'peering_crush_bucket_count': 0, 'peering_crush_bucket_target': 0, 'peering_crush_bucket_barrier': 0, 'peering_crush_bucket_mandatory_member': 2147483647, 'is_stretch_pool': False, 'object_hash': 2, 'pg_autoscale_mode': 'off', 'pg_num': 1, 'pg_placement_num': 1, 'pg_placement_num_target': 1, 'pg_num_target': 1, 'pg_num_pending': 1, 'last_pg_merge_meta': {'source_pgid': '0.0', 'ready_epoch': 0, 'last_epoch_started': 0, 'last_epoch_clean': 0, 'source_version': "0'0", 'target_version': "0'0"}, 'last_change': '19', 'last_force_op_resend': '0', 'last_force_op_resend_prenautilus': '0', 'last_force_op_resend_preluminous': '0', 'auid': 0, 'snap_mode': 'selfmanaged', 'snap_seq': 0, 'snap_epoch': 0, 'pool_snaps': [], 'removed_snaps': '[]', 'quota_max_bytes': 0, 'quota_max_objects': 0, 'tiers': [], 'tier_of': -1, 'read_tier': -1, 'write_tier': -1, 'cache_mode': 'none', 'target_max_bytes': 0, 'target_max_objects': 0, 'cache_target_dirty_ratio_micro': 400000, 'cache_target_dirty_high_ratio_micro': 600000, 'cache_target_full_ratio_micro': 800000, 'cache_min_flush_age': 0, 'cache_min_evict_age': 0, 'erasure_code_profile': '', 'hit_set_params': {'type': 'none'}, 'hit_set_period': 0, 'hit_set_count': 0, 'use_gmt_hitset': True, 'min_read_recency_for_promote': 0, 'min_write_recency_for_promote': 0, 'hit_set_grade_decay_rate': 0, 'hit_set_search_last_n': 0, 'grade_table': [], 'stripe_width': 0, 'expected_num_objects': 0, 'fast_read': False, 'nonprimary_shards': '{}', 'options': {'pg_num_max': 32, 'pg_num_min': 1}, 'application_metadata': {}, 'read_balance': {'score_type': 'Fair distribution', 'score_acting': 3, 'score_stable': 3, 'optimal_score': 1, 'raw_score_acting': 3, 'raw_score_stable': 3, 'primary_affinity_weighted': 1, 'average_primary_affinity': 1, 'average_primary_affinity_weighted': 1}}] 2026-03-31T22:56:25.927 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd pool get .mgr pg_num 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: purged_snaps scrub starts 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: purged_snaps scrub ok 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: osd.2 [v2:192.168.123.109:6800/2457921800,v1:192.168.123.109:6801/2457921800] boot 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: osdmap e18: 3 total, 3 up, 3 in 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: pgmap v35: 0 pgs: ; 0 B data, 80 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd pool create", "format": "json", "pool": ".mgr", "pg_num": 1, "pg_num_min": 1, "pg_num_max": 32, "yes_i_really_mean_it": true} : dispatch 2026-03-31T22:56:25.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:25 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/2662274044' entity='client.admin' cmd={"prefix": "osd stat", "format": "json"} : dispatch 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: purged_snaps scrub starts 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: purged_snaps scrub ok 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: osd.2 [v2:192.168.123.109:6800/2457921800,v1:192.168.123.109:6801/2457921800] boot 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: osdmap e18: 3 total, 3 up, 3 in 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: pgmap v35: 0 pgs: ; 0 B data, 80 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd pool create", "format": "json", "pool": ".mgr", "pg_num": 1, "pg_num_min": 1, "pg_num_max": 32, "yes_i_really_mean_it": true} : dispatch 2026-03-31T22:56:25.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:25 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/2662274044' entity='client.admin' cmd={"prefix": "osd stat", "format": "json"} : dispatch 2026-03-31T22:56:26.043 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:26.272 INFO:teuthology.orchestra.run.vm00.stdout:pg_num: 1 2026-03-31T22:56:26.312 INFO:tasks.cephadm:Setting up client nodes... 2026-03-31T22:56:26.312 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph config log 1 --format=json 2026-03-31T22:56:26.428 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:26.598 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:26 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "osd pool create", "format": "json", "pool": ".mgr", "pg_num": 1, "pg_num_min": 1, "pg_num_max": 32, "yes_i_really_mean_it": true}]': finished 2026-03-31T22:56:26.598 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:26 vm00 ceph-mon[61968]: osdmap e19: 3 total, 3 up, 3 in 2026-03-31T22:56:26.598 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:26 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd pool application enable", "format": "json", "pool": ".mgr", "app": "mgr", "yes_i_really_mean_it": true} : dispatch 2026-03-31T22:56:26.598 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:26 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/458162033' entity='client.admin' cmd={"prefix": "osd dump", "format": "json"} : dispatch 2026-03-31T22:56:26.598 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:26 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3766484666' entity='client.admin' cmd={"prefix": "osd pool get", "pool": ".mgr", "var": "pg_num"} : dispatch 2026-03-31T22:56:26.674 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:26.720 INFO:teuthology.orchestra.run.vm00.stdout:[{"version":11,"timestamp":"2026-03-31T22:56:23.058468+0000","name":"","changes":[{"name":"osd/host:vm09/osd_memory_target","new_value":"4961325875"}]}] 2026-03-31T22:56:26.721 INFO:tasks.ceph_manager:config epoch is 11 2026-03-31T22:56:26.721 INFO:tasks.ceph:Waiting until ceph daemons up and pgs clean... 2026-03-31T22:56:26.721 INFO:tasks.cephadm.ceph_manager.ceph:waiting for mgr available 2026-03-31T22:56:26.721 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph mgr dump --format=json 2026-03-31T22:56:26.842 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:26.865 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:26 vm00 sudo[74424]: ceph : PWD=/ ; USER=root ; COMMAND=/usr/sbin/smartctl -x --json=o /dev/vda 2026-03-31T22:56:26.865 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:26 vm00 sudo[74424]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory 2026-03-31T22:56:26.865 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:26 vm00 sudo[74424]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=167) 2026-03-31T22:56:26.865 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:26 vm00 sudo[74424]: pam_unix(sudo:session): session closed for user root 2026-03-31T22:56:26.865 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 22:56:26 vm00 sudo[74420]: ceph : PWD=/ ; USER=root ; COMMAND=/usr/sbin/smartctl -x --json=o /dev/vde 2026-03-31T22:56:26.865 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 22:56:26 vm00 sudo[74420]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory 2026-03-31T22:56:26.865 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 22:56:26 vm00 sudo[74420]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=167) 2026-03-31T22:56:26.865 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 22:56:26 vm00 sudo[74420]: pam_unix(sudo:session): session closed for user root 2026-03-31T22:56:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:26 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "osd pool create", "format": "json", "pool": ".mgr", "pg_num": 1, "pg_num_min": 1, "pg_num_max": 32, "yes_i_really_mean_it": true}]': finished 2026-03-31T22:56:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:26 vm09 ceph-mon[98050]: osdmap e19: 3 total, 3 up, 3 in 2026-03-31T22:56:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:26 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd pool application enable", "format": "json", "pool": ".mgr", "app": "mgr", "yes_i_really_mean_it": true} : dispatch 2026-03-31T22:56:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:26 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/458162033' entity='client.admin' cmd={"prefix": "osd dump", "format": "json"} : dispatch 2026-03-31T22:56:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:26 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/3766484666' entity='client.admin' cmd={"prefix": "osd pool get", "pool": ".mgr", "var": "pg_num"} : dispatch 2026-03-31T22:56:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:26 vm09 sudo[105036]: ceph : PWD=/ ; USER=root ; COMMAND=/usr/sbin/smartctl -x --json=o /dev/vda 2026-03-31T22:56:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:26 vm09 sudo[105036]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory 2026-03-31T22:56:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:26 vm09 sudo[105036]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=167) 2026-03-31T22:56:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:26 vm09 sudo[105036]: pam_unix(sudo:session): session closed for user root 2026-03-31T22:56:26.989 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:26 vm09 sudo[105032]: ceph : PWD=/ ; USER=root ; COMMAND=/usr/sbin/smartctl -x --json=o /dev/vde 2026-03-31T22:56:26.990 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:26 vm09 sudo[105032]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory 2026-03-31T22:56:26.990 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:26 vm09 sudo[105032]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=167) 2026-03-31T22:56:26.990 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:26 vm09 sudo[105032]: pam_unix(sudo:session): session closed for user root 2026-03-31T22:56:26.993 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 22:56:26 vm05 sudo[75970]: ceph : PWD=/ ; USER=root ; COMMAND=/usr/sbin/smartctl -x --json=o /dev/vde 2026-03-31T22:56:26.993 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 22:56:26 vm05 sudo[75970]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory 2026-03-31T22:56:26.993 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 22:56:26 vm05 sudo[75970]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=167) 2026-03-31T22:56:26.993 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 22:56:26 vm05 sudo[75970]: pam_unix(sudo:session): session closed for user root 2026-03-31T22:56:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:26 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "osd pool create", "format": "json", "pool": ".mgr", "pg_num": 1, "pg_num_min": 1, "pg_num_max": 32, "yes_i_really_mean_it": true}]': finished 2026-03-31T22:56:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:26 vm05 ceph-mon[69577]: osdmap e19: 3 total, 3 up, 3 in 2026-03-31T22:56:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:26 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd pool application enable", "format": "json", "pool": ".mgr", "app": "mgr", "yes_i_really_mean_it": true} : dispatch 2026-03-31T22:56:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:26 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/458162033' entity='client.admin' cmd={"prefix": "osd dump", "format": "json"} : dispatch 2026-03-31T22:56:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:26 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/3766484666' entity='client.admin' cmd={"prefix": "osd pool get", "pool": ".mgr", "var": "pg_num"} : dispatch 2026-03-31T22:56:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:26 vm05 sudo[75974]: ceph : PWD=/ ; USER=root ; COMMAND=/usr/sbin/smartctl -x --json=o /dev/vda 2026-03-31T22:56:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:26 vm05 sudo[75974]: pam_systemd(sudo:session): Failed to connect to system bus: No such file or directory 2026-03-31T22:56:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:26 vm05 sudo[75974]: pam_unix(sudo:session): session opened for user root(uid=0) by (uid=167) 2026-03-31T22:56:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:26 vm05 sudo[75974]: pam_unix(sudo:session): session closed for user root 2026-03-31T22:56:27.102 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:27.145 INFO:teuthology.orchestra.run.vm00.stdout:{"epoch":13,"flags":0,"active_gid":14152,"active_name":"a","active_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6800","nonce":773697551},{"type":"v1","addr":"192.168.123.100:6801","nonce":773697551}]},"active_addr":"192.168.123.100:6801/773697551","active_change":"2026-03-31T22:55:30.890309+0000","active_mgr_features":4541880224203014143,"available":true,"standbys":[{"gid":24107,"name":"b","mgr_features":4541880224203014143,"available_modules":[{"name":"alerts","can_run":true,"error_string":"","module_options":{"interval":{"name":"interval","type":"secs","level":"advanced","flags":1,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"How frequently to reexamine health status","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"smtp_destination":{"name":"smtp_destination","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Email address to send alerts to, use commas to separate multiple","long_desc":"","tags":[],"see_also":[]},"smtp_from_name":{"name":"smtp_from_name","type":"str","level":"advanced","flags":1,"default_value":"Ceph","min":"","max":"","enum_allowed":[],"desc":"Email From: name","long_desc":"","tags":[],"see_also":[]},"smtp_host":{"name":"smtp_host","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"SMTP server","long_desc":"","tags":[],"see_also":[]},"smtp_password":{"name":"smtp_password","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Password to authenticate with","long_desc":"","tags":[],"see_also":[]},"smtp_port":{"name":"smtp_port","type":"int","level":"advanced","flags":1,"default_value":"465","min":"","max":"","enum_allowed":[],"desc":"SMTP port","long_desc":"","tags":[],"see_also":[]},"smtp_sender":{"name":"smtp_sender","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"SMTP envelope sender","long_desc":"","tags":[],"see_also":[]},"smtp_ssl":{"name":"smtp_ssl","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Use SSL to connect to SMTP server","long_desc":"","tags":[],"see_also":[]},"smtp_user":{"name":"smtp_user","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"User to authenticate as","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"balancer","can_run":true,"error_string":"","module_options":{"active":{"name":"active","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"automatically balance PGs across cluster","long_desc":"","tags":[],"see_also":[]},"begin_time":{"name":"begin_time","type":"str","level":"advanced","flags":1,"default_value":"0000","min":"","max":"","enum_allowed":[],"desc":"beginning time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"begin_weekday":{"name":"begin_weekday","type":"uint","level":"advanced","flags":1,"default_value":"0","min":"0","max":"6","enum_allowed":[],"desc":"Restrict automatic balancing to this day of the week or later","long_desc":"0 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"crush_compat_max_iterations":{"name":"crush_compat_max_iterations","type":"uint","level":"advanced","flags":1,"default_value":"25","min":"1","max":"250","enum_allowed":[],"desc":"maximum number of iterations to attempt optimization","long_desc":"","tags":[],"see_also":[]},"crush_compat_metrics":{"name":"crush_compat_metrics","type":"str","level":"advanced","flags":1,"default_value":"pgs,objects,bytes","min":"","max":"","enum_allowed":[],"desc":"metrics with which to calculate OSD utilization","long_desc":"Value is a list of one or more of \"pgs\", \"objects\", or \"bytes\", and indicates which metrics to use to balance utilization.","tags":[],"see_also":[]},"crush_compat_step":{"name":"crush_compat_step","type":"float","level":"advanced","flags":1,"default_value":"0.5","min":"0.001","max":"0.999","enum_allowed":[],"desc":"aggressiveness of optimization","long_desc":".99 is very aggressive, .01 is less aggressive","tags":[],"see_also":[]},"end_time":{"name":"end_time","type":"str","level":"advanced","flags":1,"default_value":"2359","min":"","max":"","enum_allowed":[],"desc":"ending time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"end_weekday":{"name":"end_weekday","type":"uint","level":"advanced","flags":1,"default_value":"0","min":"0","max":"6","enum_allowed":[],"desc":"Restrict automatic balancing to days of the week earlier than this","long_desc":"0 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"min_score":{"name":"min_score","type":"float","level":"advanced","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"minimum score, below which no optimization is attempted","long_desc":"","tags":[],"see_also":[]},"mode":{"name":"mode","type":"str","level":"advanced","flags":1,"default_value":"upmap","min":"","max":"","enum_allowed":["crush-compat","none","read","upmap","upmap-read"],"desc":"Balancer mode","long_desc":"","tags":[],"see_also":[]},"pool_ids":{"name":"pool_ids","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"pools which the automatic balancing will be limited to","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and attempt optimization","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"update_pg_upmap_activity":{"name":"update_pg_upmap_activity","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Updates pg_upmap activity stats to be used in `balancer status detail`","long_desc":"","tags":[],"see_also":[]},"upmap_max_deviation":{"name":"upmap_max_deviation","type":"int","level":"advanced","flags":1,"default_value":"5","min":"1","max":"","enum_allowed":[],"desc":"deviation below which no optimization is attempted","long_desc":"If the number of PGs are within this count then no optimization is attempted","tags":[],"see_also":[]},"upmap_max_optimizations":{"name":"upmap_max_optimizations","type":"uint","level":"advanced","flags":1,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"maximum upmap optimizations to make per attempt","long_desc":"","tags":[],"see_also":[]}}},{"name":"cephadm","can_run":true,"error_string":"","module_options":{"agent_down_multiplier":{"name":"agent_down_multiplier","type":"float","level":"advanced","flags":0,"default_value":"3.0","min":"","max":"","enum_allowed":[],"desc":"Multiplied by agent refresh rate to calculate how long agent must not report before being marked down","long_desc":"","tags":[],"see_also":[]},"agent_refresh_rate":{"name":"agent_refresh_rate","type":"secs","level":"advanced","flags":0,"default_value":"20","min":"","max":"","enum_allowed":[],"desc":"How often agent on each host will try to gather and send metadata","long_desc":"","tags":[],"see_also":[]},"agent_starting_port":{"name":"agent_starting_port","type":"int","level":"advanced","flags":0,"default_value":"4721","min":"","max":"","enum_allowed":[],"desc":"First port agent will try to bind to (will also try up to next 1000 subsequent ports if blocked)","long_desc":"","tags":[],"see_also":[]},"allow_ptrace":{"name":"allow_ptrace","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow SYS_PTRACE capability on ceph containers","long_desc":"The SYS_PTRACE capability is needed to attach to a process with gdb or strace. Enabling this options can allow debugging daemons that encounter problems at runtime.","tags":[],"see_also":[]},"autotune_interval":{"name":"autotune_interval","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to autotune daemon memory","long_desc":"","tags":[],"see_also":[]},"autotune_memory_target_ratio":{"name":"autotune_memory_target_ratio","type":"float","level":"advanced","flags":0,"default_value":"0.7","min":"","max":"","enum_allowed":[],"desc":"ratio of total system memory to divide amongst autotuned daemons","long_desc":"","tags":[],"see_also":[]},"cephadm_log_destination":{"name":"cephadm_log_destination","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":["file","file,syslog","syslog"],"desc":"Destination for cephadm command's persistent logging","long_desc":"","tags":[],"see_also":[]},"certificate_automated_rotation_enabled":{"name":"certificate_automated_rotation_enabled","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"This flag controls whether cephadm automatically rotates certificates upon expiration.","long_desc":"","tags":[],"see_also":[]},"certificate_check_debug_mode":{"name":"certificate_check_debug_mode","type":"bool","level":"dev","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"FOR TESTING ONLY: This flag forces the certificate check instead of waiting for certificate_check_period.","long_desc":"","tags":[],"see_also":[]},"certificate_check_period":{"name":"certificate_check_period","type":"int","level":"advanced","flags":0,"default_value":"1","min":"0","max":"30","enum_allowed":[],"desc":"Specifies how often (in days) the certificate should be checked for validity.","long_desc":"","tags":[],"see_also":[]},"certificate_duration_days":{"name":"certificate_duration_days","type":"int","level":"advanced","flags":0,"default_value":"1095","min":"90","max":"3650","enum_allowed":[],"desc":"Specifies the duration of self certificates generated and signed by cephadm root CA","long_desc":"","tags":[],"see_also":[]},"certificate_renewal_threshold_days":{"name":"certificate_renewal_threshold_days","type":"int","level":"advanced","flags":0,"default_value":"30","min":"10","max":"90","enum_allowed":[],"desc":"Specifies the lead time in days to initiate certificate renewal before expiration.","long_desc":"","tags":[],"see_also":[]},"cgroups_split":{"name":"cgroups_split","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Pass --cgroups=split when cephadm creates containers (currently podman only)","long_desc":"","tags":[],"see_also":[]},"config_checks_enabled":{"name":"config_checks_enabled","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable or disable the cephadm configuration analysis","long_desc":"","tags":[],"see_also":[]},"config_dashboard":{"name":"config_dashboard","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"manage configs like API endpoints in Dashboard.","long_desc":"","tags":[],"see_also":[]},"container_image_alertmanager":{"name":"container_image_alertmanager","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/alertmanager:v0.28.1","min":"","max":"","enum_allowed":[],"desc":"Alertmanager container image","long_desc":"","tags":[],"see_also":[]},"container_image_base":{"name":"container_image_base","type":"str","level":"advanced","flags":1,"default_value":"quay.io/ceph/ceph","min":"","max":"","enum_allowed":[],"desc":"Container image name, without the tag","long_desc":"","tags":[],"see_also":[]},"container_image_elasticsearch":{"name":"container_image_elasticsearch","type":"str","level":"advanced","flags":0,"default_value":"quay.io/omrizeneva/elasticsearch:6.8.23","min":"","max":"","enum_allowed":[],"desc":"Elasticsearch container image","long_desc":"","tags":[],"see_also":[]},"container_image_grafana":{"name":"container_image_grafana","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/grafana:12.3.1","min":"","max":"","enum_allowed":[],"desc":"Grafana container image","long_desc":"","tags":[],"see_also":[]},"container_image_haproxy":{"name":"container_image_haproxy","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/haproxy:2.3","min":"","max":"","enum_allowed":[],"desc":"Haproxy container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_agent":{"name":"container_image_jaeger_agent","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-agent:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger agent container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_collector":{"name":"container_image_jaeger_collector","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-collector:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger collector container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_query":{"name":"container_image_jaeger_query","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-query:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger query container image","long_desc":"","tags":[],"see_also":[]},"container_image_keepalived":{"name":"container_image_keepalived","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/keepalived:2.2.4","min":"","max":"","enum_allowed":[],"desc":"Keepalived container image","long_desc":"","tags":[],"see_also":[]},"container_image_loki":{"name":"container_image_loki","type":"str","level":"advanced","flags":0,"default_value":"docker.io/grafana/loki:3.0.0","min":"","max":"","enum_allowed":[],"desc":"Loki container image","long_desc":"","tags":[],"see_also":[]},"container_image_nginx":{"name":"container_image_nginx","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/nginx:sclorg-nginx-126","min":"","max":"","enum_allowed":[],"desc":"Nginx container image","long_desc":"","tags":[],"see_also":[]},"container_image_node_exporter":{"name":"container_image_node_exporter","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/node-exporter:v1.9.1","min":"","max":"","enum_allowed":[],"desc":"Node exporter container image","long_desc":"","tags":[],"see_also":[]},"container_image_nvmeof":{"name":"container_image_nvmeof","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/nvmeof:1.5","min":"","max":"","enum_allowed":[],"desc":"Nvmeof container image","long_desc":"","tags":[],"see_also":[]},"container_image_oauth2_proxy":{"name":"container_image_oauth2_proxy","type":"str","level":"advanced","flags":0,"default_value":"quay.io/oauth2-proxy/oauth2-proxy:v7.6.0","min":"","max":"","enum_allowed":[],"desc":"Oauth2 proxy container image","long_desc":"","tags":[],"see_also":[]},"container_image_prometheus":{"name":"container_image_prometheus","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/prometheus:v3.6.0","min":"","max":"","enum_allowed":[],"desc":"Prometheus container image","long_desc":"","tags":[],"see_also":[]},"container_image_promtail":{"name":"container_image_promtail","type":"str","level":"advanced","flags":0,"default_value":"docker.io/grafana/promtail:3.0.0","min":"","max":"","enum_allowed":[],"desc":"Promtail container image","long_desc":"","tags":[],"see_also":[]},"container_image_samba":{"name":"container_image_samba","type":"str","level":"advanced","flags":0,"default_value":"quay.io/samba.org/samba-server:ceph20-centos-amd64","min":"","max":"","enum_allowed":[],"desc":"Samba container image","long_desc":"","tags":[],"see_also":[]},"container_image_samba_metrics":{"name":"container_image_samba_metrics","type":"str","level":"advanced","flags":0,"default_value":"quay.io/samba.org/samba-metrics:ceph20-centos-amd64","min":"","max":"","enum_allowed":[],"desc":"Samba metrics container image","long_desc":"","tags":[],"see_also":[]},"container_image_snmp_gateway":{"name":"container_image_snmp_gateway","type":"str","level":"advanced","flags":0,"default_value":"docker.io/maxwo/snmp-notifier:v1.2.1","min":"","max":"","enum_allowed":[],"desc":"Snmp gateway container image","long_desc":"","tags":[],"see_also":[]},"container_init":{"name":"container_init","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Run podman/docker with `--init`","long_desc":"","tags":[],"see_also":[]},"daemon_cache_timeout":{"name":"daemon_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"seconds to cache service (daemon) inventory","long_desc":"","tags":[],"see_also":[]},"default_cephadm_command_timeout":{"name":"default_cephadm_command_timeout","type":"int","level":"advanced","flags":0,"default_value":"900","min":"","max":"","enum_allowed":[],"desc":"Default timeout applied to cephadm commands run directly on the host (in seconds)","long_desc":"","tags":[],"see_also":[]},"default_registry":{"name":"default_registry","type":"str","level":"advanced","flags":0,"default_value":"quay.io","min":"","max":"","enum_allowed":[],"desc":"Search-registry to which we should normalize unqualified image names. This is not the default registry","long_desc":"","tags":[],"see_also":[]},"device_cache_timeout":{"name":"device_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"1800","min":"","max":"","enum_allowed":[],"desc":"seconds to cache device inventory","long_desc":"","tags":[],"see_also":[]},"device_enhanced_scan":{"name":"device_enhanced_scan","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Use libstoragemgmt during device scans","long_desc":"","tags":[],"see_also":[]},"facts_cache_timeout":{"name":"facts_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"seconds to cache host facts data","long_desc":"","tags":[],"see_also":[]},"grafana_dashboards_path":{"name":"grafana_dashboards_path","type":"str","level":"advanced","flags":0,"default_value":"/etc/grafana/dashboards/ceph-dashboard/","min":"","max":"","enum_allowed":[],"desc":"location of dashboards to include in grafana deployments","long_desc":"","tags":[],"see_also":[]},"host_check_interval":{"name":"host_check_interval","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to perform a host check","long_desc":"","tags":[],"see_also":[]},"hw_monitoring":{"name":"hw_monitoring","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Deploy hw monitoring daemon on every host.","long_desc":"","tags":[],"see_also":[]},"inventory_list_all":{"name":"inventory_list_all","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Whether ceph-volume inventory should report more devices (mostly mappers (LVs / mpaths), partitions...)","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_refresh_metadata":{"name":"log_refresh_metadata","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Log all refresh metadata. Includes daemon, device, and host info collected regularly. Only has effect if logging at debug level","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"log to the \"cephadm\" cluster log channel\"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"manage_etc_ceph_ceph_conf":{"name":"manage_etc_ceph_ceph_conf","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Manage and own /etc/ceph/ceph.conf on the hosts.","long_desc":"","tags":[],"see_also":[]},"manage_etc_ceph_ceph_conf_hosts":{"name":"manage_etc_ceph_ceph_conf_hosts","type":"str","level":"advanced","flags":0,"default_value":"*","min":"","max":"","enum_allowed":[],"desc":"PlacementSpec describing on which hosts to manage /etc/ceph/ceph.conf","long_desc":"","tags":[],"see_also":[]},"max_count_per_host":{"name":"max_count_per_host","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"max number of daemons per service per host","long_desc":"","tags":[],"see_also":[]},"max_osd_draining_count":{"name":"max_osd_draining_count","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"max number of osds that will be drained simultaneously when osds are removed","long_desc":"","tags":[],"see_also":[]},"migration_current":{"name":"migration_current","type":"int","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"internal - do not modify","long_desc":"","tags":[],"see_also":[]},"mode":{"name":"mode","type":"str","level":"advanced","flags":0,"default_value":"root","min":"","max":"","enum_allowed":["cephadm-package","root"],"desc":"mode for remote execution of cephadm","long_desc":"","tags":[],"see_also":[]},"oob_default_addr":{"name":"oob_default_addr","type":"str","level":"advanced","flags":0,"default_value":"169.254.1.1","min":"","max":"","enum_allowed":[],"desc":"Default address for RedFish API (oob management).","long_desc":"","tags":[],"see_also":[]},"prometheus_alerts_path":{"name":"prometheus_alerts_path","type":"str","level":"advanced","flags":0,"default_value":"/etc/prometheus/ceph/ceph_default_alerts.yml","min":"","max":"","enum_allowed":[],"desc":"location of alerts to include in prometheus deployments","long_desc":"","tags":[],"see_also":[]},"registry_insecure":{"name":"registry_insecure","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Registry is to be considered insecure (no TLS available). Only for development purposes.","long_desc":"","tags":[],"see_also":[]},"registry_password":{"name":"registry_password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Custom repository password. Only used for logging into a registry.","long_desc":"","tags":[],"see_also":[]},"registry_url":{"name":"registry_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Registry url for login purposes. This is not the default registry","long_desc":"","tags":[],"see_also":[]},"registry_username":{"name":"registry_username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Custom repository username. Only used for logging into a registry.","long_desc":"","tags":[],"see_also":[]},"secure_monitoring_stack":{"name":"secure_monitoring_stack","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable TLS security for all the monitoring stack daemons","long_desc":"","tags":[],"see_also":[]},"service_discovery_port":{"name":"service_discovery_port","type":"int","level":"advanced","flags":0,"default_value":"8765","min":"","max":"","enum_allowed":[],"desc":"cephadm service discovery port","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssh_config_file":{"name":"ssh_config_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"customized SSH config file to connect to managed hosts","long_desc":"","tags":[],"see_also":[]},"ssh_keepalive_count_max":{"name":"ssh_keepalive_count_max","type":"int","level":"advanced","flags":0,"default_value":"3","min":"","max":"","enum_allowed":[],"desc":"How many times ssh connections can fail liveness checks before the host is marked offline","long_desc":"","tags":[],"see_also":[]},"ssh_keepalive_interval":{"name":"ssh_keepalive_interval","type":"int","level":"advanced","flags":0,"default_value":"7","min":"","max":"","enum_allowed":[],"desc":"How often ssh connections are checked for liveness","long_desc":"","tags":[],"see_also":[]},"stray_daemon_check_interval":{"name":"stray_daemon_check_interval","type":"secs","level":"advanced","flags":0,"default_value":"1800","min":"","max":"","enum_allowed":[],"desc":"how frequently cephadm should check for the presence of stray daemons","long_desc":"","tags":[],"see_also":[]},"use_agent":{"name":"use_agent","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Use cephadm agent on each host to gather and send metadata","long_desc":"","tags":[],"see_also":[]},"use_repo_digest":{"name":"use_repo_digest","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Automatically convert image tags to image digest. Make sure all daemons use the same image","long_desc":"","tags":[],"see_also":[]},"warn_on_failed_host_check":{"name":"warn_on_failed_host_check","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if the host check fails","long_desc":"","tags":[],"see_also":[]},"warn_on_stray_daemons":{"name":"warn_on_stray_daemons","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if daemons are detected that are not managed by cephadm","long_desc":"","tags":[],"see_also":[]},"warn_on_stray_hosts":{"name":"warn_on_stray_hosts","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if daemons are detected on a host that is not managed by cephadm","long_desc":"","tags":[],"see_also":[]}}},{"name":"crash","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"retain_interval":{"name":"retain_interval","type":"secs","level":"advanced","flags":1,"default_value":"31536000","min":"","max":"","enum_allowed":[],"desc":"how long to retain crashes before pruning them","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"warn_recent_interval":{"name":"warn_recent_interval","type":"secs","level":"advanced","flags":1,"default_value":"1209600","min":"","max":"","enum_allowed":[],"desc":"time interval in which to warn about recent crashes","long_desc":"","tags":[],"see_also":[]}}},{"name":"dashboard","can_run":true,"error_string":"","module_options":{"ACCOUNT_LOCKOUT_ATTEMPTS":{"name":"ACCOUNT_LOCKOUT_ATTEMPTS","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ALERTMANAGER_API_HOST":{"name":"ALERTMANAGER_API_HOST","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ALERTMANAGER_API_SSL_VERIFY":{"name":"ALERTMANAGER_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"AUDIT_API_ENABLED":{"name":"AUDIT_API_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"AUDIT_API_LOG_PAYLOAD":{"name":"AUDIT_API_LOG_PAYLOAD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ENABLE_BROWSABLE_API":{"name":"ENABLE_BROWSABLE_API","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_CEPHFS":{"name":"FEATURE_TOGGLE_CEPHFS","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_DASHBOARD":{"name":"FEATURE_TOGGLE_DASHBOARD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_ISCSI":{"name":"FEATURE_TOGGLE_ISCSI","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_MIRRORING":{"name":"FEATURE_TOGGLE_MIRRORING","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_NFS":{"name":"FEATURE_TOGGLE_NFS","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_RBD":{"name":"FEATURE_TOGGLE_RBD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_RGW":{"name":"FEATURE_TOGGLE_RGW","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GANESHA_CLUSTERS_RADOS_POOL_NAMESPACE":{"name":"GANESHA_CLUSTERS_RADOS_POOL_NAMESPACE","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_PASSWORD":{"name":"GRAFANA_API_PASSWORD","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_SSL_VERIFY":{"name":"GRAFANA_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_URL":{"name":"GRAFANA_API_URL","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_USERNAME":{"name":"GRAFANA_API_USERNAME","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_FRONTEND_API_URL":{"name":"GRAFANA_FRONTEND_API_URL","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_UPDATE_DASHBOARDS":{"name":"GRAFANA_UPDATE_DASHBOARDS","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ISCSI_API_SSL_VERIFICATION":{"name":"ISCSI_API_SSL_VERIFICATION","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ISSUE_TRACKER_API_KEY":{"name":"ISSUE_TRACKER_API_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"MANAGED_BY_CLUSTERS":{"name":"MANAGED_BY_CLUSTERS","type":"str","level":"advanced","flags":0,"default_value":"[]","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"MULTICLUSTER_CONFIG":{"name":"MULTICLUSTER_CONFIG","type":"str","level":"advanced","flags":0,"default_value":"{}","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROMETHEUS_API_HOST":{"name":"PROMETHEUS_API_HOST","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROMETHEUS_API_SSL_VERIFY":{"name":"PROMETHEUS_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROM_ALERT_CREDENTIAL_CACHE_TTL":{"name":"PROM_ALERT_CREDENTIAL_CACHE_TTL","type":"int","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_COMPLEXITY_ENABLED":{"name":"PWD_POLICY_CHECK_COMPLEXITY_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED":{"name":"PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_LENGTH_ENABLED":{"name":"PWD_POLICY_CHECK_LENGTH_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_OLDPWD_ENABLED":{"name":"PWD_POLICY_CHECK_OLDPWD_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_REPETITIVE_CHARS_ENABLED":{"name":"PWD_POLICY_CHECK_REPETITIVE_CHARS_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_SEQUENTIAL_CHARS_ENABLED":{"name":"PWD_POLICY_CHECK_SEQUENTIAL_CHARS_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_USERNAME_ENABLED":{"name":"PWD_POLICY_CHECK_USERNAME_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_ENABLED":{"name":"PWD_POLICY_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_EXCLUSION_LIST":{"name":"PWD_POLICY_EXCLUSION_LIST","type":"str","level":"advanced","flags":0,"default_value":"osd,host,dashboard,pool,block,nfs,ceph,monitors,gateway,logs,crush,maps","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_MIN_COMPLEXITY":{"name":"PWD_POLICY_MIN_COMPLEXITY","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_MIN_LENGTH":{"name":"PWD_POLICY_MIN_LENGTH","type":"int","level":"advanced","flags":0,"default_value":"8","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"REST_REQUESTS_TIMEOUT":{"name":"REST_REQUESTS_TIMEOUT","type":"int","level":"advanced","flags":0,"default_value":"45","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_ACCESS_KEY":{"name":"RGW_API_ACCESS_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_ADMIN_RESOURCE":{"name":"RGW_API_ADMIN_RESOURCE","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_SECRET_KEY":{"name":"RGW_API_SECRET_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_SSL_VERIFY":{"name":"RGW_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_HOSTNAME_PER_DAEMON":{"name":"RGW_HOSTNAME_PER_DAEMON","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"UNSAFE_TLS_v1_2":{"name":"UNSAFE_TLS_v1_2","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_SPAN":{"name":"USER_PWD_EXPIRATION_SPAN","type":"int","level":"advanced","flags":0,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_WARNING_1":{"name":"USER_PWD_EXPIRATION_WARNING_1","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_WARNING_2":{"name":"USER_PWD_EXPIRATION_WARNING_2","type":"int","level":"advanced","flags":0,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"cross_origin_url":{"name":"cross_origin_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"crt_file":{"name":"crt_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"crypto_caller":{"name":"crypto_caller","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"debug":{"name":"debug","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable/disable debug options","long_desc":"","tags":[],"see_also":[]},"jwt_token_ttl":{"name":"jwt_token_ttl","type":"int","level":"advanced","flags":0,"default_value":"28800","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"key_file":{"name":"key_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"motd":{"name":"motd","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"The message of the day","long_desc":"","tags":[],"see_also":[]},"redirect_resolve_ip_addr":{"name":"redirect_resolve_ip_addr","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"::","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"int","level":"advanced","flags":0,"default_value":"8080","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl":{"name":"ssl","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl_server_port":{"name":"ssl_server_port","type":"int","level":"advanced","flags":0,"default_value":"8443","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sso_oauth2":{"name":"sso_oauth2","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_behaviour":{"name":"standby_behaviour","type":"str","level":"advanced","flags":0,"default_value":"redirect","min":"","max":"","enum_allowed":["error","redirect"],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_error_status_code":{"name":"standby_error_status_code","type":"int","level":"advanced","flags":0,"default_value":"500","min":"400","max":"599","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"url_prefix":{"name":"url_prefix","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"devicehealth","can_run":true,"error_string":"","module_options":{"enable_monitoring":{"name":"enable_monitoring","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"monitor device health metrics","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"mark_out_threshold":{"name":"mark_out_threshold","type":"secs","level":"advanced","flags":1,"default_value":"2419200","min":"","max":"","enum_allowed":[],"desc":"automatically mark OSD if it may fail before this long","long_desc":"","tags":[],"see_also":[]},"pool_name":{"name":"pool_name","type":"str","level":"advanced","flags":1,"default_value":"device_health_metrics","min":"","max":"","enum_allowed":[],"desc":"name of pool in which to store device health metrics","long_desc":"","tags":[],"see_also":[]},"retention_period":{"name":"retention_period","type":"secs","level":"advanced","flags":1,"default_value":"15552000","min":"","max":"","enum_allowed":[],"desc":"how long to retain device health metrics","long_desc":"","tags":[],"see_also":[]},"scrape_frequency":{"name":"scrape_frequency","type":"secs","level":"advanced","flags":1,"default_value":"86400","min":"","max":"","enum_allowed":[],"desc":"how frequently to scrape device health metrics","long_desc":"","tags":[],"see_also":[]},"self_heal":{"name":"self_heal","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"preemptively heal cluster around devices that may fail","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and check device health","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"warn_threshold":{"name":"warn_threshold","type":"secs","level":"advanced","flags":1,"default_value":"7257600","min":"","max":"","enum_allowed":[],"desc":"raise health warning if OSD may fail before this long","long_desc":"","tags":[],"see_also":[]}}},{"name":"diskprediction_local","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"predict_interval":{"name":"predict_interval","type":"str","level":"advanced","flags":0,"default_value":"86400","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"predictor_model":{"name":"predictor_model","type":"str","level":"advanced","flags":0,"default_value":"prophetstor","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"str","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"influx","can_run":false,"error_string":"influxdb python module not found","module_options":{"batch_size":{"name":"batch_size","type":"int","level":"advanced","flags":0,"default_value":"5000","min":"","max":"","enum_allowed":[],"desc":"How big batches of data points should be when sending to InfluxDB.","long_desc":"","tags":[],"see_also":[]},"database":{"name":"database","type":"str","level":"advanced","flags":0,"default_value":"ceph","min":"","max":"","enum_allowed":[],"desc":"InfluxDB database name. You will need to create this database and grant write privileges to the configured username or the username must have admin privileges to create it.","long_desc":"","tags":[],"see_also":[]},"hostname":{"name":"hostname","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"InfluxDB server hostname","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"30","min":"5","max":"","enum_allowed":[],"desc":"Time between reports to InfluxDB. Default 30 seconds.","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"password":{"name":"password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"password of InfluxDB server user","long_desc":"","tags":[],"see_also":[]},"port":{"name":"port","type":"int","level":"advanced","flags":0,"default_value":"8086","min":"","max":"","enum_allowed":[],"desc":"InfluxDB server port","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl":{"name":"ssl","type":"str","level":"advanced","flags":0,"default_value":"false","min":"","max":"","enum_allowed":[],"desc":"Use https connection for InfluxDB server. Use \"true\" or \"false\".","long_desc":"","tags":[],"see_also":[]},"threads":{"name":"threads","type":"int","level":"advanced","flags":0,"default_value":"5","min":"1","max":"32","enum_allowed":[],"desc":"How many worker threads should be spawned for sending data to InfluxDB.","long_desc":"","tags":[],"see_also":[]},"username":{"name":"username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"username of InfluxDB server user","long_desc":"","tags":[],"see_also":[]},"verify_ssl":{"name":"verify_ssl","type":"str","level":"advanced","flags":0,"default_value":"true","min":"","max":"","enum_allowed":[],"desc":"Verify https cert for InfluxDB server. Use \"true\" or \"false\".","long_desc":"","tags":[],"see_also":[]}}},{"name":"insights","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"iostat","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"k8sevents","can_run":true,"error_string":"","module_options":{"ceph_event_retention_days":{"name":"ceph_event_retention_days","type":"int","level":"advanced","flags":0,"default_value":"7","min":"","max":"","enum_allowed":[],"desc":"Days to hold ceph event information within local cache","long_desc":"","tags":[],"see_also":[]},"config_check_secs":{"name":"config_check_secs","type":"int","level":"advanced","flags":0,"default_value":"10","min":"10","max":"","enum_allowed":[],"desc":"interval (secs) to check for cluster configuration changes","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"localpool","can_run":true,"error_string":"","module_options":{"failure_domain":{"name":"failure_domain","type":"str","level":"advanced","flags":1,"default_value":"host","min":"","max":"","enum_allowed":[],"desc":"failure domain for any created local pool","long_desc":"what failure domain we should separate data replicas across.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"min_size":{"name":"min_size","type":"int","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"default min_size for any created local pool","long_desc":"value to set min_size to (unchanged from Ceph's default if this option is not set)","tags":[],"see_also":[]},"num_rep":{"name":"num_rep","type":"int","level":"advanced","flags":1,"default_value":"3","min":"","max":"","enum_allowed":[],"desc":"default replica count for any created local pool","long_desc":"","tags":[],"see_also":[]},"pg_num":{"name":"pg_num","type":"int","level":"advanced","flags":1,"default_value":"128","min":"","max":"","enum_allowed":[],"desc":"default pg_num for any created local pool","long_desc":"","tags":[],"see_also":[]},"prefix":{"name":"prefix","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"name prefix for any created local pool","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"subtree":{"name":"subtree","type":"str","level":"advanced","flags":1,"default_value":"rack","min":"","max":"","enum_allowed":[],"desc":"CRUSH level for which to create a local pool","long_desc":"which CRUSH subtree type the module should create a pool for.","tags":[],"see_also":[]}}},{"name":"mds_autoscaler","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"mirroring","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"nfs","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"orchestrator","can_run":true,"error_string":"","module_options":{"fail_fs":{"name":"fail_fs","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Fail filesystem for rapid multi-rank mds upgrade","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"orchestrator":{"name":"orchestrator","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["cephadm","rook","test_orchestrator"],"desc":"Orchestrator backend","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"osd_perf_query","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"osd_support","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"pg_autoscaler","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"threshold":{"name":"threshold","type":"float","level":"advanced","flags":0,"default_value":"3.0","min":"1.0","max":"","enum_allowed":[],"desc":"scaling threshold","long_desc":"The factor by which the `NEW PG_NUM` must vary from the current`PG_NUM` before being accepted. Cannot be less than 1.0","tags":[],"see_also":[]}}},{"name":"progress","can_run":true,"error_string":"","module_options":{"allow_pg_recovery_event":{"name":"allow_pg_recovery_event","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow the module to show pg recovery progress","long_desc":"","tags":[],"see_also":[]},"enabled":{"name":"enabled","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_completed_events":{"name":"max_completed_events","type":"int","level":"advanced","flags":1,"default_value":"50","min":"","max":"","enum_allowed":[],"desc":"number of past completed events to remember","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"how long the module is going to sleep","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"prometheus","can_run":true,"error_string":"","module_options":{"cache":{"name":"cache","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"exclude_perf_counters":{"name":"exclude_perf_counters","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Do not include perf-counters in the metrics output","long_desc":"Gathering perf-counters from a single Prometheus exporter can degrade ceph-mgr performance, especially in large clusters. Instead, Ceph-exporter daemons are now used by default for perf-counter gathering. This should only be disabled when no ceph-exporters are deployed.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rbd_stats_pools":{"name":"rbd_stats_pools","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rbd_stats_pools_refresh_interval":{"name":"rbd_stats_pools_refresh_interval","type":"int","level":"advanced","flags":0,"default_value":"300","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"scrape_interval":{"name":"scrape_interval","type":"float","level":"advanced","flags":0,"default_value":"15.0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"::","min":"","max":"","enum_allowed":[],"desc":"the IPv4 or IPv6 address on which the module listens for HTTP requests","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"int","level":"advanced","flags":1,"default_value":"9283","min":"","max":"","enum_allowed":[],"desc":"the port on which the module listens for HTTP requests","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"stale_cache_strategy":{"name":"stale_cache_strategy","type":"str","level":"advanced","flags":0,"default_value":"log","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_behaviour":{"name":"standby_behaviour","type":"str","level":"advanced","flags":1,"default_value":"default","min":"","max":"","enum_allowed":["default","error"],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_error_status_code":{"name":"standby_error_status_code","type":"int","level":"advanced","flags":1,"default_value":"500","min":"400","max":"599","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rbd_support","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_concurrent_snap_create":{"name":"max_concurrent_snap_create","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"mirror_snapshot_schedule":{"name":"mirror_snapshot_schedule","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"trash_purge_schedule":{"name":"trash_purge_schedule","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rgw","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"secondary_zone_period_retry_limit":{"name":"secondary_zone_period_retry_limit","type":"int","level":"advanced","flags":0,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"RGW module period update retry limit for secondary site","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rook","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"prometheus_tls_secret_name":{"name":"prometheus_tls_secret_name","type":"str","level":"advanced","flags":0,"default_value":"rook-ceph-prometheus-server-tls","min":"","max":"","enum_allowed":[],"desc":"name of tls secret in k8s for prometheus","long_desc":"","tags":[],"see_also":[]},"secure_monitoring_stack":{"name":"secure_monitoring_stack","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable TLS security for all the monitoring stack daemons","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"storage_class":{"name":"storage_class","type":"str","level":"advanced","flags":0,"default_value":"local","min":"","max":"","enum_allowed":[],"desc":"storage class name for LSO-discovered PVs","long_desc":"","tags":[],"see_also":[]}}},{"name":"selftest","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"roption1":{"name":"roption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"roption2":{"name":"roption2","type":"str","level":"advanced","flags":0,"default_value":"xyz","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption1":{"name":"rwoption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption2":{"name":"rwoption2","type":"int","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption3":{"name":"rwoption3","type":"float","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption4":{"name":"rwoption4","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption5":{"name":"rwoption5","type":"bool","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption6":{"name":"rwoption6","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption7":{"name":"rwoption7","type":"int","level":"advanced","flags":0,"default_value":"","min":"1","max":"42","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testkey":{"name":"testkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testlkey":{"name":"testlkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testnewline":{"name":"testnewline","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"smb","can_run":true,"error_string":"","module_options":{"internal_store_backend":{"name":"internal_store_backend","type":"str","level":"dev","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"set internal store backend. for develoment and testing only","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"update_orchestration":{"name":"update_orchestration","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"automatically update orchestration when smb resources are changed","long_desc":"","tags":[],"see_also":[]}}},{"name":"snap_schedule","can_run":true,"error_string":"","module_options":{"allow_m_granularity":{"name":"allow_m_granularity","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow minute scheduled snapshots","long_desc":"","tags":[],"see_also":[]},"dump_on_update":{"name":"dump_on_update","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"dump database to debug log on update","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"stats","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"status","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"telegraf","can_run":true,"error_string":"","module_options":{"address":{"name":"address","type":"str","level":"advanced","flags":0,"default_value":"unixgram:///tmp/telegraf.sock","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"15","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"telemetry","can_run":true,"error_string":"","module_options":{"channel_basic":{"name":"channel_basic","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share basic cluster information (size, version)","long_desc":"","tags":[],"see_also":[]},"channel_crash":{"name":"channel_crash","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share metadata about Ceph daemon crashes (version, stack straces, etc)","long_desc":"","tags":[],"see_also":[]},"channel_device":{"name":"channel_device","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share device health metrics (e.g., SMART data, minus potentially identifying info like serial numbers)","long_desc":"","tags":[],"see_also":[]},"channel_ident":{"name":"channel_ident","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Share a user-provided description and/or contact email for the cluster","long_desc":"","tags":[],"see_also":[]},"channel_perf":{"name":"channel_perf","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Share various performance metrics of a cluster","long_desc":"","tags":[],"see_also":[]},"contact":{"name":"contact","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"description":{"name":"description","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"device_url":{"name":"device_url","type":"str","level":"advanced","flags":0,"default_value":"https://telemetry.ceph.com/device","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"enabled":{"name":"enabled","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"int","level":"advanced","flags":0,"default_value":"24","min":"8","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"last_opt_revision":{"name":"last_opt_revision","type":"int","level":"advanced","flags":0,"default_value":"1","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"leaderboard":{"name":"leaderboard","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"leaderboard_description":{"name":"leaderboard_description","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"organization":{"name":"organization","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"proxy":{"name":"proxy","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"url":{"name":"url","type":"str","level":"advanced","flags":0,"default_value":"https://telemetry.ceph.com/report","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"test_orchestrator","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"volumes","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_concurrent_clones":{"name":"max_concurrent_clones","type":"int","level":"advanced","flags":0,"default_value":"4","min":"","max":"","enum_allowed":[],"desc":"Number of asynchronous cloner threads","long_desc":"","tags":[],"see_also":[]},"pause_cloning":{"name":"pause_cloning","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Pause asynchronous cloner threads","long_desc":"","tags":[],"see_also":[]},"pause_purging":{"name":"pause_purging","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Pause asynchronous subvolume purge threads","long_desc":"","tags":[],"see_also":[]},"periodic_async_work":{"name":"periodic_async_work","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Periodically check for async work","long_desc":"","tags":[],"see_also":[]},"snapshot_clone_delay":{"name":"snapshot_clone_delay","type":"int","level":"advanced","flags":0,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"Delay clone begin operation by snapshot_clone_delay seconds","long_desc":"","tags":[],"see_also":[]},"snapshot_clone_no_wait":{"name":"snapshot_clone_no_wait","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Reject subvolume clone request when cloner threads are busy","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}}]}],"modules":["cephadm","dashboard","iostat","nfs"],"available_modules":[{"name":"alerts","can_run":true,"error_string":"","module_options":{"interval":{"name":"interval","type":"secs","level":"advanced","flags":1,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"How frequently to reexamine health status","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"smtp_destination":{"name":"smtp_destination","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Email address to send alerts to, use commas to separate multiple","long_desc":"","tags":[],"see_also":[]},"smtp_from_name":{"name":"smtp_from_name","type":"str","level":"advanced","flags":1,"default_value":"Ceph","min":"","max":"","enum_allowed":[],"desc":"Email From: name","long_desc":"","tags":[],"see_also":[]},"smtp_host":{"name":"smtp_host","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"SMTP server","long_desc":"","tags":[],"see_also":[]},"smtp_password":{"name":"smtp_password","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Password to authenticate with","long_desc":"","tags":[],"see_also":[]},"smtp_port":{"name":"smtp_port","type":"int","level":"advanced","flags":1,"default_value":"465","min":"","max":"","enum_allowed":[],"desc":"SMTP port","long_desc":"","tags":[],"see_also":[]},"smtp_sender":{"name":"smtp_sender","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"SMTP envelope sender","long_desc":"","tags":[],"see_also":[]},"smtp_ssl":{"name":"smtp_ssl","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Use SSL to connect to SMTP server","long_desc":"","tags":[],"see_also":[]},"smtp_user":{"name":"smtp_user","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"User to authenticate as","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"balancer","can_run":true,"error_string":"","module_options":{"active":{"name":"active","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"automatically balance PGs across cluster","long_desc":"","tags":[],"see_also":[]},"begin_time":{"name":"begin_time","type":"str","level":"advanced","flags":1,"default_value":"0000","min":"","max":"","enum_allowed":[],"desc":"beginning time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"begin_weekday":{"name":"begin_weekday","type":"uint","level":"advanced","flags":1,"default_value":"0","min":"0","max":"6","enum_allowed":[],"desc":"Restrict automatic balancing to this day of the week or later","long_desc":"0 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"crush_compat_max_iterations":{"name":"crush_compat_max_iterations","type":"uint","level":"advanced","flags":1,"default_value":"25","min":"1","max":"250","enum_allowed":[],"desc":"maximum number of iterations to attempt optimization","long_desc":"","tags":[],"see_also":[]},"crush_compat_metrics":{"name":"crush_compat_metrics","type":"str","level":"advanced","flags":1,"default_value":"pgs,objects,bytes","min":"","max":"","enum_allowed":[],"desc":"metrics with which to calculate OSD utilization","long_desc":"Value is a list of one or more of \"pgs\", \"objects\", or \"bytes\", and indicates which metrics to use to balance utilization.","tags":[],"see_also":[]},"crush_compat_step":{"name":"crush_compat_step","type":"float","level":"advanced","flags":1,"default_value":"0.5","min":"0.001","max":"0.999","enum_allowed":[],"desc":"aggressiveness of optimization","long_desc":".99 is very aggressive, .01 is less aggressive","tags":[],"see_also":[]},"end_time":{"name":"end_time","type":"str","level":"advanced","flags":1,"default_value":"2359","min":"","max":"","enum_allowed":[],"desc":"ending time of day to automatically balance","long_desc":"This is a time of day in the format HHMM.","tags":[],"see_also":[]},"end_weekday":{"name":"end_weekday","type":"uint","level":"advanced","flags":1,"default_value":"0","min":"0","max":"6","enum_allowed":[],"desc":"Restrict automatic balancing to days of the week earlier than this","long_desc":"0 = Sunday, 1 = Monday, etc.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"min_score":{"name":"min_score","type":"float","level":"advanced","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"minimum score, below which no optimization is attempted","long_desc":"","tags":[],"see_also":[]},"mode":{"name":"mode","type":"str","level":"advanced","flags":1,"default_value":"upmap","min":"","max":"","enum_allowed":["crush-compat","none","read","upmap","upmap-read"],"desc":"Balancer mode","long_desc":"","tags":[],"see_also":[]},"pool_ids":{"name":"pool_ids","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"pools which the automatic balancing will be limited to","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and attempt optimization","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"update_pg_upmap_activity":{"name":"update_pg_upmap_activity","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Updates pg_upmap activity stats to be used in `balancer status detail`","long_desc":"","tags":[],"see_also":[]},"upmap_max_deviation":{"name":"upmap_max_deviation","type":"int","level":"advanced","flags":1,"default_value":"5","min":"1","max":"","enum_allowed":[],"desc":"deviation below which no optimization is attempted","long_desc":"If the number of PGs are within this count then no optimization is attempted","tags":[],"see_also":[]},"upmap_max_optimizations":{"name":"upmap_max_optimizations","type":"uint","level":"advanced","flags":1,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"maximum upmap optimizations to make per attempt","long_desc":"","tags":[],"see_also":[]}}},{"name":"cephadm","can_run":true,"error_string":"","module_options":{"agent_down_multiplier":{"name":"agent_down_multiplier","type":"float","level":"advanced","flags":0,"default_value":"3.0","min":"","max":"","enum_allowed":[],"desc":"Multiplied by agent refresh rate to calculate how long agent must not report before being marked down","long_desc":"","tags":[],"see_also":[]},"agent_refresh_rate":{"name":"agent_refresh_rate","type":"secs","level":"advanced","flags":0,"default_value":"20","min":"","max":"","enum_allowed":[],"desc":"How often agent on each host will try to gather and send metadata","long_desc":"","tags":[],"see_also":[]},"agent_starting_port":{"name":"agent_starting_port","type":"int","level":"advanced","flags":0,"default_value":"4721","min":"","max":"","enum_allowed":[],"desc":"First port agent will try to bind to (will also try up to next 1000 subsequent ports if blocked)","long_desc":"","tags":[],"see_also":[]},"allow_ptrace":{"name":"allow_ptrace","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow SYS_PTRACE capability on ceph containers","long_desc":"The SYS_PTRACE capability is needed to attach to a process with gdb or strace. Enabling this options can allow debugging daemons that encounter problems at runtime.","tags":[],"see_also":[]},"autotune_interval":{"name":"autotune_interval","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to autotune daemon memory","long_desc":"","tags":[],"see_also":[]},"autotune_memory_target_ratio":{"name":"autotune_memory_target_ratio","type":"float","level":"advanced","flags":0,"default_value":"0.7","min":"","max":"","enum_allowed":[],"desc":"ratio of total system memory to divide amongst autotuned daemons","long_desc":"","tags":[],"see_also":[]},"cephadm_log_destination":{"name":"cephadm_log_destination","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":["file","file,syslog","syslog"],"desc":"Destination for cephadm command's persistent logging","long_desc":"","tags":[],"see_also":[]},"certificate_automated_rotation_enabled":{"name":"certificate_automated_rotation_enabled","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"This flag controls whether cephadm automatically rotates certificates upon expiration.","long_desc":"","tags":[],"see_also":[]},"certificate_check_debug_mode":{"name":"certificate_check_debug_mode","type":"bool","level":"dev","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"FOR TESTING ONLY: This flag forces the certificate check instead of waiting for certificate_check_period.","long_desc":"","tags":[],"see_also":[]},"certificate_check_period":{"name":"certificate_check_period","type":"int","level":"advanced","flags":0,"default_value":"1","min":"0","max":"30","enum_allowed":[],"desc":"Specifies how often (in days) the certificate should be checked for validity.","long_desc":"","tags":[],"see_also":[]},"certificate_duration_days":{"name":"certificate_duration_days","type":"int","level":"advanced","flags":0,"default_value":"1095","min":"90","max":"3650","enum_allowed":[],"desc":"Specifies the duration of self certificates generated and signed by cephadm root CA","long_desc":"","tags":[],"see_also":[]},"certificate_renewal_threshold_days":{"name":"certificate_renewal_threshold_days","type":"int","level":"advanced","flags":0,"default_value":"30","min":"10","max":"90","enum_allowed":[],"desc":"Specifies the lead time in days to initiate certificate renewal before expiration.","long_desc":"","tags":[],"see_also":[]},"cgroups_split":{"name":"cgroups_split","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Pass --cgroups=split when cephadm creates containers (currently podman only)","long_desc":"","tags":[],"see_also":[]},"config_checks_enabled":{"name":"config_checks_enabled","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable or disable the cephadm configuration analysis","long_desc":"","tags":[],"see_also":[]},"config_dashboard":{"name":"config_dashboard","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"manage configs like API endpoints in Dashboard.","long_desc":"","tags":[],"see_also":[]},"container_image_alertmanager":{"name":"container_image_alertmanager","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/alertmanager:v0.28.1","min":"","max":"","enum_allowed":[],"desc":"Alertmanager container image","long_desc":"","tags":[],"see_also":[]},"container_image_base":{"name":"container_image_base","type":"str","level":"advanced","flags":1,"default_value":"quay.io/ceph/ceph","min":"","max":"","enum_allowed":[],"desc":"Container image name, without the tag","long_desc":"","tags":[],"see_also":[]},"container_image_elasticsearch":{"name":"container_image_elasticsearch","type":"str","level":"advanced","flags":0,"default_value":"quay.io/omrizeneva/elasticsearch:6.8.23","min":"","max":"","enum_allowed":[],"desc":"Elasticsearch container image","long_desc":"","tags":[],"see_also":[]},"container_image_grafana":{"name":"container_image_grafana","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/grafana:12.3.1","min":"","max":"","enum_allowed":[],"desc":"Grafana container image","long_desc":"","tags":[],"see_also":[]},"container_image_haproxy":{"name":"container_image_haproxy","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/haproxy:2.3","min":"","max":"","enum_allowed":[],"desc":"Haproxy container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_agent":{"name":"container_image_jaeger_agent","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-agent:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger agent container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_collector":{"name":"container_image_jaeger_collector","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-collector:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger collector container image","long_desc":"","tags":[],"see_also":[]},"container_image_jaeger_query":{"name":"container_image_jaeger_query","type":"str","level":"advanced","flags":0,"default_value":"quay.io/jaegertracing/jaeger-query:1.29","min":"","max":"","enum_allowed":[],"desc":"Jaeger query container image","long_desc":"","tags":[],"see_also":[]},"container_image_keepalived":{"name":"container_image_keepalived","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/keepalived:2.2.4","min":"","max":"","enum_allowed":[],"desc":"Keepalived container image","long_desc":"","tags":[],"see_also":[]},"container_image_loki":{"name":"container_image_loki","type":"str","level":"advanced","flags":0,"default_value":"docker.io/grafana/loki:3.0.0","min":"","max":"","enum_allowed":[],"desc":"Loki container image","long_desc":"","tags":[],"see_also":[]},"container_image_nginx":{"name":"container_image_nginx","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/nginx:sclorg-nginx-126","min":"","max":"","enum_allowed":[],"desc":"Nginx container image","long_desc":"","tags":[],"see_also":[]},"container_image_node_exporter":{"name":"container_image_node_exporter","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/node-exporter:v1.9.1","min":"","max":"","enum_allowed":[],"desc":"Node exporter container image","long_desc":"","tags":[],"see_also":[]},"container_image_nvmeof":{"name":"container_image_nvmeof","type":"str","level":"advanced","flags":0,"default_value":"quay.io/ceph/nvmeof:1.5","min":"","max":"","enum_allowed":[],"desc":"Nvmeof container image","long_desc":"","tags":[],"see_also":[]},"container_image_oauth2_proxy":{"name":"container_image_oauth2_proxy","type":"str","level":"advanced","flags":0,"default_value":"quay.io/oauth2-proxy/oauth2-proxy:v7.6.0","min":"","max":"","enum_allowed":[],"desc":"Oauth2 proxy container image","long_desc":"","tags":[],"see_also":[]},"container_image_prometheus":{"name":"container_image_prometheus","type":"str","level":"advanced","flags":0,"default_value":"quay.io/prometheus/prometheus:v3.6.0","min":"","max":"","enum_allowed":[],"desc":"Prometheus container image","long_desc":"","tags":[],"see_also":[]},"container_image_promtail":{"name":"container_image_promtail","type":"str","level":"advanced","flags":0,"default_value":"docker.io/grafana/promtail:3.0.0","min":"","max":"","enum_allowed":[],"desc":"Promtail container image","long_desc":"","tags":[],"see_also":[]},"container_image_samba":{"name":"container_image_samba","type":"str","level":"advanced","flags":0,"default_value":"quay.io/samba.org/samba-server:ceph20-centos-amd64","min":"","max":"","enum_allowed":[],"desc":"Samba container image","long_desc":"","tags":[],"see_also":[]},"container_image_samba_metrics":{"name":"container_image_samba_metrics","type":"str","level":"advanced","flags":0,"default_value":"quay.io/samba.org/samba-metrics:ceph20-centos-amd64","min":"","max":"","enum_allowed":[],"desc":"Samba metrics container image","long_desc":"","tags":[],"see_also":[]},"container_image_snmp_gateway":{"name":"container_image_snmp_gateway","type":"str","level":"advanced","flags":0,"default_value":"docker.io/maxwo/snmp-notifier:v1.2.1","min":"","max":"","enum_allowed":[],"desc":"Snmp gateway container image","long_desc":"","tags":[],"see_also":[]},"container_init":{"name":"container_init","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Run podman/docker with `--init`","long_desc":"","tags":[],"see_also":[]},"daemon_cache_timeout":{"name":"daemon_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"seconds to cache service (daemon) inventory","long_desc":"","tags":[],"see_also":[]},"default_cephadm_command_timeout":{"name":"default_cephadm_command_timeout","type":"int","level":"advanced","flags":0,"default_value":"900","min":"","max":"","enum_allowed":[],"desc":"Default timeout applied to cephadm commands run directly on the host (in seconds)","long_desc":"","tags":[],"see_also":[]},"default_registry":{"name":"default_registry","type":"str","level":"advanced","flags":0,"default_value":"quay.io","min":"","max":"","enum_allowed":[],"desc":"Search-registry to which we should normalize unqualified image names. This is not the default registry","long_desc":"","tags":[],"see_also":[]},"device_cache_timeout":{"name":"device_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"1800","min":"","max":"","enum_allowed":[],"desc":"seconds to cache device inventory","long_desc":"","tags":[],"see_also":[]},"device_enhanced_scan":{"name":"device_enhanced_scan","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Use libstoragemgmt during device scans","long_desc":"","tags":[],"see_also":[]},"facts_cache_timeout":{"name":"facts_cache_timeout","type":"secs","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"seconds to cache host facts data","long_desc":"","tags":[],"see_also":[]},"grafana_dashboards_path":{"name":"grafana_dashboards_path","type":"str","level":"advanced","flags":0,"default_value":"/etc/grafana/dashboards/ceph-dashboard/","min":"","max":"","enum_allowed":[],"desc":"location of dashboards to include in grafana deployments","long_desc":"","tags":[],"see_also":[]},"host_check_interval":{"name":"host_check_interval","type":"secs","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to perform a host check","long_desc":"","tags":[],"see_also":[]},"hw_monitoring":{"name":"hw_monitoring","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Deploy hw monitoring daemon on every host.","long_desc":"","tags":[],"see_also":[]},"inventory_list_all":{"name":"inventory_list_all","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Whether ceph-volume inventory should report more devices (mostly mappers (LVs / mpaths), partitions...)","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_refresh_metadata":{"name":"log_refresh_metadata","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Log all refresh metadata. Includes daemon, device, and host info collected regularly. Only has effect if logging at debug level","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"log to the \"cephadm\" cluster log channel\"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"manage_etc_ceph_ceph_conf":{"name":"manage_etc_ceph_ceph_conf","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Manage and own /etc/ceph/ceph.conf on the hosts.","long_desc":"","tags":[],"see_also":[]},"manage_etc_ceph_ceph_conf_hosts":{"name":"manage_etc_ceph_ceph_conf_hosts","type":"str","level":"advanced","flags":0,"default_value":"*","min":"","max":"","enum_allowed":[],"desc":"PlacementSpec describing on which hosts to manage /etc/ceph/ceph.conf","long_desc":"","tags":[],"see_also":[]},"max_count_per_host":{"name":"max_count_per_host","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"max number of daemons per service per host","long_desc":"","tags":[],"see_also":[]},"max_osd_draining_count":{"name":"max_osd_draining_count","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"max number of osds that will be drained simultaneously when osds are removed","long_desc":"","tags":[],"see_also":[]},"migration_current":{"name":"migration_current","type":"int","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"internal - do not modify","long_desc":"","tags":[],"see_also":[]},"mode":{"name":"mode","type":"str","level":"advanced","flags":0,"default_value":"root","min":"","max":"","enum_allowed":["cephadm-package","root"],"desc":"mode for remote execution of cephadm","long_desc":"","tags":[],"see_also":[]},"oob_default_addr":{"name":"oob_default_addr","type":"str","level":"advanced","flags":0,"default_value":"169.254.1.1","min":"","max":"","enum_allowed":[],"desc":"Default address for RedFish API (oob management).","long_desc":"","tags":[],"see_also":[]},"prometheus_alerts_path":{"name":"prometheus_alerts_path","type":"str","level":"advanced","flags":0,"default_value":"/etc/prometheus/ceph/ceph_default_alerts.yml","min":"","max":"","enum_allowed":[],"desc":"location of alerts to include in prometheus deployments","long_desc":"","tags":[],"see_also":[]},"registry_insecure":{"name":"registry_insecure","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Registry is to be considered insecure (no TLS available). Only for development purposes.","long_desc":"","tags":[],"see_also":[]},"registry_password":{"name":"registry_password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Custom repository password. Only used for logging into a registry.","long_desc":"","tags":[],"see_also":[]},"registry_url":{"name":"registry_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Registry url for login purposes. This is not the default registry","long_desc":"","tags":[],"see_also":[]},"registry_username":{"name":"registry_username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"Custom repository username. Only used for logging into a registry.","long_desc":"","tags":[],"see_also":[]},"secure_monitoring_stack":{"name":"secure_monitoring_stack","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable TLS security for all the monitoring stack daemons","long_desc":"","tags":[],"see_also":[]},"service_discovery_port":{"name":"service_discovery_port","type":"int","level":"advanced","flags":0,"default_value":"8765","min":"","max":"","enum_allowed":[],"desc":"cephadm service discovery port","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssh_config_file":{"name":"ssh_config_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"customized SSH config file to connect to managed hosts","long_desc":"","tags":[],"see_also":[]},"ssh_keepalive_count_max":{"name":"ssh_keepalive_count_max","type":"int","level":"advanced","flags":0,"default_value":"3","min":"","max":"","enum_allowed":[],"desc":"How many times ssh connections can fail liveness checks before the host is marked offline","long_desc":"","tags":[],"see_also":[]},"ssh_keepalive_interval":{"name":"ssh_keepalive_interval","type":"int","level":"advanced","flags":0,"default_value":"7","min":"","max":"","enum_allowed":[],"desc":"How often ssh connections are checked for liveness","long_desc":"","tags":[],"see_also":[]},"stray_daemon_check_interval":{"name":"stray_daemon_check_interval","type":"secs","level":"advanced","flags":0,"default_value":"1800","min":"","max":"","enum_allowed":[],"desc":"how frequently cephadm should check for the presence of stray daemons","long_desc":"","tags":[],"see_also":[]},"use_agent":{"name":"use_agent","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Use cephadm agent on each host to gather and send metadata","long_desc":"","tags":[],"see_also":[]},"use_repo_digest":{"name":"use_repo_digest","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Automatically convert image tags to image digest. Make sure all daemons use the same image","long_desc":"","tags":[],"see_also":[]},"warn_on_failed_host_check":{"name":"warn_on_failed_host_check","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if the host check fails","long_desc":"","tags":[],"see_also":[]},"warn_on_stray_daemons":{"name":"warn_on_stray_daemons","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if daemons are detected that are not managed by cephadm","long_desc":"","tags":[],"see_also":[]},"warn_on_stray_hosts":{"name":"warn_on_stray_hosts","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"raise a health warning if daemons are detected on a host that is not managed by cephadm","long_desc":"","tags":[],"see_also":[]}}},{"name":"crash","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"retain_interval":{"name":"retain_interval","type":"secs","level":"advanced","flags":1,"default_value":"31536000","min":"","max":"","enum_allowed":[],"desc":"how long to retain crashes before pruning them","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"warn_recent_interval":{"name":"warn_recent_interval","type":"secs","level":"advanced","flags":1,"default_value":"1209600","min":"","max":"","enum_allowed":[],"desc":"time interval in which to warn about recent crashes","long_desc":"","tags":[],"see_also":[]}}},{"name":"dashboard","can_run":true,"error_string":"","module_options":{"ACCOUNT_LOCKOUT_ATTEMPTS":{"name":"ACCOUNT_LOCKOUT_ATTEMPTS","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ALERTMANAGER_API_HOST":{"name":"ALERTMANAGER_API_HOST","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ALERTMANAGER_API_SSL_VERIFY":{"name":"ALERTMANAGER_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"AUDIT_API_ENABLED":{"name":"AUDIT_API_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"AUDIT_API_LOG_PAYLOAD":{"name":"AUDIT_API_LOG_PAYLOAD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ENABLE_BROWSABLE_API":{"name":"ENABLE_BROWSABLE_API","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_CEPHFS":{"name":"FEATURE_TOGGLE_CEPHFS","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_DASHBOARD":{"name":"FEATURE_TOGGLE_DASHBOARD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_ISCSI":{"name":"FEATURE_TOGGLE_ISCSI","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_MIRRORING":{"name":"FEATURE_TOGGLE_MIRRORING","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_NFS":{"name":"FEATURE_TOGGLE_NFS","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_RBD":{"name":"FEATURE_TOGGLE_RBD","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"FEATURE_TOGGLE_RGW":{"name":"FEATURE_TOGGLE_RGW","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GANESHA_CLUSTERS_RADOS_POOL_NAMESPACE":{"name":"GANESHA_CLUSTERS_RADOS_POOL_NAMESPACE","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_PASSWORD":{"name":"GRAFANA_API_PASSWORD","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_SSL_VERIFY":{"name":"GRAFANA_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_URL":{"name":"GRAFANA_API_URL","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_API_USERNAME":{"name":"GRAFANA_API_USERNAME","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_FRONTEND_API_URL":{"name":"GRAFANA_FRONTEND_API_URL","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"GRAFANA_UPDATE_DASHBOARDS":{"name":"GRAFANA_UPDATE_DASHBOARDS","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ISCSI_API_SSL_VERIFICATION":{"name":"ISCSI_API_SSL_VERIFICATION","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ISSUE_TRACKER_API_KEY":{"name":"ISSUE_TRACKER_API_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"MANAGED_BY_CLUSTERS":{"name":"MANAGED_BY_CLUSTERS","type":"str","level":"advanced","flags":0,"default_value":"[]","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"MULTICLUSTER_CONFIG":{"name":"MULTICLUSTER_CONFIG","type":"str","level":"advanced","flags":0,"default_value":"{}","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROMETHEUS_API_HOST":{"name":"PROMETHEUS_API_HOST","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROMETHEUS_API_SSL_VERIFY":{"name":"PROMETHEUS_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PROM_ALERT_CREDENTIAL_CACHE_TTL":{"name":"PROM_ALERT_CREDENTIAL_CACHE_TTL","type":"int","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_COMPLEXITY_ENABLED":{"name":"PWD_POLICY_CHECK_COMPLEXITY_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED":{"name":"PWD_POLICY_CHECK_EXCLUSION_LIST_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_LENGTH_ENABLED":{"name":"PWD_POLICY_CHECK_LENGTH_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_OLDPWD_ENABLED":{"name":"PWD_POLICY_CHECK_OLDPWD_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_REPETITIVE_CHARS_ENABLED":{"name":"PWD_POLICY_CHECK_REPETITIVE_CHARS_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_SEQUENTIAL_CHARS_ENABLED":{"name":"PWD_POLICY_CHECK_SEQUENTIAL_CHARS_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_CHECK_USERNAME_ENABLED":{"name":"PWD_POLICY_CHECK_USERNAME_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_ENABLED":{"name":"PWD_POLICY_ENABLED","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_EXCLUSION_LIST":{"name":"PWD_POLICY_EXCLUSION_LIST","type":"str","level":"advanced","flags":0,"default_value":"osd,host,dashboard,pool,block,nfs,ceph,monitors,gateway,logs,crush,maps","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_MIN_COMPLEXITY":{"name":"PWD_POLICY_MIN_COMPLEXITY","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"PWD_POLICY_MIN_LENGTH":{"name":"PWD_POLICY_MIN_LENGTH","type":"int","level":"advanced","flags":0,"default_value":"8","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"REST_REQUESTS_TIMEOUT":{"name":"REST_REQUESTS_TIMEOUT","type":"int","level":"advanced","flags":0,"default_value":"45","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_ACCESS_KEY":{"name":"RGW_API_ACCESS_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_ADMIN_RESOURCE":{"name":"RGW_API_ADMIN_RESOURCE","type":"str","level":"advanced","flags":0,"default_value":"admin","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_SECRET_KEY":{"name":"RGW_API_SECRET_KEY","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_API_SSL_VERIFY":{"name":"RGW_API_SSL_VERIFY","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"RGW_HOSTNAME_PER_DAEMON":{"name":"RGW_HOSTNAME_PER_DAEMON","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"UNSAFE_TLS_v1_2":{"name":"UNSAFE_TLS_v1_2","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_SPAN":{"name":"USER_PWD_EXPIRATION_SPAN","type":"int","level":"advanced","flags":0,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_WARNING_1":{"name":"USER_PWD_EXPIRATION_WARNING_1","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"USER_PWD_EXPIRATION_WARNING_2":{"name":"USER_PWD_EXPIRATION_WARNING_2","type":"int","level":"advanced","flags":0,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"cross_origin_url":{"name":"cross_origin_url","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"crt_file":{"name":"crt_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"crypto_caller":{"name":"crypto_caller","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"debug":{"name":"debug","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable/disable debug options","long_desc":"","tags":[],"see_also":[]},"jwt_token_ttl":{"name":"jwt_token_ttl","type":"int","level":"advanced","flags":0,"default_value":"28800","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"key_file":{"name":"key_file","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"motd":{"name":"motd","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"The message of the day","long_desc":"","tags":[],"see_also":[]},"redirect_resolve_ip_addr":{"name":"redirect_resolve_ip_addr","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"::","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"int","level":"advanced","flags":0,"default_value":"8080","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl":{"name":"ssl","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl_server_port":{"name":"ssl_server_port","type":"int","level":"advanced","flags":0,"default_value":"8443","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sso_oauth2":{"name":"sso_oauth2","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_behaviour":{"name":"standby_behaviour","type":"str","level":"advanced","flags":0,"default_value":"redirect","min":"","max":"","enum_allowed":["error","redirect"],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_error_status_code":{"name":"standby_error_status_code","type":"int","level":"advanced","flags":0,"default_value":"500","min":"400","max":"599","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"url_prefix":{"name":"url_prefix","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"devicehealth","can_run":true,"error_string":"","module_options":{"enable_monitoring":{"name":"enable_monitoring","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"monitor device health metrics","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"mark_out_threshold":{"name":"mark_out_threshold","type":"secs","level":"advanced","flags":1,"default_value":"2419200","min":"","max":"","enum_allowed":[],"desc":"automatically mark OSD if it may fail before this long","long_desc":"","tags":[],"see_also":[]},"pool_name":{"name":"pool_name","type":"str","level":"advanced","flags":1,"default_value":"device_health_metrics","min":"","max":"","enum_allowed":[],"desc":"name of pool in which to store device health metrics","long_desc":"","tags":[],"see_also":[]},"retention_period":{"name":"retention_period","type":"secs","level":"advanced","flags":1,"default_value":"15552000","min":"","max":"","enum_allowed":[],"desc":"how long to retain device health metrics","long_desc":"","tags":[],"see_also":[]},"scrape_frequency":{"name":"scrape_frequency","type":"secs","level":"advanced","flags":1,"default_value":"86400","min":"","max":"","enum_allowed":[],"desc":"how frequently to scrape device health metrics","long_desc":"","tags":[],"see_also":[]},"self_heal":{"name":"self_heal","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"preemptively heal cluster around devices that may fail","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"how frequently to wake up and check device health","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"warn_threshold":{"name":"warn_threshold","type":"secs","level":"advanced","flags":1,"default_value":"7257600","min":"","max":"","enum_allowed":[],"desc":"raise health warning if OSD may fail before this long","long_desc":"","tags":[],"see_also":[]}}},{"name":"diskprediction_local","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"predict_interval":{"name":"predict_interval","type":"str","level":"advanced","flags":0,"default_value":"86400","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"predictor_model":{"name":"predictor_model","type":"str","level":"advanced","flags":0,"default_value":"prophetstor","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"str","level":"advanced","flags":0,"default_value":"600","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"influx","can_run":false,"error_string":"influxdb python module not found","module_options":{"batch_size":{"name":"batch_size","type":"int","level":"advanced","flags":0,"default_value":"5000","min":"","max":"","enum_allowed":[],"desc":"How big batches of data points should be when sending to InfluxDB.","long_desc":"","tags":[],"see_also":[]},"database":{"name":"database","type":"str","level":"advanced","flags":0,"default_value":"ceph","min":"","max":"","enum_allowed":[],"desc":"InfluxDB database name. You will need to create this database and grant write privileges to the configured username or the username must have admin privileges to create it.","long_desc":"","tags":[],"see_also":[]},"hostname":{"name":"hostname","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"InfluxDB server hostname","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"30","min":"5","max":"","enum_allowed":[],"desc":"Time between reports to InfluxDB. Default 30 seconds.","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"password":{"name":"password","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"password of InfluxDB server user","long_desc":"","tags":[],"see_also":[]},"port":{"name":"port","type":"int","level":"advanced","flags":0,"default_value":"8086","min":"","max":"","enum_allowed":[],"desc":"InfluxDB server port","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"ssl":{"name":"ssl","type":"str","level":"advanced","flags":0,"default_value":"false","min":"","max":"","enum_allowed":[],"desc":"Use https connection for InfluxDB server. Use \"true\" or \"false\".","long_desc":"","tags":[],"see_also":[]},"threads":{"name":"threads","type":"int","level":"advanced","flags":0,"default_value":"5","min":"1","max":"32","enum_allowed":[],"desc":"How many worker threads should be spawned for sending data to InfluxDB.","long_desc":"","tags":[],"see_also":[]},"username":{"name":"username","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"username of InfluxDB server user","long_desc":"","tags":[],"see_also":[]},"verify_ssl":{"name":"verify_ssl","type":"str","level":"advanced","flags":0,"default_value":"true","min":"","max":"","enum_allowed":[],"desc":"Verify https cert for InfluxDB server. Use \"true\" or \"false\".","long_desc":"","tags":[],"see_also":[]}}},{"name":"insights","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"iostat","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"k8sevents","can_run":true,"error_string":"","module_options":{"ceph_event_retention_days":{"name":"ceph_event_retention_days","type":"int","level":"advanced","flags":0,"default_value":"7","min":"","max":"","enum_allowed":[],"desc":"Days to hold ceph event information within local cache","long_desc":"","tags":[],"see_also":[]},"config_check_secs":{"name":"config_check_secs","type":"int","level":"advanced","flags":0,"default_value":"10","min":"10","max":"","enum_allowed":[],"desc":"interval (secs) to check for cluster configuration changes","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"localpool","can_run":true,"error_string":"","module_options":{"failure_domain":{"name":"failure_domain","type":"str","level":"advanced","flags":1,"default_value":"host","min":"","max":"","enum_allowed":[],"desc":"failure domain for any created local pool","long_desc":"what failure domain we should separate data replicas across.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"min_size":{"name":"min_size","type":"int","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"default min_size for any created local pool","long_desc":"value to set min_size to (unchanged from Ceph's default if this option is not set)","tags":[],"see_also":[]},"num_rep":{"name":"num_rep","type":"int","level":"advanced","flags":1,"default_value":"3","min":"","max":"","enum_allowed":[],"desc":"default replica count for any created local pool","long_desc":"","tags":[],"see_also":[]},"pg_num":{"name":"pg_num","type":"int","level":"advanced","flags":1,"default_value":"128","min":"","max":"","enum_allowed":[],"desc":"default pg_num for any created local pool","long_desc":"","tags":[],"see_also":[]},"prefix":{"name":"prefix","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"name prefix for any created local pool","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"subtree":{"name":"subtree","type":"str","level":"advanced","flags":1,"default_value":"rack","min":"","max":"","enum_allowed":[],"desc":"CRUSH level for which to create a local pool","long_desc":"which CRUSH subtree type the module should create a pool for.","tags":[],"see_also":[]}}},{"name":"mds_autoscaler","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"mirroring","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"nfs","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"orchestrator","can_run":true,"error_string":"","module_options":{"fail_fs":{"name":"fail_fs","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Fail filesystem for rapid multi-rank mds upgrade","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"orchestrator":{"name":"orchestrator","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["cephadm","rook","test_orchestrator"],"desc":"Orchestrator backend","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"osd_perf_query","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"osd_support","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"pg_autoscaler","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":0,"default_value":"60","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"threshold":{"name":"threshold","type":"float","level":"advanced","flags":0,"default_value":"3.0","min":"1.0","max":"","enum_allowed":[],"desc":"scaling threshold","long_desc":"The factor by which the `NEW PG_NUM` must vary from the current`PG_NUM` before being accepted. Cannot be less than 1.0","tags":[],"see_also":[]}}},{"name":"progress","can_run":true,"error_string":"","module_options":{"allow_pg_recovery_event":{"name":"allow_pg_recovery_event","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow the module to show pg recovery progress","long_desc":"","tags":[],"see_also":[]},"enabled":{"name":"enabled","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_completed_events":{"name":"max_completed_events","type":"int","level":"advanced","flags":1,"default_value":"50","min":"","max":"","enum_allowed":[],"desc":"number of past completed events to remember","long_desc":"","tags":[],"see_also":[]},"sleep_interval":{"name":"sleep_interval","type":"secs","level":"advanced","flags":1,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"how long the module is going to sleep","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"prometheus","can_run":true,"error_string":"","module_options":{"cache":{"name":"cache","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"exclude_perf_counters":{"name":"exclude_perf_counters","type":"bool","level":"advanced","flags":1,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Do not include perf-counters in the metrics output","long_desc":"Gathering perf-counters from a single Prometheus exporter can degrade ceph-mgr performance, especially in large clusters. Instead, Ceph-exporter daemons are now used by default for perf-counter gathering. This should only be disabled when no ceph-exporters are deployed.","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rbd_stats_pools":{"name":"rbd_stats_pools","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rbd_stats_pools_refresh_interval":{"name":"rbd_stats_pools_refresh_interval","type":"int","level":"advanced","flags":0,"default_value":"300","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"scrape_interval":{"name":"scrape_interval","type":"float","level":"advanced","flags":0,"default_value":"15.0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"server_addr":{"name":"server_addr","type":"str","level":"advanced","flags":0,"default_value":"::","min":"","max":"","enum_allowed":[],"desc":"the IPv4 or IPv6 address on which the module listens for HTTP requests","long_desc":"","tags":[],"see_also":[]},"server_port":{"name":"server_port","type":"int","level":"advanced","flags":1,"default_value":"9283","min":"","max":"","enum_allowed":[],"desc":"the port on which the module listens for HTTP requests","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"stale_cache_strategy":{"name":"stale_cache_strategy","type":"str","level":"advanced","flags":0,"default_value":"log","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_behaviour":{"name":"standby_behaviour","type":"str","level":"advanced","flags":1,"default_value":"default","min":"","max":"","enum_allowed":["default","error"],"desc":"","long_desc":"","tags":[],"see_also":[]},"standby_error_status_code":{"name":"standby_error_status_code","type":"int","level":"advanced","flags":1,"default_value":"500","min":"400","max":"599","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rbd_support","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_concurrent_snap_create":{"name":"max_concurrent_snap_create","type":"int","level":"advanced","flags":0,"default_value":"10","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"mirror_snapshot_schedule":{"name":"mirror_snapshot_schedule","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"trash_purge_schedule":{"name":"trash_purge_schedule","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rgw","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"secondary_zone_period_retry_limit":{"name":"secondary_zone_period_retry_limit","type":"int","level":"advanced","flags":0,"default_value":"5","min":"","max":"","enum_allowed":[],"desc":"RGW module period update retry limit for secondary site","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"rook","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"prometheus_tls_secret_name":{"name":"prometheus_tls_secret_name","type":"str","level":"advanced","flags":0,"default_value":"rook-ceph-prometheus-server-tls","min":"","max":"","enum_allowed":[],"desc":"name of tls secret in k8s for prometheus","long_desc":"","tags":[],"see_also":[]},"secure_monitoring_stack":{"name":"secure_monitoring_stack","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Enable TLS security for all the monitoring stack daemons","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"storage_class":{"name":"storage_class","type":"str","level":"advanced","flags":0,"default_value":"local","min":"","max":"","enum_allowed":[],"desc":"storage class name for LSO-discovered PVs","long_desc":"","tags":[],"see_also":[]}}},{"name":"selftest","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"roption1":{"name":"roption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"roption2":{"name":"roption2","type":"str","level":"advanced","flags":0,"default_value":"xyz","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption1":{"name":"rwoption1","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption2":{"name":"rwoption2","type":"int","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption3":{"name":"rwoption3","type":"float","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption4":{"name":"rwoption4","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption5":{"name":"rwoption5","type":"bool","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption6":{"name":"rwoption6","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"rwoption7":{"name":"rwoption7","type":"int","level":"advanced","flags":0,"default_value":"","min":"1","max":"42","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testkey":{"name":"testkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testlkey":{"name":"testlkey","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"testnewline":{"name":"testnewline","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"smb","can_run":true,"error_string":"","module_options":{"internal_store_backend":{"name":"internal_store_backend","type":"str","level":"dev","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"set internal store backend. for develoment and testing only","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"update_orchestration":{"name":"update_orchestration","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"automatically update orchestration when smb resources are changed","long_desc":"","tags":[],"see_also":[]}}},{"name":"snap_schedule","can_run":true,"error_string":"","module_options":{"allow_m_granularity":{"name":"allow_m_granularity","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"allow minute scheduled snapshots","long_desc":"","tags":[],"see_also":[]},"dump_on_update":{"name":"dump_on_update","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"dump database to debug log on update","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"stats","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"status","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"telegraf","can_run":true,"error_string":"","module_options":{"address":{"name":"address","type":"str","level":"advanced","flags":0,"default_value":"unixgram:///tmp/telegraf.sock","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"secs","level":"advanced","flags":0,"default_value":"15","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"telemetry","can_run":true,"error_string":"","module_options":{"channel_basic":{"name":"channel_basic","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share basic cluster information (size, version)","long_desc":"","tags":[],"see_also":[]},"channel_crash":{"name":"channel_crash","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share metadata about Ceph daemon crashes (version, stack straces, etc)","long_desc":"","tags":[],"see_also":[]},"channel_device":{"name":"channel_device","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Share device health metrics (e.g., SMART data, minus potentially identifying info like serial numbers)","long_desc":"","tags":[],"see_also":[]},"channel_ident":{"name":"channel_ident","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Share a user-provided description and/or contact email for the cluster","long_desc":"","tags":[],"see_also":[]},"channel_perf":{"name":"channel_perf","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Share various performance metrics of a cluster","long_desc":"","tags":[],"see_also":[]},"contact":{"name":"contact","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"description":{"name":"description","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"device_url":{"name":"device_url","type":"str","level":"advanced","flags":0,"default_value":"https://telemetry.ceph.com/device","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"enabled":{"name":"enabled","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"interval":{"name":"interval","type":"int","level":"advanced","flags":0,"default_value":"24","min":"8","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"last_opt_revision":{"name":"last_opt_revision","type":"int","level":"advanced","flags":0,"default_value":"1","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"leaderboard":{"name":"leaderboard","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"leaderboard_description":{"name":"leaderboard_description","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"organization":{"name":"organization","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"proxy":{"name":"proxy","type":"str","level":"advanced","flags":0,"default_value":"","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"url":{"name":"url","type":"str","level":"advanced","flags":0,"default_value":"https://telemetry.ceph.com/report","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"test_orchestrator","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}},{"name":"volumes","can_run":true,"error_string":"","module_options":{"log_level":{"name":"log_level","type":"str","level":"advanced","flags":1,"default_value":"","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster":{"name":"log_to_cluster","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_cluster_level":{"name":"log_to_cluster_level","type":"str","level":"advanced","flags":1,"default_value":"info","min":"","max":"","enum_allowed":["","critical","debug","error","info","warning"],"desc":"","long_desc":"","tags":[],"see_also":[]},"log_to_file":{"name":"log_to_file","type":"bool","level":"advanced","flags":1,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]},"max_concurrent_clones":{"name":"max_concurrent_clones","type":"int","level":"advanced","flags":0,"default_value":"4","min":"","max":"","enum_allowed":[],"desc":"Number of asynchronous cloner threads","long_desc":"","tags":[],"see_also":[]},"pause_cloning":{"name":"pause_cloning","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Pause asynchronous cloner threads","long_desc":"","tags":[],"see_also":[]},"pause_purging":{"name":"pause_purging","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Pause asynchronous subvolume purge threads","long_desc":"","tags":[],"see_also":[]},"periodic_async_work":{"name":"periodic_async_work","type":"bool","level":"advanced","flags":0,"default_value":"False","min":"","max":"","enum_allowed":[],"desc":"Periodically check for async work","long_desc":"","tags":[],"see_also":[]},"snapshot_clone_delay":{"name":"snapshot_clone_delay","type":"int","level":"advanced","flags":0,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"Delay clone begin operation by snapshot_clone_delay seconds","long_desc":"","tags":[],"see_also":[]},"snapshot_clone_no_wait":{"name":"snapshot_clone_no_wait","type":"bool","level":"advanced","flags":0,"default_value":"True","min":"","max":"","enum_allowed":[],"desc":"Reject subvolume clone request when cloner threads are busy","long_desc":"","tags":[],"see_also":[]},"sqlite3_killpoint":{"name":"sqlite3_killpoint","type":"int","level":"dev","flags":1,"default_value":"0","min":"","max":"","enum_allowed":[],"desc":"","long_desc":"","tags":[],"see_also":[]}}}],"services":{"dashboard":"https://192.168.123.100:8443/"},"always_on_modules":{"octopus":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"pacific":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"quincy":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"reef":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"squid":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"],"tentacle":["balancer","crash","devicehealth","orchestrator","pg_autoscaler","progress","rbd_support","status","telemetry","volumes"]},"force_disabled_modules":{},"last_failure_osd_epoch":3,"active_clients":[{"name":"libcephsqlite","addrvec":[{"type":"v2","addr":"192.168.123.100:0","nonce":241977597}]},{"name":"rbd_support","addrvec":[{"type":"v2","addr":"192.168.123.100:0","nonce":4085109041}]},{"name":"volumes","addrvec":[{"type":"v2","addr":"192.168.123.100:0","nonce":1724534020}]}]} 2026-03-31T22:56:27.147 INFO:tasks.cephadm.ceph_manager.ceph:mgr available! 2026-03-31T22:56:27.147 INFO:tasks.cephadm.ceph_manager.ceph:waiting for all up 2026-03-31T22:56:27.147 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd dump --format=json 2026-03-31T22:56:27.269 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:27.507 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:27.507 INFO:teuthology.orchestra.run.vm00.stdout:{"epoch":20,"fsid":"8bb14950-2d54-11f1-a348-07063966e06c","created":"2026-03-31T22:55:10.407867+0000","modified":"2026-03-31T22:56:26.547277+0000","last_up_change":"2026-03-31T22:56:24.532279+0000","last_in_change":"2026-03-31T22:56:15.608734+0000","flags":"sortbitwise,recovery_deletes,purged_snapdirs,pglog_hardlimit","flags_num":5799936,"flags_set":["pglog_hardlimit","purged_snapdirs","recovery_deletes","sortbitwise"],"crush_version":10,"full_ratio":0.94999998807907104,"backfillfull_ratio":0.89999997615814209,"nearfull_ratio":0.85000002384185791,"cluster_snapshot":"","pool_max":1,"max_osd":3,"require_min_compat_client":"luminous","min_compat_client":"jewel","require_osd_release":"tentacle","allow_crimson":false,"pools":[{"pool":1,"pool_name":".mgr","create_time":"2026-03-31T22:56:25.120853+0000","flags":32769,"flags_names":"hashpspool,creating","type":1,"size":3,"min_size":2,"crush_rule":0,"peering_crush_bucket_count":0,"peering_crush_bucket_target":0,"peering_crush_bucket_barrier":0,"peering_crush_bucket_mandatory_member":2147483647,"is_stretch_pool":false,"object_hash":2,"pg_autoscale_mode":"off","pg_num":1,"pg_placement_num":1,"pg_placement_num_target":1,"pg_num_target":1,"pg_num_pending":1,"last_pg_merge_meta":{"source_pgid":"0.0","ready_epoch":0,"last_epoch_started":0,"last_epoch_clean":0,"source_version":"0'0","target_version":"0'0"},"last_change":"20","last_force_op_resend":"0","last_force_op_resend_prenautilus":"0","last_force_op_resend_preluminous":"0","auid":0,"snap_mode":"selfmanaged","snap_seq":0,"snap_epoch":0,"pool_snaps":[],"removed_snaps":"[]","quota_max_bytes":0,"quota_max_objects":0,"tiers":[],"tier_of":-1,"read_tier":-1,"write_tier":-1,"cache_mode":"none","target_max_bytes":0,"target_max_objects":0,"cache_target_dirty_ratio_micro":400000,"cache_target_dirty_high_ratio_micro":600000,"cache_target_full_ratio_micro":800000,"cache_min_flush_age":0,"cache_min_evict_age":0,"erasure_code_profile":"","hit_set_params":{"type":"none"},"hit_set_period":0,"hit_set_count":0,"use_gmt_hitset":true,"min_read_recency_for_promote":0,"min_write_recency_for_promote":0,"hit_set_grade_decay_rate":0,"hit_set_search_last_n":0,"grade_table":[],"stripe_width":0,"expected_num_objects":0,"fast_read":false,"nonprimary_shards":"{}","options":{"pg_num_max":32,"pg_num_min":1},"application_metadata":{"mgr":{}},"read_balance":{"score_type":"Fair distribution","score_acting":3,"score_stable":3,"optimal_score":1,"raw_score_acting":3,"raw_score_stable":3,"primary_affinity_weighted":1,"average_primary_affinity":1,"average_primary_affinity_weighted":1}}],"osds":[{"osd":0,"uuid":"e8d1d763-2967-4800-93f0-dda88d3e9875","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":9,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6802","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6803","nonce":2458914830}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6804","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6805","nonce":2458914830}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6808","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6809","nonce":2458914830}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6806","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6807","nonce":2458914830}]},"public_addr":"192.168.123.100:6803/2458914830","cluster_addr":"192.168.123.100:6805/2458914830","heartbeat_back_addr":"192.168.123.100:6809/2458914830","heartbeat_front_addr":"192.168.123.100:6807/2458914830","state":["exists","up"]},{"osd":1,"uuid":"75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":19,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6800","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6801","nonce":503330297}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6802","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6803","nonce":503330297}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6806","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6807","nonce":503330297}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6804","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6805","nonce":503330297}]},"public_addr":"192.168.123.105:6801/503330297","cluster_addr":"192.168.123.105:6803/503330297","heartbeat_back_addr":"192.168.123.105:6807/503330297","heartbeat_front_addr":"192.168.123.105:6805/503330297","state":["exists","up"]},{"osd":2,"uuid":"be821005-2b57-413b-92e7-ee956ad7fc35","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":18,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6800","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6801","nonce":2457921800}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6802","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6803","nonce":2457921800}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6806","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6807","nonce":2457921800}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6804","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6805","nonce":2457921800}]},"public_addr":"192.168.123.109:6801/2457921800","cluster_addr":"192.168.123.109:6803/2457921800","heartbeat_back_addr":"192.168.123.109:6807/2457921800","heartbeat_front_addr":"192.168.123.109:6805/2457921800","state":["exists","up"]}],"osd_xinfo":[{"osd":0,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-03-31T22:56:07.977821+0000","dead_epoch":0},{"osd":1,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-03-31T22:56:14.031818+0000","dead_epoch":0},{"osd":2,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-03-31T22:56:22.728605+0000","dead_epoch":0}],"pg_upmap":[],"pg_upmap_items":[],"pg_upmap_primaries":[],"pg_temp":[],"primary_temp":[],"blocklist":{"192.168.123.100:0/2735422203":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/2321427443":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/3145658098":"2026-04-01T22:55:30.890195+0000","192.168.123.100:6800/1629928501":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/3581991654":"2026-04-01T22:55:20.757463+0000","192.168.123.100:0/1523559636":"2026-04-01T22:55:20.757463+0000","192.168.123.100:6801/1629928501":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/389484463":"2026-04-01T22:55:20.757463+0000","192.168.123.100:6801/3751763863":"2026-04-01T22:55:20.757463+0000","192.168.123.100:6800/3751763863":"2026-04-01T22:55:20.757463+0000"},"range_blocklist":{},"erasure_code_profiles":{"default":{"crush-failure-domain":"osd","k":"2","m":"1","plugin":"isa","technique":"reed_sol_van"}},"removed_snaps_queue":[],"new_removed_snaps":[],"new_purged_snaps":[],"crush_node_flags":{},"device_class_flags":{},"stretch_mode":{"stretch_mode_enabled":false,"stretch_bucket_count":0,"degraded_stretch_mode":0,"recovering_stretch_mode":0,"stretch_mode_bucket":0}} 2026-03-31T22:56:27.547 INFO:tasks.cephadm.ceph_manager.ceph:all up! 2026-03-31T22:56:27.547 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd dump --format=json 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "osd pool application enable", "format": "json", "pool": ".mgr", "app": "mgr", "yes_i_really_mean_it": true}]': finished 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: osdmap e20: 3 total, 3 up, 3 in 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='admin socket' entity='admin socket' cmd='smart' args=[json]: dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='admin socket' entity='admin socket' cmd=smart args=[json]: finished 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/114963791' entity='client.admin' cmd={"prefix": "config log", "num": 1, "format": "json"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='admin socket' entity='admin socket' cmd='smart' args=[json]: dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='admin socket' entity='admin socket' cmd=smart args=[json]: finished 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='admin socket' entity='admin socket' cmd='smart' args=[json]: dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='admin socket' entity='admin socket' cmd=smart args=[json]: finished 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: pgmap v38: 1 pgs: 1 creating+peering; 0 B data, 80 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3309386666' entity='client.admin' cmd={"prefix": "mgr dump", "format": "json"} : dispatch 2026-03-31T22:56:27.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:27 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/486926064' entity='client.admin' cmd={"prefix": "osd dump", "format": "json"} : dispatch 2026-03-31T22:56:27.669 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:27.889 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:27.889 INFO:teuthology.orchestra.run.vm00.stdout:{"epoch":21,"fsid":"8bb14950-2d54-11f1-a348-07063966e06c","created":"2026-03-31T22:55:10.407867+0000","modified":"2026-03-31T22:56:27.556451+0000","last_up_change":"2026-03-31T22:56:24.532279+0000","last_in_change":"2026-03-31T22:56:15.608734+0000","flags":"sortbitwise,recovery_deletes,purged_snapdirs,pglog_hardlimit","flags_num":5799936,"flags_set":["pglog_hardlimit","purged_snapdirs","recovery_deletes","sortbitwise"],"crush_version":10,"full_ratio":0.94999998807907104,"backfillfull_ratio":0.89999997615814209,"nearfull_ratio":0.85000002384185791,"cluster_snapshot":"","pool_max":1,"max_osd":3,"require_min_compat_client":"luminous","min_compat_client":"jewel","require_osd_release":"tentacle","allow_crimson":false,"pools":[{"pool":1,"pool_name":".mgr","create_time":"2026-03-31T22:56:25.120853+0000","flags":1,"flags_names":"hashpspool","type":1,"size":3,"min_size":2,"crush_rule":0,"peering_crush_bucket_count":0,"peering_crush_bucket_target":0,"peering_crush_bucket_barrier":0,"peering_crush_bucket_mandatory_member":2147483647,"is_stretch_pool":false,"object_hash":2,"pg_autoscale_mode":"off","pg_num":1,"pg_placement_num":1,"pg_placement_num_target":1,"pg_num_target":1,"pg_num_pending":1,"last_pg_merge_meta":{"source_pgid":"0.0","ready_epoch":0,"last_epoch_started":0,"last_epoch_clean":0,"source_version":"0'0","target_version":"0'0"},"last_change":"21","last_force_op_resend":"0","last_force_op_resend_prenautilus":"0","last_force_op_resend_preluminous":"0","auid":0,"snap_mode":"selfmanaged","snap_seq":0,"snap_epoch":0,"pool_snaps":[],"removed_snaps":"[]","quota_max_bytes":0,"quota_max_objects":0,"tiers":[],"tier_of":-1,"read_tier":-1,"write_tier":-1,"cache_mode":"none","target_max_bytes":0,"target_max_objects":0,"cache_target_dirty_ratio_micro":400000,"cache_target_dirty_high_ratio_micro":600000,"cache_target_full_ratio_micro":800000,"cache_min_flush_age":0,"cache_min_evict_age":0,"erasure_code_profile":"","hit_set_params":{"type":"none"},"hit_set_period":0,"hit_set_count":0,"use_gmt_hitset":true,"min_read_recency_for_promote":0,"min_write_recency_for_promote":0,"hit_set_grade_decay_rate":0,"hit_set_search_last_n":0,"grade_table":[],"stripe_width":0,"expected_num_objects":0,"fast_read":false,"nonprimary_shards":"{}","options":{"pg_num_max":32,"pg_num_min":1},"application_metadata":{"mgr":{}},"read_balance":{"score_type":"Fair distribution","score_acting":3,"score_stable":3,"optimal_score":1,"raw_score_acting":3,"raw_score_stable":3,"primary_affinity_weighted":1,"average_primary_affinity":1,"average_primary_affinity_weighted":1}}],"osds":[{"osd":0,"uuid":"e8d1d763-2967-4800-93f0-dda88d3e9875","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":9,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6802","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6803","nonce":2458914830}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6804","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6805","nonce":2458914830}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6808","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6809","nonce":2458914830}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.100:6806","nonce":2458914830},{"type":"v1","addr":"192.168.123.100:6807","nonce":2458914830}]},"public_addr":"192.168.123.100:6803/2458914830","cluster_addr":"192.168.123.100:6805/2458914830","heartbeat_back_addr":"192.168.123.100:6809/2458914830","heartbeat_front_addr":"192.168.123.100:6807/2458914830","state":["exists","up"]},{"osd":1,"uuid":"75b2cb48-40c2-40a8-b8e7-3ceec7d79cf8","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":13,"up_thru":19,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6800","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6801","nonce":503330297}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6802","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6803","nonce":503330297}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6806","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6807","nonce":503330297}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.105:6804","nonce":503330297},{"type":"v1","addr":"192.168.123.105:6805","nonce":503330297}]},"public_addr":"192.168.123.105:6801/503330297","cluster_addr":"192.168.123.105:6803/503330297","heartbeat_back_addr":"192.168.123.105:6807/503330297","heartbeat_front_addr":"192.168.123.105:6805/503330297","state":["exists","up"]},{"osd":2,"uuid":"be821005-2b57-413b-92e7-ee956ad7fc35","up":1,"in":1,"weight":1,"primary_affinity":1,"last_clean_begin":0,"last_clean_end":0,"up_from":18,"up_thru":0,"down_at":0,"lost_at":0,"public_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6800","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6801","nonce":2457921800}]},"cluster_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6802","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6803","nonce":2457921800}]},"heartbeat_back_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6806","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6807","nonce":2457921800}]},"heartbeat_front_addrs":{"addrvec":[{"type":"v2","addr":"192.168.123.109:6804","nonce":2457921800},{"type":"v1","addr":"192.168.123.109:6805","nonce":2457921800}]},"public_addr":"192.168.123.109:6801/2457921800","cluster_addr":"192.168.123.109:6803/2457921800","heartbeat_back_addr":"192.168.123.109:6807/2457921800","heartbeat_front_addr":"192.168.123.109:6805/2457921800","state":["exists","up"]}],"osd_xinfo":[{"osd":0,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-03-31T22:56:07.977821+0000","dead_epoch":0},{"osd":1,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-03-31T22:56:14.031818+0000","dead_epoch":0},{"osd":2,"down_stamp":"0.000000","laggy_probability":0,"laggy_interval":0,"features":4541880224203014143,"old_weight":0,"last_purged_snaps_scrub":"2026-03-31T22:56:22.728605+0000","dead_epoch":0}],"pg_upmap":[],"pg_upmap_items":[],"pg_upmap_primaries":[],"pg_temp":[],"primary_temp":[],"blocklist":{"192.168.123.100:0/2735422203":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/2321427443":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/3145658098":"2026-04-01T22:55:30.890195+0000","192.168.123.100:6800/1629928501":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/3581991654":"2026-04-01T22:55:20.757463+0000","192.168.123.100:0/1523559636":"2026-04-01T22:55:20.757463+0000","192.168.123.100:6801/1629928501":"2026-04-01T22:55:30.890195+0000","192.168.123.100:0/389484463":"2026-04-01T22:55:20.757463+0000","192.168.123.100:6801/3751763863":"2026-04-01T22:55:20.757463+0000","192.168.123.100:6800/3751763863":"2026-04-01T22:55:20.757463+0000"},"range_blocklist":{},"erasure_code_profiles":{"default":{"crush-failure-domain":"osd","k":"2","m":"1","plugin":"isa","technique":"reed_sol_van"}},"removed_snaps_queue":[],"new_removed_snaps":[],"new_purged_snaps":[],"crush_node_flags":{},"device_class_flags":{},"stretch_mode":{"stretch_mode_enabled":false,"stretch_bucket_count":0,"degraded_stretch_mode":0,"recovering_stretch_mode":0,"stretch_mode_bucket":0}} 2026-03-31T22:56:27.928 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph tell osd.0 flush_pg_stats 2026-03-31T22:56:27.928 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph tell osd.1 flush_pg_stats 2026-03-31T22:56:27.928 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph tell osd.2 flush_pg_stats 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "osd pool application enable", "format": "json", "pool": ".mgr", "app": "mgr", "yes_i_really_mean_it": true}]': finished 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: osdmap e20: 3 total, 3 up, 3 in 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='admin socket' entity='admin socket' cmd='smart' args=[json]: dispatch 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='admin socket' entity='admin socket' cmd=smart args=[json]: finished 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/114963791' entity='client.admin' cmd={"prefix": "config log", "num": 1, "format": "json"} : dispatch 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='admin socket' entity='admin socket' cmd='smart' args=[json]: dispatch 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='admin socket' entity='admin socket' cmd=smart args=[json]: finished 2026-03-31T22:56:27.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='admin socket' entity='admin socket' cmd='smart' args=[json]: dispatch 2026-03-31T22:56:27.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:56:27.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:56:27.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:56:27.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='admin socket' entity='admin socket' cmd=smart args=[json]: finished 2026-03-31T22:56:27.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: pgmap v38: 1 pgs: 1 creating+peering; 0 B data, 80 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:27.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/3309386666' entity='client.admin' cmd={"prefix": "mgr dump", "format": "json"} : dispatch 2026-03-31T22:56:27.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:27 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/486926064' entity='client.admin' cmd={"prefix": "osd dump", "format": "json"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "osd pool application enable", "format": "json", "pool": ".mgr", "app": "mgr", "yes_i_really_mean_it": true}]': finished 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: osdmap e20: 3 total, 3 up, 3 in 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='admin socket' entity='admin socket' cmd='smart' args=[json]: dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='admin socket' entity='admin socket' cmd=smart args=[json]: finished 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/114963791' entity='client.admin' cmd={"prefix": "config log", "num": 1, "format": "json"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='admin socket' entity='admin socket' cmd='smart' args=[json]: dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='admin socket' entity='admin socket' cmd=smart args=[json]: finished 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='admin socket' entity='admin socket' cmd='smart' args=[json]: dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='admin socket' entity='admin socket' cmd=smart args=[json]: finished 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: pgmap v38: 1 pgs: 1 creating+peering; 0 B data, 80 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/3309386666' entity='client.admin' cmd={"prefix": "mgr dump", "format": "json"} : dispatch 2026-03-31T22:56:27.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:27 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/486926064' entity='client.admin' cmd={"prefix": "osd dump", "format": "json"} : dispatch 2026-03-31T22:56:28.050 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:28.051 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:28.054 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:28.239 INFO:teuthology.orchestra.run.vm00.stdout:55834574852 2026-03-31T22:56:28.239 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd last-stat-seq osd.1 2026-03-31T22:56:28.261 INFO:teuthology.orchestra.run.vm00.stdout:38654705669 2026-03-31T22:56:28.261 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd last-stat-seq osd.0 2026-03-31T22:56:28.279 INFO:teuthology.orchestra.run.vm00.stdout:77309411330 2026-03-31T22:56:28.279 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd last-stat-seq osd.2 2026-03-31T22:56:28.377 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:28.449 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:28.455 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:28.650 INFO:teuthology.orchestra.run.vm00.stdout:55834574851 2026-03-31T22:56:28.690 INFO:tasks.cephadm.ceph_manager.ceph:need seq 55834574852 got 55834574851 for osd.1 2026-03-31T22:56:28.691 INFO:teuthology.orchestra.run.vm00.stdout:77309411329 2026-03-31T22:56:28.723 INFO:teuthology.orchestra.run.vm00.stdout:38654705668 2026-03-31T22:56:28.723 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:28 vm00 ceph-mon[61968]: osdmap e21: 3 total, 3 up, 3 in 2026-03-31T22:56:28.723 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:28 vm00 ceph-mon[61968]: mgrmap e14: a(active, since 56s), standbys: b 2026-03-31T22:56:28.723 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:28 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/1185326751' entity='client.admin' cmd={"prefix": "osd dump", "format": "json"} : dispatch 2026-03-31T22:56:28.737 INFO:tasks.cephadm.ceph_manager.ceph:need seq 77309411330 got 77309411329 for osd.2 2026-03-31T22:56:28.765 INFO:tasks.cephadm.ceph_manager.ceph:need seq 38654705669 got 38654705668 for osd.0 2026-03-31T22:56:28.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:28 vm09 ceph-mon[98050]: osdmap e21: 3 total, 3 up, 3 in 2026-03-31T22:56:28.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:28 vm09 ceph-mon[98050]: mgrmap e14: a(active, since 56s), standbys: b 2026-03-31T22:56:28.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:28 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/1185326751' entity='client.admin' cmd={"prefix": "osd dump", "format": "json"} : dispatch 2026-03-31T22:56:28.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:28 vm05 ceph-mon[69577]: osdmap e21: 3 total, 3 up, 3 in 2026-03-31T22:56:28.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:28 vm05 ceph-mon[69577]: mgrmap e14: a(active, since 56s), standbys: b 2026-03-31T22:56:28.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:28 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/1185326751' entity='client.admin' cmd={"prefix": "osd dump", "format": "json"} : dispatch 2026-03-31T22:56:29.578 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:29 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/558504623' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 1} : dispatch 2026-03-31T22:56:29.578 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:29 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/3569837897' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 2} : dispatch 2026-03-31T22:56:29.578 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:29 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/4210992064' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 0} : dispatch 2026-03-31T22:56:29.578 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:29 vm00 ceph-mon[61968]: pgmap v40: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:29.692 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd last-stat-seq osd.1 2026-03-31T22:56:29.738 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd last-stat-seq osd.2 2026-03-31T22:56:29.765 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph osd last-stat-seq osd.0 2026-03-31T22:56:29.813 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:29.873 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:29.875 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:29 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/558504623' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 1} : dispatch 2026-03-31T22:56:29.875 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:29 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/3569837897' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 2} : dispatch 2026-03-31T22:56:29.875 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:29 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/4210992064' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 0} : dispatch 2026-03-31T22:56:29.875 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:29 vm09 ceph-mon[98050]: pgmap v40: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:29.927 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:29.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:29 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/558504623' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 1} : dispatch 2026-03-31T22:56:29.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:29 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/3569837897' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 2} : dispatch 2026-03-31T22:56:29.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:29 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/4210992064' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 0} : dispatch 2026-03-31T22:56:29.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:29 vm05 ceph-mon[69577]: pgmap v40: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:30.061 INFO:teuthology.orchestra.run.vm00.stdout:55834574852 2026-03-31T22:56:30.107 INFO:tasks.cephadm.ceph_manager.ceph:need seq 55834574852 got 55834574852 for osd.1 2026-03-31T22:56:30.107 DEBUG:teuthology.parallel:result is None 2026-03-31T22:56:30.146 INFO:teuthology.orchestra.run.vm00.stdout:77309411330 2026-03-31T22:56:30.188 INFO:tasks.cephadm.ceph_manager.ceph:need seq 77309411330 got 77309411330 for osd.2 2026-03-31T22:56:30.188 DEBUG:teuthology.parallel:result is None 2026-03-31T22:56:30.211 INFO:teuthology.orchestra.run.vm00.stdout:38654705669 2026-03-31T22:56:30.252 INFO:tasks.cephadm.ceph_manager.ceph:need seq 38654705669 got 38654705669 for osd.0 2026-03-31T22:56:30.252 DEBUG:teuthology.parallel:result is None 2026-03-31T22:56:30.252 INFO:tasks.cephadm.ceph_manager.ceph:waiting for clean 2026-03-31T22:56:30.252 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph pg dump --format=json 2026-03-31T22:56:30.414 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:30.636 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:30.636 INFO:teuthology.orchestra.run.vm00.stderr:dumped all 2026-03-31T22:56:30.677 INFO:teuthology.orchestra.run.vm00.stdout:{"pg_ready":true,"pg_map":{"version":40,"stamp":"2026-03-31T22:56:28.903665+0000","last_osdmap_epoch":0,"last_pg_scan":0,"pg_stats_sum":{"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":6,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":32,"ondisk_log_size":32,"up":3,"acting":3,"num_store_stats":0},"osd_stats_sum":{"up_from":0,"seq":0,"num_pgs":3,"num_osds":3,"num_per_pool_osds":3,"num_per_pool_omap_osds":3,"kb":62902272,"kb_used":83476,"kb_used_data":1884,"kb_used_omap":28,"kb_used_meta":81059,"kb_avail":62818796,"statfs":{"total":64411926528,"available":64326447104,"internally_reserved":0,"allocated":1929216,"data_stored":1544334,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":29566,"internal_metadata":83004546},"hb_peers":[],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[],"network_ping_times":[]},"pg_stats_delta":{"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":0,"ondisk_log_size":0,"up":0,"acting":0,"num_store_stats":0,"stamp_delta":"0.000000"},"pg_stats":[{"pgid":"1.0","version":"20'32","reported_seq":58,"reported_epoch":21,"state":"active+clean","last_fresh":"2026-03-31T22:56:27.571928+0000","last_change":"2026-03-31T22:56:26.553495+0000","last_active":"2026-03-31T22:56:27.571928+0000","last_peered":"2026-03-31T22:56:27.571928+0000","last_clean":"2026-03-31T22:56:27.571928+0000","last_became_active":"2026-03-31T22:56:26.553327+0000","last_became_peered":"2026-03-31T22:56:26.553327+0000","last_unstale":"2026-03-31T22:56:27.571928+0000","last_undegraded":"2026-03-31T22:56:27.571928+0000","last_fullsized":"2026-03-31T22:56:27.571928+0000","mapping_epoch":19,"log_start":"0'0","ondisk_log_start":"0'0","created":19,"last_epoch_clean":20,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-03-31T22:56:25.541192+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-03-31T22:56:25.541192+0000","last_clean_scrub_stamp":"2026-03-31T22:56:25.541192+0000","objects_scrubbed":0,"log_size":32,"log_dups_size":0,"ondisk_log_size":32,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T04:09:26.809094+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0,"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":6,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[1,2,0],"acting":[1,2,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":1,"acting_primary":1,"purged_snaps":[]}],"pool_stats":[{"poolid":1,"num_pg":1,"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":6,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":1388544,"data_stored":1377840,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":32,"ondisk_log_size":32,"up":3,"acting":3,"num_store_stats":3}],"osd_stats":[{"osd":2,"up_from":18,"seq":77309411330,"num_pgs":1,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":20967424,"kb_used":27932,"kb_used_data":628,"kb_used_omap":6,"kb_used_meta":27129,"kb_avail":20939492,"statfs":{"total":21470642176,"available":21442039808,"internally_reserved":0,"allocated":643072,"data_stored":514778,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":7140,"internal_metadata":27780124},"hb_peers":[0,1],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":1,"up_from":13,"seq":55834574852,"num_pgs":1,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":20967424,"kb_used":27612,"kb_used_data":628,"kb_used_omap":9,"kb_used_meta":26806,"kb_avail":20939812,"statfs":{"total":21470642176,"available":21442367488,"internally_reserved":0,"allocated":643072,"data_stored":514778,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":9421,"internal_metadata":27450163},"hb_peers":[0,2],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":0,"up_from":9,"seq":38654705669,"num_pgs":1,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":20967424,"kb_used":27932,"kb_used_data":628,"kb_used_omap":12,"kb_used_meta":27123,"kb_avail":20939492,"statfs":{"total":21470642176,"available":21442039808,"internally_reserved":0,"allocated":643072,"data_stored":514778,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":13005,"internal_metadata":27774259},"hb_peers":[1,2],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]}],"pool_statfs":[{"poolid":1,"osd":0,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":1,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":2,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0}]}} 2026-03-31T22:56:30.677 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph pg dump --format=json 2026-03-31T22:56:30.799 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/1763924401' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 1} : dispatch 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/1925874990' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 2} : dispatch 2026-03-31T22:56:31.034 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:30 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/2350667203' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 0} : dispatch 2026-03-31T22:56:31.034 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:31.034 INFO:teuthology.orchestra.run.vm00.stderr:dumped all 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/1763924401' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 1} : dispatch 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/1925874990' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 2} : dispatch 2026-03-31T22:56:31.059 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:30 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/2350667203' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 0} : dispatch 2026-03-31T22:56:31.081 INFO:teuthology.orchestra.run.vm00.stdout:{"pg_ready":true,"pg_map":{"version":41,"stamp":"2026-03-31T22:56:30.903908+0000","last_osdmap_epoch":0,"last_pg_scan":0,"pg_stats_sum":{"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":6,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":32,"ondisk_log_size":32,"up":3,"acting":3,"num_store_stats":0},"osd_stats_sum":{"up_from":0,"seq":0,"num_pgs":3,"num_osds":3,"num_per_pool_osds":3,"num_per_pool_omap_osds":3,"kb":62902272,"kb_used":83476,"kb_used_data":1884,"kb_used_omap":28,"kb_used_meta":81059,"kb_avail":62818796,"statfs":{"total":64411926528,"available":64326447104,"internally_reserved":0,"allocated":1929216,"data_stored":1544334,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":29566,"internal_metadata":83004546},"hb_peers":[],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[],"network_ping_times":[]},"pg_stats_delta":{"stat_sum":{"num_bytes":0,"num_objects":0,"num_object_clones":0,"num_object_copies":0,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":0,"num_whiteouts":0,"num_read":0,"num_read_kb":0,"num_write":0,"num_write_kb":0,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":0,"data_stored":0,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":0,"ondisk_log_size":0,"up":0,"acting":0,"num_store_stats":0,"stamp_delta":"2.000243"},"pg_stats":[{"pgid":"1.0","version":"20'32","reported_seq":58,"reported_epoch":21,"state":"active+clean","last_fresh":"2026-03-31T22:56:27.571928+0000","last_change":"2026-03-31T22:56:26.553495+0000","last_active":"2026-03-31T22:56:27.571928+0000","last_peered":"2026-03-31T22:56:27.571928+0000","last_clean":"2026-03-31T22:56:27.571928+0000","last_became_active":"2026-03-31T22:56:26.553327+0000","last_became_peered":"2026-03-31T22:56:26.553327+0000","last_unstale":"2026-03-31T22:56:27.571928+0000","last_undegraded":"2026-03-31T22:56:27.571928+0000","last_fullsized":"2026-03-31T22:56:27.571928+0000","mapping_epoch":19,"log_start":"0'0","ondisk_log_start":"0'0","created":19,"last_epoch_clean":20,"parent":"0.0","parent_split_bits":0,"last_scrub":"0'0","last_scrub_stamp":"2026-03-31T22:56:25.541192+0000","last_deep_scrub":"0'0","last_deep_scrub_stamp":"2026-03-31T22:56:25.541192+0000","last_clean_scrub_stamp":"2026-03-31T22:56:25.541192+0000","objects_scrubbed":0,"log_size":32,"log_dups_size":0,"ondisk_log_size":32,"stats_invalid":false,"dirty_stats_invalid":false,"omap_stats_invalid":false,"hitset_stats_invalid":false,"hitset_bytes_stats_invalid":false,"pin_stats_invalid":false,"manifest_stats_invalid":false,"snaptrimq_len":0,"last_scrub_duration":0,"scrub_schedule":"periodic scrub scheduled @ 2026-04-02T04:09:26.809094+0000","scrub_duration":0,"objects_trimmed":0,"snaptrim_duration":0,"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":6,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"up":[1,2,0],"acting":[1,2,0],"avail_no_missing":[],"object_location_counts":[],"blocked_by":[],"up_primary":1,"acting_primary":1,"purged_snaps":[]}],"pool_stats":[{"poolid":1,"num_pg":1,"stat_sum":{"num_bytes":459280,"num_objects":2,"num_object_clones":0,"num_object_copies":6,"num_objects_missing_on_primary":0,"num_objects_missing":0,"num_objects_degraded":0,"num_objects_misplaced":0,"num_objects_unfound":0,"num_objects_dirty":2,"num_whiteouts":0,"num_read":46,"num_read_kb":37,"num_write":57,"num_write_kb":584,"num_scrub_errors":0,"num_shallow_scrub_errors":0,"num_deep_scrub_errors":0,"num_objects_recovered":0,"num_bytes_recovered":0,"num_keys_recovered":0,"num_objects_omap":0,"num_objects_hit_set_archive":0,"num_bytes_hit_set_archive":0,"num_flush":0,"num_flush_kb":0,"num_evict":0,"num_evict_kb":0,"num_promote":0,"num_flush_mode_high":0,"num_flush_mode_low":0,"num_evict_mode_some":0,"num_evict_mode_full":0,"num_objects_pinned":0,"num_legacy_snapsets":0,"num_large_omap_objects":0,"num_objects_manifest":0,"num_omap_bytes":0,"num_omap_keys":0,"num_objects_repaired":0},"store_stats":{"total":0,"available":0,"internally_reserved":0,"allocated":1388544,"data_stored":1377840,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},"log_size":32,"ondisk_log_size":32,"up":3,"acting":3,"num_store_stats":3}],"osd_stats":[{"osd":2,"up_from":18,"seq":77309411331,"num_pgs":1,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":20967424,"kb_used":27932,"kb_used_data":628,"kb_used_omap":6,"kb_used_meta":27129,"kb_avail":20939492,"statfs":{"total":21470642176,"available":21442039808,"internally_reserved":0,"allocated":643072,"data_stored":514778,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":7140,"internal_metadata":27780124},"hb_peers":[0,1],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":1,"up_from":13,"seq":55834574853,"num_pgs":1,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":20967424,"kb_used":27612,"kb_used_data":628,"kb_used_omap":9,"kb_used_meta":26806,"kb_avail":20939812,"statfs":{"total":21470642176,"available":21442367488,"internally_reserved":0,"allocated":643072,"data_stored":514778,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":9421,"internal_metadata":27450163},"hb_peers":[0,2],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]},{"osd":0,"up_from":9,"seq":38654705670,"num_pgs":1,"num_osds":1,"num_per_pool_osds":1,"num_per_pool_omap_osds":1,"kb":20967424,"kb_used":27932,"kb_used_data":628,"kb_used_omap":12,"kb_used_meta":27123,"kb_avail":20939492,"statfs":{"total":21470642176,"available":21442039808,"internally_reserved":0,"allocated":643072,"data_stored":514778,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":13005,"internal_metadata":27774259},"hb_peers":[1,2],"snap_trim_queue_len":0,"num_snap_trimming":0,"num_shards_repaired":0,"op_queue_age_hist":{"histogram":[],"upper_bound":1},"perf_stat":{"commit_latency_ms":0,"apply_latency_ms":0,"commit_latency_ns":0,"apply_latency_ns":0},"alerts":[]}],"pool_statfs":[{"poolid":1,"osd":0,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":1,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0},{"poolid":1,"osd":2,"total":0,"available":0,"internally_reserved":0,"allocated":462848,"data_stored":459280,"data_compressed":0,"data_compressed_allocated":0,"data_compressed_original":0,"omap_allocated":0,"internal_metadata":0}]}} 2026-03-31T22:56:31.081 INFO:tasks.cephadm.ceph_manager.ceph:clean! 2026-03-31T22:56:31.081 INFO:tasks.ceph:Waiting until ceph cluster ceph is healthy... 2026-03-31T22:56:31.081 INFO:tasks.cephadm.ceph_manager.ceph:wait_until_healthy 2026-03-31T22:56:31.081 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph health --format=json 2026-03-31T22:56:31.210 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/1763924401' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 1} : dispatch 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/1925874990' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 2} : dispatch 2026-03-31T22:56:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:30 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/2350667203' entity='client.admin' cmd={"prefix": "osd last-stat-seq", "id": 0} : dispatch 2026-03-31T22:56:31.468 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T22:56:31.468 INFO:teuthology.orchestra.run.vm00.stdout:{"status":"HEALTH_OK","checks":{},"mutes":[]} 2026-03-31T22:56:31.513 INFO:tasks.cephadm.ceph_manager.ceph:wait_until_healthy done 2026-03-31T22:56:31.513 INFO:tasks.cephadm:Setup complete, yielding 2026-03-31T22:56:31.513 INFO:teuthology.run_tasks:Running task cephadm.shell... 2026-03-31T22:56:31.515 INFO:tasks.cephadm:Running commands on role host.a host ubuntu@vm00.local 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- bash -c 'set -e 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> set -x 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ceph orch apply node-exporter 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ceph orch apply grafana 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ceph orch apply alertmanager 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ceph orch apply prometheus 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> sleep 240 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ceph orch ls 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ceph orch ps 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ceph orch host ls 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> MON_DAEMON=$(ceph orch ps --daemon-type mon -f json | jq -r '"'"'last | .daemon_name'"'"') 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> GRAFANA_HOST=$(ceph orch ps --daemon-type grafana -f json | jq -e '"'"'.[]'"'"' | jq -r '"'"'.hostname'"'"') 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> PROM_HOST=$(ceph orch ps --daemon-type prometheus -f json | jq -e '"'"'.[]'"'"' | jq -r '"'"'.hostname'"'"') 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ALERTM_HOST=$(ceph orch ps --daemon-type alertmanager -f json | jq -e '"'"'.[]'"'"' | jq -r '"'"'.hostname'"'"') 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> GRAFANA_IP=$(ceph orch host ls -f json | jq -r --arg GRAFANA_HOST "$GRAFANA_HOST" '"'"'.[] | select(.hostname==$GRAFANA_HOST) | .addr'"'"') 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> PROM_IP=$(ceph orch host ls -f json | jq -r --arg PROM_HOST "$PROM_HOST" '"'"'.[] | select(.hostname==$PROM_HOST) | .addr'"'"') 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ALERTM_IP=$(ceph orch host ls -f json | jq -r --arg ALERTM_HOST "$ALERTM_HOST" '"'"'.[] | select(.hostname==$ALERTM_HOST) | .addr'"'"') 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> # check each host node-exporter metrics endpoint is responsive 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> ALL_HOST_IPS=$(ceph orch host ls -f json | jq -r '"'"'.[] | .addr'"'"') 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> for ip in $ALL_HOST_IPS; do 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> curl -s http://${ip}:9100/metric 2026-03-31T22:56:31.515 DEBUG:teuthology.orchestra.run.vm00:> done 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> # check grafana endpoints are responsive and database health is okay 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -k -s https://${GRAFANA_IP}:3000/api/health 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -k -s https://${GRAFANA_IP}:3000/api/health | jq -e '"'"'.database == "ok"'"'"' 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> # stop mon daemon in order to trigger an alert 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> ceph orch daemon stop $MON_DAEMON 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> sleep 120 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> # check prometheus endpoints are responsive and mon down alert is firing 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -s http://${PROM_IP}:9095/api/v1/status/config 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -s http://${PROM_IP}:9095/api/v1/status/config | jq -e '"'"'.status == "success"'"'"' 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -s http://${PROM_IP}:9095/api/v1/alerts 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -s http://${PROM_IP}:9095/api/v1/alerts | jq -e '"'"'.data | .alerts | .[] | select(.labels | .alertname == "CephMonDown") | .state == "firing"'"'"' 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> # check alertmanager endpoints are responsive and mon down alert is active 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -s http://${ALERTM_IP}:9093/api/v2/status 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -s http://${ALERTM_IP}:9093/api/v2/alerts 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -s http://${ALERTM_IP}:9093/api/v2/alerts | jq -e '"'"'.[] | select(.labels | .alertname == "CephMonDown") | .status | .state == "active"'"'"' 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> # check prometheus metrics endpoint is not empty and make sure we can get metrics 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> METRICS_URL=$(ceph mgr services | jq -r .prometheus) 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> [ -n "$METRICS_URL" ] || exit 1 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> curl -s "${METRICS_URL}metrics" | grep -q '"'"'^ceph_health_status'"'"' 2026-03-31T22:56:31.516 DEBUG:teuthology.orchestra.run.vm00:> ' 2026-03-31T22:56:31.583 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:31 vm09 systemd[1]: Stopping Ceph osd.2 for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T22:56:31.637 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T22:56:31.702 INFO:teuthology.orchestra.run.vm00.stderr:+ ceph orch apply node-exporter 2026-03-31T22:56:31.867 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:31 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[103572]: 2026-03-31T22:56:31.582+0000 7fdea17b3640 -1 received signal: Terminated from /run/podman-init -- /usr/bin/ceph-osd -n osd.2 -f --setuser ceph --setgroup ceph --default-log-to-file=false --default-log-to-journald=true --default-log-to-stderr=false --osd-objectstore=bluestore (PID: 1) UID: 0 2026-03-31T22:56:31.867 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:31 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[103572]: 2026-03-31T22:56:31.582+0000 7fdea17b3640 -1 osd.2 21 *** Got signal Terminated *** 2026-03-31T22:56:31.867 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:31 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[103572]: 2026-03-31T22:56:31.582+0000 7fdea17b3640 -1 osd.2 21 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-03-31T22:56:31.882 INFO:teuthology.orchestra.run.vm00.stdout:Scheduled node-exporter update... 2026-03-31T22:56:31.892 INFO:teuthology.orchestra.run.vm00.stderr:+ ceph orch apply grafana 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: from='client.14409 -' entity='client.admin' cmd=[{"prefix": "pg dump", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.2"} : dispatch 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: Deploying daemon osd.2 on vm09 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: pgmap v41: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: from='client.? 192.168.123.100:0/4019677401' entity='client.admin' cmd={"prefix": "health", "format": "json"} : dispatch 2026-03-31T22:56:31.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:31 vm00 ceph-mon[61968]: osd.2 marked itself down and dead 2026-03-31T22:56:32.069 INFO:teuthology.orchestra.run.vm00.stdout:Scheduled grafana update... 2026-03-31T22:56:32.079 INFO:teuthology.orchestra.run.vm00.stderr:+ ceph orch apply alertmanager 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: from='client.14409 -' entity='client.admin' cmd=[{"prefix": "pg dump", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.2"} : dispatch 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: Deploying daemon osd.2 on vm09 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: pgmap v41: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: from='client.? 192.168.123.100:0/4019677401' entity='client.admin' cmd={"prefix": "health", "format": "json"} : dispatch 2026-03-31T22:56:32.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:31 vm09 ceph-mon[98050]: osd.2 marked itself down and dead 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: from='client.14409 -' entity='client.admin' cmd=[{"prefix": "pg dump", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "osd.2"} : dispatch 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: Deploying daemon osd.2 on vm09 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: pgmap v41: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: from='client.? 192.168.123.100:0/4019677401' entity='client.admin' cmd={"prefix": "health", "format": "json"} : dispatch 2026-03-31T22:56:32.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:31 vm05 ceph-mon[69577]: osd.2 marked itself down and dead 2026-03-31T22:56:32.252 INFO:teuthology.orchestra.run.vm00.stdout:Scheduled alertmanager update... 2026-03-31T22:56:32.265 INFO:teuthology.orchestra.run.vm00.stderr:+ ceph orch apply prometheus 2026-03-31T22:56:32.453 INFO:teuthology.orchestra.run.vm00.stdout:Scheduled prometheus update... 2026-03-31T22:56:32.458 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105717]: 2026-03-31 22:56:32.202883243 +0000 UTC m=+0.632627186 container died a89671b4e6c26c3929fe1be4638967991589d640be09e66debc1e3db447e05ea (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2, org.label-schema.build-date=20260316, org.opencontainers.image.documentation=https://docs.ceph.com/, org.label-schema.schema-version=1.0, org.label-schema.license=GPLv2, io.buildah.version=1.43.0, org.label-schema.vendor=CentOS, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.name=CentOS Stream 9 Base Image, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.opencontainers.image.authors=Ceph Release Team , OSD_FLAVOR=default, ceph=True, CEPH_REF=tentacle-release, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2) 2026-03-31T22:56:32.458 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105717]: 2026-03-31 22:56:32.226531515 +0000 UTC m=+0.656275448 container remove a89671b4e6c26c3929fe1be4638967991589d640be09e66debc1e3db447e05ea (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2, org.label-schema.vendor=CentOS, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.license=GPLv2, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_REF=tentacle-release, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.build-date=20260316, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, ceph=True, io.buildah.version=1.43.0, org.label-schema.schema-version=1.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, OSD_FLAVOR=default, org.label-schema.name=CentOS Stream 9 Base Image) 2026-03-31T22:56:32.458 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 bash[105717]: ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2 2026-03-31T22:56:32.458 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105798]: 2026-03-31 22:56:32.364890473 +0000 UTC m=+0.016304688 container create d18b23fd08beb3f64fa21d410e838a5f7d0e59ecac3bf55eb2f369e6bdc92687 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-deactivate, org.label-schema.build-date=20260316, ceph=True, OSD_FLAVOR=default, org.label-schema.schema-version=1.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.vendor=CentOS, org.opencontainers.image.authors=Ceph Release Team , io.buildah.version=1.43.0, org.label-schema.name=CentOS Stream 9 Base Image, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, FROM_IMAGE=quay.io/centos/centos:stream9, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, CEPH_REF=tentacle-release, org.label-schema.license=GPLv2, org.opencontainers.image.documentation=https://docs.ceph.com/) 2026-03-31T22:56:32.458 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105798]: 2026-03-31 22:56:32.418187831 +0000 UTC m=+0.069602036 container init d18b23fd08beb3f64fa21d410e838a5f7d0e59ecac3bf55eb2f369e6bdc92687 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-deactivate, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, FROM_IMAGE=quay.io/centos/centos:stream9, org.opencontainers.image.authors=Ceph Release Team , OSD_FLAVOR=default, org.label-schema.vendor=CentOS, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.build-date=20260316, ceph=True, CEPH_REF=tentacle-release, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.opencontainers.image.documentation=https://docs.ceph.com/, io.buildah.version=1.43.0) 2026-03-31T22:56:32.458 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105798]: 2026-03-31 22:56:32.421335302 +0000 UTC m=+0.072749517 container start d18b23fd08beb3f64fa21d410e838a5f7d0e59ecac3bf55eb2f369e6bdc92687 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-deactivate, org.opencontainers.image.documentation=https://docs.ceph.com/, org.label-schema.name=CentOS Stream 9 Base Image, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, ceph=True, OSD_FLAVOR=default, org.label-schema.vendor=CentOS, FROM_IMAGE=quay.io/centos/centos:stream9, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.opencontainers.image.authors=Ceph Release Team , io.buildah.version=1.43.0, org.label-schema.build-date=20260316, CEPH_REF=tentacle-release, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git) 2026-03-31T22:56:32.458 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105798]: 2026-03-31 22:56:32.422353217 +0000 UTC m=+0.073767422 container attach d18b23fd08beb3f64fa21d410e838a5f7d0e59ecac3bf55eb2f369e6bdc92687 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-deactivate, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.opencontainers.image.documentation=https://docs.ceph.com/, org.label-schema.schema-version=1.0, OSD_FLAVOR=default, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.build-date=20260316, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, CEPH_REF=tentacle-release, FROM_IMAGE=quay.io/centos/centos:stream9, io.buildah.version=1.43.0, org.label-schema.name=CentOS Stream 9 Base Image, ceph=True, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.opencontainers.image.authors=Ceph Release Team ) 2026-03-31T22:56:32.465 INFO:teuthology.orchestra.run.vm00.stderr:+ sleep 240 2026-03-31T22:56:32.739 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105798]: 2026-03-31 22:56:32.358649091 +0000 UTC m=+0.010063316 image pull 1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072 2026-03-31T22:56:32.740 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105818]: 2026-03-31 22:56:32.577833171 +0000 UTC m=+0.009754519 container died d18b23fd08beb3f64fa21d410e838a5f7d0e59ecac3bf55eb2f369e6bdc92687 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-deactivate, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.schema-version=1.0, OSD_FLAVOR=default, org.label-schema.license=GPLv2, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_REF=tentacle-release, FROM_IMAGE=quay.io/centos/centos:stream9, ceph=True, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, io.buildah.version=1.43.0) 2026-03-31T22:56:32.740 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105818]: 2026-03-31 22:56:32.590642135 +0000 UTC m=+0.022563494 container remove d18b23fd08beb3f64fa21d410e838a5f7d0e59ecac3bf55eb2f369e6bdc92687 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-deactivate, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.license=GPLv2, org.label-schema.name=CentOS Stream 9 Base Image, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, OSD_FLAVOR=default, ceph=True, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.vendor=CentOS, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, io.buildah.version=1.43.0, org.label-schema.schema-version=1.0, CEPH_REF=tentacle-release, org.opencontainers.image.documentation=https://docs.ceph.com/) 2026-03-31T22:56:32.740 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 systemd[1]: ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.2.service: Deactivated successfully. 2026-03-31T22:56:32.740 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 systemd[1]: Stopped Ceph osd.2 for 8bb14950-2d54-11f1-a348-07063966e06c. 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: from='client.14415 -' entity='client.admin' cmd=[{"prefix": "pg dump", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: from='client.14427 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "node-exporter", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: Saving service node-exporter spec with placement * 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: Health check failed: 1 osds down (OSD_DOWN) 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: Health check failed: 1 host (1 osds) down (OSD_HOST_DOWN) 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: osdmap e22: 3 total, 2 up, 3 in 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:32 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 systemd[1]: Starting Ceph osd.2 for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105912]: 2026-03-31 22:56:32.840024907 +0000 UTC m=+0.017218790 container create 26f67a9be8d843f235b9cc44b9cf5a96963fd3796f5b6d4ffd0fb2146ada147a (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.build-date=20260316, ceph=True, org.opencontainers.image.authors=Ceph Release Team , org.opencontainers.image.documentation=https://docs.ceph.com/, io.buildah.version=1.43.0, org.label-schema.name=CentOS Stream 9 Base Image, OSD_FLAVOR=default, org.label-schema.license=GPLv2, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, CEPH_REF=tentacle-release, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.schema-version=1.0, org.label-schema.vendor=CentOS, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/) 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105912]: 2026-03-31 22:56:32.877201041 +0000 UTC m=+0.054394933 container init 26f67a9be8d843f235b9cc44b9cf5a96963fd3796f5b6d4ffd0fb2146ada147a (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_REF=tentacle-release, org.opencontainers.image.documentation=https://docs.ceph.com/, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, OSD_FLAVOR=default, io.buildah.version=1.43.0, FROM_IMAGE=quay.io/centos/centos:stream9, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, ceph=True, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2) 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105912]: 2026-03-31 22:56:32.882647544 +0000 UTC m=+0.059841436 container start 26f67a9be8d843f235b9cc44b9cf5a96963fd3796f5b6d4ffd0fb2146ada147a (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.name=CentOS Stream 9 Base Image, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_REF=tentacle-release, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, OSD_FLAVOR=default, ceph=True, io.buildah.version=1.43.0, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.vendor=CentOS) 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105912]: 2026-03-31 22:56:32.883401145 +0000 UTC m=+0.060595037 container attach 26f67a9be8d843f235b9cc44b9cf5a96963fd3796f5b6d4ffd0fb2146ada147a (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.build-date=20260316, CEPH_REF=tentacle-release, io.buildah.version=1.43.0, org.label-schema.license=GPLv2, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, OSD_FLAVOR=default, ceph=True, org.opencontainers.image.authors=Ceph Release Team , org.opencontainers.image.documentation=https://docs.ceph.com/, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.schema-version=1.0) 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 podman[105912]: 2026-03-31 22:56:32.833404916 +0000 UTC m=+0.010598819 image pull 1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/ceph-authtool --gen-print-key 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 bash[105912]: Running command: /usr/bin/ceph-authtool --gen-print-key 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/ceph-authtool --gen-print-key 2026-03-31T22:56:33.240 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:32 vm09 bash[105912]: Running command: /usr/bin/ceph-authtool --gen-print-key 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: from='client.14415 -' entity='client.admin' cmd=[{"prefix": "pg dump", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: from='client.14427 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "node-exporter", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: Saving service node-exporter spec with placement * 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: Health check failed: 1 osds down (OSD_DOWN) 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: Health check failed: 1 host (1 osds) down (OSD_HOST_DOWN) 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: osdmap e22: 3 total, 2 up, 3 in 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:32 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: from='client.14415 -' entity='client.admin' cmd=[{"prefix": "pg dump", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: from='client.14427 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "node-exporter", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: Saving service node-exporter spec with placement * 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: Health check failed: 1 osds down (OSD_DOWN) 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: Health check failed: 1 host (1 osds) down (OSD_HOST_DOWN) 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: osdmap e22: 3 total, 2 up, 3 in 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:32 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: --> Failed to activate via raw: did not find any matching OSD to activate 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/ceph-authtool --gen-print-key 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: --> Failed to activate via raw: did not find any matching OSD to activate 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: Running command: /usr/bin/ceph-authtool --gen-print-key 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/ceph-authtool --gen-print-key 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: Running command: /usr/bin/ceph-authtool --gen-print-key 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-2 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: Running command: /usr/bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-2 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/ceph-bluestore-tool --cluster=ceph prime-osd-dir --dev /dev/vg_nvme/lv_4 --path /var/lib/ceph/osd/ceph-2 --no-mon-config 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: Running command: /usr/bin/ceph-bluestore-tool --cluster=ceph prime-osd-dir --dev /dev/vg_nvme/lv_4 --path /var/lib/ceph/osd/ceph-2 --no-mon-config 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/ln -snf /dev/vg_nvme/lv_4 /var/lib/ceph/osd/ceph-2/block 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: Running command: /usr/bin/ln -snf /dev/vg_nvme/lv_4 /var/lib/ceph/osd/ceph-2/block 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/chown -h ceph:ceph /var/lib/ceph/osd/ceph-2/block 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: Running command: /usr/bin/chown -h ceph:ceph /var/lib/ceph/osd/ceph-2/block 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/chown -R ceph:ceph /dev/dm-3 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: Running command: /usr/bin/chown -R ceph:ceph /dev/dm-3 2026-03-31T22:56:33.712 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: Running command: /usr/bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-2 2026-03-31T22:56:33.713 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: Running command: /usr/bin/chown -R ceph:ceph /var/lib/ceph/osd/ceph-2 2026-03-31T22:56:33.713 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate[105924]: --> ceph-volume lvm activate successful for osd ID: 2 2026-03-31T22:56:33.713 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[105912]: --> ceph-volume lvm activate successful for osd ID: 2 2026-03-31T22:56:33.713 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 podman[105912]: 2026-03-31 22:56:33.580554003 +0000 UTC m=+0.757747895 container died 26f67a9be8d843f235b9cc44b9cf5a96963fd3796f5b6d4ffd0fb2146ada147a (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, CEPH_REF=tentacle-release, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.license=GPLv2, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.schema-version=1.0, org.label-schema.name=CentOS Stream 9 Base Image, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, ceph=True, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, OSD_FLAVOR=default, io.buildah.version=1.43.0, org.label-schema.vendor=CentOS) 2026-03-31T22:56:33.713 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 podman[105912]: 2026-03-31 22:56:33.594813503 +0000 UTC m=+0.772007395 container remove 26f67a9be8d843f235b9cc44b9cf5a96963fd3796f5b6d4ffd0fb2146ada147a (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2-activate, org.opencontainers.image.authors=Ceph Release Team , OSD_FLAVOR=default, FROM_IMAGE=quay.io/centos/centos:stream9, ceph=True, org.label-schema.license=GPLv2, io.buildah.version=1.43.0, org.opencontainers.image.documentation=https://docs.ceph.com/, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.build-date=20260316, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.schema-version=1.0, CEPH_REF=tentacle-release, org.label-schema.name=CentOS Stream 9 Base Image, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.vendor=CentOS) 2026-03-31T22:56:33.713 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 podman[106210]: 2026-03-31 22:56:33.679686691 +0000 UTC m=+0.016025627 container create 8574bf5f2cc1960f3c0747aad57cd66a2f6cd84623839d6d96fcab9a737ac0e8 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.schema-version=1.0, org.label-schema.build-date=20260316, org.label-schema.license=GPLv2, FROM_IMAGE=quay.io/centos/centos:stream9, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, ceph=True, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.43.0, OSD_FLAVOR=default, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.opencontainers.image.documentation=https://docs.ceph.com/, org.label-schema.vendor=CentOS, CEPH_REF=tentacle-release) 2026-03-31T22:56:33.973 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 podman[106210]: 2026-03-31 22:56:33.721337029 +0000 UTC m=+0.057675974 container init 8574bf5f2cc1960f3c0747aad57cd66a2f6cd84623839d6d96fcab9a737ac0e8 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, FROM_IMAGE=quay.io/centos/centos:stream9, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.license=GPLv2, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , CEPH_REF=tentacle-release, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, io.buildah.version=1.43.0, org.opencontainers.image.documentation=https://docs.ceph.com/, OSD_FLAVOR=default, org.label-schema.vendor=CentOS, ceph=True) 2026-03-31T22:56:33.973 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 podman[106210]: 2026-03-31 22:56:33.727079617 +0000 UTC m=+0.063418562 container start 8574bf5f2cc1960f3c0747aad57cd66a2f6cd84623839d6d96fcab9a737ac0e8 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2, ceph=True, org.label-schema.build-date=20260316, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.schema-version=1.0, OSD_FLAVOR=default, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, FROM_IMAGE=quay.io/centos/centos:stream9, CEPH_REF=tentacle-release, org.label-schema.name=CentOS Stream 9 Base Image, io.buildah.version=1.43.0, org.label-schema.license=GPLv2, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.vendor=CentOS, org.opencontainers.image.documentation=https://docs.ceph.com/) 2026-03-31T22:56:33.973 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 bash[106210]: 8574bf5f2cc1960f3c0747aad57cd66a2f6cd84623839d6d96fcab9a737ac0e8 2026-03-31T22:56:33.973 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 podman[106210]: 2026-03-31 22:56:33.673937951 +0000 UTC m=+0.010276896 image pull 1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072 2026-03-31T22:56:33.973 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 systemd[1]: Started Ceph osd.2 for 8bb14950-2d54-11f1-a348-07063966e06c. 2026-03-31T22:56:33.973 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-osd[106226]: -- 192.168.123.109:0/494775478 <== mon.1 v2:192.168.123.109:3300/0 4 ==== auth_reply(proto 2 0 (0) Success) ==== 194+0+0 (secure 0 0 0) 0x564928019860 con 0x564927e39c00 2026-03-31T22:56:33.973 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[106222]: 2026-03-31T22:56:33.854+0000 7f57496ab8c0 -1 Falling back to public interface 2026-03-31T22:56:33.974 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: from='client.14433 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "grafana", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:33.974 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: Saving service grafana spec with placement count:1 2026-03-31T22:56:33.974 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: from='client.14439 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "alertmanager", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:33.974 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: Saving service alertmanager spec with placement count:1 2026-03-31T22:56:33.974 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: from='client.14445 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "prometheus", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:33.975 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: Saving service prometheus spec with placement count:1 2026-03-31T22:56:33.975 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: pgmap v43: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:33.975 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: osdmap e23: 3 total, 2 up, 3 in 2026-03-31T22:56:33.975 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:33.975 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:33 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: from='client.14433 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "grafana", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: Saving service grafana spec with placement count:1 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: from='client.14439 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "alertmanager", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: Saving service alertmanager spec with placement count:1 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: from='client.14445 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "prometheus", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: Saving service prometheus spec with placement count:1 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: pgmap v43: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: osdmap e23: 3 total, 2 up, 3 in 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:33 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:34.236 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:33 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[106222]: 2026-03-31T22:56:33.975+0000 7f57496ab8c0 -1 osd.2 21 log_to_monitors true 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: from='client.14433 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "grafana", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: Saving service grafana spec with placement count:1 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: from='client.14439 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "alertmanager", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: Saving service alertmanager spec with placement count:1 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: from='client.14445 -' entity='client.admin' cmd=[{"prefix": "orch apply", "service_type": "prometheus", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: Saving service prometheus spec with placement count:1 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: pgmap v43: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: osdmap e23: 3 total, 2 up, 3 in 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:33 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:35.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:34 vm00 ceph-mon[61968]: from='osd.2 [v2:192.168.123.109:6800/1386248770,v1:192.168.123.109:6801/1386248770]' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:35.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:34 vm00 ceph-mon[61968]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:35.228 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 22:56:34 vm09 ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[106222]: 2026-03-31T22:56:34.911+0000 7f5740bfd640 -1 osd.2 21 set_numa_affinity unable to identify public interface '' numa node: (2) No such file or directory 2026-03-31T22:56:35.228 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:34 vm09 ceph-mon[98050]: from='osd.2 [v2:192.168.123.109:6800/1386248770,v1:192.168.123.109:6801/1386248770]' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:35.228 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:34 vm09 ceph-mon[98050]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:34 vm05 ceph-mon[69577]: from='osd.2 [v2:192.168.123.109:6800/1386248770,v1:192.168.123.109:6801/1386248770]' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:34 vm05 ceph-mon[69577]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]} : dispatch 2026-03-31T22:56:36.151 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]}]': finished 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: osdmap e24: 3 total, 2 up, 3 in 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='osd.2 [v2:192.168.123.109:6800/1386248770,v1:192.168.123.109:6801/1386248770]' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: pgmap v46: 1 pgs: 1 active+clean; 449 KiB data, 81 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:36.152 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:35 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]}]': finished 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: osdmap e24: 3 total, 2 up, 3 in 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='osd.2 [v2:192.168.123.109:6800/1386248770,v1:192.168.123.109:6801/1386248770]' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: pgmap v46: 1 pgs: 1 active+clean; 449 KiB data, 81 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:36.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:35 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='osd.2 ' entity='osd.2' cmd='[{"prefix": "osd crush set-device-class", "class": "hdd", "ids": ["2"]}]': finished 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: osdmap e24: 3 total, 2 up, 3 in 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='osd.2 [v2:192.168.123.109:6800/1386248770,v1:192.168.123.109:6801/1386248770]' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='osd.2 ' entity='osd.2' cmd={"prefix": "osd crush create-or-move", "id": 2, "weight":0.0195, "args": ["host=vm09", "root=default"]} : dispatch 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: pgmap v46: 1 pgs: 1 active+clean; 449 KiB data, 81 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:56:36.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:35 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: Health check cleared: OSD_DOWN (was: 1 osds down) 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: Health check cleared: OSD_HOST_DOWN (was: 1 host (1 osds) down) 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: Cluster is now healthy 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: osd.2 [v2:192.168.123.109:6800/1386248770,v1:192.168.123.109:6801/1386248770] boot 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: osdmap e25: 3 total, 3 up, 3 in 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.988 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:36.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:36 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: Health check cleared: OSD_DOWN (was: 1 osds down) 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: Health check cleared: OSD_HOST_DOWN (was: 1 host (1 osds) down) 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: Cluster is now healthy 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: osd.2 [v2:192.168.123.109:6800/1386248770,v1:192.168.123.109:6801/1386248770] boot 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: osdmap e25: 3 total, 3 up, 3 in 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:36 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: Health check cleared: OSD_DOWN (was: 1 osds down) 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: Health check cleared: OSD_HOST_DOWN (was: 1 host (1 osds) down) 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: Cluster is now healthy 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: osd.2 [v2:192.168.123.109:6800/1386248770,v1:192.168.123.109:6801/1386248770] boot 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: osdmap e25: 3 total, 3 up, 3 in 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:36 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:37 vm09 ceph-mon[98050]: Detected new or changed devices on vm00 2026-03-31T22:56:38.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:37 vm09 ceph-mon[98050]: pgmap v48: 1 pgs: 1 active+undersized+degraded; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 2/6 objects degraded (33.333%) 2026-03-31T22:56:38.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:37 vm09 ceph-mon[98050]: osdmap e26: 3 total, 3 up, 3 in 2026-03-31T22:56:38.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:37 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:37 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:37 vm09 ceph-mon[98050]: Health check failed: Degraded data redundancy: 2/6 objects degraded (33.333%), 1 pg degraded (PG_DEGRADED) 2026-03-31T22:56:38.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:37 vm05 ceph-mon[69577]: Detected new or changed devices on vm00 2026-03-31T22:56:38.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:37 vm05 ceph-mon[69577]: pgmap v48: 1 pgs: 1 active+undersized+degraded; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 2/6 objects degraded (33.333%) 2026-03-31T22:56:38.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:37 vm05 ceph-mon[69577]: osdmap e26: 3 total, 3 up, 3 in 2026-03-31T22:56:38.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:37 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:37 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:37 vm05 ceph-mon[69577]: Health check failed: Degraded data redundancy: 2/6 objects degraded (33.333%), 1 pg degraded (PG_DEGRADED) 2026-03-31T22:56:38.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:37 vm00 ceph-mon[61968]: Detected new or changed devices on vm00 2026-03-31T22:56:38.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:37 vm00 ceph-mon[61968]: pgmap v48: 1 pgs: 1 active+undersized+degraded; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 2/6 objects degraded (33.333%) 2026-03-31T22:56:38.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:37 vm00 ceph-mon[61968]: osdmap e26: 3 total, 3 up, 3 in 2026-03-31T22:56:38.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:37 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:37 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:37 vm00 ceph-mon[61968]: Health check failed: Degraded data redundancy: 2/6 objects degraded (33.333%), 1 pg degraded (PG_DEGRADED) 2026-03-31T22:56:38.992 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:38 vm05 ceph-mon[69577]: Deploying daemon node-exporter.vm00 on vm00 2026-03-31T22:56:38.992 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:38 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.992 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:38 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.992 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:38 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.992 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:38 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.992 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:38 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:38.992 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:38 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:38 vm00 ceph-mon[61968]: Deploying daemon node-exporter.vm00 on vm00 2026-03-31T22:56:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:38 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:38 vm09 ceph-mon[98050]: Deploying daemon node-exporter.vm00 on vm00 2026-03-31T22:56:39.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:38 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:38 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:38 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:38 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:38 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:39.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:38 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:40.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:39 vm00 ceph-mon[61968]: pgmap v50: 1 pgs: 1 active+undersized+degraded; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 2/6 objects degraded (33.333%) 2026-03-31T22:56:40.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:39 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:40.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:39 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:40.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:39 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:40.214 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:39 vm05 ceph-mon[69577]: pgmap v50: 1 pgs: 1 active+undersized+degraded; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 2/6 objects degraded (33.333%) 2026-03-31T22:56:40.214 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:39 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:40.214 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:39 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:40.214 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:39 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:40.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:39 vm09 ceph-mon[98050]: pgmap v50: 1 pgs: 1 active+undersized+degraded; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 2/6 objects degraded (33.333%) 2026-03-31T22:56:40.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:39 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:40.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:39 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:40.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:39 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:41.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:40 vm09 ceph-mon[98050]: Deploying daemon node-exporter.vm05 on vm05 2026-03-31T22:56:41.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:40 vm05 ceph-mon[69577]: Deploying daemon node-exporter.vm05 on vm05 2026-03-31T22:56:41.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:40 vm00 ceph-mon[61968]: Deploying daemon node-exporter.vm05 on vm05 2026-03-31T22:56:42.202 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:41 vm05 ceph-mon[69577]: pgmap v51: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:42.202 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:41 vm05 ceph-mon[69577]: Health check cleared: PG_DEGRADED (was: Degraded data redundancy: 2/6 objects degraded (33.333%), 1 pg degraded) 2026-03-31T22:56:42.202 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:41 vm05 ceph-mon[69577]: Cluster is now healthy 2026-03-31T22:56:42.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:41 vm09 ceph-mon[98050]: pgmap v51: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:42.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:41 vm09 ceph-mon[98050]: Health check cleared: PG_DEGRADED (was: Degraded data redundancy: 2/6 objects degraded (33.333%), 1 pg degraded) 2026-03-31T22:56:42.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:41 vm09 ceph-mon[98050]: Cluster is now healthy 2026-03-31T22:56:42.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:41 vm00 ceph-mon[61968]: pgmap v51: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:42.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:41 vm00 ceph-mon[61968]: Health check cleared: PG_DEGRADED (was: Degraded data redundancy: 2/6 objects degraded (33.333%), 1 pg degraded) 2026-03-31T22:56:42.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:41 vm00 ceph-mon[61968]: Cluster is now healthy 2026-03-31T22:56:43.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:43.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:43.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:43 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:43.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:43 vm00 ceph-mon[61968]: Deploying daemon node-exporter.vm09 on vm09 2026-03-31T22:56:43.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:43 vm00 ceph-mon[61968]: pgmap v52: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:43.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:43 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:43.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:43 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:43.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:43 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:43.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:43 vm09 ceph-mon[98050]: Deploying daemon node-exporter.vm09 on vm09 2026-03-31T22:56:43.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:43 vm09 ceph-mon[98050]: pgmap v52: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:43.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:43 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:43.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:43 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:43.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:43 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:43.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:43 vm05 ceph-mon[69577]: Deploying daemon node-exporter.vm09 on vm09 2026-03-31T22:56:43.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:43 vm05 ceph-mon[69577]: pgmap v52: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: Generating cephadm-signed certificates for grafana_cert/grafana_key 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: pgmap v53: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.811 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"} : dispatch 2026-03-31T22:56:45.812 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:45 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: Generating cephadm-signed certificates for grafana_cert/grafana_key 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: pgmap v53: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"} : dispatch 2026-03-31T22:56:45.890 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:45 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: Generating cephadm-signed certificates for grafana_cert/grafana_key 2026-03-31T22:56:46.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: pgmap v53: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:46.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:46.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"} : dispatch 2026-03-31T22:56:46.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:45 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"}]: dispatch 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: Deploying daemon grafana.vm00 on vm00 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: Detected new or changed devices on vm09 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:46 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"}]: dispatch 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: Deploying daemon grafana.vm00 on vm00 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: Detected new or changed devices on vm09 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:46 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"}]: dispatch 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: Deploying daemon grafana.vm00 on vm00 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: Detected new or changed devices on vm09 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:46 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:48.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:47 vm00 ceph-mon[61968]: pgmap v54: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:48.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:47 vm09 ceph-mon[98050]: pgmap v54: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:48.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:47 vm05 ceph-mon[69577]: pgmap v54: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:50.319 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:49 vm00 ceph-mon[61968]: pgmap v55: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:50.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:49 vm09 ceph-mon[98050]: pgmap v55: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:50.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:49 vm05 ceph-mon[69577]: pgmap v55: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:51.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:51 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:51.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:51 vm00 ceph-mon[61968]: pgmap v56: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:51.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:51 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:51.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:51 vm09 ceph-mon[98050]: pgmap v56: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:51.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:51 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:51.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:51 vm05 ceph-mon[69577]: pgmap v56: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:53.481 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:53 vm00 ceph-mon[61968]: pgmap v57: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:53.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:53 vm09 ceph-mon[98050]: pgmap v57: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:53.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:53 vm05 ceph-mon[69577]: pgmap v57: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:55.046 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.047 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.047 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.047 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:55 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.047 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:55 vm00 ceph-mon[61968]: Deploying daemon alertmanager.vm09 on vm09 2026-03-31T22:56:55.047 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:55 vm00 ceph-mon[61968]: pgmap v58: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:55 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:55 vm09 ceph-mon[98050]: Deploying daemon alertmanager.vm09 on vm09 2026-03-31T22:56:55.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:55 vm09 ceph-mon[98050]: pgmap v58: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:55 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:55 vm05 ceph-mon[69577]: Deploying daemon alertmanager.vm09 on vm09 2026-03-31T22:56:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:55 vm05 ceph-mon[69577]: pgmap v58: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:56.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.389 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:56 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:56 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:56.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:56 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:57.179 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:57 vm09 ceph-mon[98050]: pgmap v59: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:57.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:57 vm00 ceph-mon[61968]: pgmap v59: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:57.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:57 vm05 ceph-mon[69577]: pgmap v59: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:56:58.942 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.942 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.942 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.942 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.942 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.942 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:58 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.942 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:58 vm09 ceph-mon[98050]: Deploying daemon prometheus.vm05 on vm05 2026-03-31T22:56:58.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:58 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:58.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:58 vm05 ceph-mon[69577]: Deploying daemon prometheus.vm05 on vm05 2026-03-31T22:56:59.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:59.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:59.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:59.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:59.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:59.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:58 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:56:59.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:58 vm00 ceph-mon[61968]: Deploying daemon prometheus.vm05 on vm05 2026-03-31T22:57:00.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:59 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:59 vm00 ceph-mon[61968]: pgmap v60: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:00.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:59 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:59 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:56:59 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:59 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:59 vm09 ceph-mon[98050]: pgmap v60: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:00.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:59 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:59 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:56:59 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:59 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:59 vm05 ceph-mon[69577]: pgmap v60: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:00.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:59 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:59 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:00.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:56:59 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:01.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:01 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:01.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:01 vm00 ceph-mon[61968]: pgmap v61: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:01 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:01.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:01 vm09 ceph-mon[98050]: pgmap v61: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:01 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:01 vm05 ceph-mon[69577]: pgmap v61: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:04.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:03 vm09 ceph-mon[98050]: pgmap v62: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:04.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:03 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:04.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:03 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:04.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:03 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:04.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:03 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr module enable", "module": "prometheus"} : dispatch 2026-03-31T22:57:04.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:03 vm05 ceph-mon[69577]: pgmap v62: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:04.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:03 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:04.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:03 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:04.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:03 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:04.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:03 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr module enable", "module": "prometheus"} : dispatch 2026-03-31T22:57:04.243 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:04 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: ignoring --setuser ceph since I am not root 2026-03-31T22:57:04.243 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:04 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: ignoring --setgroup ceph since I am not root 2026-03-31T22:57:04.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:03 vm00 ceph-mon[61968]: pgmap v62: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:04.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:03 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:04.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:03 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:04.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:03 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' 2026-03-31T22:57:04.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:03 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd={"prefix": "mgr module enable", "module": "prometheus"} : dispatch 2026-03-31T22:57:04.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:04 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ignoring --setuser ceph since I am not root 2026-03-31T22:57:04.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:04 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ignoring --setgroup ceph since I am not root 2026-03-31T22:57:05.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:05 vm00 ceph-mon[61968]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "mgr module enable", "module": "prometheus"}]': finished 2026-03-31T22:57:05.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:05 vm00 ceph-mon[61968]: mgrmap e15: a(active, since 93s), standbys: b 2026-03-31T22:57:05.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:05 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: /lib64/python3.9/site-packages/scipy/__init__.py:73: UserWarning: NumPy was imported from a Python sub-interpreter but NumPy does not properly support sub-interpreters. This will likely work for most users but might cause hard to track down issues or subtle bugs. A common user of the rare sub-interpreter feature is wsgi which also allows single-interpreter mode. 2026-03-31T22:57:05.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:05 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: Improvements in the case of bugs are welcome, but is not on the NumPy roadmap, and full support may require significant effort to achieve. 2026-03-31T22:57:05.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:05 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: from numpy import show_config as show_numpy_config 2026-03-31T22:57:05.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:05 vm09 ceph-mon[98050]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "mgr module enable", "module": "prometheus"}]': finished 2026-03-31T22:57:05.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:05 vm09 ceph-mon[98050]: mgrmap e15: a(active, since 93s), standbys: b 2026-03-31T22:57:05.493 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:05 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: /lib64/python3.9/site-packages/scipy/__init__.py:73: UserWarning: NumPy was imported from a Python sub-interpreter but NumPy does not properly support sub-interpreters. This will likely work for most users but might cause hard to track down issues or subtle bugs. A common user of the rare sub-interpreter feature is wsgi which also allows single-interpreter mode. 2026-03-31T22:57:05.493 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:05 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: Improvements in the case of bugs are welcome, but is not on the NumPy roadmap, and full support may require significant effort to achieve. 2026-03-31T22:57:05.493 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:05 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: from numpy import show_config as show_numpy_config 2026-03-31T22:57:05.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:05 vm05 ceph-mon[69577]: from='mgr.14152 192.168.123.100:0/3222361356' entity='mgr.a' cmd='[{"prefix": "mgr module enable", "module": "prometheus"}]': finished 2026-03-31T22:57:05.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:05 vm05 ceph-mon[69577]: mgrmap e15: a(active, since 93s), standbys: b 2026-03-31T22:57:08.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:08 vm05 ceph-mon[69577]: Active manager daemon a restarted 2026-03-31T22:57:08.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:08 vm05 ceph-mon[69577]: Activating manager daemon a 2026-03-31T22:57:08.244 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:07 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: [31/Mar/2026:22:57:07] ENGINE Bus STARTING 2026-03-31T22:57:08.244 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:07 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: CherryPy Checker: 2026-03-31T22:57:08.244 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:07 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: The Application mounted at '' has an empty config. 2026-03-31T22:57:08.244 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:07 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: 2026-03-31T22:57:08.244 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:08 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: [31/Mar/2026:22:57:08] ENGINE Serving on http://:::9283 2026-03-31T22:57:08.244 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 22:57:08 vm05 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-b[70969]: [31/Mar/2026:22:57:08] ENGINE Bus STARTED 2026-03-31T22:57:08.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:08 vm09 ceph-mon[98050]: Active manager daemon a restarted 2026-03-31T22:57:08.491 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:08 vm09 ceph-mon[98050]: Activating manager daemon a 2026-03-31T22:57:08.492 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:08 vm00 ceph-mon[61968]: Active manager daemon a restarted 2026-03-31T22:57:08.492 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:08 vm00 ceph-mon[61968]: Activating manager daemon a 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/crt"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: osdmap e27: 3 total, 3 up, 3 in 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: mgrmap e16: a(active, starting, since 0.304018s), standbys: b 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "a", "id": "a"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "b", "id": "b"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: Standby manager daemon b restarted 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: Standby manager daemon b started 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/crt"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mds metadata"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/key"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/key"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: Manager daemon a is now available 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm05", "name": "osd_memory_target"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm09", "name": "osd_memory_target"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm00", "name": "osd_memory_target"} : dispatch 2026-03-31T22:57:09.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd='[{"prefix": "config rm", "who": "osd/host:vm09", "name": "osd_memory_target"}]': finished 2026-03-31T22:57:09.491 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:09.491 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:09.491 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:57:09.491 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:09.491 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:09.491 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/mirror_snapshot_schedule"} : dispatch 2026-03-31T22:57:09.491 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/trash_purge_schedule"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/crt"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: osdmap e27: 3 total, 3 up, 3 in 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: mgrmap e16: a(active, starting, since 0.304018s), standbys: b 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "a", "id": "a"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "b", "id": "b"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: Standby manager daemon b restarted 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: Standby manager daemon b started 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/crt"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mds metadata"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata"} : dispatch 2026-03-31T22:57:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/key"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/key"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: Manager daemon a is now available 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm05", "name": "osd_memory_target"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm09", "name": "osd_memory_target"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm00", "name": "osd_memory_target"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd='[{"prefix": "config rm", "who": "osd/host:vm09", "name": "osd_memory_target"}]': finished 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/mirror_snapshot_schedule"} : dispatch 2026-03-31T22:57:09.494 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/trash_purge_schedule"} : dispatch 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:09] ENGINE Bus STARTING 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: CherryPy Checker: 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: The Application mounted at '' has an empty config. 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:09] ENGINE Serving on http://:::9283 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:09] ENGINE Bus STARTED 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/crt"} : dispatch 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: osdmap e27: 3 total, 3 up, 3 in 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: mgrmap e16: a(active, starting, since 0.304018s), standbys: b 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "a"} : dispatch 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "b"} : dispatch 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata", "id": "c"} : dispatch 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "a", "id": "a"} : dispatch 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mgr metadata", "who": "b", "id": "b"} : dispatch 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 0} : dispatch 2026-03-31T22:57:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 1} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: Standby manager daemon b restarted 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: Standby manager daemon b started 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata", "id": 2} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/crt"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mds metadata"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd metadata"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon metadata"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/b/key"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.? 192.168.123.105:0/3147357394' entity='mgr.b' cmd={"prefix": "config-key get", "key": "mgr/dashboard/key"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: Manager daemon a is now available 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm05", "name": "osd_memory_target"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm09", "name": "osd_memory_target"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config rm", "who": "osd/host:vm00", "name": "osd_memory_target"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd='[{"prefix": "config rm", "who": "osd/host:vm09", "name": "osd_memory_target"}]': finished 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/mirror_snapshot_schedule"} : dispatch 2026-03-31T22:57:09.666 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix":"config rm","who":"mgr","name":"mgr/rbd_support/a/trash_purge_schedule"} : dispatch 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: Updating vm00:/etc/ceph/ceph.conf 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: Updating vm05:/etc/ceph/ceph.conf 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: Updating vm09:/etc/ceph/ceph.conf 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: Updating vm09:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: Updating vm05:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: Updating vm00:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: mgrmap e17: a(active, since 1.31599s), standbys: b 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: pgmap v3: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:10 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm00", "caps": []} : dispatch 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: Updating vm00:/etc/ceph/ceph.conf 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: Updating vm05:/etc/ceph/ceph.conf 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: Updating vm09:/etc/ceph/ceph.conf 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: Updating vm09:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: Updating vm05:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: Updating vm00:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: mgrmap e17: a(active, since 1.31599s), standbys: b 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: pgmap v3: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:10 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm00", "caps": []} : dispatch 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: Updating vm00:/etc/ceph/ceph.conf 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: Updating vm05:/etc/ceph/ceph.conf 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: Updating vm09:/etc/ceph/ceph.conf 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.conf 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: Updating vm09:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: Updating vm05:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: Updating vm00:/etc/ceph/ceph.client.admin.keyring 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: mgrmap e17: a(active, since 1.31599s), standbys: b 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: pgmap v3: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:10.672 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:10 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm00", "caps": []} : dispatch 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: [31/Mar/2026:22:57:09] ENGINE Bus STARTING 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: [31/Mar/2026:22:57:09] ENGINE Serving on http://192.168.123.100:8765 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: [31/Mar/2026:22:57:09] ENGINE Serving on https://192.168.123.100:7151 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: [31/Mar/2026:22:57:09] ENGINE Bus STARTED 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: [31/Mar/2026:22:57:09] ENGINE Client ('192.168.123.100', 45986) lost — peer dropped the TLS connection suddenly, during handshake: (6, 'TLS/SSL connection has been closed (EOF) (_ssl.c:1147)') 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: Reconfiguring grafana.vm00 deps ['secure_monitoring_stack:False'] -> ['prometheus.vm05', 'secure_monitoring_stack:False'] (diff {'prometheus.vm05'}) 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: pgmap v4: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"} : dispatch 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: mgrmap e18: a(active, since 2s), standbys: b 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:11.272 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:11 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:11.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: [31/Mar/2026:22:57:09] ENGINE Bus STARTING 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: [31/Mar/2026:22:57:09] ENGINE Serving on http://192.168.123.100:8765 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: [31/Mar/2026:22:57:09] ENGINE Serving on https://192.168.123.100:7151 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: [31/Mar/2026:22:57:09] ENGINE Bus STARTED 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: [31/Mar/2026:22:57:09] ENGINE Client ('192.168.123.100', 45986) lost — peer dropped the TLS connection suddenly, during handshake: (6, 'TLS/SSL connection has been closed (EOF) (_ssl.c:1147)') 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: Reconfiguring grafana.vm00 deps ['secure_monitoring_stack:False'] -> ['prometheus.vm05', 'secure_monitoring_stack:False'] (diff {'prometheus.vm05'}) 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: pgmap v4: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"} : dispatch 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: mgrmap e18: a(active, since 2s), standbys: b 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:11.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:11 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: [31/Mar/2026:22:57:09] ENGINE Bus STARTING 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: Updating vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: Updating vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: Updating vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/config/ceph.client.admin.keyring 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: [31/Mar/2026:22:57:09] ENGINE Serving on http://192.168.123.100:8765 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: [31/Mar/2026:22:57:09] ENGINE Serving on https://192.168.123.100:7151 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: [31/Mar/2026:22:57:09] ENGINE Bus STARTED 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: [31/Mar/2026:22:57:09] ENGINE Client ('192.168.123.100', 45986) lost — peer dropped the TLS connection suddenly, during handshake: (6, 'TLS/SSL connection has been closed (EOF) (_ssl.c:1147)') 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: Reconfiguring grafana.vm00 deps ['secure_monitoring_stack:False'] -> ['prometheus.vm05', 'secure_monitoring_stack:False'] (diff {'prometheus.vm05'}) 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: pgmap v4: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"} : dispatch 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: mgrmap e18: a(active, since 2s), standbys: b 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:11 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:12 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"}]: dispatch 2026-03-31T22:57:12.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:12 vm00 ceph-mon[61968]: Reconfiguring daemon grafana.vm00 on vm00 2026-03-31T22:57:12.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:12 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:12 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:12 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:12 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:12 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:12 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:12 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm05", "caps": []} : dispatch 2026-03-31T22:57:12.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:12 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"}]: dispatch 2026-03-31T22:57:12.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:12 vm09 ceph-mon[98050]: Reconfiguring daemon grafana.vm00 on vm00 2026-03-31T22:57:12.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:12 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:12 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:12 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:12 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:12 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:12 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:12 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm05", "caps": []} : dispatch 2026-03-31T22:57:12.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:12 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-grafana-api-ssl-verify", "value": "false"}]: dispatch 2026-03-31T22:57:12.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:12 vm05 ceph-mon[69577]: Reconfiguring daemon grafana.vm00 on vm00 2026-03-31T22:57:12.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:12 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:12 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:12 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:12 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:12 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:12 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:12.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:12 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm05", "caps": []} : dispatch 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: pgmap v5: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm09", "caps": []} : dispatch 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: pgmap v6: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: Health check failed: 1 failed cephadm daemon(s) (CEPHADM_FAILED_DAEMON) 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: mgrmap e19: a(active, since 4s), standbys: b 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: Reconfiguring alertmanager.vm09 deps ['mgr.a', 'mgr.b', 'secure_monitoring_stack:False'] -> ['alertmanager.vm09', 'mgr.a', 'mgr.b', 'secure_monitoring_stack:False'] (diff {'alertmanager.vm09'}) 2026-03-31T22:57:13.530 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:13 vm00 ceph-mon[61968]: Reconfiguring daemon alertmanager.vm09 on vm09 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: pgmap v5: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm09", "caps": []} : dispatch 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: pgmap v6: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: Health check failed: 1 failed cephadm daemon(s) (CEPHADM_FAILED_DAEMON) 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: mgrmap e19: a(active, since 4s), standbys: b 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: Reconfiguring alertmanager.vm09 deps ['mgr.a', 'mgr.b', 'secure_monitoring_stack:False'] -> ['alertmanager.vm09', 'mgr.a', 'mgr.b', 'secure_monitoring_stack:False'] (diff {'alertmanager.vm09'}) 2026-03-31T22:57:13.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:13 vm09 ceph-mon[98050]: Reconfiguring daemon alertmanager.vm09 on vm09 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: pgmap v5: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get-or-create", "entity": "client.agent.vm09", "caps": []} : dispatch 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: pgmap v6: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: Health check failed: 1 failed cephadm daemon(s) (CEPHADM_FAILED_DAEMON) 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: mgrmap e19: a(active, since 4s), standbys: b 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: Reconfiguring alertmanager.vm09 deps ['mgr.a', 'mgr.b', 'secure_monitoring_stack:False'] -> ['alertmanager.vm09', 'mgr.a', 'mgr.b', 'secure_monitoring_stack:False'] (diff {'alertmanager.vm09'}) 2026-03-31T22:57:13.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:13 vm05 ceph-mon[69577]: Reconfiguring daemon alertmanager.vm09 on vm09 2026-03-31T22:57:14.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:14 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:14 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:14.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:14 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:14.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:14 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:14.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:14 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:14.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:14 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.020 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:14 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.020 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:14 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.020 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:14 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.865 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: pgmap v7: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 34 KiB/s rd, 0 B/s wr, 14 op/s 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: pgmap v8: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 31 KiB/s rd, 0 B/s wr, 12 op/s 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: pgmap v9: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 31 KiB/s rd, 0 B/s wr, 12 op/s 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: pgmap v10: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 35 KiB/s rd, 0 B/s wr, 14 op/s 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: pgmap v11: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 47 KiB/s rd, 0 B/s wr, 19 op/s 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard get-grafana-api-url"} : dispatch 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-grafana-api-url", "value": "https://vm00.local:3000"} : dispatch 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard get-prometheus-api-host"} : dispatch 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-prometheus-api-host", "value": "http://vm05.local:9095"} : dispatch 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.866 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: pgmap v7: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 34 KiB/s rd, 0 B/s wr, 14 op/s 2026-03-31T22:57:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: pgmap v8: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 31 KiB/s rd, 0 B/s wr, 12 op/s 2026-03-31T22:57:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: pgmap v9: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 31 KiB/s rd, 0 B/s wr, 12 op/s 2026-03-31T22:57:15.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: pgmap v10: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 35 KiB/s rd, 0 B/s wr, 14 op/s 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: pgmap v11: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 47 KiB/s rd, 0 B/s wr, 19 op/s 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard get-grafana-api-url"} : dispatch 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-grafana-api-url", "value": "https://vm00.local:3000"} : dispatch 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard get-prometheus-api-host"} : dispatch 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-prometheus-api-host", "value": "http://vm05.local:9095"} : dispatch 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:15.994 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE Bus STOPPING 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE HTTP Server cherrypy._cpwsgi_server.CPWSGIServer(('::', 9283)) shut down 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE Bus STOPPED 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE Bus STARTING 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE Serving on http://:::9283 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE Bus STARTED 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE Bus STOPPING 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE HTTP Server cherrypy._cpwsgi_server.CPWSGIServer(('::', 9283)) shut down 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE Bus STOPPED 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:15] ENGINE Bus STARTING 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:16] ENGINE Serving on http://:::9283 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:16] ENGINE Bus STARTED 2026-03-31T22:57:16.040 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:16] ENGINE Bus STOPPING 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: pgmap v7: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 34 KiB/s rd, 0 B/s wr, 14 op/s 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: pgmap v8: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 31 KiB/s rd, 0 B/s wr, 12 op/s 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: pgmap v9: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 31 KiB/s rd, 0 B/s wr, 12 op/s 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: pgmap v10: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 35 KiB/s rd, 0 B/s wr, 14 op/s 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: pgmap v11: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 47 KiB/s rd, 0 B/s wr, 19 op/s 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard get-grafana-api-url"} : dispatch 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-grafana-api-url", "value": "https://vm00.local:3000"} : dispatch 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard get-prometheus-api-host"} : dispatch 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-prometheus-api-host", "value": "http://vm05.local:9095"} : dispatch 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.041 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:16] ENGINE HTTP Server cherrypy._cpwsgi_server.CPWSGIServer(('::', 9283)) shut down 2026-03-31T22:57:16.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:16] ENGINE Bus STOPPED 2026-03-31T22:57:16.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:16] ENGINE Bus STARTING 2026-03-31T22:57:16.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:16] ENGINE Serving on http://:::9283 2026-03-31T22:57:16.415 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:22:57:16] ENGINE Bus STARTED 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: pgmap v12: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 58 KiB/s rd, 0 B/s wr, 23 op/s 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: pgmap v13: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard get-grafana-api-url"}]: dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-grafana-api-url", "value": "https://vm00.local:3000"}]: dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard get-prometheus-api-host"}]: dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-prometheus-api-host", "value": "http://vm05.local:9095"}]: dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: pgmap v14: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard get-alertmanager-api-host"} : dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard get-alertmanager-api-host"}]: dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-alertmanager-api-host", "value": "http://vm09.local:9093"} : dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-alertmanager-api-host", "value": "http://vm09.local:9093"}]: dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: Certificate for "grafana_cert (vm00)" is still valid for 1094 days. 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:16.928 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: Health check cleared: CEPHADM_FAILED_DAEMON (was: 1 failed cephadm daemon(s)) 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: Cluster is now healthy 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:57:16.929 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: pgmap v12: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 58 KiB/s rd, 0 B/s wr, 23 op/s 2026-03-31T22:57:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: pgmap v13: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard get-grafana-api-url"}]: dispatch 2026-03-31T22:57:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-grafana-api-url", "value": "https://vm00.local:3000"}]: dispatch 2026-03-31T22:57:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard get-prometheus-api-host"}]: dispatch 2026-03-31T22:57:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-prometheus-api-host", "value": "http://vm05.local:9095"}]: dispatch 2026-03-31T22:57:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: pgmap v14: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard get-alertmanager-api-host"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard get-alertmanager-api-host"}]: dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-alertmanager-api-host", "value": "http://vm09.local:9093"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-alertmanager-api-host", "value": "http://vm09.local:9093"}]: dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: Certificate for "grafana_cert (vm00)" is still valid for 1094 days. 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: Health check cleared: CEPHADM_FAILED_DAEMON (was: 1 failed cephadm daemon(s)) 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: Cluster is now healthy 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:57:17.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: pgmap v12: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail; 58 KiB/s rd, 0 B/s wr, 23 op/s 2026-03-31T22:57:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: pgmap v13: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard get-grafana-api-url"}]: dispatch 2026-03-31T22:57:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-grafana-api-url", "value": "https://vm00.local:3000"}]: dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard get-prometheus-api-host"}]: dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-prometheus-api-host", "value": "http://vm05.local:9095"}]: dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: pgmap v14: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard get-alertmanager-api-host"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard get-alertmanager-api-host"}]: dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "dashboard set-alertmanager-api-host", "value": "http://vm09.local:9093"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "dashboard set-alertmanager-api-host", "value": "http://vm09.local:9093"}]: dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: Certificate for "grafana_cert (vm00)" is still valid for 1094 days. 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: Metadata not up to date on all hosts. Skipping non agent specs 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "orch get-security-config"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mon.0 -' entity='mon.' cmd=[{"prefix": "orch get-security-config"}]: dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: Health check cleared: CEPHADM_FAILED_DAEMON (was: 1 failed cephadm daemon(s)) 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: Cluster is now healthy 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.bootstrap-osd"} : dispatch 2026-03-31T22:57:17.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:57:17.991 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:17 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.991 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:17 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:17.991 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:17 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:18.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:18.390 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:18.391 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:18.391 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:17 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:18.391 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:17 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:18.391 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:17 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:19.071 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:18 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:57:18] "GET /metrics HTTP/1.1" 200 23116 "" "Prometheus/3.6.0" 2026-03-31T22:57:19.390 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:19 vm09 ceph-mon[98050]: pgmap v15: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:19.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:19 vm00 ceph-mon[61968]: pgmap v15: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:19.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:19 vm05 ceph-mon[69577]: pgmap v15: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:20.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:20.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:20.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:20.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:20 vm00 ceph-mon[61968]: pgmap v16: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:20.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:20 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:20.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:20 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:20.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:20 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:20.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:20 vm09 ceph-mon[98050]: pgmap v16: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:20.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:20 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:20.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:20 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:20.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:20 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:20.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:20 vm05 ceph-mon[69577]: pgmap v16: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:23.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:22 vm00 ceph-mon[61968]: pgmap v17: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:23.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:22 vm09 ceph-mon[98050]: pgmap v17: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:23.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:22 vm05 ceph-mon[69577]: pgmap v17: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:24.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:23 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:24.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:23 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:24.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:23 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:25.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:24 vm00 ceph-mon[61968]: pgmap v18: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:25.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:24 vm09 ceph-mon[98050]: pgmap v18: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:25.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:24 vm05 ceph-mon[69577]: pgmap v18: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:27.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:26 vm00 ceph-mon[61968]: pgmap v19: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:27.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:26 vm09 ceph-mon[98050]: pgmap v19: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:27.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:26 vm05 ceph-mon[69577]: pgmap v19: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:29.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:28 vm00 ceph-mon[61968]: pgmap v20: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:29.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:28 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:57:28] "GET /metrics HTTP/1.1" 200 24366 "" "Prometheus/3.6.0" 2026-03-31T22:57:29.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:28 vm09 ceph-mon[98050]: pgmap v20: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:29.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:28 vm05 ceph-mon[69577]: pgmap v20: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:31.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:30 vm00 ceph-mon[61968]: pgmap v21: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:31.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:30 vm09 ceph-mon[98050]: pgmap v21: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:30 vm05 ceph-mon[69577]: pgmap v21: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:33.164 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:32 vm00 ceph-mon[61968]: pgmap v22: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:33.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:32 vm09 ceph-mon[98050]: pgmap v22: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:32 vm05 ceph-mon[69577]: pgmap v22: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:35.152 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:34 vm05 ceph-mon[69577]: pgmap v23: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:35.152 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:34 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:35.152 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:34 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:35.152 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:34 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:35.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:34 vm00 ceph-mon[61968]: pgmap v23: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:35.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:35.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:35.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:35.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:34 vm09 ceph-mon[98050]: pgmap v23: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:35.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:34 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:35.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:34 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:35.203 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:34 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:36 vm09 ceph-mon[98050]: pgmap v24: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:36.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.990 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:36 vm05 ceph-mon[69577]: pgmap v24: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:36.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:36.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:36 vm00 ceph-mon[61968]: pgmap v24: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:37.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:38 vm00 ceph-mon[61968]: pgmap v25: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:38 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:39.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:38 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:57:38] "GET /metrics HTTP/1.1" 200 24373 "" "Prometheus/3.6.0" 2026-03-31T22:57:39.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:38 vm09 ceph-mon[98050]: pgmap v25: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:39.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:38 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:39.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:38 vm05 ceph-mon[69577]: pgmap v25: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:39.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:38 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:40 vm00 ceph-mon[61968]: pgmap v26: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:41.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:40 vm09 ceph-mon[98050]: pgmap v26: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:41.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:40 vm05 ceph-mon[69577]: pgmap v26: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:42 vm00 ceph-mon[61968]: pgmap v27: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:43.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:42 vm09 ceph-mon[98050]: pgmap v27: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:43.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:42 vm05 ceph-mon[69577]: pgmap v27: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:45.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:44 vm00 ceph-mon[61968]: pgmap v28: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:45.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:44 vm09 ceph-mon[98050]: pgmap v28: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:45.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:44 vm05 ceph-mon[69577]: pgmap v28: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:46.873 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:46 vm05 ceph-mon[69577]: pgmap v29: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:46 vm00 ceph-mon[61968]: pgmap v29: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:47.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:46 vm09 ceph-mon[98050]: pgmap v29: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:49.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:48 vm00 ceph-mon[61968]: pgmap v30: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:49.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:48 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:57:48] "GET /metrics HTTP/1.1" 200 24373 "" "Prometheus/3.6.0" 2026-03-31T22:57:49.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:48 vm09 ceph-mon[98050]: pgmap v30: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:49.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:48 vm05 ceph-mon[69577]: pgmap v30: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:50 vm00 ceph-mon[61968]: pgmap v31: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:51.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:50 vm09 ceph-mon[98050]: pgmap v31: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:51.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:50 vm05 ceph-mon[69577]: pgmap v31: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:53.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:52 vm00 ceph-mon[61968]: pgmap v32: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:53.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:52 vm09 ceph-mon[98050]: pgmap v32: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:53.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:52 vm05 ceph-mon[69577]: pgmap v32: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:54.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:53 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:54.185 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:53 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:54.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:53 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:57:55.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:54 vm00 ceph-mon[61968]: pgmap v33: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:55.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:54 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:55.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:54 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:55.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:54 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:55.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:54 vm09 ceph-mon[98050]: pgmap v33: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:55.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:54 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:55.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:54 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:55.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:54 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:55.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:54 vm05 ceph-mon[69577]: pgmap v33: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:55.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:54 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:55.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:54 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:55.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:54 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:56 vm00 ceph-mon[61968]: pgmap v34: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:56 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:56 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:56 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:56 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:56 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:56 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:56 vm09 ceph-mon[98050]: pgmap v34: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:57.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:56 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:56 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:56 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:56 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:56 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:56 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:56 vm05 ceph-mon[69577]: pgmap v34: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:56 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:56 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:56 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:56 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:56 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:56 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:57:59.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:57:58 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:57:58] "GET /metrics HTTP/1.1" 200 24364 "" "Prometheus/3.6.0" 2026-03-31T22:57:59.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:57:58 vm00 ceph-mon[61968]: pgmap v35: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:59.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:57:58 vm09 ceph-mon[98050]: pgmap v35: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:57:59.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:57:58 vm05 ceph-mon[69577]: pgmap v35: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:01.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:00 vm00 ceph-mon[61968]: pgmap v36: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:01.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:00 vm09 ceph-mon[98050]: pgmap v36: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:01.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:00 vm05 ceph-mon[69577]: pgmap v36: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:03.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:02 vm00 ceph-mon[61968]: pgmap v37: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:03.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:02 vm09 ceph-mon[98050]: pgmap v37: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:03.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:02 vm05 ceph-mon[69577]: pgmap v37: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:05.047 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:04 vm09 ceph-mon[98050]: pgmap v38: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:05.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:04 vm00 ceph-mon[61968]: pgmap v38: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:05.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:04 vm05 ceph-mon[69577]: pgmap v38: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:07.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:06 vm00 ceph-mon[61968]: pgmap v39: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:07.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:06 vm09 ceph-mon[98050]: pgmap v39: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:07.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:06 vm05 ceph-mon[69577]: pgmap v39: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:09.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:58:08 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:58:08] "GET /metrics HTTP/1.1" 200 24372 "" "Prometheus/3.6.0" 2026-03-31T22:58:09.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:08 vm00 ceph-mon[61968]: pgmap v40: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:09.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:08 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:08 vm09 ceph-mon[98050]: pgmap v40: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:09.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:08 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:09.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:08 vm05 ceph-mon[69577]: pgmap v40: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:09.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:08 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:10 vm00 ceph-mon[61968]: pgmap v41: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:11.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:10 vm09 ceph-mon[98050]: pgmap v41: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:11.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:10 vm05 ceph-mon[69577]: pgmap v41: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:13.153 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:12 vm00 ceph-mon[61968]: pgmap v42: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:13.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:12 vm09 ceph-mon[98050]: pgmap v42: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:13.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:12 vm05 ceph-mon[69577]: pgmap v42: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:14.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:14 vm05 ceph-mon[69577]: pgmap v43: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:14.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:14 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:14.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:14 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:14.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:14 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:15.082 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:14 vm09 ceph-mon[98050]: pgmap v43: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:15.082 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:14 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:15.082 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:14 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:15.082 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:14 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:14 vm00 ceph-mon[61968]: pgmap v43: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:14 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:14 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:15.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:14 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:16 vm00 ceph-mon[61968]: pgmap v44: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:16 vm09 ceph-mon[98050]: pgmap v44: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:17.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:16 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:16 vm05 ceph-mon[69577]: pgmap v44: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:19.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:58:18 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:58:18] "GET /metrics HTTP/1.1" 200 24372 "" "Prometheus/3.6.0" 2026-03-31T22:58:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:18 vm00 ceph-mon[61968]: pgmap v45: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:19.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:18 vm09 ceph-mon[98050]: pgmap v45: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:18 vm05 ceph-mon[69577]: pgmap v45: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:20.230 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:19 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:58:20.230 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:19 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:58:20.230 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:19 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:58:20.230 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:19 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:20.230 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:19 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:58:20.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:19 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:58:20.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:19 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:58:20.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:19 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:58:20.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:19 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:20.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:19 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:58:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:19 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:58:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:19 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:58:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:19 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:58:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:19 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:20.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:19 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:58:21.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:20 vm00 ceph-mon[61968]: pgmap v46: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:21.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:20 vm09 ceph-mon[98050]: pgmap v46: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:21.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:20 vm05 ceph-mon[69577]: pgmap v46: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:23.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:23 vm00 ceph-mon[61968]: pgmap v47: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:23.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:23 vm09 ceph-mon[98050]: pgmap v47: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:23.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:23 vm05 ceph-mon[69577]: pgmap v47: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:24.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:24 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:24.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:24 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:24.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:24 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:25.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:25 vm00 ceph-mon[61968]: pgmap v48: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:25.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:25 vm09 ceph-mon[98050]: pgmap v48: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:25.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:25 vm05 ceph-mon[69577]: pgmap v48: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:27.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:27 vm00 ceph-mon[61968]: pgmap v49: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:27.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:27 vm09 ceph-mon[98050]: pgmap v49: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:27.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:27 vm05 ceph-mon[69577]: pgmap v49: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:29.099 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:58:28 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:58:28] "GET /metrics HTTP/1.1" 200 24367 "" "Prometheus/3.6.0" 2026-03-31T22:58:29.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:29 vm00 ceph-mon[61968]: pgmap v50: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:29.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:29 vm09 ceph-mon[98050]: pgmap v50: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:29.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:29 vm05 ceph-mon[69577]: pgmap v50: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:31.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:31 vm00 ceph-mon[61968]: pgmap v51: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:31.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:31 vm09 ceph-mon[98050]: pgmap v51: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:31.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:31 vm05 ceph-mon[69577]: pgmap v51: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:33.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:33 vm00 ceph-mon[61968]: pgmap v52: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:33.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:33 vm09 ceph-mon[98050]: pgmap v52: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:33.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:33 vm05 ceph-mon[69577]: pgmap v52: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:35.390 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:35 vm00 ceph-mon[61968]: pgmap v53: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:35.390 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:35 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:35.390 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:35 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:35.390 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:35 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:35.394 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:35 vm05 ceph-mon[69577]: pgmap v53: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:35.394 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:35 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:35.394 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:35 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:35.394 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:35 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:35.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:35 vm09 ceph-mon[98050]: pgmap v53: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:35.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:35 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:35.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:35 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:35.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:35 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:36 vm00 ceph-mon[61968]: pgmap v54: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:36 vm09 ceph-mon[98050]: pgmap v54: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:37.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.490 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:36 vm05 ceph-mon[69577]: pgmap v54: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:37.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:37.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:39.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:58:38 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:58:38] "GET /metrics HTTP/1.1" 200 24368 "" "Prometheus/3.6.0" 2026-03-31T22:58:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:38 vm00 ceph-mon[61968]: pgmap v55: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:39.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:38 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:39.183 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:38 vm09 ceph-mon[98050]: pgmap v55: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:39.183 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:38 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:39.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:38 vm05 ceph-mon[69577]: pgmap v55: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:39.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:38 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:41.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:41 vm00 ceph-mon[61968]: pgmap v56: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:41.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:41 vm09 ceph-mon[98050]: pgmap v56: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:41.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:41 vm05 ceph-mon[69577]: pgmap v56: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:43.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:43 vm00 ceph-mon[61968]: pgmap v57: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:43.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:43 vm09 ceph-mon[98050]: pgmap v57: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:43.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:43 vm05 ceph-mon[69577]: pgmap v57: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:45.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:45 vm00 ceph-mon[61968]: pgmap v58: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:45.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:45 vm09 ceph-mon[98050]: pgmap v58: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:45.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:45 vm05 ceph-mon[69577]: pgmap v58: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:47.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:47 vm00 ceph-mon[61968]: pgmap v59: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:47.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:47 vm09 ceph-mon[98050]: pgmap v59: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:47.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:47 vm05 ceph-mon[69577]: pgmap v59: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:49.121 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:58:48 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:58:48] "GET /metrics HTTP/1.1" 200 24368 "" "Prometheus/3.6.0" 2026-03-31T22:58:49.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:49 vm00 ceph-mon[61968]: pgmap v60: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:49.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:49 vm09 ceph-mon[98050]: pgmap v60: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:49.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:49 vm05 ceph-mon[69577]: pgmap v60: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:51.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:51 vm00 ceph-mon[61968]: pgmap v61: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:51.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:51 vm09 ceph-mon[98050]: pgmap v61: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:51.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:51 vm05 ceph-mon[69577]: pgmap v61: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:53.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:53 vm09 ceph-mon[98050]: pgmap v62: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:53.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:53 vm05 ceph-mon[69577]: pgmap v62: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:53.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:53 vm00 ceph-mon[61968]: pgmap v62: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:54.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:54 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:54.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:54 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:54.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:54 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:58:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:55 vm09 ceph-mon[98050]: pgmap v63: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:55 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:55 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:55 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:55 vm05 ceph-mon[69577]: pgmap v63: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:55 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:55 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:55 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:55.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:55 vm00 ceph-mon[61968]: pgmap v63: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:55.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:55 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:55.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:55 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:55.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:55 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:57 vm09 ceph-mon[98050]: pgmap v64: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:57.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:57 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:57 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:57 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:57 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:57 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.240 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:57 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:57 vm00 ceph-mon[61968]: pgmap v64: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:57.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:57 vm05 ceph-mon[69577]: pgmap v64: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:57.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:57.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:58:59.083 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:58:58 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:58:58] "GET /metrics HTTP/1.1" 200 24366 "" "Prometheus/3.6.0" 2026-03-31T22:58:59.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:58:59 vm00 ceph-mon[61968]: pgmap v65: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:59.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:58:59 vm09 ceph-mon[98050]: pgmap v65: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:58:59.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:58:59 vm05 ceph-mon[69577]: pgmap v65: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:01.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:01 vm00 ceph-mon[61968]: pgmap v66: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:01.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:01 vm09 ceph-mon[98050]: pgmap v66: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:01 vm05 ceph-mon[69577]: pgmap v66: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:03.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:03 vm00 ceph-mon[61968]: pgmap v67: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:03.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:03 vm09 ceph-mon[98050]: pgmap v67: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:03.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:03 vm05 ceph-mon[69577]: pgmap v67: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:05.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:05 vm00 ceph-mon[61968]: pgmap v68: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:05.441 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:05 vm05 ceph-mon[69577]: pgmap v68: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:05.443 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:05 vm09 ceph-mon[98050]: pgmap v68: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:07.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:07 vm00 ceph-mon[61968]: pgmap v69: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:07.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:07 vm09 ceph-mon[98050]: pgmap v69: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:07.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:07 vm05 ceph-mon[69577]: pgmap v69: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:09.106 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:59:08 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:59:08] "GET /metrics HTTP/1.1" 200 24371 "" "Prometheus/3.6.0" 2026-03-31T22:59:09.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:09 vm00 ceph-mon[61968]: pgmap v70: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:09.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:09.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:09 vm09 ceph-mon[98050]: pgmap v70: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:09.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:09 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:09 vm05 ceph-mon[69577]: pgmap v70: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:11.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:11 vm00 ceph-mon[61968]: pgmap v71: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:11.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:11 vm09 ceph-mon[98050]: pgmap v71: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:11 vm05 ceph-mon[69577]: pgmap v71: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:13.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:13 vm00 ceph-mon[61968]: pgmap v72: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:13.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:13 vm09 ceph-mon[98050]: pgmap v72: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:13.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:13 vm05 ceph-mon[69577]: pgmap v72: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:15.120 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:15 vm00 ceph-mon[61968]: pgmap v73: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:15.120 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:15.120 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:15.120 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:15 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:15.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:15 vm09 ceph-mon[98050]: pgmap v73: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:15.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:15.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:15.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:15 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:15.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:15 vm05 ceph-mon[69577]: pgmap v73: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:15.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:15.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:15.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:15 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:17 vm00 ceph-mon[61968]: pgmap v74: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.418 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:17 vm09 ceph-mon[98050]: pgmap v74: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:17.418 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:17 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.418 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:17 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.418 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:17 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.418 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:17 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.418 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:17 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.418 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:17 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:17 vm05 ceph-mon[69577]: pgmap v74: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:17.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:17 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:17 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:17 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:17 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:17 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:17.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:17 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:19.164 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:59:18 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:59:18] "GET /metrics HTTP/1.1" 200 24371 "" "Prometheus/3.6.0" 2026-03-31T22:59:19.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:19 vm00 ceph-mon[61968]: pgmap v75: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:19.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:19 vm09 ceph-mon[98050]: pgmap v75: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:19.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:19 vm05 ceph-mon[69577]: pgmap v75: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:20.446 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:20 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:59:20.446 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:20 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:59:20.446 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:20 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:59:20.446 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:20 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:20.446 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:20 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:59:20.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:20 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:59:20.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:20 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:59:20.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:20 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:59:20.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:20 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:20.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:20 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:59:20.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T22:59:20.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T22:59:20.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T22:59:20.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:20.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T22:59:21.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:21 vm09 ceph-mon[98050]: pgmap v76: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:21.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:21 vm05 ceph-mon[69577]: pgmap v76: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:21.566 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:21 vm00 ceph-mon[61968]: pgmap v76: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:23.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:23 vm09 ceph-mon[98050]: pgmap v77: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:23.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:23 vm05 ceph-mon[69577]: pgmap v77: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:23.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:23 vm00 ceph-mon[61968]: pgmap v77: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:24.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:24 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:24.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:24 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:24.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:24 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:25.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:25 vm00 ceph-mon[61968]: pgmap v78: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:25.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:25 vm09 ceph-mon[98050]: pgmap v78: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:25.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:25 vm05 ceph-mon[69577]: pgmap v78: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:26.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:26 vm00 ceph-mon[61968]: pgmap v79: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:26.989 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:26 vm09 ceph-mon[98050]: pgmap v79: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:26.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:26 vm05 ceph-mon[69577]: pgmap v79: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:29.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:59:28 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:59:28] "GET /metrics HTTP/1.1" 200 24379 "" "Prometheus/3.6.0" 2026-03-31T22:59:29.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:28 vm00 ceph-mon[61968]: pgmap v80: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:29.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:28 vm09 ceph-mon[98050]: pgmap v80: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:29.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:28 vm05 ceph-mon[69577]: pgmap v80: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:31.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:30 vm00 ceph-mon[61968]: pgmap v81: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:31.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:30 vm09 ceph-mon[98050]: pgmap v81: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:31.242 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:30 vm05 ceph-mon[69577]: pgmap v81: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:32 vm00 ceph-mon[61968]: pgmap v82: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:33.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:32 vm09 ceph-mon[98050]: pgmap v82: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:32 vm05 ceph-mon[69577]: pgmap v82: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:34.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:34 vm05 ceph-mon[69577]: pgmap v83: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:35.083 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:34 vm00 ceph-mon[61968]: pgmap v83: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:35.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:34 vm09 ceph-mon[98050]: pgmap v83: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:36.326 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:36.326 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:36.326 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:36 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:36.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:36.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:36.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:36 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:36.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:36.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:36.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:36 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:37 vm05 ceph-mon[69577]: pgmap v84: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:37 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:37 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:37 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:37 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:37 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:37 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:37 vm00 ceph-mon[61968]: pgmap v84: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:37 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:37 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:37 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:37 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:37 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:37 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:37 vm09 ceph-mon[98050]: pgmap v84: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:37.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:37 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:37 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:37 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:37 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:37 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:37.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:37 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:39.096 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:59:38 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:59:38] "GET /metrics HTTP/1.1" 200 24383 "" "Prometheus/3.6.0" 2026-03-31T22:59:39.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:39 vm00 ceph-mon[61968]: pgmap v85: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:39.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:39 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:39.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:39 vm09 ceph-mon[98050]: pgmap v85: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:39.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:39 vm09 ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:39.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:39 vm05 ceph-mon[69577]: pgmap v85: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:39.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:39 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:41.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:41 vm00 ceph-mon[61968]: pgmap v86: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:41.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:41 vm09 ceph-mon[98050]: pgmap v86: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:41.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:41 vm05 ceph-mon[69577]: pgmap v86: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:43.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:43 vm00 ceph-mon[61968]: pgmap v87: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:43.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:43 vm09 ceph-mon[98050]: pgmap v87: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:43.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:43 vm05 ceph-mon[69577]: pgmap v87: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:45.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:45 vm00 ceph-mon[61968]: pgmap v88: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:45.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:45 vm09 ceph-mon[98050]: pgmap v88: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:45.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:45 vm05 ceph-mon[69577]: pgmap v88: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:47 vm05 ceph-mon[69577]: pgmap v89: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:47.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:47 vm00 ceph-mon[61968]: pgmap v89: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:47.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:47 vm09 ceph-mon[98050]: pgmap v89: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:49.119 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:59:48 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:59:48] "GET /metrics HTTP/1.1" 200 24383 "" "Prometheus/3.6.0" 2026-03-31T22:59:49.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:49 vm00 ceph-mon[61968]: pgmap v90: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:49.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:49 vm09 ceph-mon[98050]: pgmap v90: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:49.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:49 vm05 ceph-mon[69577]: pgmap v90: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:51.416 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:51 vm00 ceph-mon[61968]: pgmap v91: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:51.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:51 vm09 ceph-mon[98050]: pgmap v91: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:51.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:51 vm05 ceph-mon[69577]: pgmap v91: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:53.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:53 vm00 ceph-mon[61968]: pgmap v92: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:53.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:53 vm09.local ceph-mon[98050]: pgmap v92: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:53.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:53 vm05 ceph-mon[69577]: pgmap v92: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:54.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:54 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:54.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:54 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:54.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:54 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T22:59:55.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:55 vm00 ceph-mon[61968]: pgmap v93: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:55.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:55 vm09.local ceph-mon[98050]: pgmap v93: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:55.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:55 vm05 ceph-mon[69577]: pgmap v93: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:56.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:56 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:56.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:56 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:56.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:56 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:56.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:56 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:56.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:56 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:56.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:56 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:56.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:56 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:56.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:56 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:56.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:56 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.455 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:57 vm05 ceph-mon[69577]: pgmap v94: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:57.455 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.455 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.455 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.455 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.455 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.455 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:57 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:57 vm09.local ceph-mon[98050]: pgmap v94: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:57.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:57 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:57 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:57 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:57 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:57 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:57 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:57 vm00 ceph-mon[61968]: pgmap v94: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:57.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:57.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:57 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T22:59:59.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 22:59:58 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:22:59:58] "GET /metrics HTTP/1.1" 200 24380 "" "Prometheus/3.6.0" 2026-03-31T22:59:59.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 22:59:59 vm09.local ceph-mon[98050]: pgmap v95: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:59.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 22:59:59 vm05 ceph-mon[69577]: pgmap v95: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T22:59:59.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 22:59:59 vm00 ceph-mon[61968]: pgmap v95: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:00.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:00 vm09.local ceph-mon[98050]: overall HEALTH_OK 2026-03-31T23:00:00.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:00 vm05 ceph-mon[69577]: overall HEALTH_OK 2026-03-31T23:00:00.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:00 vm00 ceph-mon[61968]: overall HEALTH_OK 2026-03-31T23:00:01.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:01 vm09.local ceph-mon[98050]: pgmap v96: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:01.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:01 vm05 ceph-mon[69577]: pgmap v96: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:01.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:01 vm00 ceph-mon[61968]: pgmap v96: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:03.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:03 vm09.local ceph-mon[98050]: pgmap v97: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:03.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:03 vm05 ceph-mon[69577]: pgmap v97: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:03.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:03 vm00 ceph-mon[61968]: pgmap v97: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:05.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:05 vm09.local ceph-mon[98050]: pgmap v98: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:05.492 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:05 vm05 ceph-mon[69577]: pgmap v98: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:05.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:05 vm00 ceph-mon[61968]: pgmap v98: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:07.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:07 vm09.local ceph-mon[98050]: pgmap v99: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:07.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:07 vm05 ceph-mon[69577]: pgmap v99: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:07.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:07 vm00 ceph-mon[61968]: pgmap v99: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:09.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:00:08 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:00:08] "GET /metrics HTTP/1.1" 200 24380 "" "Prometheus/3.6.0" 2026-03-31T23:00:09.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:09 vm09.local ceph-mon[98050]: pgmap v100: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:09.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:09 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:09 vm05 ceph-mon[69577]: pgmap v100: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:09.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:09 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:09 vm00 ceph-mon[61968]: pgmap v100: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:09.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:09 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:11.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:11 vm09.local ceph-mon[98050]: pgmap v101: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:11.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:11 vm05 ceph-mon[69577]: pgmap v101: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:11.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:11 vm00 ceph-mon[61968]: pgmap v101: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:13.489 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:13 vm09.local ceph-mon[98050]: pgmap v102: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:13.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:13 vm05 ceph-mon[69577]: pgmap v102: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:13.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:13 vm00 ceph-mon[61968]: pgmap v102: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:15.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:15 vm00 ceph-mon[61968]: pgmap v103: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:15.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:15 vm09.local ceph-mon[98050]: pgmap v103: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:15.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:15 vm05 ceph-mon[69577]: pgmap v103: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:16.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:16.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:16.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:16 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:16.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:16 vm00 ceph-mon[61968]: pgmap v104: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:16.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:16 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:16.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:16 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:16.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:16 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:16.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:16 vm09.local ceph-mon[98050]: pgmap v104: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:16.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:16.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:16.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:16 vm05 ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:16.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:16 vm05 ceph-mon[69577]: pgmap v104: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:17.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:17 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:17 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:17 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:17 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:17 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:17 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:17 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:17 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:17 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:17 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:17 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:17 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:17.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:17 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:18.739 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:18 vm09.local ceph-mon[98050]: pgmap v105: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:18.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:18 vm05.local ceph-mon[69577]: pgmap v105: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:18.766 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:18 vm00 ceph-mon[61968]: pgmap v105: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:19.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:00:18 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:00:18] "GET /metrics HTTP/1.1" 200 24380 "" "Prometheus/3.6.0" 2026-03-31T23:00:21.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:20 vm00 ceph-mon[61968]: pgmap v106: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:21.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:00:21.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:00:21.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:00:21.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:21.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:20 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:00:21.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:20 vm09.local ceph-mon[98050]: pgmap v106: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:21.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:20 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:00:21.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:20 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:00:21.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:20 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:00:21.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:20 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:21.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:20 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:00:21.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:20 vm05.local ceph-mon[69577]: pgmap v106: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:21.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:20 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:00:21.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:20 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:00:21.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:20 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:00:21.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:20 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:21.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:20 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:00:23.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:22 vm00 ceph-mon[61968]: pgmap v107: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:23.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:22 vm09.local ceph-mon[98050]: pgmap v107: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:23.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:22 vm05.local ceph-mon[69577]: pgmap v107: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:24.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:23 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:24.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:23 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:24.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:23 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:25.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:24 vm00 ceph-mon[61968]: pgmap v108: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:25.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:24 vm09.local ceph-mon[98050]: pgmap v108: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:25.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:24 vm05.local ceph-mon[69577]: pgmap v108: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:27.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:26 vm00 ceph-mon[61968]: pgmap v109: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:27.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:26 vm09.local ceph-mon[98050]: pgmap v109: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:27.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:26 vm05.local ceph-mon[69577]: pgmap v109: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:29.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:00:28 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:00:28] "GET /metrics HTTP/1.1" 200 24381 "" "Prometheus/3.6.0" 2026-03-31T23:00:29.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:28 vm00 ceph-mon[61968]: pgmap v110: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:29.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:28 vm09.local ceph-mon[98050]: pgmap v110: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:29.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:28 vm05.local ceph-mon[69577]: pgmap v110: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:31.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:30 vm00 ceph-mon[61968]: pgmap v111: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:31.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:30 vm09.local ceph-mon[98050]: pgmap v111: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:30 vm05.local ceph-mon[69577]: pgmap v111: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:32.468 INFO:teuthology.orchestra.run.vm00.stderr:+ ceph orch ls 2026-03-31T23:00:32.641 INFO:teuthology.orchestra.run.vm00.stdout:NAME PORTS RUNNING REFRESHED AGE PLACEMENT 2026-03-31T23:00:32.641 INFO:teuthology.orchestra.run.vm00.stdout:agent 3/3 17s ago 3m * 2026-03-31T23:00:32.641 INFO:teuthology.orchestra.run.vm00.stdout:alertmanager ?:9093,9094 1/1 16s ago 4m count:1 2026-03-31T23:00:32.641 INFO:teuthology.orchestra.run.vm00.stdout:grafana ?:3000 1/1 17s ago 4m count:1 2026-03-31T23:00:32.641 INFO:teuthology.orchestra.run.vm00.stdout:mgr 2/2 17s ago 4m vm00=a;vm05=b;count:2 2026-03-31T23:00:32.641 INFO:teuthology.orchestra.run.vm00.stdout:mon 3/3 17s ago 4m vm00:192.168.123.100=a;vm05:192.168.123.105=b;vm09:192.168.123.109=c;count:3 2026-03-31T23:00:32.641 INFO:teuthology.orchestra.run.vm00.stdout:node-exporter ?:9100 3/3 17s ago 4m * 2026-03-31T23:00:32.641 INFO:teuthology.orchestra.run.vm00.stdout:osd.default 3 17s ago 4m vm09 2026-03-31T23:00:32.641 INFO:teuthology.orchestra.run.vm00.stdout:prometheus ?:9095 1/1 16s ago 4m count:1 2026-03-31T23:00:32.651 INFO:teuthology.orchestra.run.vm00.stderr:+ ceph orch ps 2026-03-31T23:00:32.822 INFO:teuthology.orchestra.run.vm00.stdout:NAME HOST PORTS STATUS REFRESHED AGE MEM USE MEM LIM VERSION IMAGE ID CONTAINER ID 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:agent.vm00 vm00 *:4721 running 17s ago 4m - - 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:agent.vm05 vm05 *:4721 running 16s ago 4m - - 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:agent.vm09 vm09 *:4721 running 16s ago 4m - - 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:alertmanager.vm09 vm09 *:9093,9094 running (3m) 16s ago 3m 23.4M - 0.28.1 91c01b3cec9b 83ad5071a764 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:grafana.vm00 vm00 *:3000 running (3m) 17s ago 3m 117M - 12.3.1 5cdab57891ea 0b472c4478a2 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:mgr.a vm00 *:9283,8765 running (5m) 17s ago 5m 555M - 20.2.0-721-g5bb32787 1e58a3cbf9ab 581ccc516b3e 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:mgr.b vm05 *:8443,8765 running (4m) 16s ago 4m 493M - 20.2.0-721-g5bb32787 1e58a3cbf9ab 51fbff09a17e 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:mon.a vm00 running (5m) 17s ago 5m 49.8M 2048M 20.2.0-721-g5bb32787 1e58a3cbf9ab ccaba7e50d34 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:mon.b vm05 running (4m) 16s ago 4m 43.2M 2048M 20.2.0-721-g5bb32787 1e58a3cbf9ab f20f58259247 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:mon.c vm09 running (4m) 16s ago 4m 40.1M 2048M 20.2.0-721-g5bb32787 1e58a3cbf9ab bfa0d523ec1a 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:node-exporter.vm00 vm00 *:9100 running (3m) 17s ago 3m 12.4M - 1.9.1 255ec253085f d75c31ddcd69 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:node-exporter.vm05 vm05 *:9100 running (3m) 16s ago 3m 13.5M - 1.9.1 255ec253085f cef93ed9bca4 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:node-exporter.vm09 vm09 *:9100 running (3m) 16s ago 3m 13.5M - 1.9.1 255ec253085f 8d28893e14e6 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:osd.0 vm00 running (4m) 17s ago 4m 40.5M 4096M 20.2.0-721-g5bb32787 1e58a3cbf9ab 797c01ef65d6 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:osd.1 vm05 running (4m) 16s ago 4m 41.9M 4096M 20.2.0-721-g5bb32787 1e58a3cbf9ab d562d5ab1cdb 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:osd.2 vm09 running (3m) 16s ago 4m 38.7M 4096M 20.2.0-721-g5bb32787 1e58a3cbf9ab 8574bf5f2cc1 2026-03-31T23:00:32.823 INFO:teuthology.orchestra.run.vm00.stdout:prometheus.vm05 vm05 *:9095 running (3m) 16s ago 3m 39.7M - 3.6.0 4fcecf061b74 ede076e43c6f 2026-03-31T23:00:32.832 INFO:teuthology.orchestra.run.vm00.stderr:+ ceph orch host ls 2026-03-31T23:00:33.000 INFO:teuthology.orchestra.run.vm00.stdout:HOST ADDR LABELS STATUS 2026-03-31T23:00:33.000 INFO:teuthology.orchestra.run.vm00.stdout:vm00 192.168.123.100 2026-03-31T23:00:33.000 INFO:teuthology.orchestra.run.vm00.stdout:vm05 192.168.123.105 2026-03-31T23:00:33.000 INFO:teuthology.orchestra.run.vm00.stdout:vm09 192.168.123.109 2026-03-31T23:00:33.000 INFO:teuthology.orchestra.run.vm00.stdout:3 hosts in cluster 2026-03-31T23:00:33.009 INFO:teuthology.orchestra.run.vm00.stderr:++ ceph orch ps --daemon-type mon -f json 2026-03-31T23:00:33.009 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -r 'last | .daemon_name' 2026-03-31T23:00:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:32 vm00 ceph-mon[61968]: pgmap v112: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:33.192 INFO:teuthology.orchestra.run.vm00.stderr:+ MON_DAEMON=mon.c 2026-03-31T23:00:33.192 INFO:teuthology.orchestra.run.vm00.stderr:++ ceph orch ps --daemon-type grafana -f json 2026-03-31T23:00:33.192 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -e '.[]' 2026-03-31T23:00:33.192 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -r .hostname 2026-03-31T23:00:33.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:32 vm09.local ceph-mon[98050]: pgmap v112: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:33.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:32 vm05.local ceph-mon[69577]: pgmap v112: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:33.377 INFO:teuthology.orchestra.run.vm00.stderr:+ GRAFANA_HOST=vm00 2026-03-31T23:00:33.378 INFO:teuthology.orchestra.run.vm00.stderr:++ ceph orch ps --daemon-type prometheus -f json 2026-03-31T23:00:33.378 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -e '.[]' 2026-03-31T23:00:33.378 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -r .hostname 2026-03-31T23:00:33.555 INFO:teuthology.orchestra.run.vm00.stderr:+ PROM_HOST=vm05 2026-03-31T23:00:33.555 INFO:teuthology.orchestra.run.vm00.stderr:++ ceph orch ps --daemon-type alertmanager -f json 2026-03-31T23:00:33.555 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -r .hostname 2026-03-31T23:00:33.555 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -e '.[]' 2026-03-31T23:00:33.730 INFO:teuthology.orchestra.run.vm00.stderr:+ ALERTM_HOST=vm09 2026-03-31T23:00:33.730 INFO:teuthology.orchestra.run.vm00.stderr:++ ceph orch host ls -f json 2026-03-31T23:00:33.731 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -r --arg GRAFANA_HOST vm00 '.[] | select(.hostname==$GRAFANA_HOST) | .addr' 2026-03-31T23:00:33.908 INFO:teuthology.orchestra.run.vm00.stderr:+ GRAFANA_IP=192.168.123.100 2026-03-31T23:00:33.908 INFO:teuthology.orchestra.run.vm00.stderr:++ ceph orch host ls -f json 2026-03-31T23:00:33.908 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -r --arg PROM_HOST vm05 '.[] | select(.hostname==$PROM_HOST) | .addr' 2026-03-31T23:00:34.081 INFO:teuthology.orchestra.run.vm00.stderr:+ PROM_IP=192.168.123.105 2026-03-31T23:00:34.082 INFO:teuthology.orchestra.run.vm00.stderr:++ ceph orch host ls -f json 2026-03-31T23:00:34.082 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -r --arg ALERTM_HOST vm09 '.[] | select(.hostname==$ALERTM_HOST) | .addr' 2026-03-31T23:00:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:33 vm00 ceph-mon[61968]: from='client.14487 -' entity='client.admin' cmd=[{"prefix": "orch ls", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T23:00:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:33 vm00 ceph-mon[61968]: from='client.14493 -' entity='client.admin' cmd=[{"prefix": "orch ps", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T23:00:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:33 vm00 ceph-mon[61968]: from='client.24314 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T23:00:34.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:33 vm00 ceph-mon[61968]: from='client.14505 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "mon", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:34.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:33 vm09.local ceph-mon[98050]: from='client.14487 -' entity='client.admin' cmd=[{"prefix": "orch ls", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T23:00:34.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:33 vm09.local ceph-mon[98050]: from='client.14493 -' entity='client.admin' cmd=[{"prefix": "orch ps", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T23:00:34.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:33 vm09.local ceph-mon[98050]: from='client.24314 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T23:00:34.239 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:33 vm09.local ceph-mon[98050]: from='client.14505 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "mon", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:33 vm05.local ceph-mon[69577]: from='client.14487 -' entity='client.admin' cmd=[{"prefix": "orch ls", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T23:00:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:33 vm05.local ceph-mon[69577]: from='client.14493 -' entity='client.admin' cmd=[{"prefix": "orch ps", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T23:00:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:33 vm05.local ceph-mon[69577]: from='client.24314 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""]}]: dispatch 2026-03-31T23:00:34.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:33 vm05.local ceph-mon[69577]: from='client.14505 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "mon", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:34.257 INFO:teuthology.orchestra.run.vm00.stderr:+ ALERTM_IP=192.168.123.109 2026-03-31T23:00:34.258 INFO:teuthology.orchestra.run.vm00.stderr:++ ceph orch host ls -f json 2026-03-31T23:00:34.258 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -r '.[] | .addr' 2026-03-31T23:00:34.433 INFO:teuthology.orchestra.run.vm00.stderr:+ ALL_HOST_IPS='192.168.123.100 2026-03-31T23:00:34.433 INFO:teuthology.orchestra.run.vm00.stderr:192.168.123.105 2026-03-31T23:00:34.433 INFO:teuthology.orchestra.run.vm00.stderr:192.168.123.109' 2026-03-31T23:00:34.433 INFO:teuthology.orchestra.run.vm00.stderr:+ for ip in $ALL_HOST_IPS 2026-03-31T23:00:34.433 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.100:9100/metric 2026-03-31T23:00:34.436 INFO:teuthology.orchestra.run.vm00.stdout:404 page not found 2026-03-31T23:00:34.436 INFO:teuthology.orchestra.run.vm00.stderr:+ for ip in $ALL_HOST_IPS 2026-03-31T23:00:34.436 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.105:9100/metric 2026-03-31T23:00:34.438 INFO:teuthology.orchestra.run.vm00.stdout:404 page not found 2026-03-31T23:00:34.439 INFO:teuthology.orchestra.run.vm00.stderr:+ for ip in $ALL_HOST_IPS 2026-03-31T23:00:34.439 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.109:9100/metric 2026-03-31T23:00:34.441 INFO:teuthology.orchestra.run.vm00.stdout:404 page not found 2026-03-31T23:00:34.441 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -k -s https://192.168.123.100:3000/api/health 2026-03-31T23:00:34.453 INFO:teuthology.orchestra.run.vm00.stdout:{ 2026-03-31T23:00:34.453 INFO:teuthology.orchestra.run.vm00.stdout: "database": "ok", 2026-03-31T23:00:34.453 INFO:teuthology.orchestra.run.vm00.stdout: "version": "12.3.1", 2026-03-31T23:00:34.453 INFO:teuthology.orchestra.run.vm00.stdout: "commit": "3a1c80ca7ce612f309fdc99338dd3c5e486339be" 2026-03-31T23:00:34.453 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -k -s https://192.168.123.100:3000/api/health 2026-03-31T23:00:34.453 INFO:teuthology.orchestra.run.vm00.stderr:+ jq -e '.database == "ok"' 2026-03-31T23:00:34.465 INFO:teuthology.orchestra.run.vm00.stdout:}true 2026-03-31T23:00:34.465 INFO:teuthology.orchestra.run.vm00.stderr:+ ceph orch daemon stop mon.c 2026-03-31T23:00:34.641 INFO:teuthology.orchestra.run.vm00.stdout:Scheduled to stop mon.c on host 'vm09' 2026-03-31T23:00:34.659 INFO:teuthology.orchestra.run.vm00.stderr:+ sleep 120 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='client.14511 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "grafana", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='client.14517 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "prometheus", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='client.14523 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "alertmanager", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: pgmap v113: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='client.14529 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='client.14535 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon ok-to-stop", "ids": ["c"]} : dispatch 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:34.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:34 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local systemd[1]: Stopping Ceph mon.c for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='client.14511 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "grafana", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='client.14517 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "prometheus", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='client.14523 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "alertmanager", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: pgmap v113: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='client.14529 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='client.14535 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon ok-to-stop", "ids": ["c"]} : dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-mon[98050]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-c[98046]: 2026-03-31T23:00:34.903+0000 7f279c4ee640 -1 received signal: Terminated from /run/podman-init -- /usr/bin/ceph-mon -n mon.c -f --setuser ceph --setgroup ceph --default-log-to-file=false --default-log-to-journald=true --default-log-to-stderr=false --default-mon-cluster-log-to-file=false --default-mon-cluster-log-to-journald=true --default-mon-cluster-log-to-stderr=false (PID: 1) UID: 0 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-c[98046]: 2026-03-31T23:00:34.903+0000 7f279c4ee640 -1 mon.c@1(peon) e3 *** Got Signal Terminated *** 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local podman[112548]: 2026-03-31 23:00:34.925040104 +0000 UTC m=+0.034949307 container died bfa0d523ec1aa41189e887e35a93a047efbbbba07ce487e5dd8aae49f41274a2 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-c, org.opencontainers.image.documentation=https://docs.ceph.com/, org.label-schema.name=CentOS Stream 9 Base Image, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.schema-version=1.0, OSD_FLAVOR=default, ceph=True, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, org.opencontainers.image.authors=Ceph Release Team , CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_REF=tentacle-release, io.buildah.version=1.43.0, org.label-schema.build-date=20260316, FROM_IMAGE=quay.io/centos/centos:stream9) 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local podman[112548]: 2026-03-31 23:00:34.937890457 +0000 UTC m=+0.047799660 container remove bfa0d523ec1aa41189e887e35a93a047efbbbba07ce487e5dd8aae49f41274a2 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-c, org.label-schema.build-date=20260316, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, org.opencontainers.image.documentation=https://docs.ceph.com/, io.buildah.version=1.43.0, FROM_IMAGE=quay.io/centos/centos:stream9, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, OSD_FLAVOR=default, org.label-schema.name=CentOS Stream 9 Base Image, ceph=True, CEPH_REF=tentacle-release, org.label-schema.license=GPLv2, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.opencontainers.image.authors=Ceph Release Team ) 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:34 vm09.local bash[112548]: ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-c 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:35 vm09.local systemd[1]: ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.c.service: Deactivated successfully. 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:35 vm09.local systemd[1]: Stopped Ceph mon.c for 8bb14950-2d54-11f1-a348-07063966e06c. 2026-03-31T23:00:35.141 INFO:journalctl@ceph.mon.c.vm09.stdout:Mar 31 23:00:35 vm09.local systemd[1]: ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.c.service: Consumed 2.782s CPU time. 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='client.14511 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "grafana", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='client.14517 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "prometheus", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='client.14523 -' entity='client.admin' cmd=[{"prefix": "orch ps", "daemon_type": "alertmanager", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: pgmap v113: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='client.14529 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='client.14535 -' entity='client.admin' cmd=[{"prefix": "orch host ls", "target": ["mon-mgr", ""], "format": "json"}]: dispatch 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "mon ok-to-stop", "ids": ["c"]} : dispatch 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:34 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:00:39.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:00:38 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:00:38] "GET /metrics HTTP/1.1" 200 24381 "" "Prometheus/3.6.0" 2026-03-31T23:00:49.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:00:48 vm00 ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:00:48] "GET /metrics HTTP/1.1" 200 24381 "" "Prometheus/3.6.0" 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: pgmap v114: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: pgmap v115: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: pgmap v116: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: pgmap v117: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: pgmap v118: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: mon.b calling monitor election 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: mon.a calling monitor election 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: pgmap v119: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: pgmap v120: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: pgmap v121: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: mon.a is new leader, mons a,b in quorum (ranks 0,2) 2026-03-31T23:00:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: monmap epoch 3 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: last_changed 2026-03-31T22:55:50.037853+0000 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: created 2026-03-31T22:55:09.387964+0000 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: min_mon_release 20 (tentacle) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: election_strategy: 1 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: 0: [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon.a 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: 1: [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] mon.c 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: 2: [v2:192.168.123.105:3300/0,v1:192.168.123.105:6789/0] mon.b 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: fsmap 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: osdmap e27: 3 total, 3 up, 3 in 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: mgrmap e19: a(active, since 3m), standbys: b 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: Health check failed: 1/3 mons down, quorum a,b (MON_DOWN) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: Health detail: HEALTH_WARN 1/3 mons down, quorum a,b 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: [WRN] MON_DOWN: 1/3 mons down, quorum a,b 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: mon.c (rank 1) addr [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] is down (out of quorum) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: [31/Mar/2026:23:00:49] ENGINE socket.error 5 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: Traceback (most recent call last): 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib/python3.9/site-packages/cheroot/server.py", line 1291, in communicate 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: req.respond() 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib/python3.9/site-packages/cheroot/server.py", line 1081, in respond 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: self.server.gateway(self).respond() 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib/python3.9/site-packages/cheroot/wsgi.py", line 141, in respond 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: self.write(chunk) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib/python3.9/site-packages/cheroot/wsgi.py", line 223, in write 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: self.req.write(chunk) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib/python3.9/site-packages/cheroot/server.py", line 1137, in write 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: self.conn.wfile.write(chunk) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib/python3.9/site-packages/cheroot/makefile.py", line 68, in write 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: res = super().write(val, *args, **kwargs) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib/python3.9/site-packages/cheroot/makefile.py", line 24, in write 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: self._flush_unlocked() 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib/python3.9/site-packages/cheroot/makefile.py", line 33, in _flush_unlocked 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: n = self.raw.write(bytes(self._write_buf)) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib64/python3.9/socket.py", line 734, in write 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: return self._sock.send(b) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: File "/lib64/python3.9/ssl.py", line 1209, in send 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: return self._sslobj.write(data) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout: ssl.SSLError: [SYS] unknown error (_ssl.c:2501) 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.166 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:50 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: pgmap v114: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: pgmap v115: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: pgmap v116: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: pgmap v117: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: pgmap v118: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: mon.b calling monitor election 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: mon.a calling monitor election 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: pgmap v119: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: pgmap v120: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: pgmap v121: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: mon.a is new leader, mons a,b in quorum (ranks 0,2) 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: monmap epoch 3 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: fsid 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: last_changed 2026-03-31T22:55:50.037853+0000 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: created 2026-03-31T22:55:09.387964+0000 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: min_mon_release 20 (tentacle) 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: election_strategy: 1 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: 0: [v2:192.168.123.100:3300/0,v1:192.168.123.100:6789/0] mon.a 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: 1: [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] mon.c 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: 2: [v2:192.168.123.105:3300/0,v1:192.168.123.105:6789/0] mon.b 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: fsmap 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: osdmap e27: 3 total, 3 up, 3 in 2026-03-31T23:00:51.244 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: mgrmap e19: a(active, since 3m), standbys: b 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: Health check failed: 1/3 mons down, quorum a,b (MON_DOWN) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: Health detail: HEALTH_WARN 1/3 mons down, quorum a,b 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: [WRN] MON_DOWN: 1/3 mons down, quorum a,b 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: mon.c (rank 1) addr [v2:192.168.123.109:3300/0,v1:192.168.123.109:6789/0] is down (out of quorum) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: [31/Mar/2026:23:00:49] ENGINE socket.error 5 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: Traceback (most recent call last): 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib/python3.9/site-packages/cheroot/server.py", line 1291, in communicate 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: req.respond() 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib/python3.9/site-packages/cheroot/server.py", line 1081, in respond 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: self.server.gateway(self).respond() 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib/python3.9/site-packages/cheroot/wsgi.py", line 141, in respond 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: self.write(chunk) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib/python3.9/site-packages/cheroot/wsgi.py", line 223, in write 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: self.req.write(chunk) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib/python3.9/site-packages/cheroot/server.py", line 1137, in write 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: self.conn.wfile.write(chunk) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib/python3.9/site-packages/cheroot/makefile.py", line 68, in write 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: res = super().write(val, *args, **kwargs) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib/python3.9/site-packages/cheroot/makefile.py", line 24, in write 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: self._flush_unlocked() 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib/python3.9/site-packages/cheroot/makefile.py", line 33, in _flush_unlocked 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: n = self.raw.write(bytes(self._write_buf)) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib64/python3.9/socket.py", line 734, in write 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: return self._sock.send(b) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: File "/lib64/python3.9/ssl.py", line 1209, in send 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: return self._sslobj.write(data) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout: ssl.SSLError: [SYS] unknown error (_ssl.c:2501) 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:51.245 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:00:53.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:52 vm00 ceph-mon[61968]: pgmap v122: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:53.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:52 vm05.local ceph-mon[69577]: pgmap v122: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:54.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:54 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:54.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:54 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:54.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:54 vm05.local ceph-mon[69577]: pgmap v123: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:54.821 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:54 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:54.821 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:54 vm00 ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:00:54.821 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:54 vm00 ceph-mon[61968]: pgmap v123: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:56 vm00.local ceph-mon[61968]: pgmap v124: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:56 vm05.local ceph-mon[69577]: pgmap v124: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:58.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:58 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:58.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:58 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:58.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:58 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:58.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:00:58 vm00.local ceph-mon[61968]: pgmap v125: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:58.742 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:58 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:58.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:58 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:58.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:58 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:00:58.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:00:58 vm05.local ceph-mon[69577]: pgmap v125: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:00:59.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:00:58 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:00:58] "GET /metrics HTTP/1.1" 200 24442 "" "Prometheus/3.6.0" 2026-03-31T23:01:01.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:00 vm00.local ceph-mon[61968]: pgmap v126: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:01.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:00 vm05.local ceph-mon[69577]: pgmap v126: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:03.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:02 vm00.local ceph-mon[61968]: pgmap v127: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:03.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:02 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:03.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:02 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:03.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:02 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:03.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:02 vm05.local ceph-mon[69577]: pgmap v127: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:03.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:02 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:03.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:02 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:03.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:02 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:04.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:04 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:04.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:04 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:04.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:04 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:04.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:04 vm00.local ceph-mon[61968]: pgmap v128: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:04.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:04 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:04.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:04 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:04.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:04 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:04.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:04 vm05.local ceph-mon[69577]: pgmap v128: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:07.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:06 vm00.local ceph-mon[61968]: pgmap v129: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:07.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:06 vm05.local ceph-mon[69577]: pgmap v129: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:09.062 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:08 vm05.local ceph-mon[69577]: pgmap v130: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:09.062 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:08 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:01:09.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:01:08 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:01:08] "GET /metrics HTTP/1.1" 200 24444 "" "Prometheus/3.6.0" 2026-03-31T23:01:09.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:08 vm00.local ceph-mon[61968]: pgmap v130: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:09.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:08 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:01:11.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:10 vm00.local ceph-mon[61968]: pgmap v131: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:11.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:10 vm05.local ceph-mon[69577]: pgmap v131: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:13.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:12 vm00.local ceph-mon[61968]: pgmap v132: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:13.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:12 vm05.local ceph-mon[69577]: pgmap v132: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:14.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:14 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:14.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:14 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:14.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:14 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:14.665 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:14 vm00.local ceph-mon[61968]: pgmap v133: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:14.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:14 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:14.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:14 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:14.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:14 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:14.743 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:14 vm05.local ceph-mon[69577]: pgmap v133: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:16 vm00.local ceph-mon[61968]: pgmap v134: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:16 vm05.local ceph-mon[69577]: pgmap v134: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:19.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:01:18 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:01:18] "GET /metrics HTTP/1.1" 200 24444 "" "Prometheus/3.6.0" 2026-03-31T23:01:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:18 vm00.local ceph-mon[61968]: pgmap v135: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:19.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:18 vm05.local ceph-mon[69577]: pgmap v135: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:20.801 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:20 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:20.801 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:20 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:20.801 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:20 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:20.801 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:20 vm00.local ceph-mon[61968]: pgmap v136: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:20.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:20 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:20.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:20 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:20.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:20 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:20.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:20 vm05.local ceph-mon[69577]: pgmap v136: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:21.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:21 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:21.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:21 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:21.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:21 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:22.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:21 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:22.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:21 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:22.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:21 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:22.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:22 vm05.local ceph-mon[69577]: pgmap v137: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:23.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:22 vm00.local ceph-mon[61968]: pgmap v137: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:24.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:23 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:01:24.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:23 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:01:25.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:24 vm00.local ceph-mon[61968]: pgmap v138: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:25.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:24 vm05.local ceph-mon[69577]: pgmap v138: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:27.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:26 vm00.local ceph-mon[61968]: pgmap v139: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:27.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:26 vm05.local ceph-mon[69577]: pgmap v139: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:29.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:01:28 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:01:28] "GET /metrics HTTP/1.1" 200 24443 "" "Prometheus/3.6.0" 2026-03-31T23:01:29.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:28 vm00.local ceph-mon[61968]: pgmap v140: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:29.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:28 vm05.local ceph-mon[69577]: pgmap v140: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:31.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:30 vm00.local ceph-mon[61968]: pgmap v141: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:31.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:30 vm05.local ceph-mon[69577]: pgmap v141: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:32.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:32 vm05.local ceph-mon[69577]: pgmap v142: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:32.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:32 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:32.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:32 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:32.993 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:32 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:32 vm00.local ceph-mon[61968]: pgmap v142: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:32 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:32 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:33.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:32 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:35.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:34 vm00.local ceph-mon[61968]: pgmap v143: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:35.188 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:34 vm05.local ceph-mon[69577]: pgmap v143: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:36.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:36 vm00.local ceph-mon[61968]: pgmap v144: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:36.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:36 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:36.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:36 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:36.915 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:36 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:36 vm05.local ceph-mon[69577]: pgmap v144: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:36 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:36 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:37.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:36 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:38.823 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:01:38 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:01:38] "GET /metrics HTTP/1.1" 200 24441 "" "Prometheus/3.6.0" 2026-03-31T23:01:38.823 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:38 vm00.local ceph-mon[61968]: pgmap v145: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:38.823 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:38 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:38.823 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:38 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:38.823 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:38 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:38.823 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:38 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:01:39.144 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:38 vm05.local ceph-mon[69577]: pgmap v145: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:39.144 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:38 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:39.144 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:38 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:39.144 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:38 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:39.144 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:38 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:01:41.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:40 vm00.local ceph-mon[61968]: pgmap v146: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:41.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:40 vm05.local ceph-mon[69577]: pgmap v146: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:43.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:42 vm00.local ceph-mon[61968]: pgmap v147: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:43.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:42 vm05.local ceph-mon[69577]: pgmap v147: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:45.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:44 vm00.local ceph-mon[61968]: pgmap v148: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:45.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:44 vm05.local ceph-mon[69577]: pgmap v148: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:47.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:46 vm00.local ceph-mon[61968]: pgmap v149: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:47.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:46 vm05.local ceph-mon[69577]: pgmap v149: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:49.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:48 vm00.local ceph-mon[61968]: pgmap v150: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:49.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:01:48 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:01:48] "GET /metrics HTTP/1.1" 200 24441 "" "Prometheus/3.6.0" 2026-03-31T23:01:49.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:48 vm05.local ceph-mon[69577]: pgmap v150: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:50 vm00.local ceph-mon[61968]: pgmap v151: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:50 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:01:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:50 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:01:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:50 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:01:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:50 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:51.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:50 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:01:51.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:50 vm05.local ceph-mon[69577]: pgmap v151: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:51.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config dump", "format": "json"} : dispatch 2026-03-31T23:01:51.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "config generate-minimal-conf"} : dispatch 2026-03-31T23:01:51.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "auth get", "entity": "client.admin"} : dispatch 2026-03-31T23:01:51.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:51.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:50 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd tree", "states": ["destroyed"], "format": "json"} : dispatch 2026-03-31T23:01:53.111 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:52 vm00.local ceph-mon[61968]: pgmap v152: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:53.111 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:52 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:53.111 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:52 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:53.111 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:52 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:53.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:52 vm05.local ceph-mon[69577]: pgmap v152: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:53.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:52 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:53.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:52 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:53.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:52 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:54.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:53 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:01:54.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:53 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:01:55.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:54 vm00.local ceph-mon[61968]: pgmap v153: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:55.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:54 vm05.local ceph-mon[69577]: pgmap v153: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:56 vm00.local ceph-mon[61968]: pgmap v154: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:56 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:56 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:57.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:56 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:56 vm05.local ceph-mon[69577]: pgmap v154: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:56 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:56 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:57.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:56 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:59.058 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:58 vm00.local ceph-mon[61968]: pgmap v155: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:59.058 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:58 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:59.058 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:58 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:59.059 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:01:58 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:59.059 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:01:58 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:01:58] "GET /metrics HTTP/1.1" 200 24443 "" "Prometheus/3.6.0" 2026-03-31T23:01:59.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:58 vm05.local ceph-mon[69577]: pgmap v155: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:01:59.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:58 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:59.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:58 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:01:59.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:01:58 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:00.915 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:00 vm05.local ceph-mon[69577]: pgmap v156: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:01.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:00 vm00.local ceph-mon[61968]: pgmap v156: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:03.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:02 vm05.local ceph-mon[69577]: pgmap v157: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:03.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:02 vm00.local ceph-mon[61968]: pgmap v157: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:05.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:04 vm05.local ceph-mon[69577]: pgmap v158: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:05.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:04 vm00.local ceph-mon[61968]: pgmap v158: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:07.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:06 vm05.local ceph-mon[69577]: pgmap v159: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:07.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:06 vm00.local ceph-mon[61968]: pgmap v159: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:09.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:08 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:02:08] "GET /metrics HTTP/1.1" 200 24440 "" "Prometheus/3.6.0" 2026-03-31T23:02:09.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:08 vm00.local ceph-mon[61968]: pgmap v160: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:09.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:08 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:02:09.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:08 vm05.local ceph-mon[69577]: pgmap v160: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:09.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:08 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:02:11.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:10 vm05.local ceph-mon[69577]: pgmap v161: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:11.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:10 vm00.local ceph-mon[61968]: pgmap v161: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:13.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:12 vm05.local ceph-mon[69577]: pgmap v162: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:13.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:12 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:13.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:12 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:13.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:12 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:13.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:12 vm00.local ceph-mon[61968]: pgmap v162: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:13.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:12 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:13.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:12 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:13.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:12 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:15.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:14 vm05.local ceph-mon[69577]: pgmap v163: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:15.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:14 vm00.local ceph-mon[61968]: pgmap v163: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:16 vm00.local ceph-mon[61968]: pgmap v164: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:16 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:16 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:17.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:16 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:16 vm05.local ceph-mon[69577]: pgmap v164: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:16 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:16 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:17.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:16 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:19.165 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:18 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:02:18] "GET /metrics HTTP/1.1" 200 24440 "" "Prometheus/3.6.0" 2026-03-31T23:02:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:18 vm00.local ceph-mon[61968]: pgmap v165: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:18 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:18 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:19.165 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:18 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:19.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:18 vm05.local ceph-mon[69577]: pgmap v165: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:19.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:18 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:19.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:18 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:19.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:18 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:21.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:21 vm00.local ceph-mon[61968]: pgmap v166: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:21.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:21 vm05.local ceph-mon[69577]: pgmap v166: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:23.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:23 vm00.local ceph-mon[61968]: pgmap v167: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:23.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:23 vm05.local ceph-mon[69577]: pgmap v167: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:24.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:24 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:02:24.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:24 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' cmd={"prefix": "osd blocklist ls", "format": "json"} : dispatch 2026-03-31T23:02:25.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:25 vm00.local ceph-mon[61968]: pgmap v168: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:25.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:25 vm05.local ceph-mon[69577]: pgmap v168: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:27.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:27 vm00.local ceph-mon[61968]: pgmap v169: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:27.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:27 vm05.local ceph-mon[69577]: pgmap v169: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:29.027 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:28 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.105 - - [31/Mar/2026:23:02:28] "GET /metrics HTTP/1.1" 200 24436 "" "Prometheus/3.6.0" 2026-03-31T23:02:29.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:29 vm00.local ceph-mon[61968]: pgmap v170: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:29.492 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:29 vm05.local ceph-mon[69577]: pgmap v170: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:31.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:31 vm00.local ceph-mon[61968]: pgmap v171: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:31.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:31 vm05.local ceph-mon[69577]: pgmap v171: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:33.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:33 vm00.local ceph-mon[61968]: pgmap v172: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:33.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:33 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:33.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:33 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:33.415 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:33 vm00.local ceph-mon[61968]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:33.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:33 vm05.local ceph-mon[69577]: pgmap v172: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:33.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:33 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:33.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:33 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:33.493 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:33 vm05.local ceph-mon[69577]: from='mgr.14457 192.168.123.100:0/36412890' entity='mgr.a' 2026-03-31T23:02:34.662 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.105:9095/api/v1/status/config 2026-03-31T23:02:34.666 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.105:9095/api/v1/status/config 2026-03-31T23:02:34.666 INFO:teuthology.orchestra.run.vm00.stderr:+ jq -e '.status == "success"' 2026-03-31T23:02:34.668 INFO:teuthology.orchestra.run.vm00.stdout:{"status":"success","data":{"yaml":"global:\n scrape_interval: 10s\n scrape_timeout: 10s\n scrape_protocols:\n - OpenMetricsText1.0.0\n - OpenMetricsText0.0.1\n - PrometheusText1.0.0\n - PrometheusText0.0.4\n evaluation_interval: 10s\n external_labels:\n cluster: 8bb14950-2d54-11f1-a348-07063966e06c\nruntime:\n gogc: 75\nalerting:\n alertmanagers:\n - follow_redirects: true\n enable_http2: true\n scheme: http\n timeout: 10s\n api_version: v2\n http_sd_configs:\n - follow_redirects: true\n enable_http2: true\n refresh_interval: 1m\n url: http://192.168.123.100:8765/sd/prometheus/sd-config?service=alertmanager\nrule_files:\n- /etc/prometheus/alerting/*\nscrape_configs:\n- job_name: ceph\n honor_labels: true\n honor_timestamps: true\n track_timestamps_staleness: false\n scrape_interval: 10s\n scrape_timeout: 10s\n scrape_protocols:\n - OpenMetricsText1.0.0\n - OpenMetricsText0.0.1\n - PrometheusText1.0.0\n - PrometheusText0.0.4\n always_scrape_classic_histograms: false\n convert_classic_histograms_to_nhcb: false\n metrics_path: /metrics\n scheme: http\n enable_compression: true\n metric_name_validation_scheme: utf8\n metric_name_escaping_scheme: allow-utf-8\n follow_redirects: true\n enable_http2: true\n relabel_configs:\n - source_labels: [__address__]\n separator: ;\n target_label: cluster\n replacement: 8bb14950-2d54-11f1-a348-07063966e06c\n action: replace\n - source_labels: [instance]\n separator: ;\n target_label: instance\n replacement: ceph_cluster\n action: replace\n http_sd_configs:\n - follow_redirects: true\n enable_http2: true\n refresh_interval: 1m\n url: http://192.168.123.100:8765/sd/prometheus/sd-config?service=mgr-prometheus\n- job_name: node\n honor_timestamps: true\n track_timestamps_staleness: false\n scrape_interval: 10s\n scrape_timeout: 10s\n scrape_protocols:\n - OpenMetricsText1.0.0\n - OpenMetricsText0.0.1\n - PrometheusText1.0.0\n - PrometheusText0.0.4\n always_scrape_classic_histograms: false\n convert_classic_histograms_to_nhcb: false\n metrics_path: /metrics\n scheme: http\n enable_compression: true\n metric_name_validation_scheme: utf8\n metric_name_escaping_scheme: allow-utf-8\n follow_redirects: true\n enable_http2: true\n relabel_configs:\n - source_labels: [__address__]\n separator: ;\n target_label: cluster\n replacement: 8bb14950-2d54-11f1-a348-07063966e06c\n action: replace\n http_sd_configs:\n - follow_redirects: true\n enable_http2: true\n refresh_interval: 1m\n url: http://192.168.123.100:8765/sd/prometheus/sd-config?service=node-exporter\n- job_name: ceph-exporter\n honor_labels: true\n honor_timestamps: true\n track_timestamps_staleness: false\n scrape_interval: 10s\n scrape_timeout: 10s\n scrape_protocols:\n - OpenMetricsText1.0.0\n - OpenMetricsText0.0.1\n - PrometheusText1.0.0\n - PrometheusText0.0.4\n always_scrape_classic_histograms: false\n convert_classic_histograms_to_nhcb: false\n metrics_path: /metrics\n scheme: http\n enable_compression: true\n metric_name_validation_scheme: utf8\n metric_name_escaping_scheme: allow-utf-8\n follow_redirects: true\n enable_http2: true\n relabel_configs:\n - source_labels: [__address__]\n separator: ;\n target_label: cluster\n replacement: 8bb14950-2d54-11f1-a348-07063966e06c\n action: replace\n http_sd_configs:\n - follow_redirects: true\n enable_http2: true\n refresh_interval: 1m\n url: http://192.168.123.100:8765/sd/prometheus/sd-config?service=ceph-exporter\n- job_name: nvmeof\n honor_timestamps: true\n track_timestamps_staleness: false\n scrape_interval: 10s\n scrape_timeout: 10s\n scrape_protocols:\n - OpenMetricsText1.0.0\n - OpenMetricsText0.0.1\n - PrometheusText1.0.0\n - PrometheusText0.0.4\n always_scrape_classic_histograms: false\n convert_classic_histograms_to_nhcb: false\n metrics_path: /metrics\n scheme: http\n enable_compression: true\n metric_name_validation_scheme: utf8\n metric_name_escaping_scheme: allow-utf-8\n follow_redirects: true\n enable_http2: true\n http_sd_configs:\n - follow_redirects: true\n enable_http2: true\n refresh_interval: 1m\n url: http://192.168.123.100:8765/sd/prometheus/sd-config?service=nvmeof\n- job_name: nfs\n honor_timestamps: true\n track_timestamps_staleness: false\n scrape_interval: 10s\n scrape_timeout: 10s\n scrape_protocols:\n - OpenMetricsText1.0.0\n - OpenMetricsText0.0.1\n - PrometheusText1.0.0\n - PrometheusText0.0.4\n always_scrape_classic_histograms: false\n convert_classic_histograms_to_nhcb: false\n metrics_path: /metrics\n scheme: http\n enable_compression: true\n metric_name_validation_scheme: utf8\n metric_name_escaping_scheme: allow-utf-8\n follow_redirects: true\n enable_http2: true\n http_sd_configs:\n - follow_redirects: true\n enable_http2: true\n refresh_interval: 1m\n url: http://192.168.123.100:8765/sd/prometheus/sd-config?service=nfs\n- job_name: smb\n honor_timestamps: true\n track_timestamps_staleness: false\n scrape_interval: 10s\n scrape_timeout: 10s\n scrape_protocols:\n - OpenMetricsText1.0.0\n - OpenMetricsText0.0.1\n - PrometheusText1.0.0\n - PrometheusText0.0.4\n always_scrape_classic_histograms: false\n convert_classic_histograms_to_nhcb: false\n metrics_path: /metrics\n scheme: http\n enable_compression: true\n metric_name_validation_scheme: utf8\n metric_name_escaping_scheme: allow-utf-8\n follow_redirects: true\n enable_http2: true\n http_sd_configs:\n - follow_redirects: true\n enable_http2: true\n refresh_interval: 1m\n url: http://192.168.123.100:8765/sd/prometheus/sd-config?service=smb\notlp:\n translation_strategy: UnderscoreEscapingWithSuffixes\n"}}true 2026-03-31T23:02:34.669 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.105:9095/api/v1/alerts 2026-03-31T23:02:34.671 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.105:9095/api/v1/alerts 2026-03-31T23:02:34.671 INFO:teuthology.orchestra.run.vm00.stderr:+ jq -e '.data | .alerts | .[] | select(.labels | .alertname == "CephMonDown") | .state == "firing"' 2026-03-31T23:02:34.673 INFO:teuthology.orchestra.run.vm00.stdout:{"status":"success","data":{"alerts":[{"labels":{"alertname":"CephHealthWarning","cluster":"8bb14950-2d54-11f1-a348-07063966e06c","instance":"ceph_cluster","job":"ceph","severity":"warning","type":"ceph_default"},"annotations":{"description":"The cluster state has been HEALTH_WARN for more than 15 minutes on cluster 8bb14950-2d54-11f1-a348-07063966e06c. Please check 'ceph health detail' for more information.","summary":"Ceph is in the WARNING state on cluster 8bb14950-2d54-11f1-a348-07063966e06c"},"state":"pending","activeAt":"2026-03-31T23:01:03.558815712Z","value":"1e+00"},{"labels":{"alertname":"CephMonDownQuorumAtRisk","cluster":"8bb14950-2d54-11f1-a348-07063966e06c","oid":"1.3.6.1.4.1.50495.1.2.1.3.1","severity":"critical","type":"ceph_default"},"annotations":{"description":"Quorum requires a majority of monitors (x 2) to be active. Without quorum the cluster will become inoperable, affecting all services and connected clients. The following monitors are down: - mon.c on vm09","documentation":"https://docs.ceph.com/en/latest/rados/operations/health-checks#mon-down","summary":"Monitor quorum is at risk on cluster 8bb14950-2d54-11f1-a348-07063966e06c"},"state":"firing","activeAt":"2026-03-31T23:01:02.639590217Z","value":"1e+00"},{"labels":{"alertname":"CephMonDown","cluster":"8bb14950-2d54-11f1-a348-07063966e06c","severity":"warning","type":"ceph_default"},"annotations":{"description":"You have 1 monitor down. Quorum is still intact, but the loss of an additional monitor will make your cluster inoperable. The following monitors are down: - mon.c on vm09","documentation":"https://docs.ceph.com/en/latest/rados/operations/health-checks#mon-down","summary":"One or more monitors down on cluster 8bb14950-2d54-11f1-a348-07063966e06c"},"state":"firing","activeAt":"2026-03-31T23:01:02.639590217Z","value":"1e+00"}]}}true 2026-03-31T23:02:34.674 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.109:9093/api/v2/status 2026-03-31T23:02:34.676 INFO:teuthology.orchestra.run.vm00.stdout:{"cluster":{"name":"01KN31V3W8DF62801K9JWZ29KP","peers":[{"address":"192.168.123.109:9094","name":"01KN31V3W8DF62801K9JWZ29KP"}],"status":"ready"},"config":{"original":"global:\n resolve_timeout: 5m\n http_config:\n tls_config:\n insecure_skip_verify: true\n follow_redirects: true\n enable_http2: true\n smtp_hello: localhost\n smtp_require_tls: true\n smtp_tls_config:\n insecure_skip_verify: false\n pagerduty_url: https://events.pagerduty.com/v2/enqueue\n opsgenie_api_url: https://api.opsgenie.com/\n wechat_api_url: https://qyapi.weixin.qq.com/cgi-bin/\n victorops_api_url: https://alert.victorops.com/integrations/generic/20131114/alert/\n telegram_api_url: https://api.telegram.org\n webex_api_url: https://webexapis.com/v1/messages\n rocketchat_api_url: https://open.rocket.chat/\nroute:\n receiver: default\n continue: false\n routes:\n - receiver: ceph-dashboard\n group_by:\n - alertname\n continue: false\n group_wait: 10s\n group_interval: 10s\n repeat_interval: 1h\nreceivers:\n- name: default\n- name: ceph-dashboard\n webhook_configs:\n - send_resolved: true\n http_config:\n tls_config:\n insecure_skip_verify: true\n follow_redirects: true\n enable_http2: true\n url: \n url_file: \"\"\n max_alerts: 0\n timeout: 0s\n - send_resolved: true\n http_config:\n tls_config:\n insecure_skip_verify: true\n follow_redirects: true\n enable_http2: true\n url: \n url_file: \"\"\n max_alerts: 0\n timeout: 0s\ntemplates: []\n"},"uptime":"2026-03-31T22:57:15.657Z","versionInfo":{"branch":"HEAD","buildDate":"20250307-15:05:18","buildUser":"root@fa3ca569dfe4","goVersion":"go1.23.7","revision":"b2099eaa2c9ebc25edb26517cb9c732738e93910","version":"0.28.1"}} 2026-03-31T23:02:34.676 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.109:9093/api/v2/alerts 2026-03-31T23:02:34.678 INFO:teuthology.orchestra.run.vm00.stdout:[{"annotations":{"description":"Quorum requires a majority of monitors (x 2) to be active. Without quorum the cluster will become inoperable, affecting all services and connected clients. The following monitors are down: - mon.c on vm09","documentation":"https://docs.ceph.com/en/latest/rados/operations/health-checks#mon-down","summary":"Monitor quorum is at risk on cluster 8bb14950-2d54-11f1-a348-07063966e06c"},"endsAt":"2026-03-31T23:05:32.639Z","fingerprint":"2cdceed55fa4481e","receivers":[{"name":"ceph-dashboard"}],"startsAt":"2026-03-31T23:01:32.639Z","status":{"inhibitedBy":[],"mutedBy":[],"silencedBy":[],"state":"active"},"updatedAt":"2026-03-31T23:01:32.642Z","generatorURL":"http://vm05.local:9095/graph?g0.expr=%28%28ceph_health_detail%7Bname%3D%22MON_DOWN%22%7D+%3D%3D+1%29+%2A+on+%28%29+group_right+%28cluster%29+%28count+by+%28cluster%29+%28ceph_mon_quorum_status+%3D%3D+1%29+%3D%3D+bool+%28floor%28count+by+%28cluster%29+%28ceph_mon_metadata%29+%2F+2%29+%2B+1%29%29%29+%3D%3D+1\u0026g0.tab=1","labels":{"alertname":"CephMonDownQuorumAtRisk","cluster":"8bb14950-2d54-11f1-a348-07063966e06c","oid":"1.3.6.1.4.1.50495.1.2.1.3.1","severity":"critical","type":"ceph_default"}},{"annotations":{"description":"You have 1 monitor down. Quorum is still intact, but the loss of an additional monitor will make your cluster inoperable. The following monitors are down: - mon.c on vm09","documentation":"https://docs.ceph.com/en/latest/rados/operations/health-checks#mon-down","summary":"One or more monitors down on cluster 8bb14950-2d54-11f1-a348-07063966e06c"},"endsAt":"2026-03-31T23:05:32.639Z","fingerprint":"fefadbee517a8163","receivers":[{"name":"ceph-dashboard"}],"startsAt":"2026-03-31T23:01:32.639Z","status":{"inhibitedBy":[],"mutedBy":[],"silencedBy":[],"state":"active"},"updatedAt":"2026-03-31T23:01:32.642Z","generatorURL":"http://vm05.local:9095/graph?g0.expr=%28count+by+%28cluster%29+%28ceph_mon_quorum_status+%3D%3D+0%29%29+%3C%3D+%28count+by+%28cluster%29+%28ceph_mon_metadata%29+-+floor%28%28count+by+%28cluster%29+%28ceph_mon_metadata%29+%2F+2+%2B+1%29%29%29\u0026g0.tab=1","labels":{"alertname":"CephMonDown","cluster":"8bb14950-2d54-11f1-a348-07063966e06c","severity":"warning","type":"ceph_default"}}] 2026-03-31T23:02:34.679 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.109:9093/api/v2/alerts 2026-03-31T23:02:34.679 INFO:teuthology.orchestra.run.vm00.stderr:+ jq -e '.[] | select(.labels | .alertname == "CephMonDown") | .status | .state == "active"' 2026-03-31T23:02:34.681 INFO:teuthology.orchestra.run.vm00.stdout:true 2026-03-31T23:02:34.681 INFO:teuthology.orchestra.run.vm00.stderr:++ ceph mgr services 2026-03-31T23:02:34.681 INFO:teuthology.orchestra.run.vm00.stderr:++ jq -r .prometheus 2026-03-31T23:02:34.881 INFO:teuthology.orchestra.run.vm00.stderr:+ METRICS_URL=http://192.168.123.100:9283/ 2026-03-31T23:02:34.881 INFO:teuthology.orchestra.run.vm00.stderr:+ '[' -n http://192.168.123.100:9283/ ']' 2026-03-31T23:02:34.882 INFO:teuthology.orchestra.run.vm00.stderr:+ curl -s http://192.168.123.100:9283/metrics 2026-03-31T23:02:34.882 INFO:teuthology.orchestra.run.vm00.stderr:+ grep -q '^ceph_health_status' 2026-03-31T23:02:34.915 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:34 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: ::ffff:192.168.123.100 - - [31/Mar/2026:23:02:34] "GET /metrics HTTP/1.1" 200 24436 "" "curl/7.76.1" 2026-03-31T23:02:34.941 DEBUG:teuthology.run_tasks:Unwinding manager cephadm 2026-03-31T23:02:34.943 INFO:tasks.cephadm:Teardown begin 2026-03-31T23:02:34.943 DEBUG:teuthology.orchestra.run.vm00:> sudo rm -f /etc/ceph/ceph.conf /etc/ceph/ceph.client.admin.keyring 2026-03-31T23:02:34.969 DEBUG:teuthology.orchestra.run.vm05:> sudo rm -f /etc/ceph/ceph.conf /etc/ceph/ceph.client.admin.keyring 2026-03-31T23:02:34.995 DEBUG:teuthology.orchestra.run.vm09:> sudo rm -f /etc/ceph/ceph.conf /etc/ceph/ceph.client.admin.keyring 2026-03-31T23:02:35.023 INFO:tasks.cephadm:Disabling cephadm mgr module 2026-03-31T23:02:35.023 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 8bb14950-2d54-11f1-a348-07063966e06c -- ceph mgr module disable cephadm 2026-03-31T23:02:35.146 INFO:teuthology.orchestra.run.vm00.stderr:Inferring config /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/mon.a/config 2026-03-31T23:02:35.165 INFO:teuthology.orchestra.run.vm00.stderr:Error: statfs /etc/ceph/ceph.client.admin.keyring: no such file or directory 2026-03-31T23:02:35.182 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-mon[61968]: pgmap v173: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:35.182 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-mon[61968]: from='client.? 192.168.123.100:0/2528815401' entity='client.admin' cmd={"prefix": "mgr services"} : dispatch 2026-03-31T23:02:35.183 DEBUG:teuthology.orchestra.run:got remote process result: 125 2026-03-31T23:02:35.183 INFO:tasks.cephadm:Cleaning up testdir ceph.* files... 2026-03-31T23:02:35.183 DEBUG:teuthology.orchestra.run.vm00:> rm -f /home/ubuntu/cephtest/seed.ceph.conf /home/ubuntu/cephtest/ceph.pub 2026-03-31T23:02:35.196 DEBUG:teuthology.orchestra.run.vm05:> rm -f /home/ubuntu/cephtest/seed.ceph.conf /home/ubuntu/cephtest/ceph.pub 2026-03-31T23:02:35.211 DEBUG:teuthology.orchestra.run.vm09:> rm -f /home/ubuntu/cephtest/seed.ceph.conf /home/ubuntu/cephtest/ceph.pub 2026-03-31T23:02:35.226 INFO:tasks.cephadm:Stopping all daemons... 2026-03-31T23:02:35.226 INFO:tasks.cephadm.mon.a:Stopping mon.a... 2026-03-31T23:02:35.226 DEBUG:teuthology.orchestra.run.vm00:> sudo systemctl stop ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.a 2026-03-31T23:02:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:35 vm05.local ceph-mon[69577]: pgmap v173: 1 pgs: 1 active+clean; 449 KiB data, 82 MiB used, 60 GiB / 60 GiB avail 2026-03-31T23:02:35.243 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:35 vm05.local ceph-mon[69577]: from='client.? 192.168.123.100:0/2528815401' entity='client.admin' cmd={"prefix": "mgr services"} : dispatch 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:35 vm00.local systemd[1]: Stopping Ceph mon.a for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a[61963]: 2026-03-31T23:02:35.326+0000 7f55f2b07640 -1 received signal: Terminated from /run/podman-init -- /usr/bin/ceph-mon -n mon.a -f --setuser ceph --setgroup ceph --default-log-to-file=false --default-log-to-journald=true --default-log-to-stderr=false --default-mon-cluster-log-to-file=false --default-mon-cluster-log-to-journald=true --default-mon-cluster-log-to-stderr=false (PID: 1) UID: 0 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a[61963]: 2026-03-31T23:02:35.326+0000 7f55f2b07640 -1 mon.a@0(leader) e3 *** Got Signal Terminated *** 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:35 vm00.local podman[82468]: 2026-03-31 23:02:35.398980595 +0000 UTC m=+0.083295807 container died ccaba7e50d34f86eea5c2ddad21e7aa26b517bc56df8aba713a3ca611adaa61e (image=quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, io.buildah.version=1.43.0, CEPH_REF=tentacle-release, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.schema-version=1.0, OSD_FLAVOR=default, org.label-schema.build-date=20260316, org.label-schema.name=CentOS Stream 9 Base Image, org.opencontainers.image.authors=Ceph Release Team , org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.license=GPLv2, ceph=True, org.label-schema.vendor=CentOS) 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:35 vm00.local podman[82468]: 2026-03-31 23:02:35.412484429 +0000 UTC m=+0.096799641 container remove ccaba7e50d34f86eea5c2ddad21e7aa26b517bc56df8aba713a3ca611adaa61e (image=quay.ceph.io/ceph-ci/ceph:5bb3278730741031382ca9c3dc9d221a942e06a2, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a, org.label-schema.license=GPLv2, OSD_FLAVOR=default, io.buildah.version=1.43.0, org.opencontainers.image.authors=Ceph Release Team , CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, ceph=True, org.label-schema.schema-version=1.0, FROM_IMAGE=quay.io/centos/centos:stream9, CEPH_REF=tentacle-release, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.build-date=20260316, org.opencontainers.image.documentation=https://docs.ceph.com/) 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mon.a.vm00.stdout:Mar 31 23:02:35 vm00.local bash[82468]: ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-a 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:23:02:35] ENGINE Bus STOPPING 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:23:02:35] ENGINE HTTP Server cherrypy._cpwsgi_server.CPWSGIServer(('::', 9283)) shut down 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:23:02:35] ENGINE Bus STOPPED 2026-03-31T23:02:35.463 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:23:02:35] ENGINE Bus STARTING 2026-03-31T23:02:35.481 DEBUG:teuthology.orchestra.run.vm00:> sudo pkill -f 'journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.a.service' 2026-03-31T23:02:35.512 DEBUG:teuthology.orchestra.run:got remote process result: None 2026-03-31T23:02:35.512 INFO:tasks.cephadm.mon.a:Stopped mon.a 2026-03-31T23:02:35.513 INFO:tasks.cephadm.mon.c:Stopping mon.b... 2026-03-31T23:02:35.513 DEBUG:teuthology.orchestra.run.vm05:> sudo systemctl stop ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.b 2026-03-31T23:02:35.844 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:35 vm05.local systemd[1]: Stopping Ceph mon.b for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T23:02:35.844 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:35 vm05.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-b[69573]: 2026-03-31T23:02:35.608+0000 7f08b6430640 -1 received signal: Terminated from /run/podman-init -- /usr/bin/ceph-mon -n mon.b -f --setuser ceph --setgroup ceph --default-log-to-file=false --default-log-to-journald=true --default-log-to-stderr=false --default-mon-cluster-log-to-file=false --default-mon-cluster-log-to-journald=true --default-mon-cluster-log-to-stderr=false (PID: 1) UID: 0 2026-03-31T23:02:35.844 INFO:journalctl@ceph.mon.b.vm05.stdout:Mar 31 23:02:35 vm05.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mon-b[69573]: 2026-03-31T23:02:35.608+0000 7f08b6430640 -1 mon.b@2(peon) e3 *** Got Signal Terminated *** 2026-03-31T23:02:35.915 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:23:02:35] ENGINE Serving on http://:::9283 2026-03-31T23:02:35.915 INFO:journalctl@ceph.mgr.a.vm00.stdout:Mar 31 23:02:35 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-mgr-a[62204]: [31/Mar/2026:23:02:35] ENGINE Bus STARTED 2026-03-31T23:02:35.916 DEBUG:teuthology.orchestra.run.vm05:> sudo pkill -f 'journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.b.service' 2026-03-31T23:02:35.945 DEBUG:teuthology.orchestra.run:got remote process result: None 2026-03-31T23:02:35.945 INFO:tasks.cephadm.mon.c:Stopped mon.b 2026-03-31T23:02:35.945 INFO:tasks.cephadm.mon.c:Stopping mon.c... 2026-03-31T23:02:35.945 DEBUG:teuthology.orchestra.run.vm09:> sudo systemctl stop ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.c 2026-03-31T23:02:35.973 DEBUG:teuthology.orchestra.run.vm09:> sudo pkill -f 'journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mon.c.service' 2026-03-31T23:02:36.046 DEBUG:teuthology.orchestra.run:got remote process result: None 2026-03-31T23:02:36.046 INFO:tasks.cephadm.mon.c:Stopped mon.c 2026-03-31T23:02:36.046 INFO:tasks.cephadm.mgr.a:Stopping mgr.a... 2026-03-31T23:02:36.046 DEBUG:teuthology.orchestra.run.vm00:> sudo systemctl stop ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.a 2026-03-31T23:02:36.280 DEBUG:teuthology.orchestra.run.vm00:> sudo pkill -f 'journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.a.service' 2026-03-31T23:02:36.308 DEBUG:teuthology.orchestra.run:got remote process result: None 2026-03-31T23:02:36.308 INFO:tasks.cephadm.mgr.a:Stopped mgr.a 2026-03-31T23:02:36.309 INFO:tasks.cephadm.mgr.b:Stopping mgr.b... 2026-03-31T23:02:36.309 DEBUG:teuthology.orchestra.run.vm05:> sudo systemctl stop ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.b 2026-03-31T23:02:36.432 INFO:journalctl@ceph.mgr.b.vm05.stdout:Mar 31 23:02:36 vm05.local systemd[1]: Stopping Ceph mgr.b for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T23:02:36.526 DEBUG:teuthology.orchestra.run.vm05:> sudo pkill -f 'journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@mgr.b.service' 2026-03-31T23:02:36.555 DEBUG:teuthology.orchestra.run:got remote process result: None 2026-03-31T23:02:36.555 INFO:tasks.cephadm.mgr.b:Stopped mgr.b 2026-03-31T23:02:36.555 INFO:tasks.cephadm.osd.0:Stopping osd.0... 2026-03-31T23:02:36.555 DEBUG:teuthology.orchestra.run.vm00:> sudo systemctl stop ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.0 2026-03-31T23:02:36.644 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:36 vm00.local systemd[1]: Stopping Ceph osd.0 for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T23:02:36.915 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:36 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0[72064]: 2026-03-31T23:02:36.642+0000 7facbaf8f640 -1 received signal: Terminated from /run/podman-init -- /usr/bin/ceph-osd -n osd.0 -f --setuser ceph --setgroup ceph --default-log-to-file=false --default-log-to-journald=true --default-log-to-stderr=false --osd-objectstore=bluestore (PID: 1) UID: 0 2026-03-31T23:02:36.915 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:36 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0[72064]: 2026-03-31T23:02:36.642+0000 7facbaf8f640 -1 osd.0 27 *** Got signal Terminated *** 2026-03-31T23:02:36.915 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:36 vm00.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0[72064]: 2026-03-31T23:02:36.642+0000 7facbaf8f640 -1 osd.0 27 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-03-31T23:02:41.960 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:41 vm00.local podman[82965]: 2026-03-31 23:02:41.684394983 +0000 UTC m=+5.052759043 container died 797c01ef65d602a3269e787a0143a816de64cbb3c127ea05daae4962e74694e7 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0, org.label-schema.vendor=CentOS, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.name=CentOS Stream 9 Base Image, FROM_IMAGE=quay.io/centos/centos:stream9, org.opencontainers.image.authors=Ceph Release Team , io.buildah.version=1.43.0, OSD_FLAVOR=default, ceph=True, org.label-schema.schema-version=1.0, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, CEPH_REF=tentacle-release, org.label-schema.license=GPLv2, org.label-schema.build-date=20260316) 2026-03-31T23:02:41.960 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:41 vm00.local podman[82965]: 2026-03-31 23:02:41.703016676 +0000 UTC m=+5.071380747 container remove 797c01ef65d602a3269e787a0143a816de64cbb3c127ea05daae4962e74694e7 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0, FROM_IMAGE=quay.io/centos/centos:stream9, OSD_FLAVOR=default, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.vendor=CentOS, org.label-schema.build-date=20260316, org.label-schema.license=GPLv2, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.opencontainers.image.authors=Ceph Release Team , org.opencontainers.image.documentation=https://docs.ceph.com/, ceph=True, io.buildah.version=1.43.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_REF=tentacle-release, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.schema-version=1.0) 2026-03-31T23:02:41.960 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:41 vm00.local bash[82965]: ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0 2026-03-31T23:02:41.960 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:41 vm00.local podman[83189]: 2026-03-31 23:02:41.866093559 +0000 UTC m=+0.015025683 container create df3792e939229fe23f0a7bf3a7ea84dd5046487233f1f51970ed9ea0e2ff3a96 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0-deactivate, FROM_IMAGE=quay.io/centos/centos:stream9, org.opencontainers.image.documentation=https://docs.ceph.com/, io.buildah.version=1.43.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.vendor=CentOS, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.schema-version=1.0, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.name=CentOS Stream 9 Base Image, ceph=True, OSD_FLAVOR=default, CEPH_REF=tentacle-release, org.label-schema.license=GPLv2) 2026-03-31T23:02:41.960 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:41 vm00.local podman[83189]: 2026-03-31 23:02:41.908519935 +0000 UTC m=+0.057452059 container init df3792e939229fe23f0a7bf3a7ea84dd5046487233f1f51970ed9ea0e2ff3a96 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0-deactivate, CEPH_REF=tentacle-release, org.label-schema.license=GPLv2, ceph=True, org.opencontainers.image.documentation=https://docs.ceph.com/, io.buildah.version=1.43.0, org.label-schema.vendor=CentOS, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.name=CentOS Stream 9 Base Image, OSD_FLAVOR=default, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.schema-version=1.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/) 2026-03-31T23:02:41.960 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:41 vm00.local podman[83189]: 2026-03-31 23:02:41.911914168 +0000 UTC m=+0.060846301 container start df3792e939229fe23f0a7bf3a7ea84dd5046487233f1f51970ed9ea0e2ff3a96 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0-deactivate, org.label-schema.vendor=CentOS, ceph=True, org.label-schema.build-date=20260316, io.buildah.version=1.43.0, CEPH_REF=tentacle-release, org.label-schema.name=CentOS Stream 9 Base Image, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.license=GPLv2, org.opencontainers.image.authors=Ceph Release Team , OSD_FLAVOR=default, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.schema-version=1.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git) 2026-03-31T23:02:41.960 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:41 vm00.local podman[83189]: 2026-03-31 23:02:41.912849769 +0000 UTC m=+0.061781883 container attach df3792e939229fe23f0a7bf3a7ea84dd5046487233f1f51970ed9ea0e2ff3a96 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-0-deactivate, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.build-date=20260316, org.opencontainers.image.authors=Ceph Release Team , CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.name=CentOS Stream 9 Base Image, ceph=True, org.opencontainers.image.documentation=https://docs.ceph.com/, org.label-schema.schema-version=1.0, io.buildah.version=1.43.0, CEPH_REF=tentacle-release, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, OSD_FLAVOR=default) 2026-03-31T23:02:41.960 INFO:journalctl@ceph.osd.0.vm00.stdout:Mar 31 23:02:41 vm00.local podman[83189]: 2026-03-31 23:02:41.860166386 +0000 UTC m=+0.009098521 image pull 1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072 2026-03-31T23:02:42.074 DEBUG:teuthology.orchestra.run.vm00:> sudo pkill -f 'journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.0.service' 2026-03-31T23:02:42.102 DEBUG:teuthology.orchestra.run:got remote process result: None 2026-03-31T23:02:42.103 INFO:tasks.cephadm.osd.0:Stopped osd.0 2026-03-31T23:02:42.103 INFO:tasks.cephadm.osd.1:Stopping osd.1... 2026-03-31T23:02:42.103 DEBUG:teuthology.orchestra.run.vm05:> sudo systemctl stop ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.1 2026-03-31T23:02:42.493 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:42 vm05.local systemd[1]: Stopping Ceph osd.1 for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T23:02:42.493 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:42 vm05.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1[74050]: 2026-03-31T23:02:42.211+0000 7fefa66cf640 -1 received signal: Terminated from /run/podman-init -- /usr/bin/ceph-osd -n osd.1 -f --setuser ceph --setgroup ceph --default-log-to-file=false --default-log-to-journald=true --default-log-to-stderr=false --osd-objectstore=bluestore (PID: 1) UID: 0 2026-03-31T23:02:42.493 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:42 vm05.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1[74050]: 2026-03-31T23:02:42.211+0000 7fefa66cf640 -1 osd.1 27 *** Got signal Terminated *** 2026-03-31T23:02:42.493 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:42 vm05.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1[74050]: 2026-03-31T23:02:42.211+0000 7fefa66cf640 -1 osd.1 27 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-03-31T23:02:47.510 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:47 vm05.local podman[81834]: 2026-03-31 23:02:47.247925267 +0000 UTC m=+5.049679474 container died d562d5ab1cdbb1b4a656eaedd42ec882e0130f4f9fc6edcd874274084191bd33 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1, io.buildah.version=1.43.0, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.license=GPLv2, ceph=True, org.label-schema.build-date=20260316, FROM_IMAGE=quay.io/centos/centos:stream9, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.label-schema.schema-version=1.0, OSD_FLAVOR=default, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.opencontainers.image.authors=Ceph Release Team , CEPH_REF=tentacle-release) 2026-03-31T23:02:47.510 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:47 vm05.local podman[81834]: 2026-03-31 23:02:47.262561791 +0000 UTC m=+5.064315998 container remove d562d5ab1cdbb1b4a656eaedd42ec882e0130f4f9fc6edcd874274084191bd33 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1, io.buildah.version=1.43.0, org.label-schema.name=CentOS Stream 9 Base Image, OSD_FLAVOR=default, org.opencontainers.image.documentation=https://docs.ceph.com/, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, CEPH_REF=tentacle-release, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.build-date=20260316, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, ceph=True, org.opencontainers.image.authors=Ceph Release Team , FROM_IMAGE=quay.io/centos/centos:stream9) 2026-03-31T23:02:47.510 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:47 vm05.local bash[81834]: ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1 2026-03-31T23:02:47.510 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:47 vm05.local podman[81930]: 2026-03-31 23:02:47.418612521 +0000 UTC m=+0.017729897 container create debeffa97f809fe17fda92a491628ef460fac146561037f81512eb453c3c358b (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1-deactivate, org.label-schema.vendor=CentOS, org.label-schema.name=CentOS Stream 9 Base Image, org.opencontainers.image.documentation=https://docs.ceph.com/, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.build-date=20260316, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, org.label-schema.schema-version=1.0, CEPH_REF=tentacle-release, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.license=GPLv2, io.buildah.version=1.43.0, OSD_FLAVOR=default, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, ceph=True, org.opencontainers.image.authors=Ceph Release Team ) 2026-03-31T23:02:47.510 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:47 vm05.local podman[81930]: 2026-03-31 23:02:47.463523501 +0000 UTC m=+0.062640877 container init debeffa97f809fe17fda92a491628ef460fac146561037f81512eb453c3c358b (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1-deactivate, org.label-schema.name=CentOS Stream 9 Base Image, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.license=GPLv2, org.label-schema.vendor=CentOS, io.buildah.version=1.43.0, org.label-schema.schema-version=1.0, org.opencontainers.image.documentation=https://docs.ceph.com/, org.opencontainers.image.authors=Ceph Release Team , CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, OSD_FLAVOR=default, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, CEPH_REF=tentacle-release, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.build-date=20260316, ceph=True) 2026-03-31T23:02:47.510 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:47 vm05.local podman[81930]: 2026-03-31 23:02:47.466038347 +0000 UTC m=+0.065155723 container start debeffa97f809fe17fda92a491628ef460fac146561037f81512eb453c3c358b (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1-deactivate, org.opencontainers.image.documentation=https://docs.ceph.com/, ceph=True, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, OSD_FLAVOR=default, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.name=CentOS Stream 9 Base Image, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.label-schema.build-date=20260316, org.label-schema.vendor=CentOS, CEPH_REF=tentacle-release, org.label-schema.license=GPLv2, org.opencontainers.image.authors=Ceph Release Team , FROM_IMAGE=quay.io/centos/centos:stream9, io.buildah.version=1.43.0, org.label-schema.schema-version=1.0) 2026-03-31T23:02:47.510 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:47 vm05.local podman[81930]: 2026-03-31 23:02:47.467355614 +0000 UTC m=+0.066472990 container attach debeffa97f809fe17fda92a491628ef460fac146561037f81512eb453c3c358b (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-1-deactivate, org.label-schema.build-date=20260316, org.label-schema.vendor=CentOS, org.label-schema.license=GPLv2, io.buildah.version=1.43.0, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, org.label-schema.name=CentOS Stream 9 Base Image, OSD_FLAVOR=default, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_REF=tentacle-release, FROM_IMAGE=quay.io/centos/centos:stream9, ceph=True, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.schema-version=1.0, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2) 2026-03-31T23:02:47.510 INFO:journalctl@ceph.osd.1.vm05.stdout:Mar 31 23:02:47 vm05.local podman[81930]: 2026-03-31 23:02:47.410981087 +0000 UTC m=+0.010098473 image pull 1e58a3cbf9abfa7cd4c97d6122dfc897574d910096f68804997a3e0f45bc44f0 quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072 2026-03-31T23:02:47.638 DEBUG:teuthology.orchestra.run.vm05:> sudo pkill -f 'journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.1.service' 2026-03-31T23:02:47.673 DEBUG:teuthology.orchestra.run:got remote process result: None 2026-03-31T23:02:47.674 INFO:tasks.cephadm.osd.1:Stopped osd.1 2026-03-31T23:02:47.674 INFO:tasks.cephadm.osd.2:Stopping osd.2... 2026-03-31T23:02:47.674 DEBUG:teuthology.orchestra.run.vm09:> sudo systemctl stop ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.2 2026-03-31T23:02:47.989 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 23:02:47 vm09.local systemd[1]: Stopping Ceph osd.2 for 8bb14950-2d54-11f1-a348-07063966e06c... 2026-03-31T23:02:47.989 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 23:02:47 vm09.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[106222]: 2026-03-31T23:02:47.772+0000 7f5746640640 -1 received signal: Terminated from /run/podman-init -- /usr/bin/ceph-osd -n osd.2 -f --setuser ceph --setgroup ceph --default-log-to-file=false --default-log-to-journald=true --default-log-to-stderr=false --osd-objectstore=bluestore (PID: 1) UID: 0 2026-03-31T23:02:47.989 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 23:02:47 vm09.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[106222]: 2026-03-31T23:02:47.772+0000 7f5746640640 -1 osd.2 27 *** Got signal Terminated *** 2026-03-31T23:02:47.989 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 23:02:47 vm09.local ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2[106222]: 2026-03-31T23:02:47.772+0000 7f5746640640 -1 osd.2 27 *** Immediate shutdown (osd_fast_shutdown=true) *** 2026-03-31T23:02:52.948 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 23:02:52 vm09.local podman[114152]: 2026-03-31 23:02:52.796857496 +0000 UTC m=+5.036134369 container died 8574bf5f2cc1960f3c0747aad57cd66a2f6cd84623839d6d96fcab9a737ac0e8 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, FROM_IMAGE=quay.io/centos/centos:stream9, org.label-schema.vendor=CentOS, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/, ceph=True, org.label-schema.build-date=20260316, org.label-schema.license=GPLv2, org.label-schema.schema-version=1.0, io.buildah.version=1.43.0, org.opencontainers.image.documentation=https://docs.ceph.com/, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, CEPH_REF=tentacle-release, org.opencontainers.image.authors=Ceph Release Team , OSD_FLAVOR=default, org.label-schema.name=CentOS Stream 9 Base Image) 2026-03-31T23:02:52.948 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 23:02:52 vm09.local podman[114152]: 2026-03-31 23:02:52.812751514 +0000 UTC m=+5.052028386 container remove 8574bf5f2cc1960f3c0747aad57cd66a2f6cd84623839d6d96fcab9a737ac0e8 (image=quay.ceph.io/ceph-ci/ceph@sha256:02c8d616f8a7af1a26efca44a51de7761356c5cb66c69c789e66f798c27c8072, name=ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2, org.label-schema.build-date=20260316, org.label-schema.name=CentOS Stream 9 Base Image, org.opencontainers.image.documentation=https://docs.ceph.com/, io.buildah.version=1.43.0, CEPH_REF=tentacle-release, org.label-schema.license=GPLv2, FROM_IMAGE=quay.io/centos/centos:stream9, CEPH_GIT_REPO=https://github.com/ceph/ceph-ci.git, org.opencontainers.image.authors=Ceph Release Team , org.label-schema.vendor=CentOS, org.label-schema.schema-version=1.0, OSD_FLAVOR=default, ceph=True, CEPH_SHA1=5bb3278730741031382ca9c3dc9d221a942e06a2, GANESHA_REPO_BASEURL=https://buildlogs.centos.org/centos/$releasever-stream/storage/$basearch/nfsganesha-5/) 2026-03-31T23:02:52.948 INFO:journalctl@ceph.osd.2.vm09.stdout:Mar 31 23:02:52 vm09.local bash[114152]: ceph-8bb14950-2d54-11f1-a348-07063966e06c-osd-2 2026-03-31T23:02:53.155 DEBUG:teuthology.orchestra.run.vm09:> sudo pkill -f 'journalctl -f -n 0 -u ceph-8bb14950-2d54-11f1-a348-07063966e06c@osd.2.service' 2026-03-31T23:02:53.190 DEBUG:teuthology.orchestra.run:got remote process result: None 2026-03-31T23:02:53.190 INFO:tasks.cephadm.osd.2:Stopped osd.2 2026-03-31T23:02:53.190 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm rm-cluster --fsid 8bb14950-2d54-11f1-a348-07063966e06c --force --keep-logs 2026-03-31T23:02:53.322 INFO:teuthology.orchestra.run.vm00.stdout:Deleting cluster with fsid: 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T23:02:54.994 DEBUG:teuthology.orchestra.run.vm05:> sudo /home/ubuntu/cephtest/cephadm rm-cluster --fsid 8bb14950-2d54-11f1-a348-07063966e06c --force --keep-logs 2026-03-31T23:02:55.123 INFO:teuthology.orchestra.run.vm05.stdout:Deleting cluster with fsid: 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T23:02:56.724 DEBUG:teuthology.orchestra.run.vm09:> sudo /home/ubuntu/cephtest/cephadm rm-cluster --fsid 8bb14950-2d54-11f1-a348-07063966e06c --force --keep-logs 2026-03-31T23:02:56.852 INFO:teuthology.orchestra.run.vm09.stdout:Deleting cluster with fsid: 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T23:02:58.313 DEBUG:teuthology.orchestra.run.vm00:> sudo rm -f /etc/ceph/ceph.conf /etc/ceph/ceph.client.admin.keyring 2026-03-31T23:02:58.338 DEBUG:teuthology.orchestra.run.vm05:> sudo rm -f /etc/ceph/ceph.conf /etc/ceph/ceph.client.admin.keyring 2026-03-31T23:02:58.364 DEBUG:teuthology.orchestra.run.vm09:> sudo rm -f /etc/ceph/ceph.conf /etc/ceph/ceph.client.admin.keyring 2026-03-31T23:02:58.391 INFO:tasks.cephadm:Archiving crash dumps... 2026-03-31T23:02:58.391 DEBUG:teuthology.misc:Transferring archived files from vm00:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/crash to /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360/remote/vm00/crash 2026-03-31T23:02:58.392 DEBUG:teuthology.orchestra.run.vm00:> sudo tar c -f - -C /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/crash -- . 2026-03-31T23:02:58.414 INFO:teuthology.orchestra.run.vm00.stderr:tar: /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/crash: Cannot open: No such file or directory 2026-03-31T23:02:58.414 INFO:teuthology.orchestra.run.vm00.stderr:tar: Error is not recoverable: exiting now 2026-03-31T23:02:58.415 DEBUG:teuthology.misc:Transferring archived files from vm05:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/crash to /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360/remote/vm05/crash 2026-03-31T23:02:58.416 DEBUG:teuthology.orchestra.run.vm05:> sudo tar c -f - -C /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/crash -- . 2026-03-31T23:02:58.441 INFO:teuthology.orchestra.run.vm05.stderr:tar: /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/crash: Cannot open: No such file or directory 2026-03-31T23:02:58.442 INFO:teuthology.orchestra.run.vm05.stderr:tar: Error is not recoverable: exiting now 2026-03-31T23:02:58.443 DEBUG:teuthology.misc:Transferring archived files from vm09:/var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/crash to /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360/remote/vm09/crash 2026-03-31T23:02:58.443 DEBUG:teuthology.orchestra.run.vm09:> sudo tar c -f - -C /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/crash -- . 2026-03-31T23:02:58.467 INFO:teuthology.orchestra.run.vm09.stderr:tar: /var/lib/ceph/8bb14950-2d54-11f1-a348-07063966e06c/crash: Cannot open: No such file or directory 2026-03-31T23:02:58.467 INFO:teuthology.orchestra.run.vm09.stderr:tar: Error is not recoverable: exiting now 2026-03-31T23:02:58.468 INFO:tasks.cephadm:Checking cluster log for badness... 2026-03-31T23:02:58.468 DEBUG:teuthology.orchestra.run.vm00:> sudo egrep '\[ERR\]|\[WRN\]|\[SEC\]' /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log | egrep CEPHADM_ | egrep -v '\(MDS_ALL_DOWN\)' | egrep -v '\(MDS_UP_LESS_THAN_MAX\)' | egrep -v MON_DOWN | egrep -v 'mons down' | egrep -v 'mon down' | egrep -v 'out of quorum' | egrep -v CEPHADM_STRAY_DAEMON | egrep -v CEPHADM_FAILED_DAEMON | head -n 1 2026-03-31T23:02:58.493 INFO:tasks.cephadm:Compressing logs... 2026-03-31T23:02:58.493 DEBUG:teuthology.orchestra.run.vm00:> time sudo find /var/log/ceph /var/log/rbd-target-api -name '*.log' -print0 | sudo xargs --max-args=1 --max-procs=0 --verbose -0 --no-run-if-empty -- gzip -5 --verbose -- 2026-03-31T23:02:58.535 DEBUG:teuthology.orchestra.run.vm05:> time sudo find /var/log/ceph /var/log/rbd-target-api -name '*.log' -print0 | sudo xargs --max-args=1 --max-procs=0 --verbose -0 --no-run-if-empty -- gzip -5 --verbose -- 2026-03-31T23:02:58.537 DEBUG:teuthology.orchestra.run.vm09:> time sudo find /var/log/ceph /var/log/rbd-target-api -name '*.log' -print0 | sudo xargs --max-args=1 --max-procs=0 --verbose -0 --no-run-if-empty -- gzip -5 --verbose -- 2026-03-31T23:02:58.556 INFO:teuthology.orchestra.run.vm00.stderr:find: ‘/var/log/rbd-target-api’: No such file or directory 2026-03-31T23:02:58.556 INFO:teuthology.orchestra.run.vm00.stderr:gzip -5 --verbose -- /var/log/ceph/cephadm.log 2026-03-31T23:02:58.557 INFO:teuthology.orchestra.run.vm00.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mon.a.log 2026-03-31T23:02:58.557 INFO:teuthology.orchestra.run.vm00.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log 2026-03-31T23:02:58.558 INFO:teuthology.orchestra.run.vm00.stderr:/var/log/ceph/cephadm.log: /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mon.a.log: gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mgr.a.log 2026-03-31T23:02:58.558 INFO:teuthology.orchestra.run.vm00.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log: gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.audit.log 2026-03-31T23:02:58.558 INFO:teuthology.orchestra.run.vm00.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mgr.a.log: 87.2% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log.gz 2026-03-31T23:02:58.558 INFO:teuthology.orchestra.run.vm00.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.cephadm.log 2026-03-31T23:02:58.559 INFO:teuthology.orchestra.run.vm09.stderr:find: gzip -5 --verbose -- /var/log/ceph/cephadm.log 2026-03-31T23:02:58.559 INFO:teuthology.orchestra.run.vm09.stderr:‘/var/log/rbd-target-api’: No such file or directory 2026-03-31T23:02:58.559 INFO:teuthology.orchestra.run.vm09.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-volume.log 2026-03-31T23:02:58.559 INFO:teuthology.orchestra.run.vm09.stderr:/var/log/ceph/cephadm.log: gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mon.c.log 2026-03-31T23:02:58.560 INFO:teuthology.orchestra.run.vm09.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-volume.log: gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.cephadm.log 2026-03-31T23:02:58.560 INFO:teuthology.orchestra.run.vm00.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.audit.log: 90.7% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.audit.log.gz 2026-03-31T23:02:58.560 INFO:teuthology.orchestra.run.vm00.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-volume.log 2026-03-31T23:02:58.560 INFO:teuthology.orchestra.run.vm00.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.cephadm.log: 81.5% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.cephadm.log.gz 2026-03-31T23:02:58.560 INFO:teuthology.orchestra.run.vm09.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mon.c.log: gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.audit.log 2026-03-31T23:02:58.561 INFO:teuthology.orchestra.run.vm00.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-osd.0.log 2026-03-31T23:02:58.561 INFO:teuthology.orchestra.run.vm09.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.cephadm.log: 82.5% -- replaced with /var/log/ceph/cephadm.log.gz 2026-03-31T23:02:58.561 INFO:teuthology.orchestra.run.vm09.stderr: 80.7% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.cephadm.log.gz 2026-03-31T23:02:58.563 INFO:teuthology.orchestra.run.vm05.stderr:find: gzip -5 --verbose -- /var/log/ceph/cephadm.log 2026-03-31T23:02:58.563 INFO:teuthology.orchestra.run.vm05.stderr:‘/var/log/rbd-target-api’: No such file or directory 2026-03-31T23:02:58.563 INFO:teuthology.orchestra.run.vm05.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-volume.log 2026-03-31T23:02:58.563 INFO:teuthology.orchestra.run.vm05.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mon.b.log 2026-03-31T23:02:58.563 INFO:teuthology.orchestra.run.vm05.stderr:/var/log/ceph/cephadm.log: gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.cephadm.log 2026-03-31T23:02:58.564 INFO:teuthology.orchestra.run.vm09.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log 2026-03-31T23:02:58.565 INFO:teuthology.orchestra.run.vm09.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.audit.log: gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-osd.2.log 2026-03-31T23:02:58.565 INFO:teuthology.orchestra.run.vm05.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-volume.log: /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mon.b.log: 80.4% -- replaced with /var/log/ceph/cephadm.log.gz 2026-03-31T23:02:58.565 INFO:teuthology.orchestra.run.vm09.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log: 86.6% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log.gz 2026-03-31T23:02:58.565 INFO:teuthology.orchestra.run.vm09.stderr: 90.8% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.audit.log.gz 2026-03-31T23:02:58.566 INFO:teuthology.orchestra.run.vm00.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-volume.log: 95.9% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-volume.log.gz 2026-03-31T23:02:58.567 INFO:teuthology.orchestra.run.vm05.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.audit.log 2026-03-31T23:02:58.567 INFO:teuthology.orchestra.run.vm05.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.cephadm.log: gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log 2026-03-31T23:02:58.567 INFO:teuthology.orchestra.run.vm05.stderr: 79.6% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.cephadm.log.gz 2026-03-31T23:02:58.568 INFO:teuthology.orchestra.run.vm05.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.audit.log: 95.9% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-volume.log.gz 2026-03-31T23:02:58.568 INFO:teuthology.orchestra.run.vm05.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mgr.b.log 2026-03-31T23:02:58.568 INFO:teuthology.orchestra.run.vm05.stderr:gzip -5 --verbose -- /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-osd.1.log 2026-03-31T23:02:58.568 INFO:teuthology.orchestra.run.vm05.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log: /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mgr.b.log: 90.9% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.audit.log.gz 2026-03-31T23:02:58.569 INFO:teuthology.orchestra.run.vm05.stderr: 87.3% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph.log.gz 2026-03-31T23:02:58.570 INFO:teuthology.orchestra.run.vm09.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-osd.2.log: 95.9% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-volume.log.gz 2026-03-31T23:02:58.572 INFO:teuthology.orchestra.run.vm05.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-osd.1.log: 90.3% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mgr.b.log.gz 2026-03-31T23:02:58.574 INFO:teuthology.orchestra.run.vm00.stderr:/var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-osd.0.log: 92.3% -- replaced with /var/log/ceph/cephadm.log.gz 2026-03-31T23:02:58.591 INFO:teuthology.orchestra.run.vm00.stderr: 93.2% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-osd.0.log.gz 2026-03-31T23:02:58.594 INFO:teuthology.orchestra.run.vm09.stderr: 93.1% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-osd.2.log.gz 2026-03-31T23:02:58.604 INFO:teuthology.orchestra.run.vm05.stderr: 93.3% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-osd.1.log.gz 2026-03-31T23:02:58.612 INFO:teuthology.orchestra.run.vm09.stderr: 92.8% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mon.c.log.gz 2026-03-31T23:02:58.613 INFO:teuthology.orchestra.run.vm09.stderr: 2026-03-31T23:02:58.613 INFO:teuthology.orchestra.run.vm09.stderr:real 0m0.062s 2026-03-31T23:02:58.613 INFO:teuthology.orchestra.run.vm09.stderr:user 0m0.100s 2026-03-31T23:02:58.613 INFO:teuthology.orchestra.run.vm09.stderr:sys 0m0.013s 2026-03-31T23:02:58.633 INFO:teuthology.orchestra.run.vm05.stderr: 92.7% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mon.b.log.gz 2026-03-31T23:02:58.634 INFO:teuthology.orchestra.run.vm05.stderr: 2026-03-31T23:02:58.634 INFO:teuthology.orchestra.run.vm05.stderr:real 0m0.083s 2026-03-31T23:02:58.634 INFO:teuthology.orchestra.run.vm05.stderr:user 0m0.119s 2026-03-31T23:02:58.634 INFO:teuthology.orchestra.run.vm05.stderr:sys 0m0.023s 2026-03-31T23:02:58.649 INFO:teuthology.orchestra.run.vm00.stderr: 89.8% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mgr.a.log.gz 2026-03-31T23:02:58.755 INFO:teuthology.orchestra.run.vm00.stderr: 91.5% -- replaced with /var/log/ceph/8bb14950-2d54-11f1-a348-07063966e06c/ceph-mon.a.log.gz 2026-03-31T23:02:58.757 INFO:teuthology.orchestra.run.vm00.stderr: 2026-03-31T23:02:58.757 INFO:teuthology.orchestra.run.vm00.stderr:real 0m0.210s 2026-03-31T23:02:58.757 INFO:teuthology.orchestra.run.vm00.stderr:user 0m0.330s 2026-03-31T23:02:58.757 INFO:teuthology.orchestra.run.vm00.stderr:sys 0m0.028s 2026-03-31T23:02:58.757 INFO:tasks.cephadm:Archiving logs... 2026-03-31T23:02:58.758 DEBUG:teuthology.misc:Transferring archived files from vm00:/var/log/ceph to /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360/remote/vm00/log 2026-03-31T23:02:58.758 DEBUG:teuthology.orchestra.run.vm00:> sudo tar c -f - -C /var/log/ceph -- . 2026-03-31T23:02:58.841 DEBUG:teuthology.misc:Transferring archived files from vm05:/var/log/ceph to /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360/remote/vm05/log 2026-03-31T23:02:58.841 DEBUG:teuthology.orchestra.run.vm05:> sudo tar c -f - -C /var/log/ceph -- . 2026-03-31T23:02:58.872 DEBUG:teuthology.misc:Transferring archived files from vm09:/var/log/ceph to /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360/remote/vm09/log 2026-03-31T23:02:58.872 DEBUG:teuthology.orchestra.run.vm09:> sudo tar c -f - -C /var/log/ceph -- . 2026-03-31T23:02:58.900 INFO:tasks.cephadm:Removing cluster... 2026-03-31T23:02:58.900 DEBUG:teuthology.orchestra.run.vm00:> sudo /home/ubuntu/cephtest/cephadm rm-cluster --fsid 8bb14950-2d54-11f1-a348-07063966e06c --force 2026-03-31T23:02:59.029 INFO:teuthology.orchestra.run.vm00.stdout:Deleting cluster with fsid: 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T23:02:59.202 DEBUG:teuthology.orchestra.run.vm05:> sudo /home/ubuntu/cephtest/cephadm rm-cluster --fsid 8bb14950-2d54-11f1-a348-07063966e06c --force 2026-03-31T23:02:59.334 INFO:teuthology.orchestra.run.vm05.stdout:Deleting cluster with fsid: 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T23:02:59.503 DEBUG:teuthology.orchestra.run.vm09:> sudo /home/ubuntu/cephtest/cephadm rm-cluster --fsid 8bb14950-2d54-11f1-a348-07063966e06c --force 2026-03-31T23:02:59.636 INFO:teuthology.orchestra.run.vm09.stdout:Deleting cluster with fsid: 8bb14950-2d54-11f1-a348-07063966e06c 2026-03-31T23:02:59.805 INFO:tasks.cephadm:Removing cephadm ... 2026-03-31T23:02:59.805 DEBUG:teuthology.orchestra.run.vm00:> rm -rf /home/ubuntu/cephtest/cephadm 2026-03-31T23:02:59.823 DEBUG:teuthology.orchestra.run.vm05:> rm -rf /home/ubuntu/cephtest/cephadm 2026-03-31T23:02:59.839 DEBUG:teuthology.orchestra.run.vm09:> rm -rf /home/ubuntu/cephtest/cephadm 2026-03-31T23:02:59.854 INFO:tasks.cephadm:Teardown complete 2026-03-31T23:02:59.854 DEBUG:teuthology.run_tasks:Unwinding manager install 2026-03-31T23:02:59.856 INFO:teuthology.task.install.util:Removing shipped files: /home/ubuntu/cephtest/valgrind.supp /usr/bin/daemon-helper /usr/bin/adjust-ulimits /usr/bin/stdin-killer... 2026-03-31T23:02:59.856 DEBUG:teuthology.orchestra.run.vm00:> sudo rm -f -- /home/ubuntu/cephtest/valgrind.supp /usr/bin/daemon-helper /usr/bin/adjust-ulimits /usr/bin/stdin-killer 2026-03-31T23:02:59.865 DEBUG:teuthology.orchestra.run.vm05:> sudo rm -f -- /home/ubuntu/cephtest/valgrind.supp /usr/bin/daemon-helper /usr/bin/adjust-ulimits /usr/bin/stdin-killer 2026-03-31T23:02:59.881 DEBUG:teuthology.orchestra.run.vm09:> sudo rm -f -- /home/ubuntu/cephtest/valgrind.supp /usr/bin/daemon-helper /usr/bin/adjust-ulimits /usr/bin/stdin-killer 2026-03-31T23:02:59.927 INFO:teuthology.task.install.rpm:Removing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd on rpm system. 2026-03-31T23:02:59.927 DEBUG:teuthology.orchestra.run.vm00:> 2026-03-31T23:02:59.927 DEBUG:teuthology.orchestra.run.vm00:> for d in ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd ; do 2026-03-31T23:02:59.927 DEBUG:teuthology.orchestra.run.vm00:> sudo yum -y remove $d || true 2026-03-31T23:02:59.927 DEBUG:teuthology.orchestra.run.vm00:> done 2026-03-31T23:02:59.932 INFO:teuthology.task.install.rpm:Removing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd on rpm system. 2026-03-31T23:02:59.932 DEBUG:teuthology.orchestra.run.vm05:> 2026-03-31T23:02:59.932 DEBUG:teuthology.orchestra.run.vm05:> for d in ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd ; do 2026-03-31T23:02:59.932 DEBUG:teuthology.orchestra.run.vm05:> sudo yum -y remove $d || true 2026-03-31T23:02:59.932 DEBUG:teuthology.orchestra.run.vm05:> done 2026-03-31T23:02:59.937 INFO:teuthology.task.install.rpm:Removing packages: ceph-radosgw, ceph-test, ceph, ceph-base, cephadm, ceph-immutable-object-cache, ceph-mgr, ceph-mgr-dashboard, ceph-mgr-diskprediction-local, ceph-mgr-rook, ceph-mgr-cephadm, ceph-fuse, ceph-volume, librados-devel, libcephfs2, libcephfs-devel, librados2, librbd1, python3-rados, python3-rgw, python3-cephfs, python3-rbd, rbd-fuse, rbd-mirror, rbd-nbd on rpm system. 2026-03-31T23:02:59.937 DEBUG:teuthology.orchestra.run.vm09:> 2026-03-31T23:02:59.937 DEBUG:teuthology.orchestra.run.vm09:> for d in ceph-radosgw ceph-test ceph ceph-base cephadm ceph-immutable-object-cache ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-cephadm ceph-fuse ceph-volume librados-devel libcephfs2 libcephfs-devel librados2 librbd1 python3-rados python3-rgw python3-cephfs python3-rbd rbd-fuse rbd-mirror rbd-nbd ; do 2026-03-31T23:02:59.937 DEBUG:teuthology.orchestra.run.vm09:> sudo yum -y remove $d || true 2026-03-31T23:02:59.937 DEBUG:teuthology.orchestra.run.vm09:> done 2026-03-31T23:03:00.132 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repository Size 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout:Removing: 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout: ceph-radosgw x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 103 M 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout:Removing unused dependencies: 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout: mailcap noarch 2.1.49-5.el9 @baseos 78 k 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout:Remove 2 Packages 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout:Freed space: 103 M 2026-03-31T23:03:00.133 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T23:03:00.135 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T23:03:00.135 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repository Size 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:Removing: 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout: ceph-radosgw x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 103 M 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:Removing unused dependencies: 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout: mailcap noarch 2.1.49-5.el9 @baseos 78 k 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:Remove 2 Packages 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:Freed space: 103 M 2026-03-31T23:03:00.149 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T23:03:00.152 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T23:03:00.152 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T23:03:00.153 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T23:03:00.154 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T23:03:00.171 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T23:03:00.171 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:00.171 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repository Size 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout:Removing: 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout: ceph-radosgw x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 103 M 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout:Removing unused dependencies: 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout: mailcap noarch 2.1.49-5.el9 @baseos 78 k 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout:Remove 2 Packages 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout:Freed space: 103 M 2026-03-31T23:03:00.172 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T23:03:00.174 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T23:03:00.174 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T23:03:00.191 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T23:03:00.192 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T23:03:00.193 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T23:03:00.207 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T23:03:00.216 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.216 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:00.216 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-03-31T23:03:00.216 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-radosgw.target". 2026-03-31T23:03:00.216 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-radosgw.target". 2026-03-31T23:03:00.216 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:00.224 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.228 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T23:03:00.229 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.229 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:00.229 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-03-31T23:03:00.229 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-radosgw.target". 2026-03-31T23:03:00.229 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-radosgw.target". 2026-03-31T23:03:00.229 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:00.233 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.237 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.247 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.250 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.250 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:00.250 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-radosgw@*.service" escaped as "ceph-radosgw@\x2a.service". 2026-03-31T23:03:00.250 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-radosgw.target". 2026-03-31T23:03:00.250 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-radosgw.target". 2026-03-31T23:03:00.250 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:00.252 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : mailcap-2.1.49-5.el9.noarch 2/2 2026-03-31T23:03:00.259 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.262 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : mailcap-2.1.49-5.el9.noarch 2/2 2026-03-31T23:03:00.269 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.289 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : mailcap-2.1.49-5.el9.noarch 2/2 2026-03-31T23:03:00.327 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: mailcap-2.1.49-5.el9.noarch 2/2 2026-03-31T23:03:00.327 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.336 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: mailcap-2.1.49-5.el9.noarch 2/2 2026-03-31T23:03:00.337 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.369 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: mailcap-2.1.49-5.el9.noarch 2/2 2026-03-31T23:03:00.369 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:00.385 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : mailcap-2.1.49-5.el9.noarch 2/2 2026-03-31T23:03:00.385 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:00.385 INFO:teuthology.orchestra.run.vm00.stdout:Removed: 2026-03-31T23:03:00.385 INFO:teuthology.orchestra.run.vm00.stdout: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 mailcap-2.1.49-5.el9.noarch 2026-03-31T23:03:00.385 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:00.385 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:00.391 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : mailcap-2.1.49-5.el9.noarch 2/2 2026-03-31T23:03:00.391 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:00.391 INFO:teuthology.orchestra.run.vm05.stdout:Removed: 2026-03-31T23:03:00.391 INFO:teuthology.orchestra.run.vm05.stdout: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 mailcap-2.1.49-5.el9.noarch 2026-03-31T23:03:00.391 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:00.391 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:00.423 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : mailcap-2.1.49-5.el9.noarch 2/2 2026-03-31T23:03:00.423 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:00.423 INFO:teuthology.orchestra.run.vm09.stdout:Removed: 2026-03-31T23:03:00.423 INFO:teuthology.orchestra.run.vm09.stdout: ceph-radosgw-2:20.2.0-721.g5bb32787.el9.x86_64 mailcap-2.1.49-5.el9.noarch 2026-03-31T23:03:00.423 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:00.423 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:00.601 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repository Size 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout:Removing: 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout: ceph-test x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 362 M 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout:Removing unused dependencies: 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout: socat x86_64 1.7.4.1-8.el9 @appstream 1.1 M 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout: xmlstarlet x86_64 1.6.1-20.el9 @appstream 195 k 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout:Remove 3 Packages 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout:Freed space: 363 M 2026-03-31T23:03:00.602 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T23:03:00.604 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T23:03:00.604 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repository Size 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:Removing: 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout: ceph-test x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 362 M 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:Removing unused dependencies: 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout: socat x86_64 1.7.4.1-8.el9 @appstream 1.1 M 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout: xmlstarlet x86_64 1.6.1-20.el9 @appstream 195 k 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:Remove 3 Packages 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:Freed space: 363 M 2026-03-31T23:03:00.614 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T23:03:00.617 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T23:03:00.617 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T23:03:00.623 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repository Size 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout:Removing: 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout: ceph-test x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 362 M 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout:Removing unused dependencies: 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout: socat x86_64 1.7.4.1-8.el9 @appstream 1.1 M 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout: xmlstarlet x86_64 1.6.1-20.el9 @appstream 195 k 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout:Remove 3 Packages 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout:Freed space: 363 M 2026-03-31T23:03:00.624 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T23:03:00.627 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T23:03:00.627 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T23:03:00.628 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T23:03:00.628 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T23:03:00.643 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T23:03:00.643 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T23:03:00.651 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T23:03:00.651 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T23:03:00.705 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T23:03:00.712 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 1/3 2026-03-31T23:03:00.714 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : socat-1.7.4.1-8.el9.x86_64 2/3 2026-03-31T23:03:00.716 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T23:03:00.722 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 1/3 2026-03-31T23:03:00.723 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T23:03:00.725 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : socat-1.7.4.1-8.el9.x86_64 2/3 2026-03-31T23:03:00.729 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 1/3 2026-03-31T23:03:00.730 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-03-31T23:03:00.731 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : socat-1.7.4.1-8.el9.x86_64 2/3 2026-03-31T23:03:00.740 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-03-31T23:03:00.746 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-03-31T23:03:00.798 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-03-31T23:03:00.798 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 1/3 2026-03-31T23:03:00.798 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 2/3 2026-03-31T23:03:00.815 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-03-31T23:03:00.815 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 1/3 2026-03-31T23:03:00.815 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 2/3 2026-03-31T23:03:00.817 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-03-31T23:03:00.817 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 1/3 2026-03-31T23:03:00.817 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : socat-1.7.4.1-8.el9.x86_64 2/3 2026-03-31T23:03:00.861 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-03-31T23:03:00.861 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:00.861 INFO:teuthology.orchestra.run.vm05.stdout:Removed: 2026-03-31T23:03:00.861 INFO:teuthology.orchestra.run.vm05.stdout: ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 socat-1.7.4.1-8.el9.x86_64 2026-03-31T23:03:00.861 INFO:teuthology.orchestra.run.vm05.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-03-31T23:03:00.861 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:00.861 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:00.867 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-03-31T23:03:00.868 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:00.868 INFO:teuthology.orchestra.run.vm00.stdout:Removed: 2026-03-31T23:03:00.868 INFO:teuthology.orchestra.run.vm00.stdout: ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 socat-1.7.4.1-8.el9.x86_64 2026-03-31T23:03:00.868 INFO:teuthology.orchestra.run.vm00.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-03-31T23:03:00.868 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:00.868 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:00.870 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : xmlstarlet-1.6.1-20.el9.x86_64 3/3 2026-03-31T23:03:00.870 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:00.870 INFO:teuthology.orchestra.run.vm09.stdout:Removed: 2026-03-31T23:03:00.870 INFO:teuthology.orchestra.run.vm09.stdout: ceph-test-2:20.2.0-721.g5bb32787.el9.x86_64 socat-1.7.4.1-8.el9.x86_64 2026-03-31T23:03:00.870 INFO:teuthology.orchestra.run.vm09.stdout: xmlstarlet-1.6.1-20.el9.x86_64 2026-03-31T23:03:00.870 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:00.870 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:01.071 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repository Size 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout:Removing: 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: ceph x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 0 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout:Removing unused dependencies: 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mds x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 6.8 M 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mon x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 19 M 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: lua x86_64 5.4.4-4.el9 @appstream 593 k 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: lua-devel x86_64 5.4.4-4.el9 @crb 49 k 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: luarocks noarch 3.9.2-5.el9 @epel 692 k 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: unzip x86_64 6.0-59.el9 @baseos 389 k 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: zip x86_64 3.0-35.el9 @baseos 724 k 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout:Remove 8 Packages 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout:Freed space: 28 M 2026-03-31T23:03:01.072 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T23:03:01.075 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T23:03:01.076 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T23:03:01.086 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repository Size 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout:Removing: 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout: ceph x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 0 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout:Removing unused dependencies: 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mds x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 6.8 M 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mon x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 19 M 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout: lua x86_64 5.4.4-4.el9 @appstream 593 k 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout: lua-devel x86_64 5.4.4-4.el9 @crb 49 k 2026-03-31T23:03:01.087 INFO:teuthology.orchestra.run.vm00.stdout: luarocks noarch 3.9.2-5.el9 @epel 692 k 2026-03-31T23:03:01.088 INFO:teuthology.orchestra.run.vm00.stdout: unzip x86_64 6.0-59.el9 @baseos 389 k 2026-03-31T23:03:01.088 INFO:teuthology.orchestra.run.vm00.stdout: zip x86_64 3.0-35.el9 @baseos 724 k 2026-03-31T23:03:01.088 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:01.088 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T23:03:01.088 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:01.088 INFO:teuthology.orchestra.run.vm00.stdout:Remove 8 Packages 2026-03-31T23:03:01.088 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:01.088 INFO:teuthology.orchestra.run.vm00.stdout:Freed space: 28 M 2026-03-31T23:03:01.088 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T23:03:01.090 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T23:03:01.091 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T23:03:01.100 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T23:03:01.100 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T23:03:01.100 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repository Size 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout:Removing: 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: ceph x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 0 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout:Removing unused dependencies: 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mds x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 6.8 M 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mon x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 19 M 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: lua x86_64 5.4.4-4.el9 @appstream 593 k 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: lua-devel x86_64 5.4.4-4.el9 @crb 49 k 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: luarocks noarch 3.9.2-5.el9 @epel 692 k 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: unzip x86_64 6.0-59.el9 @baseos 389 k 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: zip x86_64 3.0-35.el9 @baseos 724 k 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout:Remove 8 Packages 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout:Freed space: 28 M 2026-03-31T23:03:01.102 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T23:03:01.105 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T23:03:01.105 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T23:03:01.115 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T23:03:01.115 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T23:03:01.133 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T23:03:01.133 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T23:03:01.145 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T23:03:01.151 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 1/8 2026-03-31T23:03:01.154 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : luarocks-3.9.2-5.el9.noarch 2/8 2026-03-31T23:03:01.156 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : lua-devel-5.4.4-4.el9.x86_64 3/8 2026-03-31T23:03:01.159 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : zip-3.0-35.el9.x86_64 4/8 2026-03-31T23:03:01.159 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T23:03:01.162 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : unzip-6.0-59.el9.x86_64 5/8 2026-03-31T23:03:01.164 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : lua-5.4.4-4.el9.x86_64 6/8 2026-03-31T23:03:01.165 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 1/8 2026-03-31T23:03:01.168 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : luarocks-3.9.2-5.el9.noarch 2/8 2026-03-31T23:03:01.171 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : lua-devel-5.4.4-4.el9.x86_64 3/8 2026-03-31T23:03:01.174 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : zip-3.0-35.el9.x86_64 4/8 2026-03-31T23:03:01.176 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T23:03:01.176 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : unzip-6.0-59.el9.x86_64 5/8 2026-03-31T23:03:01.178 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : lua-5.4.4-4.el9.x86_64 6/8 2026-03-31T23:03:01.181 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 1/8 2026-03-31T23:03:01.184 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : luarocks-3.9.2-5.el9.noarch 2/8 2026-03-31T23:03:01.185 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : lua-devel-5.4.4-4.el9.x86_64 3/8 2026-03-31T23:03:01.188 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : zip-3.0-35.el9.x86_64 4/8 2026-03-31T23:03:01.189 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 7/8 2026-03-31T23:03:01.189 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:01.189 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-03-31T23:03:01.189 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mds.target". 2026-03-31T23:03:01.189 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mds.target". 2026-03-31T23:03:01.189 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:01.190 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 7/8 2026-03-31T23:03:01.190 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : unzip-6.0-59.el9.x86_64 5/8 2026-03-31T23:03:01.193 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : lua-5.4.4-4.el9.x86_64 6/8 2026-03-31T23:03:01.199 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 7/8 2026-03-31T23:03:01.204 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 7/8 2026-03-31T23:03:01.204 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:01.204 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-03-31T23:03:01.204 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mds.target". 2026-03-31T23:03:01.204 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mds.target". 2026-03-31T23:03:01.204 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:01.205 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 7/8 2026-03-31T23:03:01.210 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 7/8 2026-03-31T23:03:01.210 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:01.210 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-mds@*.service" escaped as "ceph-mds@\x2a.service". 2026-03-31T23:03:01.210 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mds.target". 2026-03-31T23:03:01.210 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mds.target". 2026-03-31T23:03:01.210 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:01.211 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 7/8 2026-03-31T23:03:01.212 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 7/8 2026-03-31T23:03:01.217 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 7/8 2026-03-31T23:03:01.221 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/8 2026-03-31T23:03:01.222 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:01.222 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-03-31T23:03:01.222 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mon.target". 2026-03-31T23:03:01.222 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mon.target". 2026-03-31T23:03:01.222 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:01.223 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/8 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/8 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mon.target". 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mon.target". 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/8 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-mon@*.service" escaped as "ceph-mon@\x2a.service". 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mon.target". 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mon.target". 2026-03-31T23:03:01.234 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:01.235 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/8 2026-03-31T23:03:01.237 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/8 2026-03-31T23:03:01.320 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/8 2026-03-31T23:03:01.320 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 1/8 2026-03-31T23:03:01.320 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 2/8 2026-03-31T23:03:01.320 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 3/8 2026-03-31T23:03:01.320 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : lua-5.4.4-4.el9.x86_64 4/8 2026-03-31T23:03:01.320 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 5/8 2026-03-31T23:03:01.320 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 6/8 2026-03-31T23:03:01.320 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : unzip-6.0-59.el9.x86_64 7/8 2026-03-31T23:03:01.325 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/8 2026-03-31T23:03:01.325 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 1/8 2026-03-31T23:03:01.325 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 2/8 2026-03-31T23:03:01.325 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 3/8 2026-03-31T23:03:01.325 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : lua-5.4.4-4.el9.x86_64 4/8 2026-03-31T23:03:01.325 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 5/8 2026-03-31T23:03:01.325 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 6/8 2026-03-31T23:03:01.325 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : unzip-6.0-59.el9.x86_64 7/8 2026-03-31T23:03:01.330 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 8/8 2026-03-31T23:03:01.330 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-2:20.2.0-721.g5bb32787.el9.x86_64 1/8 2026-03-31T23:03:01.330 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 2/8 2026-03-31T23:03:01.330 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 3/8 2026-03-31T23:03:01.330 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : lua-5.4.4-4.el9.x86_64 4/8 2026-03-31T23:03:01.330 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : lua-devel-5.4.4-4.el9.x86_64 5/8 2026-03-31T23:03:01.330 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : luarocks-3.9.2-5.el9.noarch 6/8 2026-03-31T23:03:01.330 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : unzip-6.0-59.el9.x86_64 7/8 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : zip-3.0-35.el9.x86_64 8/8 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout:Removed: 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: ceph-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: lua-5.4.4-4.el9.x86_64 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: luarocks-3.9.2-5.el9.noarch 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: unzip-6.0-59.el9.x86_64 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: zip-3.0-35.el9.x86_64 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:01.373 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:01.374 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : zip-3.0-35.el9.x86_64 8/8 2026-03-31T23:03:01.374 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout:Removed: 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout: ceph-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout: lua-5.4.4-4.el9.x86_64 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout: luarocks-3.9.2-5.el9.noarch 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout: unzip-6.0-59.el9.x86_64 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout: zip-3.0-35.el9.x86_64 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:01.375 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:01.380 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : zip-3.0-35.el9.x86_64 8/8 2026-03-31T23:03:01.380 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:01.380 INFO:teuthology.orchestra.run.vm09.stdout:Removed: 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout: ceph-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mds-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout: lua-5.4.4-4.el9.x86_64 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout: lua-devel-5.4.4-4.el9.x86_64 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout: luarocks-3.9.2-5.el9.noarch 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout: unzip-6.0-59.el9.x86_64 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout: zip-3.0-35.el9.x86_64 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:01.381 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:01.579 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:01.582 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout:=========================================================================================== 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repository Size 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout:=========================================================================================== 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout:Removing: 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: ceph-base x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 24 M 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout:Removing dependent packages: 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 447 k 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 2.9 M 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-cephadm noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 940 k 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-dashboard noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 140 M 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 66 M 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-rook noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 567 k 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: ceph-osd x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 54 M 2026-03-31T23:03:01.585 INFO:teuthology.orchestra.run.vm00.stdout: ceph-volume noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 1.4 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: rbd-mirror x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 11 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout:Removing unused dependencies: 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: abseil-cpp x86_64 20211102.0-4.el9 @epel 1.9 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 98 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: ceph-grafana-dashboards noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 996 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 60 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-modules-core noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 1.6 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: ceph-prometheus-alerts noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 59 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: ceph-selinux x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 138 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: cryptsetup x86_64 2.8.1-3.el9 @baseos 770 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas x86_64 3.0.4-9.el9 @appstream 68 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-netlib x86_64 3.0.4-9.el9 @appstream 11 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-openblas-openmp x86_64 3.0.4-9.el9 @appstream 39 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: gperftools-libs x86_64 2.9.1-3.el9 @epel 1.4 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: grpc-data noarch 1.46.7-10.el9 @epel 13 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: ledmon-libs x86_64 1.1.0-3.el9 @baseos 80 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: libcephsqlite x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 409 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: libconfig x86_64 1.7.2-9.el9 @baseos 220 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: libgfortran x86_64 11.5.0-14.el9 @baseos 2.8 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: liboath x86_64 2.6.12-1.el9 @epel 94 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: libquadmath x86_64 11.5.0-14.el9 @baseos 330 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: libradosstriper1 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 792 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: libstoragemgmt x86_64 1.10.1-1.el9 @appstream 685 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: libunwind x86_64 1.6.2-1.el9 @epel 170 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: libxslt x86_64 1.1.34-12.el9 @appstream 743 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: openblas x86_64 0.3.29-1.el9 @appstream 112 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: openblas-openmp x86_64 0.3.29-1.el9 @appstream 46 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: pciutils x86_64 3.7.0-7.el9 @baseos 216 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: protobuf x86_64 3.14.0-17.el9 @appstream 3.5 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: protobuf-compiler x86_64 3.14.0-17.el9 @crb 2.9 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-asyncssh noarch 2.13.2-5.el9 @epel 3.9 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-autocommand noarch 2.2.2-8.el9 @epel 82 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-babel noarch 2.9.1-2.el9 @appstream 27 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 @epel 254 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-bcrypt x86_64 3.2.2-1.el9 @epel 87 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-cachetools noarch 4.2.4-1.el9 @epel 93 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 855 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-certifi noarch 2023.05.07-4.el9 @epel 6.3 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-cffi x86_64 1.14.5-5.el9 @baseos 1.0 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-chardet noarch 4.0.0-5.el9 @anaconda 1.4 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-cheroot noarch 10.0.1-5.el9 @epel 682 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-cherrypy noarch 18.10.0-5.el9 @epel 1.0 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-cryptography x86_64 36.0.1-5.el9 @baseos 4.5 M 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-devel x86_64 3.9.25-3.el9 @appstream 765 k 2026-03-31T23:03:01.586 INFO:teuthology.orchestra.run.vm00.stdout: python3-google-auth noarch 1:2.45.0-1.el9 @epel 1.4 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-grpcio x86_64 1.46.7-10.el9 @epel 6.7 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 @epel 418 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-idna noarch 2.10-7.el9.1 @anaconda 513 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-influxdb noarch 5.3.1-1.el9 @epel 747 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-isodate noarch 0.6.1-3.el9 @epel 203 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco noarch 8.2.1-3.el9 @epel 3.7 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 @epel 24 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 @epel 55 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-context noarch 6.0.1-3.el9 @epel 31 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 @epel 33 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-text noarch 4.0.0-2.el9 @epel 51 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-jinja2 noarch 2.11.3-8.el9 @appstream 1.1 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-jsonpatch noarch 1.21-16.el9 @koji-override-0 55 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-jsonpointer noarch 2.0-4.el9 @koji-override-0 34 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 @epel 21 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 @appstream 832 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-lxml x86_64 4.6.5-3.el9 @appstream 4.2 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-markupsafe x86_64 1.1.1-12.el9 @appstream 60 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-more-itertools noarch 8.12.0-2.el9 @epel 378 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-msgpack x86_64 1.0.3-2.el9 @epel 264 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-natsort noarch 7.1.1-5.el9 @epel 215 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-numpy x86_64 1:1.23.5-2.el9 @appstream 30 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9 @appstream 1.7 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-oauthlib noarch 3.1.1-5.el9 @koji-override-0 888 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-packaging noarch 20.9-5.el9 @appstream 248 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-ply noarch 3.11-14.el9 @baseos 430 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-portend noarch 3.1.0-2.el9 @epel 20 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-prettytable noarch 0.7.2-27.el9 @koji-override-0 166 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-protobuf noarch 3.14.0-17.el9 @appstream 1.4 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 @epel 389 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyasn1 noarch 0.4.8-7.el9 @appstream 622 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9 @appstream 1.0 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-pycparser noarch 2.20-6.el9 @baseos 745 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-pysocks noarch 1.7.1-12.el9 @anaconda 88 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-pytz noarch 2021.1-5.el9 @koji-override-0 176 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-repoze-lru noarch 0.7-16.el9 @epel 83 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-requests noarch 2.25.1-10.el9 @baseos 405 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 @appstream 119 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-routes noarch 2.5.1-5.el9 @epel 459 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-rsa noarch 4.9-2.el9 @epel 202 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-saml noarch 1.16.0-1.el9 @epel 730 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-scipy x86_64 1.9.3-2.el9 @appstream 76 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-tempora noarch 5.0.0-2.el9 @epel 96 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-toml noarch 0.10.2-6.el9 @appstream 99 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-typing-extensions noarch 4.15.0-1.el9 @epel 447 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-urllib3 noarch 1.26.5-7.el9 @baseos 746 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-websocket-client noarch 1.2.3-2.el9 @epel 319 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-xmlsec x86_64 1.3.13-1.el9 @epel 158 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: python3-zc-lockfile noarch 2.0-10.el9 @epel 35 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: qatlib x86_64 25.08.0-2.el9 @appstream 639 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: qatlib-service x86_64 25.08.0-2.el9 @appstream 69 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: qatzip-libs x86_64 1.3.1-1.el9 @appstream 148 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: smartmontools x86_64 1:7.2-10.el9 @baseos 1.9 M 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: xmlsec1 x86_64 1.2.29-13.el9 @appstream 596 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 @appstream 281 k 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout:=========================================================================================== 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout:Remove 108 Packages 2026-03-31T23:03:01.587 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm00.stdout:Freed space: 675 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout:=========================================================================================== 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repository Size 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout:=========================================================================================== 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout:Removing: 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-base x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 24 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout:Removing dependent packages: 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 447 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 2.9 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-cephadm noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 940 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-dashboard noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 140 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 66 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-rook noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 567 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-osd x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 54 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-volume noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 1.4 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: rbd-mirror x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 11 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout:Removing unused dependencies: 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: abseil-cpp x86_64 20211102.0-4.el9 @epel 1.9 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 98 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-grafana-dashboards noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 996 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 60 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-modules-core noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 1.6 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-prometheus-alerts noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 59 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ceph-selinux x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 138 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: cryptsetup x86_64 2.8.1-3.el9 @baseos 770 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas x86_64 3.0.4-9.el9 @appstream 68 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-netlib x86_64 3.0.4-9.el9 @appstream 11 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-openblas-openmp x86_64 3.0.4-9.el9 @appstream 39 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: gperftools-libs x86_64 2.9.1-3.el9 @epel 1.4 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: grpc-data noarch 1.46.7-10.el9 @epel 13 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: ledmon-libs x86_64 1.1.0-3.el9 @baseos 80 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: libcephsqlite x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 409 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: libconfig x86_64 1.7.2-9.el9 @baseos 220 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: libgfortran x86_64 11.5.0-14.el9 @baseos 2.8 M 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: liboath x86_64 2.6.12-1.el9 @epel 94 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: libquadmath x86_64 11.5.0-14.el9 @baseos 330 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: libradosstriper1 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 792 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: libstoragemgmt x86_64 1.10.1-1.el9 @appstream 685 k 2026-03-31T23:03:01.588 INFO:teuthology.orchestra.run.vm05.stdout: libunwind x86_64 1.6.2-1.el9 @epel 170 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: libxslt x86_64 1.1.34-12.el9 @appstream 743 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: openblas x86_64 0.3.29-1.el9 @appstream 112 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: openblas-openmp x86_64 0.3.29-1.el9 @appstream 46 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: pciutils x86_64 3.7.0-7.el9 @baseos 216 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: protobuf x86_64 3.14.0-17.el9 @appstream 3.5 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: protobuf-compiler x86_64 3.14.0-17.el9 @crb 2.9 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-asyncssh noarch 2.13.2-5.el9 @epel 3.9 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-autocommand noarch 2.2.2-8.el9 @epel 82 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-babel noarch 2.9.1-2.el9 @appstream 27 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 @epel 254 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-bcrypt x86_64 3.2.2-1.el9 @epel 87 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-cachetools noarch 4.2.4-1.el9 @epel 93 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 855 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-certifi noarch 2023.05.07-4.el9 @epel 6.3 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-cffi x86_64 1.14.5-5.el9 @baseos 1.0 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-chardet noarch 4.0.0-5.el9 @anaconda 1.4 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-cheroot noarch 10.0.1-5.el9 @epel 682 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-cherrypy noarch 18.10.0-5.el9 @epel 1.0 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-cryptography x86_64 36.0.1-5.el9 @baseos 4.5 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-devel x86_64 3.9.25-3.el9 @appstream 765 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-google-auth noarch 1:2.45.0-1.el9 @epel 1.4 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-grpcio x86_64 1.46.7-10.el9 @epel 6.7 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 @epel 418 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-idna noarch 2.10-7.el9.1 @anaconda 513 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-influxdb noarch 5.3.1-1.el9 @epel 747 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-isodate noarch 0.6.1-3.el9 @epel 203 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco noarch 8.2.1-3.el9 @epel 3.7 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 @epel 24 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 @epel 55 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-context noarch 6.0.1-3.el9 @epel 31 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 @epel 33 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-text noarch 4.0.0-2.el9 @epel 51 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-jinja2 noarch 2.11.3-8.el9 @appstream 1.1 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-jsonpatch noarch 1.21-16.el9 @koji-override-0 55 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-jsonpointer noarch 2.0-4.el9 @koji-override-0 34 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 @epel 21 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 @appstream 832 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-lxml x86_64 4.6.5-3.el9 @appstream 4.2 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-markupsafe x86_64 1.1.1-12.el9 @appstream 60 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-more-itertools noarch 8.12.0-2.el9 @epel 378 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-msgpack x86_64 1.0.3-2.el9 @epel 264 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-natsort noarch 7.1.1-5.el9 @epel 215 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-numpy x86_64 1:1.23.5-2.el9 @appstream 30 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9 @appstream 1.7 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-oauthlib noarch 3.1.1-5.el9 @koji-override-0 888 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-packaging noarch 20.9-5.el9 @appstream 248 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-ply noarch 3.11-14.el9 @baseos 430 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-portend noarch 3.1.0-2.el9 @epel 20 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-prettytable noarch 0.7.2-27.el9 @koji-override-0 166 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-protobuf noarch 3.14.0-17.el9 @appstream 1.4 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 @epel 389 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyasn1 noarch 0.4.8-7.el9 @appstream 622 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9 @appstream 1.0 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-pycparser noarch 2.20-6.el9 @baseos 745 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-pysocks noarch 1.7.1-12.el9 @anaconda 88 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-pytz noarch 2021.1-5.el9 @koji-override-0 176 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-repoze-lru noarch 0.7-16.el9 @epel 83 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-requests noarch 2.25.1-10.el9 @baseos 405 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 @appstream 119 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-routes noarch 2.5.1-5.el9 @epel 459 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-rsa noarch 4.9-2.el9 @epel 202 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-saml noarch 1.16.0-1.el9 @epel 730 k 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-scipy x86_64 1.9.3-2.el9 @appstream 76 M 2026-03-31T23:03:01.589 INFO:teuthology.orchestra.run.vm05.stdout: python3-tempora noarch 5.0.0-2.el9 @epel 96 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: python3-toml noarch 0.10.2-6.el9 @appstream 99 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: python3-typing-extensions noarch 4.15.0-1.el9 @epel 447 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: python3-urllib3 noarch 1.26.5-7.el9 @baseos 746 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: python3-websocket-client noarch 1.2.3-2.el9 @epel 319 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: python3-xmlsec x86_64 1.3.13-1.el9 @epel 158 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: python3-zc-lockfile noarch 2.0-10.el9 @epel 35 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: qatlib x86_64 25.08.0-2.el9 @appstream 639 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: qatlib-service x86_64 25.08.0-2.el9 @appstream 69 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: qatzip-libs x86_64 1.3.1-1.el9 @appstream 148 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: smartmontools x86_64 1:7.2-10.el9 @baseos 1.9 M 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: xmlsec1 x86_64 1.2.29-13.el9 @appstream 596 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 @appstream 281 k 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout:=========================================================================================== 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout:Remove 108 Packages 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout:Freed space: 675 M 2026-03-31T23:03:01.590 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T23:03:01.594 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout:=========================================================================================== 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repository Size 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout:=========================================================================================== 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout:Removing: 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-base x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 24 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout:Removing dependent packages: 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-immutable-object-cache x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 447 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 2.9 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-cephadm noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 940 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-dashboard noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 140 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-diskprediction-local noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 66 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-rook noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 567 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-osd x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 54 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-volume noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 1.4 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: rbd-mirror x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 11 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout:Removing unused dependencies: 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: abseil-cpp x86_64 20211102.0-4.el9 @epel 1.9 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 98 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-grafana-dashboards noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 996 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-k8sevents noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 60 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-modules-core noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 1.6 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-prometheus-alerts noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 59 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ceph-selinux x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 138 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: cryptsetup x86_64 2.8.1-3.el9 @baseos 770 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas x86_64 3.0.4-9.el9 @appstream 68 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-netlib x86_64 3.0.4-9.el9 @appstream 11 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-openblas-openmp x86_64 3.0.4-9.el9 @appstream 39 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: gperftools-libs x86_64 2.9.1-3.el9 @epel 1.4 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: grpc-data noarch 1.46.7-10.el9 @epel 13 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: ledmon-libs x86_64 1.1.0-3.el9 @baseos 80 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: libcephsqlite x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 409 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: libconfig x86_64 1.7.2-9.el9 @baseos 220 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: libgfortran x86_64 11.5.0-14.el9 @baseos 2.8 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: liboath x86_64 2.6.12-1.el9 @epel 94 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: libquadmath x86_64 11.5.0-14.el9 @baseos 330 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: libradosstriper1 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 792 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: libstoragemgmt x86_64 1.10.1-1.el9 @appstream 685 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: libunwind x86_64 1.6.2-1.el9 @epel 170 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: libxslt x86_64 1.1.34-12.el9 @appstream 743 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: openblas x86_64 0.3.29-1.el9 @appstream 112 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: openblas-openmp x86_64 0.3.29-1.el9 @appstream 46 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: pciutils x86_64 3.7.0-7.el9 @baseos 216 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: protobuf x86_64 3.14.0-17.el9 @appstream 3.5 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: protobuf-compiler x86_64 3.14.0-17.el9 @crb 2.9 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: python3-asyncssh noarch 2.13.2-5.el9 @epel 3.9 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: python3-autocommand noarch 2.2.2-8.el9 @epel 82 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: python3-babel noarch 2.9.1-2.el9 @appstream 27 M 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: python3-backports-tarfile noarch 1.2.0-1.el9 @epel 254 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: python3-bcrypt x86_64 3.2.2-1.el9 @epel 87 k 2026-03-31T23:03:01.601 INFO:teuthology.orchestra.run.vm09.stdout: python3-cachetools noarch 4.2.4-1.el9 @epel 93 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-ceph-common x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 855 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-certifi noarch 2023.05.07-4.el9 @epel 6.3 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-cffi x86_64 1.14.5-5.el9 @baseos 1.0 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-chardet noarch 4.0.0-5.el9 @anaconda 1.4 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-cheroot noarch 10.0.1-5.el9 @epel 682 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-cherrypy noarch 18.10.0-5.el9 @epel 1.0 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-cryptography x86_64 36.0.1-5.el9 @baseos 4.5 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-devel x86_64 3.9.25-3.el9 @appstream 765 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-google-auth noarch 1:2.45.0-1.el9 @epel 1.4 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-grpcio x86_64 1.46.7-10.el9 @epel 6.7 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-grpcio-tools x86_64 1.46.7-10.el9 @epel 418 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-idna noarch 2.10-7.el9.1 @anaconda 513 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-influxdb noarch 5.3.1-1.el9 @epel 747 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-isodate noarch 0.6.1-3.el9 @epel 203 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco noarch 8.2.1-3.el9 @epel 3.7 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-classes noarch 3.2.1-5.el9 @epel 24 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-collections noarch 3.0.0-8.el9 @epel 55 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-context noarch 6.0.1-3.el9 @epel 31 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-functools noarch 3.5.0-2.el9 @epel 33 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-text noarch 4.0.0-2.el9 @epel 51 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-jinja2 noarch 2.11.3-8.el9 @appstream 1.1 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-jsonpatch noarch 1.21-16.el9 @koji-override-0 55 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-jsonpointer noarch 2.0-4.el9 @koji-override-0 34 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-kubernetes noarch 1:26.1.0-3.el9 @epel 21 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-libstoragemgmt x86_64 1.10.1-1.el9 @appstream 832 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-lxml x86_64 4.6.5-3.el9 @appstream 4.2 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-markupsafe x86_64 1.1.1-12.el9 @appstream 60 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-more-itertools noarch 8.12.0-2.el9 @epel 378 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-msgpack x86_64 1.0.3-2.el9 @epel 264 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-natsort noarch 7.1.1-5.el9 @epel 215 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-numpy x86_64 1:1.23.5-2.el9 @appstream 30 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-numpy-f2py x86_64 1:1.23.5-2.el9 @appstream 1.7 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-oauthlib noarch 3.1.1-5.el9 @koji-override-0 888 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-packaging noarch 20.9-5.el9 @appstream 248 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-ply noarch 3.11-14.el9 @baseos 430 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-portend noarch 3.1.0-2.el9 @epel 20 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-prettytable noarch 0.7.2-27.el9 @koji-override-0 166 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-protobuf noarch 3.14.0-17.el9 @appstream 1.4 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyOpenSSL noarch 21.0.0-1.el9 @epel 389 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyasn1 noarch 0.4.8-7.el9 @appstream 622 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyasn1-modules noarch 0.4.8-7.el9 @appstream 1.0 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-pycparser noarch 2.20-6.el9 @baseos 745 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-pysocks noarch 1.7.1-12.el9 @anaconda 88 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-pytz noarch 2021.1-5.el9 @koji-override-0 176 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-repoze-lru noarch 0.7-16.el9 @epel 83 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-requests noarch 2.25.1-10.el9 @baseos 405 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-requests-oauthlib noarch 1.3.0-12.el9 @appstream 119 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-routes noarch 2.5.1-5.el9 @epel 459 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-rsa noarch 4.9-2.el9 @epel 202 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-saml noarch 1.16.0-1.el9 @epel 730 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-scipy x86_64 1.9.3-2.el9 @appstream 76 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-tempora noarch 5.0.0-2.el9 @epel 96 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-toml noarch 0.10.2-6.el9 @appstream 99 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-typing-extensions noarch 4.15.0-1.el9 @epel 447 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-urllib3 noarch 1.26.5-7.el9 @baseos 746 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-websocket-client noarch 1.2.3-2.el9 @epel 319 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-xmlsec x86_64 1.3.13-1.el9 @epel 158 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: python3-zc-lockfile noarch 2.0-10.el9 @epel 35 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: qatlib x86_64 25.08.0-2.el9 @appstream 639 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: qatlib-service x86_64 25.08.0-2.el9 @appstream 69 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: qatzip-libs x86_64 1.3.1-1.el9 @appstream 148 k 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: smartmontools x86_64 1:7.2-10.el9 @baseos 1.9 M 2026-03-31T23:03:01.602 INFO:teuthology.orchestra.run.vm09.stdout: xmlsec1 x86_64 1.2.29-13.el9 @appstream 596 k 2026-03-31T23:03:01.603 INFO:teuthology.orchestra.run.vm09.stdout: xmlsec1-openssl x86_64 1.2.29-13.el9 @appstream 281 k 2026-03-31T23:03:01.603 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:01.603 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T23:03:01.603 INFO:teuthology.orchestra.run.vm09.stdout:=========================================================================================== 2026-03-31T23:03:01.603 INFO:teuthology.orchestra.run.vm09.stdout:Remove 108 Packages 2026-03-31T23:03:01.603 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:01.603 INFO:teuthology.orchestra.run.vm09.stdout:Freed space: 675 M 2026-03-31T23:03:01.603 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T23:03:01.614 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T23:03:01.614 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T23:03:01.618 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T23:03:01.618 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T23:03:01.632 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T23:03:01.632 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T23:03:01.724 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T23:03:01.724 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T23:03:01.731 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T23:03:01.731 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T23:03:01.743 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T23:03:01.743 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T23:03:01.878 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T23:03:01.878 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 1/108 2026-03-31T23:03:01.885 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T23:03:01.885 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 1/108 2026-03-31T23:03:01.886 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 1/108 2026-03-31T23:03:01.894 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 1/108 2026-03-31T23:03:01.896 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T23:03:01.896 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 1/108 2026-03-31T23:03:01.904 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 1/108 2026-03-31T23:03:01.907 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:01.907 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:01.907 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-03-31T23:03:01.907 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mgr.target". 2026-03-31T23:03:01.907 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mgr.target". 2026-03-31T23:03:01.907 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:01.908 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:01.912 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:01.912 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:01.912 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-03-31T23:03:01.912 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mgr.target". 2026-03-31T23:03:01.912 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mgr.target". 2026-03-31T23:03:01.912 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:01.912 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:01.920 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:01.920 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:01.920 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-mgr@*.service" escaped as "ceph-mgr@\x2a.service". 2026-03-31T23:03:01.920 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-mgr.target". 2026-03-31T23:03:01.920 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-mgr.target". 2026-03-31T23:03:01.920 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:01.920 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:01.922 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:01.927 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:01.932 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:01.983 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 3/108 2026-03-31T23:03:01.983 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 4/108 2026-03-31T23:03:01.988 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 3/108 2026-03-31T23:03:01.988 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 4/108 2026-03-31T23:03:01.993 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 3/108 2026-03-31T23:03:01.993 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 4/108 2026-03-31T23:03:02.000 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 4/108 2026-03-31T23:03:02.006 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-influxdb-5.3.1-1.el9.noarch 5/108 2026-03-31T23:03:02.006 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 6/108 2026-03-31T23:03:02.008 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 4/108 2026-03-31T23:03:02.010 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 4/108 2026-03-31T23:03:02.015 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-influxdb-5.3.1-1.el9.noarch 5/108 2026-03-31T23:03:02.015 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 6/108 2026-03-31T23:03:02.016 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-influxdb-5.3.1-1.el9.noarch 5/108 2026-03-31T23:03:02.016 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 6/108 2026-03-31T23:03:02.019 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 6/108 2026-03-31T23:03:02.026 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-cherrypy-18.10.0-5.el9.noarch 7/108 2026-03-31T23:03:02.028 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 6/108 2026-03-31T23:03:02.029 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 6/108 2026-03-31T23:03:02.030 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-cheroot-10.0.1-5.el9.noarch 8/108 2026-03-31T23:03:02.037 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-cherrypy-18.10.0-5.el9.noarch 7/108 2026-03-31T23:03:02.037 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-cherrypy-18.10.0-5.el9.noarch 7/108 2026-03-31T23:03:02.039 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-grpcio-tools-1.46.7-10.el9.x86_64 9/108 2026-03-31T23:03:02.042 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-cheroot-10.0.1-5.el9.noarch 8/108 2026-03-31T23:03:02.042 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-cheroot-10.0.1-5.el9.noarch 8/108 2026-03-31T23:03:02.043 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-grpcio-1.46.7-10.el9.x86_64 10/108 2026-03-31T23:03:02.051 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-grpcio-tools-1.46.7-10.el9.x86_64 9/108 2026-03-31T23:03:02.052 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-grpcio-tools-1.46.7-10.el9.x86_64 9/108 2026-03-31T23:03:02.056 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-grpcio-1.46.7-10.el9.x86_64 10/108 2026-03-31T23:03:02.056 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-grpcio-1.46.7-10.el9.x86_64 10/108 2026-03-31T23:03:02.066 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 11/108 2026-03-31T23:03:02.066 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.066 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-03-31T23:03:02.066 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-osd.target". 2026-03-31T23:03:02.066 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-osd.target". 2026-03-31T23:03:02.066 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:02.071 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 11/108 2026-03-31T23:03:02.077 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 11/108 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-osd.target". 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-osd.target". 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 11/108 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-osd@*.service" escaped as "ceph-osd@\x2a.service". 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-osd.target". 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-osd.target". 2026-03-31T23:03:02.078 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:02.081 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 11/108 2026-03-31T23:03:02.083 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 11/108 2026-03-31T23:03:02.086 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 11/108 2026-03-31T23:03:02.093 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 11/108 2026-03-31T23:03:02.099 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 11/108 2026-03-31T23:03:02.101 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:02.101 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.101 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-03-31T23:03:02.101 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:02.109 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:02.111 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:02.111 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.111 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-03-31T23:03:02.111 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:02.115 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:02.116 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.116 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-volume@*.service" escaped as "ceph-volume@\x2a.service". 2026-03-31T23:03:02.116 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:02.120 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:02.121 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:02.124 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-jaraco-collections-3.0.0-8.el9.noarch 13/108 2026-03-31T23:03:02.125 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:02.129 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-jaraco-text-4.0.0-2.el9.noarch 14/108 2026-03-31T23:03:02.131 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:02.133 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-jaraco-collections-3.0.0-8.el9.noarch 13/108 2026-03-31T23:03:02.134 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-jinja2-2.11.3-8.el9.noarch 15/108 2026-03-31T23:03:02.136 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:02.138 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-jaraco-text-4.0.0-2.el9.noarch 14/108 2026-03-31T23:03:02.139 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-jaraco-collections-3.0.0-8.el9.noarch 13/108 2026-03-31T23:03:02.144 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-jinja2-2.11.3-8.el9.noarch 15/108 2026-03-31T23:03:02.144 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-jaraco-text-4.0.0-2.el9.noarch 14/108 2026-03-31T23:03:02.166 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-saml-1.16.0-1.el9.noarch 16/108 2026-03-31T23:03:02.213 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-saml-1.16.0-1.el9.noarch 16/108 2026-03-31T23:03:02.224 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-babel-2.9.1-2.el9.noarch 17/108 2026-03-31T23:03:02.224 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-babel-2.9.1-2.el9.noarch 17/108 2026-03-31T23:03:02.225 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-jinja2-2.11.3-8.el9.noarch 15/108 2026-03-31T23:03:02.226 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-jaraco-classes-3.2.1-5.el9.noarch 18/108 2026-03-31T23:03:02.227 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-jaraco-classes-3.2.1-5.el9.noarch 18/108 2026-03-31T23:03:02.235 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-pyOpenSSL-21.0.0-1.el9.noarch 19/108 2026-03-31T23:03:02.237 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-pyOpenSSL-21.0.0-1.el9.noarch 19/108 2026-03-31T23:03:02.246 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-asyncssh-2.13.2-5.el9.noarch 20/108 2026-03-31T23:03:02.246 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 21/108 2026-03-31T23:03:02.248 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-asyncssh-2.13.2-5.el9.noarch 20/108 2026-03-31T23:03:02.248 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 21/108 2026-03-31T23:03:02.254 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 21/108 2026-03-31T23:03:02.256 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-saml-1.16.0-1.el9.noarch 16/108 2026-03-31T23:03:02.256 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 21/108 2026-03-31T23:03:02.263 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-babel-2.9.1-2.el9.noarch 17/108 2026-03-31T23:03:02.266 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-jaraco-classes-3.2.1-5.el9.noarch 18/108 2026-03-31T23:03:02.276 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-pyOpenSSL-21.0.0-1.el9.noarch 19/108 2026-03-31T23:03:02.287 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-asyncssh-2.13.2-5.el9.noarch 20/108 2026-03-31T23:03:02.287 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 21/108 2026-03-31T23:03:02.296 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 21/108 2026-03-31T23:03:02.350 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-jsonpatch-1.21-16.el9.noarch 22/108 2026-03-31T23:03:02.360 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-jsonpatch-1.21-16.el9.noarch 22/108 2026-03-31T23:03:02.365 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-scipy-1.9.3-2.el9.x86_64 23/108 2026-03-31T23:03:02.372 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-xmlsec-1.3.13-1.el9.x86_64 24/108 2026-03-31T23:03:02.374 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-scipy-1.9.3-2.el9.x86_64 23/108 2026-03-31T23:03:02.376 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-lxml-4.6.5-3.el9.x86_64 25/108 2026-03-31T23:03:02.381 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-xmlsec-1.3.13-1.el9.x86_64 24/108 2026-03-31T23:03:02.385 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-lxml-4.6.5-3.el9.x86_64 25/108 2026-03-31T23:03:02.390 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/108 2026-03-31T23:03:02.390 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/multi-user.target.wants/libstoragemgmt.service". 2026-03-31T23:03:02.390 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:02.391 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libstoragemgmt-1.10.1-1.el9.x86_64 26/108 2026-03-31T23:03:02.392 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-jsonpatch-1.21-16.el9.noarch 22/108 2026-03-31T23:03:02.398 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/108 2026-03-31T23:03:02.398 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/multi-user.target.wants/libstoragemgmt.service". 2026-03-31T23:03:02.398 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:02.399 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libstoragemgmt-1.10.1-1.el9.x86_64 26/108 2026-03-31T23:03:02.407 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-scipy-1.9.3-2.el9.x86_64 23/108 2026-03-31T23:03:02.414 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-xmlsec-1.3.13-1.el9.x86_64 24/108 2026-03-31T23:03:02.418 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-lxml-4.6.5-3.el9.x86_64 25/108 2026-03-31T23:03:02.419 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/108 2026-03-31T23:03:02.424 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 27/108 2026-03-31T23:03:02.426 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : xmlsec1-openssl-1.2.29-13.el9.x86_64 28/108 2026-03-31T23:03:02.427 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/108 2026-03-31T23:03:02.433 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 27/108 2026-03-31T23:03:02.433 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/108 2026-03-31T23:03:02.433 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/multi-user.target.wants/libstoragemgmt.service". 2026-03-31T23:03:02.433 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:02.435 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libstoragemgmt-1.10.1-1.el9.x86_64 26/108 2026-03-31T23:03:02.436 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : xmlsec1-openssl-1.2.29-13.el9.x86_64 28/108 2026-03-31T23:03:02.441 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : xmlsec1-1.2.29-13.el9.x86_64 29/108 2026-03-31T23:03:02.447 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-cryptography-36.0.1-5.el9.x86_64 30/108 2026-03-31T23:03:02.450 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : xmlsec1-1.2.29-13.el9.x86_64 29/108 2026-03-31T23:03:02.450 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : protobuf-compiler-3.14.0-17.el9.x86_64 31/108 2026-03-31T23:03:02.452 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-bcrypt-3.2.2-1.el9.x86_64 32/108 2026-03-31T23:03:02.456 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-cryptography-36.0.1-5.el9.x86_64 30/108 2026-03-31T23:03:02.459 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : protobuf-compiler-3.14.0-17.el9.x86_64 31/108 2026-03-31T23:03:02.461 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libstoragemgmt-1.10.1-1.el9.x86_64 26/108 2026-03-31T23:03:02.461 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-bcrypt-3.2.2-1.el9.x86_64 32/108 2026-03-31T23:03:02.465 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 27/108 2026-03-31T23:03:02.468 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : xmlsec1-openssl-1.2.29-13.el9.x86_64 28/108 2026-03-31T23:03:02.475 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 33/108 2026-03-31T23:03:02.475 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.475 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-03-31T23:03:02.475 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target". 2026-03-31T23:03:02.475 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target". 2026-03-31T23:03:02.475 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:02.477 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 33/108 2026-03-31T23:03:02.481 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : xmlsec1-1.2.29-13.el9.x86_64 29/108 2026-03-31T23:03:02.482 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 33/108 2026-03-31T23:03:02.482 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.482 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-03-31T23:03:02.482 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target". 2026-03-31T23:03:02.482 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target". 2026-03-31T23:03:02.482 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:02.483 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 33/108 2026-03-31T23:03:02.487 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 33/108 2026-03-31T23:03:02.487 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-cryptography-36.0.1-5.el9.x86_64 30/108 2026-03-31T23:03:02.489 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-jaraco-context-6.0.1-3.el9.noarch 34/108 2026-03-31T23:03:02.490 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : protobuf-compiler-3.14.0-17.el9.x86_64 31/108 2026-03-31T23:03:02.492 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 33/108 2026-03-31T23:03:02.492 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-portend-3.1.0-2.el9.noarch 35/108 2026-03-31T23:03:02.492 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-bcrypt-3.2.2-1.el9.x86_64 32/108 2026-03-31T23:03:02.494 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-tempora-5.0.0-2.el9.noarch 36/108 2026-03-31T23:03:02.494 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-jaraco-context-6.0.1-3.el9.noarch 34/108 2026-03-31T23:03:02.497 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-portend-3.1.0-2.el9.noarch 35/108 2026-03-31T23:03:02.498 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-jaraco-functools-3.5.0-2.el9.noarch 37/108 2026-03-31T23:03:02.500 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-tempora-5.0.0-2.el9.noarch 36/108 2026-03-31T23:03:02.501 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-routes-2.5.1-5.el9.noarch 38/108 2026-03-31T23:03:02.501 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 39/108 2026-03-31T23:03:02.504 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-jaraco-functools-3.5.0-2.el9.noarch 37/108 2026-03-31T23:03:02.506 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-routes-2.5.1-5.el9.noarch 38/108 2026-03-31T23:03:02.506 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 39/108 2026-03-31T23:03:02.515 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 33/108 2026-03-31T23:03:02.515 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.515 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-rbd-mirror@*.service" escaped as "ceph-rbd-mirror@\x2a.service". 2026-03-31T23:03:02.515 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/multi-user.target.wants/ceph-rbd-mirror.target". 2026-03-31T23:03:02.515 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-rbd-mirror.target". 2026-03-31T23:03:02.515 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:02.516 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 33/108 2026-03-31T23:03:02.526 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 33/108 2026-03-31T23:03:02.528 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-jaraco-context-6.0.1-3.el9.noarch 34/108 2026-03-31T23:03:02.532 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-portend-3.1.0-2.el9.noarch 35/108 2026-03-31T23:03:02.534 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-tempora-5.0.0-2.el9.noarch 36/108 2026-03-31T23:03:02.538 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-jaraco-functools-3.5.0-2.el9.noarch 37/108 2026-03-31T23:03:02.541 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-routes-2.5.1-5.el9.noarch 38/108 2026-03-31T23:03:02.541 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 39/108 2026-03-31T23:03:02.557 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 39/108 2026-03-31T23:03:02.560 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 39/108 2026-03-31T23:03:02.565 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-kubernetes-1:26.1.0-3.el9.noarch 40/108 2026-03-31T23:03:02.568 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-kubernetes-1:26.1.0-3.el9.noarch 40/108 2026-03-31T23:03:02.569 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-requests-oauthlib-1.3.0-12.el9.noarch 41/108 2026-03-31T23:03:02.572 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-requests-oauthlib-1.3.0-12.el9.noarch 41/108 2026-03-31T23:03:02.578 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-requests-2.25.1-10.el9.noarch 42/108 2026-03-31T23:03:02.581 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-requests-2.25.1-10.el9.noarch 42/108 2026-03-31T23:03:02.582 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-google-auth-1:2.45.0-1.el9.noarch 43/108 2026-03-31T23:03:02.586 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-google-auth-1:2.45.0-1.el9.noarch 43/108 2026-03-31T23:03:02.592 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-rsa-4.9-2.el9.noarch 44/108 2026-03-31T23:03:02.597 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-rsa-4.9-2.el9.noarch 44/108 2026-03-31T23:03:02.598 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 39/108 2026-03-31T23:03:02.599 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-pyasn1-modules-0.4.8-7.el9.noarch 45/108 2026-03-31T23:03:02.603 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-pyasn1-modules-0.4.8-7.el9.noarch 45/108 2026-03-31T23:03:02.603 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-urllib3-1.26.5-7.el9.noarch 46/108 2026-03-31T23:03:02.607 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-kubernetes-1:26.1.0-3.el9.noarch 40/108 2026-03-31T23:03:02.608 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-cffi-1.14.5-5.el9.x86_64 47/108 2026-03-31T23:03:02.608 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-urllib3-1.26.5-7.el9.noarch 46/108 2026-03-31T23:03:02.611 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-requests-oauthlib-1.3.0-12.el9.noarch 41/108 2026-03-31T23:03:02.613 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-cffi-1.14.5-5.el9.x86_64 47/108 2026-03-31T23:03:02.621 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-requests-2.25.1-10.el9.noarch 42/108 2026-03-31T23:03:02.626 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-google-auth-1:2.45.0-1.el9.noarch 43/108 2026-03-31T23:03:02.636 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-rsa-4.9-2.el9.noarch 44/108 2026-03-31T23:03:02.643 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-pyasn1-modules-0.4.8-7.el9.noarch 45/108 2026-03-31T23:03:02.648 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-urllib3-1.26.5-7.el9.noarch 46/108 2026-03-31T23:03:02.653 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-cffi-1.14.5-5.el9.x86_64 47/108 2026-03-31T23:03:02.656 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-pycparser-2.20-6.el9.noarch 48/108 2026-03-31T23:03:02.662 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-pycparser-2.20-6.el9.noarch 48/108 2026-03-31T23:03:02.668 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-numpy-1:1.23.5-2.el9.x86_64 49/108 2026-03-31T23:03:02.671 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : flexiblas-netlib-3.0.4-9.el9.x86_64 50/108 2026-03-31T23:03:02.673 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-numpy-1:1.23.5-2.el9.x86_64 49/108 2026-03-31T23:03:02.676 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : flexiblas-netlib-3.0.4-9.el9.x86_64 50/108 2026-03-31T23:03:02.677 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 51/108 2026-03-31T23:03:02.679 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : openblas-openmp-0.3.29-1.el9.x86_64 52/108 2026-03-31T23:03:02.682 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 51/108 2026-03-31T23:03:02.682 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libgfortran-11.5.0-14.el9.x86_64 53/108 2026-03-31T23:03:02.684 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : openblas-openmp-0.3.29-1.el9.x86_64 52/108 2026-03-31T23:03:02.684 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 54/108 2026-03-31T23:03:02.688 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libgfortran-11.5.0-14.el9.x86_64 53/108 2026-03-31T23:03:02.691 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 54/108 2026-03-31T23:03:02.703 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-pycparser-2.20-6.el9.noarch 48/108 2026-03-31T23:03:02.709 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-721.g5bb327 55/108 2026-03-31T23:03:02.709 INFO:teuthology.orchestra.run.vm00.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.709 INFO:teuthology.orchestra.run.vm00.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-03-31T23:03:02.709 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:02.709 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 55/108 2026-03-31T23:03:02.710 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-721.g5bb327 55/108 2026-03-31T23:03:02.710 INFO:teuthology.orchestra.run.vm05.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.710 INFO:teuthology.orchestra.run.vm05.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-03-31T23:03:02.710 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:02.710 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 55/108 2026-03-31T23:03:02.715 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-numpy-1:1.23.5-2.el9.x86_64 49/108 2026-03-31T23:03:02.718 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-721.g5bb327 55/108 2026-03-31T23:03:02.719 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : flexiblas-netlib-3.0.4-9.el9.x86_64 50/108 2026-03-31T23:03:02.720 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : openblas-0.3.29-1.el9.x86_64 56/108 2026-03-31T23:03:02.720 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-721.g5bb327 55/108 2026-03-31T23:03:02.722 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : openblas-0.3.29-1.el9.x86_64 56/108 2026-03-31T23:03:02.722 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : flexiblas-3.0.4-9.el9.x86_64 57/108 2026-03-31T23:03:02.724 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : flexiblas-3.0.4-9.el9.x86_64 57/108 2026-03-31T23:03:02.725 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 51/108 2026-03-31T23:03:02.725 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-ply-3.11-14.el9.noarch 58/108 2026-03-31T23:03:02.727 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-idna-2.10-7.el9.1.noarch 59/108 2026-03-31T23:03:02.728 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : openblas-openmp-0.3.29-1.el9.x86_64 52/108 2026-03-31T23:03:02.728 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-ply-3.11-14.el9.noarch 58/108 2026-03-31T23:03:02.731 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-idna-2.10-7.el9.1.noarch 59/108 2026-03-31T23:03:02.732 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libgfortran-11.5.0-14.el9.x86_64 53/108 2026-03-31T23:03:02.733 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-pysocks-1.7.1-12.el9.noarch 60/108 2026-03-31T23:03:02.736 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-libstoragemgmt-1.10.1-1.el9.x86_64 54/108 2026-03-31T23:03:02.737 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-pyasn1-0.4.8-7.el9.noarch 61/108 2026-03-31T23:03:02.737 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-pysocks-1.7.1-12.el9.noarch 60/108 2026-03-31T23:03:02.742 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-pyasn1-0.4.8-7.el9.noarch 61/108 2026-03-31T23:03:02.743 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-cachetools-4.2.4-1.el9.noarch 62/108 2026-03-31T23:03:02.747 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-cachetools-4.2.4-1.el9.noarch 62/108 2026-03-31T23:03:02.753 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-chardet-4.0.0-5.el9.noarch 63/108 2026-03-31T23:03:02.756 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-chardet-4.0.0-5.el9.noarch 63/108 2026-03-31T23:03:02.756 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-721.g5bb327 55/108 2026-03-31T23:03:02.756 INFO:teuthology.orchestra.run.vm09.stdout:Glob pattern passed to enable, but globs are not supported for this. 2026-03-31T23:03:02.756 INFO:teuthology.orchestra.run.vm09.stdout:Invalid unit name "ceph-immutable-object-cache@*.service" escaped as "ceph-immutable-object-cache@\x2a.service". 2026-03-31T23:03:02.756 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:02.757 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 55/108 2026-03-31T23:03:02.758 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-oauthlib-3.1.1-5.el9.noarch 64/108 2026-03-31T23:03:02.761 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-websocket-client-1.2.3-2.el9.noarch 65/108 2026-03-31T23:03:02.762 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-oauthlib-3.1.1-5.el9.noarch 64/108 2026-03-31T23:03:02.764 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-certifi-2023.05.07-4.el9.noarch 66/108 2026-03-31T23:03:02.765 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-websocket-client-1.2.3-2.el9.noarch 65/108 2026-03-31T23:03:02.767 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-repoze-lru-0.7-16.el9.noarch 67/108 2026-03-31T23:03:02.767 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-immutable-object-cache-2:20.2.0-721.g5bb327 55/108 2026-03-31T23:03:02.768 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-certifi-2023.05.07-4.el9.noarch 66/108 2026-03-31T23:03:02.770 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : openblas-0.3.29-1.el9.x86_64 56/108 2026-03-31T23:03:02.770 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-jaraco-8.2.1-3.el9.noarch 68/108 2026-03-31T23:03:02.770 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-repoze-lru-0.7-16.el9.noarch 67/108 2026-03-31T23:03:02.772 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-more-itertools-8.12.0-2.el9.noarch 69/108 2026-03-31T23:03:02.773 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : flexiblas-3.0.4-9.el9.x86_64 57/108 2026-03-31T23:03:02.773 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-jaraco-8.2.1-3.el9.noarch 68/108 2026-03-31T23:03:02.775 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-more-itertools-8.12.0-2.el9.noarch 69/108 2026-03-31T23:03:02.776 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-toml-0.10.2-6.el9.noarch 70/108 2026-03-31T23:03:02.777 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-ply-3.11-14.el9.noarch 58/108 2026-03-31T23:03:02.778 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-toml-0.10.2-6.el9.noarch 70/108 2026-03-31T23:03:02.779 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-pytz-2021.1-5.el9.noarch 71/108 2026-03-31T23:03:02.780 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-idna-2.10-7.el9.1.noarch 59/108 2026-03-31T23:03:02.782 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-pytz-2021.1-5.el9.noarch 71/108 2026-03-31T23:03:02.786 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-pysocks-1.7.1-12.el9.noarch 60/108 2026-03-31T23:03:02.788 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-backports-tarfile-1.2.0-1.el9.noarch 72/108 2026-03-31T23:03:02.789 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-backports-tarfile-1.2.0-1.el9.noarch 72/108 2026-03-31T23:03:02.791 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-pyasn1-0.4.8-7.el9.noarch 61/108 2026-03-31T23:03:02.793 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-devel-3.9.25-3.el9.x86_64 73/108 2026-03-31T23:03:02.794 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-devel-3.9.25-3.el9.x86_64 73/108 2026-03-31T23:03:02.796 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-jsonpointer-2.0-4.el9.noarch 74/108 2026-03-31T23:03:02.797 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-jsonpointer-2.0-4.el9.noarch 74/108 2026-03-31T23:03:02.798 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-cachetools-4.2.4-1.el9.noarch 62/108 2026-03-31T23:03:02.800 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-typing-extensions-4.15.0-1.el9.noarch 75/108 2026-03-31T23:03:02.801 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-typing-extensions-4.15.0-1.el9.noarch 75/108 2026-03-31T23:03:02.804 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-isodate-0.6.1-3.el9.noarch 76/108 2026-03-31T23:03:02.804 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-isodate-0.6.1-3.el9.noarch 76/108 2026-03-31T23:03:02.807 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-chardet-4.0.0-5.el9.noarch 63/108 2026-03-31T23:03:02.808 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-autocommand-2.2.2-8.el9.noarch 77/108 2026-03-31T23:03:02.808 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-autocommand-2.2.2-8.el9.noarch 77/108 2026-03-31T23:03:02.812 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-packaging-20.9-5.el9.noarch 78/108 2026-03-31T23:03:02.812 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-packaging-20.9-5.el9.noarch 78/108 2026-03-31T23:03:02.813 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-oauthlib-3.1.1-5.el9.noarch 64/108 2026-03-31T23:03:02.817 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-websocket-client-1.2.3-2.el9.noarch 65/108 2026-03-31T23:03:02.818 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : grpc-data-1.46.7-10.el9.noarch 79/108 2026-03-31T23:03:02.818 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : grpc-data-1.46.7-10.el9.noarch 79/108 2026-03-31T23:03:02.820 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-certifi-2023.05.07-4.el9.noarch 66/108 2026-03-31T23:03:02.822 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-protobuf-3.14.0-17.el9.noarch 80/108 2026-03-31T23:03:02.822 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-protobuf-3.14.0-17.el9.noarch 80/108 2026-03-31T23:03:02.823 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-repoze-lru-0.7-16.el9.noarch 67/108 2026-03-31T23:03:02.825 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-jaraco-8.2.1-3.el9.noarch 68/108 2026-03-31T23:03:02.826 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-zc-lockfile-2.0-10.el9.noarch 81/108 2026-03-31T23:03:02.826 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-zc-lockfile-2.0-10.el9.noarch 81/108 2026-03-31T23:03:02.829 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-more-itertools-8.12.0-2.el9.noarch 69/108 2026-03-31T23:03:02.830 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-natsort-7.1.1-5.el9.noarch 82/108 2026-03-31T23:03:02.830 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-natsort-7.1.1-5.el9.noarch 82/108 2026-03-31T23:03:02.832 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 83/108 2026-03-31T23:03:02.832 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 83/108 2026-03-31T23:03:02.832 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-toml-0.10.2-6.el9.noarch 70/108 2026-03-31T23:03:02.833 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 84/108 2026-03-31T23:03:02.834 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 84/108 2026-03-31T23:03:02.836 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-pytz-2021.1-5.el9.noarch 71/108 2026-03-31T23:03:02.845 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-backports-tarfile-1.2.0-1.el9.noarch 72/108 2026-03-31T23:03:02.851 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-devel-3.9.25-3.el9.x86_64 73/108 2026-03-31T23:03:02.854 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-jsonpointer-2.0-4.el9.noarch 74/108 2026-03-31T23:03:02.855 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 85/108 2026-03-31T23:03:02.856 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-crash.service". 2026-03-31T23:03:02.856 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:02.857 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 85/108 2026-03-31T23:03:02.857 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-crash.service". 2026-03-31T23:03:02.857 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:02.858 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-typing-extensions-4.15.0-1.el9.noarch 75/108 2026-03-31T23:03:02.863 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-isodate-0.6.1-3.el9.noarch 76/108 2026-03-31T23:03:02.864 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 85/108 2026-03-31T23:03:02.865 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 85/108 2026-03-31T23:03:02.867 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-autocommand-2.2.2-8.el9.noarch 77/108 2026-03-31T23:03:02.871 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-packaging-20.9-5.el9.noarch 78/108 2026-03-31T23:03:02.877 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : grpc-data-1.46.7-10.el9.noarch 79/108 2026-03-31T23:03:02.882 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-protobuf-3.14.0-17.el9.noarch 80/108 2026-03-31T23:03:02.886 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-zc-lockfile-2.0-10.el9.noarch 81/108 2026-03-31T23:03:02.889 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-natsort-7.1.1-5.el9.noarch 82/108 2026-03-31T23:03:02.891 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 83/108 2026-03-31T23:03:02.893 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 84/108 2026-03-31T23:03:02.895 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 85/108 2026-03-31T23:03:02.895 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 86/108 2026-03-31T23:03:02.898 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 85/108 2026-03-31T23:03:02.898 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 86/108 2026-03-31T23:03:02.908 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 86/108 2026-03-31T23:03:02.911 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 86/108 2026-03-31T23:03:02.913 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 85/108 2026-03-31T23:03:02.913 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/ceph.target.wants/ceph-crash.service". 2026-03-31T23:03:02.913 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:02.914 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : qatzip-libs-1.3.1-1.el9.x86_64 87/108 2026-03-31T23:03:02.917 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : qatzip-libs-1.3.1-1.el9.x86_64 87/108 2026-03-31T23:03:02.918 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 88/108 2026-03-31T23:03:02.920 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-prettytable-0.7.2-27.el9.noarch 89/108 2026-03-31T23:03:02.920 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 90/108 2026-03-31T23:03:02.921 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 88/108 2026-03-31T23:03:02.921 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 85/108 2026-03-31T23:03:02.923 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-prettytable-0.7.2-27.el9.noarch 89/108 2026-03-31T23:03:02.923 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 90/108 2026-03-31T23:03:02.955 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 85/108 2026-03-31T23:03:02.955 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 86/108 2026-03-31T23:03:02.968 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 86/108 2026-03-31T23:03:02.975 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : qatzip-libs-1.3.1-1.el9.x86_64 87/108 2026-03-31T23:03:02.978 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 88/108 2026-03-31T23:03:02.981 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-prettytable-0.7.2-27.el9.noarch 89/108 2026-03-31T23:03:02.981 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 90/108 2026-03-31T23:03:08.047 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 90/108 2026-03-31T23:03:08.047 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /sys 2026-03-31T23:03:08.047 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /proc 2026-03-31T23:03:08.047 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /mnt 2026-03-31T23:03:08.047 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /var/tmp 2026-03-31T23:03:08.047 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /home 2026-03-31T23:03:08.047 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /root 2026-03-31T23:03:08.047 INFO:teuthology.orchestra.run.vm00.stdout:skipping the directory /tmp 2026-03-31T23:03:08.047 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:08.056 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : qatlib-25.08.0-2.el9.x86_64 91/108 2026-03-31T23:03:08.071 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: qatlib-service-25.08.0-2.el9.x86_64 92/108 2026-03-31T23:03:08.071 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : qatlib-service-25.08.0-2.el9.x86_64 92/108 2026-03-31T23:03:08.080 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: qatlib-service-25.08.0-2.el9.x86_64 92/108 2026-03-31T23:03:08.084 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : gperftools-libs-2.9.1-3.el9.x86_64 93/108 2026-03-31T23:03:08.087 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libunwind-1.6.2-1.el9.x86_64 94/108 2026-03-31T23:03:08.089 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : pciutils-3.7.0-7.el9.x86_64 95/108 2026-03-31T23:03:08.091 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : liboath-2.6.12-1.el9.x86_64 96/108 2026-03-31T23:03:08.092 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 97/108 2026-03-31T23:03:08.105 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 97/108 2026-03-31T23:03:08.119 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: smartmontools-1:7.2-10.el9.x86_64 98/108 2026-03-31T23:03:08.119 INFO:teuthology.orchestra.run.vm00.stdout:Removed "/etc/systemd/system/multi-user.target.wants/smartd.service". 2026-03-31T23:03:08.119 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:08.121 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : smartmontools-1:7.2-10.el9.x86_64 98/108 2026-03-31T23:03:08.131 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: smartmontools-1:7.2-10.el9.x86_64 98/108 2026-03-31T23:03:08.154 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ledmon-libs-1.1.0-3.el9.x86_64 99/108 2026-03-31T23:03:08.157 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libquadmath-11.5.0-14.el9.x86_64 100/108 2026-03-31T23:03:08.160 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : protobuf-3.14.0-17.el9.x86_64 101/108 2026-03-31T23:03:08.164 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libxslt-1.1.34-12.el9.x86_64 102/108 2026-03-31T23:03:08.170 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libconfig-1.7.2-9.el9.x86_64 103/108 2026-03-31T23:03:08.177 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-markupsafe-1.1.1-12.el9.x86_64 104/108 2026-03-31T23:03:08.185 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : cryptsetup-2.8.1-3.el9.x86_64 105/108 2026-03-31T23:03:08.190 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : abseil-cpp-20211102.0-4.el9.x86_64 106/108 2026-03-31T23:03:08.192 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-msgpack-1.0.3-2.el9.x86_64 107/108 2026-03-31T23:03:08.192 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 108/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 108/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : abseil-cpp-20211102.0-4.el9.x86_64 1/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 3/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 4/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 5/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 6/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 7/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 8/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 9/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 10/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 11/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 13/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 14/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 15/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 16/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : cryptsetup-2.8.1-3.el9.x86_64 17/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : flexiblas-3.0.4-9.el9.x86_64 18/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : flexiblas-netlib-3.0.4-9.el9.x86_64 19/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 20/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : gperftools-libs-2.9.1-3.el9.x86_64 21/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : grpc-data-1.46.7-10.el9.noarch 22/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ledmon-libs-1.1.0-3.el9.x86_64 23/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 24/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libconfig-1.7.2-9.el9.x86_64 25/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libgfortran-11.5.0-14.el9.x86_64 26/108 2026-03-31T23:03:08.293 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : liboath-2.6.12-1.el9.x86_64 27/108 2026-03-31T23:03:08.294 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libquadmath-11.5.0-14.el9.x86_64 28/108 2026-03-31T23:03:08.294 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 29/108 2026-03-31T23:03:08.294 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libstoragemgmt-1.10.1-1.el9.x86_64 30/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libunwind-1.6.2-1.el9.x86_64 31/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libxslt-1.1.34-12.el9.x86_64 32/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : openblas-0.3.29-1.el9.x86_64 33/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : openblas-openmp-0.3.29-1.el9.x86_64 34/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : pciutils-3.7.0-7.el9.x86_64 35/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : protobuf-3.14.0-17.el9.x86_64 36/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : protobuf-compiler-3.14.0-17.el9.x86_64 37/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-asyncssh-2.13.2-5.el9.noarch 38/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-autocommand-2.2.2-8.el9.noarch 39/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-babel-2.9.1-2.el9.noarch 40/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-backports-tarfile-1.2.0-1.el9.noarch 41/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-bcrypt-3.2.2-1.el9.x86_64 42/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cachetools-4.2.4-1.el9.noarch 43/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 44/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-certifi-2023.05.07-4.el9.noarch 45/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cffi-1.14.5-5.el9.x86_64 46/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-chardet-4.0.0-5.el9.noarch 47/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cheroot-10.0.1-5.el9.noarch 48/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cherrypy-18.10.0-5.el9.noarch 49/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cryptography-36.0.1-5.el9.x86_64 50/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-devel-3.9.25-3.el9.x86_64 51/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-google-auth-1:2.45.0-1.el9.noarch 52/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-grpcio-1.46.7-10.el9.x86_64 53/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-grpcio-tools-1.46.7-10.el9.x86_64 54/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-idna-2.10-7.el9.1.noarch 55/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-influxdb-5.3.1-1.el9.noarch 56/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-isodate-0.6.1-3.el9.noarch 57/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-8.2.1-3.el9.noarch 58/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-classes-3.2.1-5.el9.noarch 59/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-collections-3.0.0-8.el9.noarch 60/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-context-6.0.1-3.el9.noarch 61/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-functools-3.5.0-2.el9.noarch 62/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jaraco-text-4.0.0-2.el9.noarch 63/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jinja2-2.11.3-8.el9.noarch 64/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jsonpatch-1.21-16.el9.noarch 65/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-jsonpointer-2.0-4.el9.noarch 66/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-kubernetes-1:26.1.0-3.el9.noarch 67/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-libstoragemgmt-1.10.1-1.el9.x86_64 68/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-lxml-4.6.5-3.el9.x86_64 69/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-markupsafe-1.1.1-12.el9.x86_64 70/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-more-itertools-8.12.0-2.el9.noarch 71/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-msgpack-1.0.3-2.el9.x86_64 72/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-natsort-7.1.1-5.el9.noarch 73/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-numpy-1:1.23.5-2.el9.x86_64 74/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 75/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-oauthlib-3.1.1-5.el9.noarch 76/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-packaging-20.9-5.el9.noarch 77/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-ply-3.11-14.el9.noarch 78/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-portend-3.1.0-2.el9.noarch 79/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-prettytable-0.7.2-27.el9.noarch 80/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-protobuf-3.14.0-17.el9.noarch 81/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pyOpenSSL-21.0.0-1.el9.noarch 82/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pyasn1-0.4.8-7.el9.noarch 83/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pyasn1-modules-0.4.8-7.el9.noarch 84/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pycparser-2.20-6.el9.noarch 85/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pysocks-1.7.1-12.el9.noarch 86/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-pytz-2021.1-5.el9.noarch 87/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-repoze-lru-0.7-16.el9.noarch 88/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-requests-2.25.1-10.el9.noarch 89/108 2026-03-31T23:03:08.295 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-requests-oauthlib-1.3.0-12.el9.noarch 90/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-routes-2.5.1-5.el9.noarch 91/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-rsa-4.9-2.el9.noarch 92/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-saml-1.16.0-1.el9.noarch 93/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-scipy-1.9.3-2.el9.x86_64 94/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-tempora-5.0.0-2.el9.noarch 95/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-toml-0.10.2-6.el9.noarch 96/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-typing-extensions-4.15.0-1.el9.noarch 97/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-urllib3-1.26.5-7.el9.noarch 98/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-websocket-client-1.2.3-2.el9.noarch 99/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-xmlsec-1.3.13-1.el9.x86_64 100/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-zc-lockfile-2.0-10.el9.noarch 101/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : qatlib-25.08.0-2.el9.x86_64 102/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : qatlib-service-25.08.0-2.el9.x86_64 103/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : qatzip-libs-1.3.1-1.el9.x86_64 104/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 105/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : smartmontools-1:7.2-10.el9.x86_64 106/108 2026-03-31T23:03:08.296 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : xmlsec1-1.2.29-13.el9.x86_64 107/108 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : xmlsec1-openssl-1.2.29-13.el9.x86_64 108/108 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout:Removed: 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: abseil-cpp-20211102.0-4.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-immutable-object-cache-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: cryptsetup-2.8.1-3.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-3.0.4-9.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-netlib-3.0.4-9.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: gperftools-libs-2.9.1-3.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: grpc-data-1.46.7-10.el9.noarch 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: ledmon-libs-1.1.0-3.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: libconfig-1.7.2-9.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: libgfortran-11.5.0-14.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: liboath-2.6.12-1.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: libquadmath-11.5.0-14.el9.x86_64 2026-03-31T23:03:08.371 INFO:teuthology.orchestra.run.vm00.stdout: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: libunwind-1.6.2-1.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: libxslt-1.1.34-12.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: openblas-0.3.29-1.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: openblas-openmp-0.3.29-1.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: pciutils-3.7.0-7.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: protobuf-3.14.0-17.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: protobuf-compiler-3.14.0-17.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-asyncssh-2.13.2-5.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-autocommand-2.2.2-8.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-babel-2.9.1-2.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-backports-tarfile-1.2.0-1.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-bcrypt-3.2.2-1.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-cachetools-4.2.4-1.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-certifi-2023.05.07-4.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-cffi-1.14.5-5.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-chardet-4.0.0-5.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-cheroot-10.0.1-5.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-cherrypy-18.10.0-5.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-cryptography-36.0.1-5.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-devel-3.9.25-3.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-google-auth-1:2.45.0-1.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-grpcio-1.46.7-10.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-grpcio-tools-1.46.7-10.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-idna-2.10-7.el9.1.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-influxdb-5.3.1-1.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-isodate-0.6.1-3.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-8.2.1-3.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-classes-3.2.1-5.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-collections-3.0.0-8.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-context-6.0.1-3.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-functools-3.5.0-2.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-jaraco-text-4.0.0-2.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-jinja2-2.11.3-8.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-jsonpatch-1.21-16.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-jsonpointer-2.0-4.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-kubernetes-1:26.1.0-3.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-lxml-4.6.5-3.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-markupsafe-1.1.1-12.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-more-itertools-8.12.0-2.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-msgpack-1.0.3-2.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-natsort-7.1.1-5.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-numpy-1:1.23.5-2.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-numpy-f2py-1:1.23.5-2.el9.x86_64 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-oauthlib-3.1.1-5.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-packaging-20.9-5.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-ply-3.11-14.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-portend-3.1.0-2.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-prettytable-0.7.2-27.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-protobuf-3.14.0-17.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyOpenSSL-21.0.0-1.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyasn1-0.4.8-7.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-pyasn1-modules-0.4.8-7.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-pycparser-2.20-6.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-pysocks-1.7.1-12.el9.noarch 2026-03-31T23:03:08.372 INFO:teuthology.orchestra.run.vm00.stdout: python3-pytz-2021.1-5.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-repoze-lru-0.7-16.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-requests-2.25.1-10.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-requests-oauthlib-1.3.0-12.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-routes-2.5.1-5.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-rsa-4.9-2.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-saml-1.16.0-1.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-scipy-1.9.3-2.el9.x86_64 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-tempora-5.0.0-2.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-toml-0.10.2-6.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-typing-extensions-4.15.0-1.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-urllib3-1.26.5-7.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-websocket-client-1.2.3-2.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-xmlsec-1.3.13-1.el9.x86_64 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: python3-zc-lockfile-2.0-10.el9.noarch 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: qatlib-25.08.0-2.el9.x86_64 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: qatlib-service-25.08.0-2.el9.x86_64 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: qatzip-libs-1.3.1-1.el9.x86_64 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: smartmontools-1:7.2-10.el9.x86_64 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: xmlsec1-1.2.29-13.el9.x86_64 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: xmlsec1-openssl-1.2.29-13.el9.x86_64 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:08.373 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:08.547 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 90/108 2026-03-31T23:03:08.547 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /sys 2026-03-31T23:03:08.547 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /proc 2026-03-31T23:03:08.547 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /mnt 2026-03-31T23:03:08.547 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /var/tmp 2026-03-31T23:03:08.547 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /home 2026-03-31T23:03:08.547 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /root 2026-03-31T23:03:08.547 INFO:teuthology.orchestra.run.vm05.stdout:skipping the directory /tmp 2026-03-31T23:03:08.547 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:08.550 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 90/108 2026-03-31T23:03:08.550 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /sys 2026-03-31T23:03:08.550 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /proc 2026-03-31T23:03:08.550 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /mnt 2026-03-31T23:03:08.550 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /var/tmp 2026-03-31T23:03:08.550 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /home 2026-03-31T23:03:08.550 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /root 2026-03-31T23:03:08.550 INFO:teuthology.orchestra.run.vm09.stdout:skipping the directory /tmp 2026-03-31T23:03:08.550 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:08.557 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : qatlib-25.08.0-2.el9.x86_64 91/108 2026-03-31T23:03:08.559 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : qatlib-25.08.0-2.el9.x86_64 91/108 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repository Size 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout:Removing: 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout: cephadm noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 1.0 M 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout:Remove 1 Package 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout:Freed space: 1.0 M 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: qatlib-service-25.08.0-2.el9.x86_64 92/108 2026-03-31T23:03:08.575 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : qatlib-service-25.08.0-2.el9.x86_64 92/108 2026-03-31T23:03:08.577 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T23:03:08.577 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T23:03:08.577 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: qatlib-service-25.08.0-2.el9.x86_64 92/108 2026-03-31T23:03:08.577 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : qatlib-service-25.08.0-2.el9.x86_64 92/108 2026-03-31T23:03:08.578 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T23:03:08.578 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T23:03:08.585 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: qatlib-service-25.08.0-2.el9.x86_64 92/108 2026-03-31T23:03:08.587 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: qatlib-service-25.08.0-2.el9.x86_64 92/108 2026-03-31T23:03:08.589 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : gperftools-libs-2.9.1-3.el9.x86_64 93/108 2026-03-31T23:03:08.590 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : gperftools-libs-2.9.1-3.el9.x86_64 93/108 2026-03-31T23:03:08.591 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libunwind-1.6.2-1.el9.x86_64 94/108 2026-03-31T23:03:08.593 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libunwind-1.6.2-1.el9.x86_64 94/108 2026-03-31T23:03:08.594 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : pciutils-3.7.0-7.el9.x86_64 95/108 2026-03-31T23:03:08.595 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T23:03:08.595 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 1/1 2026-03-31T23:03:08.596 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : pciutils-3.7.0-7.el9.x86_64 95/108 2026-03-31T23:03:08.596 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : liboath-2.6.12-1.el9.x86_64 96/108 2026-03-31T23:03:08.596 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 97/108 2026-03-31T23:03:08.598 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : liboath-2.6.12-1.el9.x86_64 96/108 2026-03-31T23:03:08.598 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 97/108 2026-03-31T23:03:08.611 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 97/108 2026-03-31T23:03:08.612 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 97/108 2026-03-31T23:03:08.625 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: smartmontools-1:7.2-10.el9.x86_64 98/108 2026-03-31T23:03:08.626 INFO:teuthology.orchestra.run.vm09.stdout:Removed "/etc/systemd/system/multi-user.target.wants/smartd.service". 2026-03-31T23:03:08.626 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:08.627 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: smartmontools-1:7.2-10.el9.x86_64 98/108 2026-03-31T23:03:08.627 INFO:teuthology.orchestra.run.vm05.stdout:Removed "/etc/systemd/system/multi-user.target.wants/smartd.service". 2026-03-31T23:03:08.627 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:08.628 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : smartmontools-1:7.2-10.el9.x86_64 98/108 2026-03-31T23:03:08.629 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : smartmontools-1:7.2-10.el9.x86_64 98/108 2026-03-31T23:03:08.638 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: smartmontools-1:7.2-10.el9.x86_64 98/108 2026-03-31T23:03:08.641 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: smartmontools-1:7.2-10.el9.x86_64 98/108 2026-03-31T23:03:08.641 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ledmon-libs-1.1.0-3.el9.x86_64 99/108 2026-03-31T23:03:08.644 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ledmon-libs-1.1.0-3.el9.x86_64 99/108 2026-03-31T23:03:08.644 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libquadmath-11.5.0-14.el9.x86_64 100/108 2026-03-31T23:03:08.647 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : protobuf-3.14.0-17.el9.x86_64 101/108 2026-03-31T23:03:08.647 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libquadmath-11.5.0-14.el9.x86_64 100/108 2026-03-31T23:03:08.650 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libxslt-1.1.34-12.el9.x86_64 102/108 2026-03-31T23:03:08.651 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : protobuf-3.14.0-17.el9.x86_64 101/108 2026-03-31T23:03:08.653 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libconfig-1.7.2-9.el9.x86_64 103/108 2026-03-31T23:03:08.654 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libxslt-1.1.34-12.el9.x86_64 102/108 2026-03-31T23:03:08.658 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libconfig-1.7.2-9.el9.x86_64 103/108 2026-03-31T23:03:08.659 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-markupsafe-1.1.1-12.el9.x86_64 104/108 2026-03-31T23:03:08.664 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-markupsafe-1.1.1-12.el9.x86_64 104/108 2026-03-31T23:03:08.666 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : cryptsetup-2.8.1-3.el9.x86_64 105/108 2026-03-31T23:03:08.672 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : cryptsetup-2.8.1-3.el9.x86_64 105/108 2026-03-31T23:03:08.672 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : abseil-cpp-20211102.0-4.el9.x86_64 106/108 2026-03-31T23:03:08.675 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-msgpack-1.0.3-2.el9.x86_64 107/108 2026-03-31T23:03:08.675 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 108/108 2026-03-31T23:03:08.677 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : abseil-cpp-20211102.0-4.el9.x86_64 106/108 2026-03-31T23:03:08.680 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-msgpack-1.0.3-2.el9.x86_64 107/108 2026-03-31T23:03:08.680 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 108/108 2026-03-31T23:03:08.699 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 1/1 2026-03-31T23:03:08.743 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 1/1 2026-03-31T23:03:08.743 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:08.743 INFO:teuthology.orchestra.run.vm00.stdout:Removed: 2026-03-31T23:03:08.743 INFO:teuthology.orchestra.run.vm00.stdout: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.743 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:08.743 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 108/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : abseil-cpp-20211102.0-4.el9.x86_64 1/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 3/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 4/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 5/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 6/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 7/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 8/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 9/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 10/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 11/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 13/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 14/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 15/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 16/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : cryptsetup-2.8.1-3.el9.x86_64 17/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : flexiblas-3.0.4-9.el9.x86_64 18/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : flexiblas-netlib-3.0.4-9.el9.x86_64 19/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 20/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : gperftools-libs-2.9.1-3.el9.x86_64 21/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : grpc-data-1.46.7-10.el9.noarch 22/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ledmon-libs-1.1.0-3.el9.x86_64 23/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 24/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libconfig-1.7.2-9.el9.x86_64 25/108 2026-03-31T23:03:08.780 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libgfortran-11.5.0-14.el9.x86_64 26/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : liboath-2.6.12-1.el9.x86_64 27/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libquadmath-11.5.0-14.el9.x86_64 28/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 29/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libstoragemgmt-1.10.1-1.el9.x86_64 30/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libunwind-1.6.2-1.el9.x86_64 31/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libxslt-1.1.34-12.el9.x86_64 32/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : openblas-0.3.29-1.el9.x86_64 33/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : openblas-openmp-0.3.29-1.el9.x86_64 34/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : pciutils-3.7.0-7.el9.x86_64 35/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : protobuf-3.14.0-17.el9.x86_64 36/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : protobuf-compiler-3.14.0-17.el9.x86_64 37/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-asyncssh-2.13.2-5.el9.noarch 38/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-autocommand-2.2.2-8.el9.noarch 39/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-babel-2.9.1-2.el9.noarch 40/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-backports-tarfile-1.2.0-1.el9.noarch 41/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-bcrypt-3.2.2-1.el9.x86_64 42/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cachetools-4.2.4-1.el9.noarch 43/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 44/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-certifi-2023.05.07-4.el9.noarch 45/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cffi-1.14.5-5.el9.x86_64 46/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-chardet-4.0.0-5.el9.noarch 47/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cheroot-10.0.1-5.el9.noarch 48/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cherrypy-18.10.0-5.el9.noarch 49/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cryptography-36.0.1-5.el9.x86_64 50/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-devel-3.9.25-3.el9.x86_64 51/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-google-auth-1:2.45.0-1.el9.noarch 52/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-grpcio-1.46.7-10.el9.x86_64 53/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-grpcio-tools-1.46.7-10.el9.x86_64 54/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-idna-2.10-7.el9.1.noarch 55/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-influxdb-5.3.1-1.el9.noarch 56/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-isodate-0.6.1-3.el9.noarch 57/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-8.2.1-3.el9.noarch 58/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-classes-3.2.1-5.el9.noarch 59/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-collections-3.0.0-8.el9.noarch 60/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-context-6.0.1-3.el9.noarch 61/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-functools-3.5.0-2.el9.noarch 62/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jaraco-text-4.0.0-2.el9.noarch 63/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jinja2-2.11.3-8.el9.noarch 64/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jsonpatch-1.21-16.el9.noarch 65/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-jsonpointer-2.0-4.el9.noarch 66/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-kubernetes-1:26.1.0-3.el9.noarch 67/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-libstoragemgmt-1.10.1-1.el9.x86_64 68/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-lxml-4.6.5-3.el9.x86_64 69/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-markupsafe-1.1.1-12.el9.x86_64 70/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-more-itertools-8.12.0-2.el9.noarch 71/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-msgpack-1.0.3-2.el9.x86_64 72/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-natsort-7.1.1-5.el9.noarch 73/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-numpy-1:1.23.5-2.el9.x86_64 74/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 75/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-oauthlib-3.1.1-5.el9.noarch 76/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-packaging-20.9-5.el9.noarch 77/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-ply-3.11-14.el9.noarch 78/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-portend-3.1.0-2.el9.noarch 79/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-prettytable-0.7.2-27.el9.noarch 80/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-protobuf-3.14.0-17.el9.noarch 81/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pyOpenSSL-21.0.0-1.el9.noarch 82/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pyasn1-0.4.8-7.el9.noarch 83/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pyasn1-modules-0.4.8-7.el9.noarch 84/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pycparser-2.20-6.el9.noarch 85/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pysocks-1.7.1-12.el9.noarch 86/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-pytz-2021.1-5.el9.noarch 87/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-repoze-lru-0.7-16.el9.noarch 88/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-requests-2.25.1-10.el9.noarch 89/108 2026-03-31T23:03:08.781 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-requests-oauthlib-1.3.0-12.el9.noarch 90/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-routes-2.5.1-5.el9.noarch 91/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-rsa-4.9-2.el9.noarch 92/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-saml-1.16.0-1.el9.noarch 93/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-scipy-1.9.3-2.el9.x86_64 94/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-tempora-5.0.0-2.el9.noarch 95/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-toml-0.10.2-6.el9.noarch 96/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-typing-extensions-4.15.0-1.el9.noarch 97/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-urllib3-1.26.5-7.el9.noarch 98/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-websocket-client-1.2.3-2.el9.noarch 99/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-xmlsec-1.3.13-1.el9.x86_64 100/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-zc-lockfile-2.0-10.el9.noarch 101/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : qatlib-25.08.0-2.el9.x86_64 102/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : qatlib-service-25.08.0-2.el9.x86_64 103/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : qatzip-libs-1.3.1-1.el9.x86_64 104/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 105/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : smartmontools-1:7.2-10.el9.x86_64 106/108 2026-03-31T23:03:08.782 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : xmlsec1-1.2.29-13.el9.x86_64 107/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 108/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : abseil-cpp-20211102.0-4.el9.x86_64 1/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 3/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.e 4/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-immutable-object-cache-2:20.2.0-721.g5bb327 5/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 6/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noar 7/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.no 8/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb3 9/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.no 10/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9 11/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 12/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 13/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el 14/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 15/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 16/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : cryptsetup-2.8.1-3.el9.x86_64 17/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : flexiblas-3.0.4-9.el9.x86_64 18/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : flexiblas-netlib-3.0.4-9.el9.x86_64 19/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 20/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : gperftools-libs-2.9.1-3.el9.x86_64 21/108 2026-03-31T23:03:08.786 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : grpc-data-1.46.7-10.el9.noarch 22/108 2026-03-31T23:03:08.787 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ledmon-libs-1.1.0-3.el9.x86_64 23/108 2026-03-31T23:03:08.787 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 24/108 2026-03-31T23:03:08.787 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libconfig-1.7.2-9.el9.x86_64 25/108 2026-03-31T23:03:08.787 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libgfortran-11.5.0-14.el9.x86_64 26/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : liboath-2.6.12-1.el9.x86_64 27/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libquadmath-11.5.0-14.el9.x86_64 28/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_ 29/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libstoragemgmt-1.10.1-1.el9.x86_64 30/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libunwind-1.6.2-1.el9.x86_64 31/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libxslt-1.1.34-12.el9.x86_64 32/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : openblas-0.3.29-1.el9.x86_64 33/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : openblas-openmp-0.3.29-1.el9.x86_64 34/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : pciutils-3.7.0-7.el9.x86_64 35/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : protobuf-3.14.0-17.el9.x86_64 36/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : protobuf-compiler-3.14.0-17.el9.x86_64 37/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-asyncssh-2.13.2-5.el9.noarch 38/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-autocommand-2.2.2-8.el9.noarch 39/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-babel-2.9.1-2.el9.noarch 40/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-backports-tarfile-1.2.0-1.el9.noarch 41/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-bcrypt-3.2.2-1.el9.x86_64 42/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cachetools-4.2.4-1.el9.noarch 43/108 2026-03-31T23:03:08.788 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x 44/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-certifi-2023.05.07-4.el9.noarch 45/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cffi-1.14.5-5.el9.x86_64 46/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-chardet-4.0.0-5.el9.noarch 47/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cheroot-10.0.1-5.el9.noarch 48/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cherrypy-18.10.0-5.el9.noarch 49/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cryptography-36.0.1-5.el9.x86_64 50/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-devel-3.9.25-3.el9.x86_64 51/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-google-auth-1:2.45.0-1.el9.noarch 52/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-grpcio-1.46.7-10.el9.x86_64 53/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-grpcio-tools-1.46.7-10.el9.x86_64 54/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-idna-2.10-7.el9.1.noarch 55/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-influxdb-5.3.1-1.el9.noarch 56/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-isodate-0.6.1-3.el9.noarch 57/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-8.2.1-3.el9.noarch 58/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-classes-3.2.1-5.el9.noarch 59/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-collections-3.0.0-8.el9.noarch 60/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-context-6.0.1-3.el9.noarch 61/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-functools-3.5.0-2.el9.noarch 62/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jaraco-text-4.0.0-2.el9.noarch 63/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jinja2-2.11.3-8.el9.noarch 64/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jsonpatch-1.21-16.el9.noarch 65/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-jsonpointer-2.0-4.el9.noarch 66/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-kubernetes-1:26.1.0-3.el9.noarch 67/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-libstoragemgmt-1.10.1-1.el9.x86_64 68/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-lxml-4.6.5-3.el9.x86_64 69/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-markupsafe-1.1.1-12.el9.x86_64 70/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-more-itertools-8.12.0-2.el9.noarch 71/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-msgpack-1.0.3-2.el9.x86_64 72/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-natsort-7.1.1-5.el9.noarch 73/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-numpy-1:1.23.5-2.el9.x86_64 74/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-numpy-f2py-1:1.23.5-2.el9.x86_64 75/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-oauthlib-3.1.1-5.el9.noarch 76/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-packaging-20.9-5.el9.noarch 77/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-ply-3.11-14.el9.noarch 78/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-portend-3.1.0-2.el9.noarch 79/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-prettytable-0.7.2-27.el9.noarch 80/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-protobuf-3.14.0-17.el9.noarch 81/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pyOpenSSL-21.0.0-1.el9.noarch 82/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pyasn1-0.4.8-7.el9.noarch 83/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pyasn1-modules-0.4.8-7.el9.noarch 84/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pycparser-2.20-6.el9.noarch 85/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pysocks-1.7.1-12.el9.noarch 86/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-pytz-2021.1-5.el9.noarch 87/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-repoze-lru-0.7-16.el9.noarch 88/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-requests-2.25.1-10.el9.noarch 89/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-requests-oauthlib-1.3.0-12.el9.noarch 90/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-routes-2.5.1-5.el9.noarch 91/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-rsa-4.9-2.el9.noarch 92/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-saml-1.16.0-1.el9.noarch 93/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-scipy-1.9.3-2.el9.x86_64 94/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-tempora-5.0.0-2.el9.noarch 95/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-toml-0.10.2-6.el9.noarch 96/108 2026-03-31T23:03:08.789 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-typing-extensions-4.15.0-1.el9.noarch 97/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-urllib3-1.26.5-7.el9.noarch 98/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-websocket-client-1.2.3-2.el9.noarch 99/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-xmlsec-1.3.13-1.el9.x86_64 100/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-zc-lockfile-2.0-10.el9.noarch 101/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : qatlib-25.08.0-2.el9.x86_64 102/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : qatlib-service-25.08.0-2.el9.x86_64 103/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : qatzip-libs-1.3.1-1.el9.x86_64 104/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 105/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : smartmontools-1:7.2-10.el9.x86_64 106/108 2026-03-31T23:03:08.790 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : xmlsec1-1.2.29-13.el9.x86_64 107/108 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : xmlsec1-openssl-1.2.29-13.el9.x86_64 108/108 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout:Removed: 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: abseil-cpp-20211102.0-4.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-immutable-object-cache-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: cryptsetup-2.8.1-3.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-3.0.4-9.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-netlib-3.0.4-9.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: gperftools-libs-2.9.1-3.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: grpc-data-1.46.7-10.el9.noarch 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: ledmon-libs-1.1.0-3.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: libconfig-1.7.2-9.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: libgfortran-11.5.0-14.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: liboath-2.6.12-1.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: libquadmath-11.5.0-14.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: libunwind-1.6.2-1.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: libxslt-1.1.34-12.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: openblas-0.3.29-1.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: openblas-openmp-0.3.29-1.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: pciutils-3.7.0-7.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: protobuf-3.14.0-17.el9.x86_64 2026-03-31T23:03:08.859 INFO:teuthology.orchestra.run.vm09.stdout: protobuf-compiler-3.14.0-17.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-asyncssh-2.13.2-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-autocommand-2.2.2-8.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-babel-2.9.1-2.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-backports-tarfile-1.2.0-1.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-bcrypt-3.2.2-1.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-cachetools-4.2.4-1.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-certifi-2023.05.07-4.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-cffi-1.14.5-5.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-chardet-4.0.0-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-cheroot-10.0.1-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-cherrypy-18.10.0-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-cryptography-36.0.1-5.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-devel-3.9.25-3.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-google-auth-1:2.45.0-1.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-grpcio-1.46.7-10.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-grpcio-tools-1.46.7-10.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-idna-2.10-7.el9.1.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-influxdb-5.3.1-1.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-isodate-0.6.1-3.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-8.2.1-3.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-classes-3.2.1-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-collections-3.0.0-8.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-context-6.0.1-3.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-functools-3.5.0-2.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-jaraco-text-4.0.0-2.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-jinja2-2.11.3-8.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-jsonpatch-1.21-16.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-jsonpointer-2.0-4.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-kubernetes-1:26.1.0-3.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-lxml-4.6.5-3.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-markupsafe-1.1.1-12.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-more-itertools-8.12.0-2.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-msgpack-1.0.3-2.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-natsort-7.1.1-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-numpy-1:1.23.5-2.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-numpy-f2py-1:1.23.5-2.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-oauthlib-3.1.1-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-packaging-20.9-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-ply-3.11-14.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-portend-3.1.0-2.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-prettytable-0.7.2-27.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-protobuf-3.14.0-17.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyOpenSSL-21.0.0-1.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyasn1-0.4.8-7.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-pyasn1-modules-0.4.8-7.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-pycparser-2.20-6.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-pysocks-1.7.1-12.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-pytz-2021.1-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-repoze-lru-0.7-16.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-requests-2.25.1-10.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-requests-oauthlib-1.3.0-12.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-routes-2.5.1-5.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-rsa-4.9-2.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-saml-1.16.0-1.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-scipy-1.9.3-2.el9.x86_64 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-tempora-5.0.0-2.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-toml-0.10.2-6.el9.noarch 2026-03-31T23:03:08.860 INFO:teuthology.orchestra.run.vm09.stdout: python3-typing-extensions-4.15.0-1.el9.noarch 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: python3-urllib3-1.26.5-7.el9.noarch 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: python3-websocket-client-1.2.3-2.el9.noarch 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: python3-xmlsec-1.3.13-1.el9.x86_64 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: python3-zc-lockfile-2.0-10.el9.noarch 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: qatlib-25.08.0-2.el9.x86_64 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: qatlib-service-25.08.0-2.el9.x86_64 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: qatzip-libs-1.3.1-1.el9.x86_64 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: smartmontools-1:7.2-10.el9.x86_64 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: xmlsec1-1.2.29-13.el9.x86_64 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: xmlsec1-openssl-1.2.29-13.el9.x86_64 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:08.861 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : xmlsec1-openssl-1.2.29-13.el9.x86_64 108/108 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout:Removed: 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: abseil-cpp-20211102.0-4.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-base-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-grafana-dashboards-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-immutable-object-cache-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-dashboard-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-diskprediction-local-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-k8sevents-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-modules-core-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-mgr-rook-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-osd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-prometheus-alerts-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-selinux-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ceph-volume-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: cryptsetup-2.8.1-3.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-3.0.4-9.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-netlib-3.0.4-9.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: flexiblas-openblas-openmp-3.0.4-9.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: gperftools-libs-2.9.1-3.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: grpc-data-1.46.7-10.el9.noarch 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: ledmon-libs-1.1.0-3.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: libcephsqlite-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: libconfig-1.7.2-9.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: libgfortran-11.5.0-14.el9.x86_64 2026-03-31T23:03:08.867 INFO:teuthology.orchestra.run.vm05.stdout: liboath-2.6.12-1.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: libquadmath-11.5.0-14.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: libradosstriper1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: libunwind-1.6.2-1.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: libxslt-1.1.34-12.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: openblas-0.3.29-1.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: openblas-openmp-0.3.29-1.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: pciutils-3.7.0-7.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: protobuf-3.14.0-17.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: protobuf-compiler-3.14.0-17.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-asyncssh-2.13.2-5.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-autocommand-2.2.2-8.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-babel-2.9.1-2.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-backports-tarfile-1.2.0-1.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-bcrypt-3.2.2-1.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-cachetools-4.2.4-1.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-ceph-common-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-certifi-2023.05.07-4.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-cffi-1.14.5-5.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-chardet-4.0.0-5.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-cheroot-10.0.1-5.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-cherrypy-18.10.0-5.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-cryptography-36.0.1-5.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-devel-3.9.25-3.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-google-auth-1:2.45.0-1.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-grpcio-1.46.7-10.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-grpcio-tools-1.46.7-10.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-idna-2.10-7.el9.1.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-influxdb-5.3.1-1.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-isodate-0.6.1-3.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-8.2.1-3.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-classes-3.2.1-5.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-collections-3.0.0-8.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-context-6.0.1-3.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-functools-3.5.0-2.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-jaraco-text-4.0.0-2.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-jinja2-2.11.3-8.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-jsonpatch-1.21-16.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-jsonpointer-2.0-4.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-kubernetes-1:26.1.0-3.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-libstoragemgmt-1.10.1-1.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-lxml-4.6.5-3.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-markupsafe-1.1.1-12.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-more-itertools-8.12.0-2.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-msgpack-1.0.3-2.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-natsort-7.1.1-5.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-numpy-1:1.23.5-2.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-numpy-f2py-1:1.23.5-2.el9.x86_64 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-oauthlib-3.1.1-5.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-packaging-20.9-5.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-ply-3.11-14.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-portend-3.1.0-2.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-prettytable-0.7.2-27.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-protobuf-3.14.0-17.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyOpenSSL-21.0.0-1.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyasn1-0.4.8-7.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-pyasn1-modules-0.4.8-7.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-pycparser-2.20-6.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-pysocks-1.7.1-12.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-pytz-2021.1-5.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-repoze-lru-0.7-16.el9.noarch 2026-03-31T23:03:08.868 INFO:teuthology.orchestra.run.vm05.stdout: python3-requests-2.25.1-10.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-requests-oauthlib-1.3.0-12.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-routes-2.5.1-5.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-rsa-4.9-2.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-saml-1.16.0-1.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-scipy-1.9.3-2.el9.x86_64 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-tempora-5.0.0-2.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-toml-0.10.2-6.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-typing-extensions-4.15.0-1.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-urllib3-1.26.5-7.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-websocket-client-1.2.3-2.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-xmlsec-1.3.13-1.el9.x86_64 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: python3-zc-lockfile-2.0-10.el9.noarch 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: qatlib-25.08.0-2.el9.x86_64 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: qatlib-service-25.08.0-2.el9.x86_64 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: qatzip-libs-1.3.1-1.el9.x86_64 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: rbd-mirror-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: smartmontools-1:7.2-10.el9.x86_64 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: xmlsec1-1.2.29-13.el9.x86_64 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: xmlsec1-openssl-1.2.29-13.el9.x86_64 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:08.869 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:08.935 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: ceph-immutable-object-cache 2026-03-31T23:03:08.935 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:08.937 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:08.938 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:08.938 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repository Size 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout:Removing: 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout: cephadm noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 1.0 M 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout:Remove 1 Package 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout:Freed space: 1.0 M 2026-03-31T23:03:09.054 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T23:03:09.056 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T23:03:09.056 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T23:03:09.057 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T23:03:09.057 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repository Size 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout:Removing: 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout: cephadm noarch 2:20.2.0-721.g5bb32787.el9 @ceph-noarch 1.0 M 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout:Remove 1 Package 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout:Freed space: 1.0 M 2026-03-31T23:03:09.067 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T23:03:09.069 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T23:03:09.069 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T23:03:09.070 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T23:03:09.070 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T23:03:09.074 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T23:03:09.074 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 1/1 2026-03-31T23:03:09.087 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T23:03:09.087 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 1/1 2026-03-31T23:03:09.102 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: ceph-mgr 2026-03-31T23:03:09.103 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:09.105 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:09.105 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:09.105 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:09.186 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 1/1 2026-03-31T23:03:09.201 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 1/1 2026-03-31T23:03:09.222 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 1/1 2026-03-31T23:03:09.222 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:09.222 INFO:teuthology.orchestra.run.vm09.stdout:Removed: 2026-03-31T23:03:09.222 INFO:teuthology.orchestra.run.vm09.stdout: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:09.222 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:09.222 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:09.244 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : cephadm-2:20.2.0-721.g5bb32787.el9.noarch 1/1 2026-03-31T23:03:09.244 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:09.245 INFO:teuthology.orchestra.run.vm05.stdout:Removed: 2026-03-31T23:03:09.245 INFO:teuthology.orchestra.run.vm05.stdout: cephadm-2:20.2.0-721.g5bb32787.el9.noarch 2026-03-31T23:03:09.245 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:09.245 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:09.274 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: ceph-mgr-dashboard 2026-03-31T23:03:09.274 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:09.276 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:09.277 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:09.277 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:09.404 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: ceph-immutable-object-cache 2026-03-31T23:03:09.404 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:09.406 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:09.407 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:09.407 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:09.425 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: ceph-immutable-object-cache 2026-03-31T23:03:09.425 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:09.427 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:09.428 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:09.428 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:09.453 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: ceph-mgr-diskprediction-local 2026-03-31T23:03:09.453 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:09.455 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:09.456 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:09.456 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:09.582 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: ceph-mgr 2026-03-31T23:03:09.582 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:09.585 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:09.585 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:09.585 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:09.606 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: ceph-mgr 2026-03-31T23:03:09.607 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:09.609 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:09.609 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:09.609 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:09.627 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: ceph-mgr-rook 2026-03-31T23:03:09.627 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:09.629 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:09.630 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:09.630 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:09.763 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: ceph-mgr-dashboard 2026-03-31T23:03:09.763 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:09.765 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:09.766 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:09.766 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:09.775 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: ceph-mgr-dashboard 2026-03-31T23:03:09.775 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:09.777 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:09.778 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:09.778 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:09.789 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: ceph-mgr-cephadm 2026-03-31T23:03:09.790 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:09.791 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:09.792 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:09.792 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:09.933 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: ceph-mgr-diskprediction-local 2026-03-31T23:03:09.934 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:09.936 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:09.937 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:09.937 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:09.953 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: ceph-mgr-diskprediction-local 2026-03-31T23:03:09.953 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:09.955 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:09.956 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:09.956 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:09.976 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repository Size 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout:Removing: 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout: ceph-fuse x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 2.7 M 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout:Removing unused dependencies: 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout: fuse x86_64 2.9.9-17.el9 @baseos 214 k 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout:Remove 2 Packages 2026-03-31T23:03:09.977 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:09.978 INFO:teuthology.orchestra.run.vm00.stdout:Freed space: 2.9 M 2026-03-31T23:03:09.978 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T23:03:09.980 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T23:03:09.980 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T23:03:09.994 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T23:03:09.995 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T23:03:10.023 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T23:03:10.027 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:10.041 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : fuse-2.9.9-17.el9.x86_64 2/2 2026-03-31T23:03:10.104 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: fuse-2.9.9-17.el9.x86_64 2/2 2026-03-31T23:03:10.104 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:10.127 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: ceph-mgr-rook 2026-03-31T23:03:10.127 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:10.128 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: ceph-mgr-rook 2026-03-31T23:03:10.128 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:10.130 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:10.130 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:10.130 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:10.131 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:10.132 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:10.132 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:10.141 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : fuse-2.9.9-17.el9.x86_64 2/2 2026-03-31T23:03:10.141 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:10.141 INFO:teuthology.orchestra.run.vm00.stdout:Removed: 2026-03-31T23:03:10.141 INFO:teuthology.orchestra.run.vm00.stdout: ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 fuse-2.9.9-17.el9.x86_64 2026-03-31T23:03:10.141 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:10.141 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:10.306 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: ceph-mgr-cephadm 2026-03-31T23:03:10.307 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:10.309 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:10.309 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:10.309 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:10.316 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: ceph-mgr-cephadm 2026-03-31T23:03:10.316 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:10.318 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:10.319 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:10.319 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:10.325 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: ceph-volume 2026-03-31T23:03:10.326 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:10.328 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:10.329 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:10.329 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repository Size 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout:Removing: 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout: ceph-fuse x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 2.7 M 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout:Removing unused dependencies: 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout: fuse x86_64 2.9.9-17.el9 @baseos 214 k 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout:Remove 2 Packages 2026-03-31T23:03:10.506 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:10.507 INFO:teuthology.orchestra.run.vm05.stdout:Freed space: 2.9 M 2026-03-31T23:03:10.507 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T23:03:10.509 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T23:03:10.509 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repository Size 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout:Removing: 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout: ceph-fuse x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 2.7 M 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout:Removing unused dependencies: 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout: fuse x86_64 2.9.9-17.el9 @baseos 214 k 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout:Remove 2 Packages 2026-03-31T23:03:10.515 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:10.516 INFO:teuthology.orchestra.run.vm09.stdout:Freed space: 2.9 M 2026-03-31T23:03:10.516 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T23:03:10.518 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T23:03:10.518 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T23:03:10.521 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:10.521 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:10.521 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repo Size 2026-03-31T23:03:10.521 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:10.521 INFO:teuthology.orchestra.run.vm00.stdout:Removing: 2026-03-31T23:03:10.521 INFO:teuthology.orchestra.run.vm00.stdout: librados-devel x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 449 k 2026-03-31T23:03:10.522 INFO:teuthology.orchestra.run.vm00.stdout:Removing dependent packages: 2026-03-31T23:03:10.522 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-devel x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 155 k 2026-03-31T23:03:10.522 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:10.522 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T23:03:10.522 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:10.522 INFO:teuthology.orchestra.run.vm00.stdout:Remove 2 Packages 2026-03-31T23:03:10.522 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:10.522 INFO:teuthology.orchestra.run.vm00.stdout:Freed space: 604 k 2026-03-31T23:03:10.522 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T23:03:10.523 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T23:03:10.524 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T23:03:10.524 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T23:03:10.524 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T23:03:10.533 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T23:03:10.533 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T23:03:10.533 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T23:03:10.534 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T23:03:10.553 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T23:03:10.557 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:10.559 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T23:03:10.561 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:10.562 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T23:03:10.565 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:10.571 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : fuse-2.9.9-17.el9.x86_64 2/2 2026-03-31T23:03:10.574 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2/2 2026-03-31T23:03:10.578 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : fuse-2.9.9-17.el9.x86_64 2/2 2026-03-31T23:03:10.639 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: fuse-2.9.9-17.el9.x86_64 2/2 2026-03-31T23:03:10.639 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:10.641 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2/2 2026-03-31T23:03:10.641 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:10.655 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: fuse-2.9.9-17.el9.x86_64 2/2 2026-03-31T23:03:10.655 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:10.684 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : fuse-2.9.9-17.el9.x86_64 2/2 2026-03-31T23:03:10.685 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:10.685 INFO:teuthology.orchestra.run.vm05.stdout:Removed: 2026-03-31T23:03:10.685 INFO:teuthology.orchestra.run.vm05.stdout: ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 fuse-2.9.9-17.el9.x86_64 2026-03-31T23:03:10.685 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:10.685 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:10.689 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2/2 2026-03-31T23:03:10.689 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:10.689 INFO:teuthology.orchestra.run.vm00.stdout:Removed: 2026-03-31T23:03:10.689 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:10.689 INFO:teuthology.orchestra.run.vm00.stdout: librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:10.689 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:10.689 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:10.698 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : fuse-2.9.9-17.el9.x86_64 2/2 2026-03-31T23:03:10.698 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:10.698 INFO:teuthology.orchestra.run.vm09.stdout:Removed: 2026-03-31T23:03:10.698 INFO:teuthology.orchestra.run.vm09.stdout: ceph-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 fuse-2.9.9-17.el9.x86_64 2026-03-31T23:03:10.698 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:10.698 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:10.860 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: ceph-volume 2026-03-31T23:03:10.860 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:10.862 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:10.863 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:10.863 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:10.874 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repo Size 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:Removing: 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 2.4 M 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:Removing dependent packages: 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout: python3-cephfs x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 510 k 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:Removing unused dependencies: 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-daemon x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 90 k 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-proxy2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 52 k 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout: python3-ceph-argparse x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 187 k 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:Remove 5 Packages 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:Freed space: 3.3 M 2026-03-31T23:03:10.875 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T23:03:10.877 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T23:03:10.877 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T23:03:10.878 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: ceph-volume 2026-03-31T23:03:10.878 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:10.880 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:10.881 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:10.881 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:10.890 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T23:03:10.890 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T23:03:10.916 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T23:03:10.918 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 1/5 2026-03-31T23:03:10.919 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86 2/5 2026-03-31T23:03:10.919 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 3/5 2026-03-31T23:03:10.930 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 3/5 2026-03-31T23:03:10.932 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 4/5 2026-03-31T23:03:10.932 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 5/5 2026-03-31T23:03:10.991 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 5/5 2026-03-31T23:03:10.991 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 1/5 2026-03-31T23:03:10.991 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 2/5 2026-03-31T23:03:10.991 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 3/5 2026-03-31T23:03:10.991 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86 4/5 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 5/5 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout:Removed: 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout: python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout: python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:11.035 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:11.048 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repo Size 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout:Removing: 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout: librados-devel x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 449 k 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout:Removing dependent packages: 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-devel x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 155 k 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout:Remove 2 Packages 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout:Freed space: 604 k 2026-03-31T23:03:11.049 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T23:03:11.051 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T23:03:11.051 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T23:03:11.061 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T23:03:11.062 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T23:03:11.062 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:11.062 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:11.062 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repo Size 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout:Removing: 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout: librados-devel x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 449 k 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout:Removing dependent packages: 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-devel x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 155 k 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout:Remove 2 Packages 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout:Freed space: 604 k 2026-03-31T23:03:11.063 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T23:03:11.065 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T23:03:11.065 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T23:03:11.074 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T23:03:11.074 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T23:03:11.088 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T23:03:11.090 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:11.099 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T23:03:11.101 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:11.104 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2/2 2026-03-31T23:03:11.114 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2/2 2026-03-31T23:03:11.167 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2/2 2026-03-31T23:03:11.167 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:11.180 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2/2 2026-03-31T23:03:11.180 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 1/2 2026-03-31T23:03:11.211 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2/2 2026-03-31T23:03:11.211 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:11.211 INFO:teuthology.orchestra.run.vm05.stdout:Removed: 2026-03-31T23:03:11.211 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.211 INFO:teuthology.orchestra.run.vm05.stdout: librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.211 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:11.211 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:11.219 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: libcephfs-devel 2026-03-31T23:03:11.219 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:11.221 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:11.222 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:11.222 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:11.224 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2/2 2026-03-31T23:03:11.224 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.224 INFO:teuthology.orchestra.run.vm09.stdout:Removed: 2026-03-31T23:03:11.224 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.224 INFO:teuthology.orchestra.run.vm09.stdout: librados-devel-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.224 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.224 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:11.419 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:11.420 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:11.420 INFO:teuthology.orchestra.run.vm00.stdout: Package Arch Version Repository Size 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout:Removing: 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: librados2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 12 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout:Removing dependent packages: 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: python3-rados x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 1.1 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: python3-rbd x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 1.1 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: python3-rgw x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 264 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: qemu-kvm-block-rbd x86_64 17:10.1.0-16.el9 @appstream 37 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: rbd-fuse x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 238 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: rbd-nbd x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 498 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout:Removing unused dependencies: 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: boost-program-options x86_64 1.75.0-13.el9 @appstream 276 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: libarrow x86_64 9.0.0-15.el9 @epel 18 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: libarrow-doc noarch 9.0.0-15.el9 @epel 122 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: libnbd x86_64 1.20.3-4.el9 @appstream 453 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: libpmemobj x86_64 1.12.1-1.el9 @appstream 383 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: librabbitmq x86_64 0.11.0-7.el9 @appstream 102 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: librbd1 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 10 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: librdkafka x86_64 1.6.1-102.el9 @appstream 2.0 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: librgw2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 28 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: lttng-ust x86_64 2.12.0-6.el9 @appstream 1.0 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: parquet-libs x86_64 9.0.0-15.el9 @epel 2.8 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: re2 x86_64 1:20211101-20.el9 @epel 472 k 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: thrift x86_64 0.15.0-4.el9 @epel 4.8 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout:Transaction Summary 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout:================================================================================ 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout:Remove 20 Packages 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout:Freed space: 84 M 2026-03-31T23:03:11.421 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction check 2026-03-31T23:03:11.425 INFO:teuthology.orchestra.run.vm00.stdout:Transaction check succeeded. 2026-03-31T23:03:11.425 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction test 2026-03-31T23:03:11.428 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:11.428 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:11.428 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repo Size 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout:Removing: 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 2.4 M 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout:Removing dependent packages: 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout: python3-cephfs x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 510 k 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout:Removing unused dependencies: 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-daemon x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 90 k 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-proxy2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 52 k 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout: python3-ceph-argparse x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 187 k 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout:Remove 5 Packages 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout:Freed space: 3.3 M 2026-03-31T23:03:11.429 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T23:03:11.431 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T23:03:11.431 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T23:03:11.444 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T23:03:11.444 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T23:03:11.444 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repo Size 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:Removing: 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 2.4 M 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:Removing dependent packages: 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout: python3-cephfs x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 510 k 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:Removing unused dependencies: 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-daemon x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 90 k 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-proxy2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 52 k 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout: python3-ceph-argparse x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 187 k 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:Remove 5 Packages 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:Freed space: 3.3 M 2026-03-31T23:03:11.445 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T23:03:11.447 INFO:teuthology.orchestra.run.vm00.stdout:Transaction test succeeded. 2026-03-31T23:03:11.447 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T23:03:11.447 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T23:03:11.448 INFO:teuthology.orchestra.run.vm00.stdout:Running transaction 2026-03-31T23:03:11.460 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T23:03:11.460 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T23:03:11.471 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T23:03:11.474 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 1/5 2026-03-31T23:03:11.475 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86 2/5 2026-03-31T23:03:11.475 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 3/5 2026-03-31T23:03:11.487 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 3/5 2026-03-31T23:03:11.489 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 4/5 2026-03-31T23:03:11.489 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 5/5 2026-03-31T23:03:11.490 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T23:03:11.490 INFO:teuthology.orchestra.run.vm00.stdout: Preparing : 1/1 2026-03-31T23:03:11.492 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 1/5 2026-03-31T23:03:11.493 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 1/20 2026-03-31T23:03:11.493 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86 2/5 2026-03-31T23:03:11.494 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 3/5 2026-03-31T23:03:11.495 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2/20 2026-03-31T23:03:11.499 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 3/20 2026-03-31T23:03:11.499 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 4/20 2026-03-31T23:03:11.506 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 3/5 2026-03-31T23:03:11.507 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 4/5 2026-03-31T23:03:11.507 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 5/5 2026-03-31T23:03:11.512 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 4/20 2026-03-31T23:03:11.514 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : parquet-libs-9.0.0-15.el9.x86_64 5/20 2026-03-31T23:03:11.515 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 6/20 2026-03-31T23:03:11.517 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 7/20 2026-03-31T23:03:11.519 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : qemu-kvm-block-rbd-17:10.1.0-16.el9.x86_64 8/20 2026-03-31T23:03:11.521 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libarrow-doc-9.0.0-15.el9.noarch 9/20 2026-03-31T23:03:11.521 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 10/20 2026-03-31T23:03:11.535 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 10/20 2026-03-31T23:03:11.536 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 11/20 2026-03-31T23:03:11.536 INFO:teuthology.orchestra.run.vm00.stdout:warning: file /etc/ceph: remove failed: No such file or directory 2026-03-31T23:03:11.536 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:11.551 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 11/20 2026-03-31T23:03:11.553 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 5/5 2026-03-31T23:03:11.553 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 1/5 2026-03-31T23:03:11.553 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 2/5 2026-03-31T23:03:11.554 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 3/5 2026-03-31T23:03:11.554 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86 4/5 2026-03-31T23:03:11.554 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libarrow-9.0.0-15.el9.x86_64 12/20 2026-03-31T23:03:11.557 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : re2-1:20211101-20.el9.x86_64 13/20 2026-03-31T23:03:11.562 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : lttng-ust-2.12.0-6.el9.x86_64 14/20 2026-03-31T23:03:11.565 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : thrift-0.15.0-4.el9.x86_64 15/20 2026-03-31T23:03:11.568 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libnbd-1.20.3-4.el9.x86_64 16/20 2026-03-31T23:03:11.570 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : libpmemobj-1.12.1-1.el9.x86_64 17/20 2026-03-31T23:03:11.571 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 5/5 2026-03-31T23:03:11.571 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 1/5 2026-03-31T23:03:11.571 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 2/5 2026-03-31T23:03:11.571 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 3/5 2026-03-31T23:03:11.571 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86 4/5 2026-03-31T23:03:11.572 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : boost-program-options-1.75.0-13.el9.x86_64 18/20 2026-03-31T23:03:11.574 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : librabbitmq-0.11.0-7.el9.x86_64 19/20 2026-03-31T23:03:11.588 INFO:teuthology.orchestra.run.vm00.stdout: Erasing : librdkafka-1.6.1-102.el9.x86_64 20/20 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 5/5 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout:Removed: 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout: python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout: python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.595 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 5/5 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout:Removed: 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-daemon-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs-proxy2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout: libcephfs2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout: python3-ceph-argparse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout: python3-cephfs-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:11.609 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Running scriptlet: librdkafka-1.6.1-102.el9.x86_64 20/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : boost-program-options-1.75.0-13.el9.x86_64 1/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libarrow-9.0.0-15.el9.x86_64 2/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libarrow-doc-9.0.0-15.el9.noarch 3/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libnbd-1.20.3-4.el9.x86_64 4/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : libpmemobj-1.12.1-1.el9.x86_64 5/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librabbitmq-0.11.0-7.el9.x86_64 6/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 7/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 8/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librdkafka-1.6.1-102.el9.x86_64 9/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 10/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : lttng-ust-2.12.0-6.el9.x86_64 11/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : parquet-libs-9.0.0-15.el9.x86_64 12/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 13/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 14/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 15/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : qemu-kvm-block-rbd-17:10.1.0-16.el9.x86_64 16/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 17/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 18/20 2026-03-31T23:03:11.659 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : re2-1:20211101-20.el9.x86_64 19/20 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: Verifying : thrift-0.15.0-4.el9.x86_64 20/20 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout:Removed: 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: boost-program-options-1.75.0-13.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: libarrow-9.0.0-15.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: libarrow-doc-9.0.0-15.el9.noarch 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: libnbd-1.20.3-4.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: libpmemobj-1.12.1-1.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: librabbitmq-0.11.0-7.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: librdkafka-1.6.1-102.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: lttng-ust-2.12.0-6.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: parquet-libs-9.0.0-15.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: qemu-kvm-block-rbd-17:10.1.0-16.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: re2-1:20211101-20.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: thrift-0.15.0-4.el9.x86_64 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout: 2026-03-31T23:03:11.707 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:11.787 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: libcephfs-devel 2026-03-31T23:03:11.788 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:11.790 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:11.791 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:11.791 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:11.795 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: libcephfs-devel 2026-03-31T23:03:11.795 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:11.797 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:11.798 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:11.798 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:11.902 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: librbd1 2026-03-31T23:03:11.902 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:11.904 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:11.905 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:11.905 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:11.982 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:11.983 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: Package Arch Version Repository Size 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout:Removing: 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: librados2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 12 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout:Removing dependent packages: 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: python3-rados x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 1.1 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: python3-rbd x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 1.1 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: python3-rgw x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 264 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: qemu-kvm-block-rbd x86_64 17:10.1.0-16.el9 @appstream 37 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: rbd-fuse x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 238 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: rbd-nbd x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 498 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout:Removing unused dependencies: 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: boost-program-options x86_64 1.75.0-13.el9 @appstream 276 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: libarrow x86_64 9.0.0-15.el9 @epel 18 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: libarrow-doc noarch 9.0.0-15.el9 @epel 122 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: libnbd x86_64 1.20.3-4.el9 @appstream 453 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: libpmemobj x86_64 1.12.1-1.el9 @appstream 383 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: librabbitmq x86_64 0.11.0-7.el9 @appstream 102 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: librbd1 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 10 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: librdkafka x86_64 1.6.1-102.el9 @appstream 2.0 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: librgw2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 28 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: lttng-ust x86_64 2.12.0-6.el9 @appstream 1.0 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: parquet-libs x86_64 9.0.0-15.el9 @epel 2.8 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: re2 x86_64 1:20211101-20.el9 @epel 472 k 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: thrift x86_64 0.15.0-4.el9 @epel 4.8 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout:Transaction Summary 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout:================================================================================ 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout:Remove 20 Packages 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout:Freed space: 84 M 2026-03-31T23:03:11.984 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction check 2026-03-31T23:03:11.988 INFO:teuthology.orchestra.run.vm09.stdout:Transaction check succeeded. 2026-03-31T23:03:11.988 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction test 2026-03-31T23:03:12.005 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: Package Arch Version Repository Size 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout:Removing: 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: librados2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 12 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout:Removing dependent packages: 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: python3-rados x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 1.1 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: python3-rbd x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 1.1 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: python3-rgw x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 264 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: qemu-kvm-block-rbd x86_64 17:10.1.0-16.el9 @appstream 37 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: rbd-fuse x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 238 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: rbd-nbd x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 498 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout:Removing unused dependencies: 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: boost-program-options x86_64 1.75.0-13.el9 @appstream 276 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: libarrow x86_64 9.0.0-15.el9 @epel 18 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: libarrow-doc noarch 9.0.0-15.el9 @epel 122 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: libnbd x86_64 1.20.3-4.el9 @appstream 453 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: libpmemobj x86_64 1.12.1-1.el9 @appstream 383 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: librabbitmq x86_64 0.11.0-7.el9 @appstream 102 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: librbd1 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 10 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: librdkafka x86_64 1.6.1-102.el9 @appstream 2.0 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: librgw2 x86_64 2:20.2.0-721.g5bb32787.el9 @ceph 28 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: lttng-ust x86_64 2.12.0-6.el9 @appstream 1.0 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: parquet-libs x86_64 9.0.0-15.el9 @epel 2.8 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: re2 x86_64 1:20211101-20.el9 @epel 472 k 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: thrift x86_64 0.15.0-4.el9 @epel 4.8 M 2026-03-31T23:03:12.006 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:12.007 INFO:teuthology.orchestra.run.vm05.stdout:Transaction Summary 2026-03-31T23:03:12.007 INFO:teuthology.orchestra.run.vm05.stdout:================================================================================ 2026-03-31T23:03:12.007 INFO:teuthology.orchestra.run.vm05.stdout:Remove 20 Packages 2026-03-31T23:03:12.007 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:12.007 INFO:teuthology.orchestra.run.vm05.stdout:Freed space: 84 M 2026-03-31T23:03:12.007 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction check 2026-03-31T23:03:12.010 INFO:teuthology.orchestra.run.vm09.stdout:Transaction test succeeded. 2026-03-31T23:03:12.010 INFO:teuthology.orchestra.run.vm09.stdout:Running transaction 2026-03-31T23:03:12.010 INFO:teuthology.orchestra.run.vm05.stdout:Transaction check succeeded. 2026-03-31T23:03:12.010 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction test 2026-03-31T23:03:12.033 INFO:teuthology.orchestra.run.vm05.stdout:Transaction test succeeded. 2026-03-31T23:03:12.033 INFO:teuthology.orchestra.run.vm05.stdout:Running transaction 2026-03-31T23:03:12.052 INFO:teuthology.orchestra.run.vm09.stdout: Preparing : 1/1 2026-03-31T23:03:12.055 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 1/20 2026-03-31T23:03:12.057 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2/20 2026-03-31T23:03:12.060 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 3/20 2026-03-31T23:03:12.060 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 4/20 2026-03-31T23:03:12.073 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 4/20 2026-03-31T23:03:12.075 INFO:teuthology.orchestra.run.vm05.stdout: Preparing : 1/1 2026-03-31T23:03:12.075 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : parquet-libs-9.0.0-15.el9.x86_64 5/20 2026-03-31T23:03:12.076 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 6/20 2026-03-31T23:03:12.078 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 1/20 2026-03-31T23:03:12.078 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 7/20 2026-03-31T23:03:12.078 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: python3-rados 2026-03-31T23:03:12.078 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:12.080 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2/20 2026-03-31T23:03:12.080 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : qemu-kvm-block-rbd-17:10.1.0-16.el9.x86_64 8/20 2026-03-31T23:03:12.081 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:12.081 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:12.081 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:12.082 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libarrow-doc-9.0.0-15.el9.noarch 9/20 2026-03-31T23:03:12.083 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 10/20 2026-03-31T23:03:12.083 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 3/20 2026-03-31T23:03:12.083 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 4/20 2026-03-31T23:03:12.096 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 10/20 2026-03-31T23:03:12.096 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 11/20 2026-03-31T23:03:12.097 INFO:teuthology.orchestra.run.vm09.stdout:warning: file /etc/ceph: remove failed: No such file or directory 2026-03-31T23:03:12.097 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:12.097 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 4/20 2026-03-31T23:03:12.098 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : parquet-libs-9.0.0-15.el9.x86_64 5/20 2026-03-31T23:03:12.100 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 6/20 2026-03-31T23:03:12.102 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 7/20 2026-03-31T23:03:12.103 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : qemu-kvm-block-rbd-17:10.1.0-16.el9.x86_64 8/20 2026-03-31T23:03:12.106 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libarrow-doc-9.0.0-15.el9.noarch 9/20 2026-03-31T23:03:12.106 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 10/20 2026-03-31T23:03:12.111 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 11/20 2026-03-31T23:03:12.113 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libarrow-9.0.0-15.el9.x86_64 12/20 2026-03-31T23:03:12.116 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : re2-1:20211101-20.el9.x86_64 13/20 2026-03-31T23:03:12.119 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 10/20 2026-03-31T23:03:12.119 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 11/20 2026-03-31T23:03:12.119 INFO:teuthology.orchestra.run.vm05.stdout:warning: file /etc/ceph: remove failed: No such file or directory 2026-03-31T23:03:12.119 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:12.120 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : lttng-ust-2.12.0-6.el9.x86_64 14/20 2026-03-31T23:03:12.122 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : thrift-0.15.0-4.el9.x86_64 15/20 2026-03-31T23:03:12.124 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libnbd-1.20.3-4.el9.x86_64 16/20 2026-03-31T23:03:12.126 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : libpmemobj-1.12.1-1.el9.x86_64 17/20 2026-03-31T23:03:12.128 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : boost-program-options-1.75.0-13.el9.x86_64 18/20 2026-03-31T23:03:12.130 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : librabbitmq-0.11.0-7.el9.x86_64 19/20 2026-03-31T23:03:12.133 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 11/20 2026-03-31T23:03:12.135 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libarrow-9.0.0-15.el9.x86_64 12/20 2026-03-31T23:03:12.138 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : re2-1:20211101-20.el9.x86_64 13/20 2026-03-31T23:03:12.142 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : lttng-ust-2.12.0-6.el9.x86_64 14/20 2026-03-31T23:03:12.144 INFO:teuthology.orchestra.run.vm09.stdout: Erasing : librdkafka-1.6.1-102.el9.x86_64 20/20 2026-03-31T23:03:12.144 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : thrift-0.15.0-4.el9.x86_64 15/20 2026-03-31T23:03:12.147 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libnbd-1.20.3-4.el9.x86_64 16/20 2026-03-31T23:03:12.149 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : libpmemobj-1.12.1-1.el9.x86_64 17/20 2026-03-31T23:03:12.150 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : boost-program-options-1.75.0-13.el9.x86_64 18/20 2026-03-31T23:03:12.153 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : librabbitmq-0.11.0-7.el9.x86_64 19/20 2026-03-31T23:03:12.168 INFO:teuthology.orchestra.run.vm05.stdout: Erasing : librdkafka-1.6.1-102.el9.x86_64 20/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Running scriptlet: librdkafka-1.6.1-102.el9.x86_64 20/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : boost-program-options-1.75.0-13.el9.x86_64 1/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libarrow-9.0.0-15.el9.x86_64 2/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libarrow-doc-9.0.0-15.el9.noarch 3/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libnbd-1.20.3-4.el9.x86_64 4/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : libpmemobj-1.12.1-1.el9.x86_64 5/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librabbitmq-0.11.0-7.el9.x86_64 6/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 7/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 8/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librdkafka-1.6.1-102.el9.x86_64 9/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 10/20 2026-03-31T23:03:12.212 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : lttng-ust-2.12.0-6.el9.x86_64 11/20 2026-03-31T23:03:12.213 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : parquet-libs-9.0.0-15.el9.x86_64 12/20 2026-03-31T23:03:12.213 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 13/20 2026-03-31T23:03:12.213 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 14/20 2026-03-31T23:03:12.213 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 15/20 2026-03-31T23:03:12.213 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : qemu-kvm-block-rbd-17:10.1.0-16.el9.x86_64 16/20 2026-03-31T23:03:12.213 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 17/20 2026-03-31T23:03:12.213 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 18/20 2026-03-31T23:03:12.213 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : re2-1:20211101-20.el9.x86_64 19/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Running scriptlet: librdkafka-1.6.1-102.el9.x86_64 20/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : boost-program-options-1.75.0-13.el9.x86_64 1/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libarrow-9.0.0-15.el9.x86_64 2/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libarrow-doc-9.0.0-15.el9.noarch 3/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libnbd-1.20.3-4.el9.x86_64 4/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : libpmemobj-1.12.1-1.el9.x86_64 5/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librabbitmq-0.11.0-7.el9.x86_64 6/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librados2-2:20.2.0-721.g5bb32787.el9.x86_64 7/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 8/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librdkafka-1.6.1-102.el9.x86_64 9/20 2026-03-31T23:03:12.229 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 10/20 2026-03-31T23:03:12.230 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : lttng-ust-2.12.0-6.el9.x86_64 11/20 2026-03-31T23:03:12.230 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : parquet-libs-9.0.0-15.el9.x86_64 12/20 2026-03-31T23:03:12.230 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 13/20 2026-03-31T23:03:12.230 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 14/20 2026-03-31T23:03:12.230 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 15/20 2026-03-31T23:03:12.230 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : qemu-kvm-block-rbd-17:10.1.0-16.el9.x86_64 16/20 2026-03-31T23:03:12.230 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 17/20 2026-03-31T23:03:12.230 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 18/20 2026-03-31T23:03:12.230 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : re2-1:20211101-20.el9.x86_64 19/20 2026-03-31T23:03:12.256 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: python3-rgw 2026-03-31T23:03:12.256 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:12.258 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:12.259 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:12.259 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: Verifying : thrift-0.15.0-4.el9.x86_64 20/20 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout:Removed: 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: boost-program-options-1.75.0-13.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: libarrow-9.0.0-15.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: libarrow-doc-9.0.0-15.el9.noarch 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: libnbd-1.20.3-4.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: libpmemobj-1.12.1-1.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: librabbitmq-0.11.0-7.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: librdkafka-1.6.1-102.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: lttng-ust-2.12.0-6.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: parquet-libs-9.0.0-15.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: qemu-kvm-block-rbd-17:10.1.0-16.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: re2-1:20211101-20.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: thrift-0.15.0-4.el9.x86_64 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout: 2026-03-31T23:03:12.269 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: Verifying : thrift-0.15.0-4.el9.x86_64 20/20 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout:Removed: 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: boost-program-options-1.75.0-13.el9.x86_64 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: libarrow-9.0.0-15.el9.x86_64 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: libarrow-doc-9.0.0-15.el9.noarch 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: libnbd-1.20.3-4.el9.x86_64 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: libpmemobj-1.12.1-1.el9.x86_64 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: librabbitmq-0.11.0-7.el9.x86_64 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: librados2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: librbd1-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: librdkafka-1.6.1-102.el9.x86_64 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: librgw2-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.285 INFO:teuthology.orchestra.run.vm05.stdout: lttng-ust-2.12.0-6.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: parquet-libs-9.0.0-15.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: python3-rados-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: python3-rbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: python3-rgw-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: qemu-kvm-block-rbd-17:10.1.0-16.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: rbd-fuse-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: rbd-nbd-2:20.2.0-721.g5bb32787.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: re2-1:20211101-20.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: thrift-0.15.0-4.el9.x86_64 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout: 2026-03-31T23:03:12.286 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:12.460 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: python3-cephfs 2026-03-31T23:03:12.460 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:12.462 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:12.463 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:12.463 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:12.473 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: librbd1 2026-03-31T23:03:12.473 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:12.476 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:12.477 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:12.477 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:12.493 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: librbd1 2026-03-31T23:03:12.493 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:12.496 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:12.496 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:12.496 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:12.636 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: python3-rbd 2026-03-31T23:03:12.636 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:12.638 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:12.639 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:12.639 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:12.662 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: python3-rados 2026-03-31T23:03:12.662 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:12.665 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:12.665 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:12.665 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:12.676 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: python3-rados 2026-03-31T23:03:12.676 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:12.678 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:12.679 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:12.679 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:12.804 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: rbd-fuse 2026-03-31T23:03:12.804 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:12.806 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:12.807 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:12.807 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:12.831 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: python3-rgw 2026-03-31T23:03:12.831 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:12.833 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:12.834 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:12.834 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:12.844 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: python3-rgw 2026-03-31T23:03:12.845 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:12.847 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:12.847 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:12.847 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:12.981 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: rbd-mirror 2026-03-31T23:03:12.981 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:12.983 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:12.984 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:12.984 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:12.999 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: python3-cephfs 2026-03-31T23:03:13.000 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:13.002 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:13.003 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:13.003 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:13.020 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: python3-cephfs 2026-03-31T23:03:13.020 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:13.022 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:13.023 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:13.023 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:13.154 INFO:teuthology.orchestra.run.vm00.stdout:No match for argument: rbd-nbd 2026-03-31T23:03:13.155 INFO:teuthology.orchestra.run.vm00.stderr:No packages marked for removal. 2026-03-31T23:03:13.157 INFO:teuthology.orchestra.run.vm00.stdout:Dependencies resolved. 2026-03-31T23:03:13.158 INFO:teuthology.orchestra.run.vm00.stdout:Nothing to do. 2026-03-31T23:03:13.158 INFO:teuthology.orchestra.run.vm00.stdout:Complete! 2026-03-31T23:03:13.173 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: python3-rbd 2026-03-31T23:03:13.173 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:13.176 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:13.177 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:13.177 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:13.194 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: python3-rbd 2026-03-31T23:03:13.194 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:13.196 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:13.197 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:13.197 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:13.234 DEBUG:teuthology.orchestra.run.vm00:> sudo yum clean all 2026-03-31T23:03:13.367 INFO:teuthology.orchestra.run.vm00.stdout:56 files removed 2026-03-31T23:03:13.383 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: rbd-fuse 2026-03-31T23:03:13.383 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:13.383 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: rbd-fuse 2026-03-31T23:03:13.383 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:13.385 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:13.386 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:13.386 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:13.386 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:13.387 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:13.387 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:13.392 DEBUG:teuthology.orchestra.run.vm00:> sudo rm -f /etc/yum.repos.d/ceph.repo 2026-03-31T23:03:13.416 DEBUG:teuthology.orchestra.run.vm00:> sudo yum clean expire-cache 2026-03-31T23:03:13.552 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: rbd-mirror 2026-03-31T23:03:13.552 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:13.552 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: rbd-mirror 2026-03-31T23:03:13.552 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:13.554 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:13.554 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:13.555 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:13.555 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:13.555 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:13.555 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:13.571 INFO:teuthology.orchestra.run.vm00.stdout:Cache was expired 2026-03-31T23:03:13.571 INFO:teuthology.orchestra.run.vm00.stdout:0 files removed 2026-03-31T23:03:13.591 DEBUG:teuthology.parallel:result is None 2026-03-31T23:03:13.719 INFO:teuthology.orchestra.run.vm09.stdout:No match for argument: rbd-nbd 2026-03-31T23:03:13.719 INFO:teuthology.orchestra.run.vm09.stderr:No packages marked for removal. 2026-03-31T23:03:13.721 INFO:teuthology.orchestra.run.vm09.stdout:Dependencies resolved. 2026-03-31T23:03:13.721 INFO:teuthology.orchestra.run.vm09.stdout:Nothing to do. 2026-03-31T23:03:13.721 INFO:teuthology.orchestra.run.vm09.stdout:Complete! 2026-03-31T23:03:13.725 INFO:teuthology.orchestra.run.vm05.stdout:No match for argument: rbd-nbd 2026-03-31T23:03:13.725 INFO:teuthology.orchestra.run.vm05.stderr:No packages marked for removal. 2026-03-31T23:03:13.728 INFO:teuthology.orchestra.run.vm05.stdout:Dependencies resolved. 2026-03-31T23:03:13.728 INFO:teuthology.orchestra.run.vm05.stdout:Nothing to do. 2026-03-31T23:03:13.728 INFO:teuthology.orchestra.run.vm05.stdout:Complete! 2026-03-31T23:03:13.742 DEBUG:teuthology.orchestra.run.vm09:> sudo yum clean all 2026-03-31T23:03:13.750 DEBUG:teuthology.orchestra.run.vm05:> sudo yum clean all 2026-03-31T23:03:13.874 INFO:teuthology.orchestra.run.vm09.stdout:56 files removed 2026-03-31T23:03:13.878 INFO:teuthology.orchestra.run.vm05.stdout:56 files removed 2026-03-31T23:03:13.893 DEBUG:teuthology.orchestra.run.vm09:> sudo rm -f /etc/yum.repos.d/ceph.repo 2026-03-31T23:03:13.899 DEBUG:teuthology.orchestra.run.vm05:> sudo rm -f /etc/yum.repos.d/ceph.repo 2026-03-31T23:03:13.917 DEBUG:teuthology.orchestra.run.vm09:> sudo yum clean expire-cache 2026-03-31T23:03:13.923 DEBUG:teuthology.orchestra.run.vm05:> sudo yum clean expire-cache 2026-03-31T23:03:14.068 INFO:teuthology.orchestra.run.vm09.stdout:Cache was expired 2026-03-31T23:03:14.068 INFO:teuthology.orchestra.run.vm09.stdout:0 files removed 2026-03-31T23:03:14.074 INFO:teuthology.orchestra.run.vm05.stdout:Cache was expired 2026-03-31T23:03:14.074 INFO:teuthology.orchestra.run.vm05.stdout:0 files removed 2026-03-31T23:03:14.087 DEBUG:teuthology.parallel:result is None 2026-03-31T23:03:14.094 DEBUG:teuthology.parallel:result is None 2026-03-31T23:03:14.094 INFO:teuthology.task.install:Removing ceph sources lists on ubuntu@vm00.local 2026-03-31T23:03:14.094 INFO:teuthology.task.install:Removing ceph sources lists on ubuntu@vm05.local 2026-03-31T23:03:14.094 INFO:teuthology.task.install:Removing ceph sources lists on ubuntu@vm09.local 2026-03-31T23:03:14.094 DEBUG:teuthology.orchestra.run.vm00:> sudo rm -f /etc/yum.repos.d/ceph.repo 2026-03-31T23:03:14.094 DEBUG:teuthology.orchestra.run.vm05:> sudo rm -f /etc/yum.repos.d/ceph.repo 2026-03-31T23:03:14.094 DEBUG:teuthology.orchestra.run.vm09:> sudo rm -f /etc/yum.repos.d/ceph.repo 2026-03-31T23:03:14.117 DEBUG:teuthology.orchestra.run.vm05:> sudo mv -f /etc/yum/pluginconf.d/priorities.conf.orig /etc/yum/pluginconf.d/priorities.conf 2026-03-31T23:03:14.117 DEBUG:teuthology.orchestra.run.vm09:> sudo mv -f /etc/yum/pluginconf.d/priorities.conf.orig /etc/yum/pluginconf.d/priorities.conf 2026-03-31T23:03:14.119 DEBUG:teuthology.orchestra.run.vm00:> sudo mv -f /etc/yum/pluginconf.d/priorities.conf.orig /etc/yum/pluginconf.d/priorities.conf 2026-03-31T23:03:14.180 DEBUG:teuthology.parallel:result is None 2026-03-31T23:03:14.181 DEBUG:teuthology.parallel:result is None 2026-03-31T23:03:14.183 DEBUG:teuthology.parallel:result is None 2026-03-31T23:03:14.183 DEBUG:teuthology.run_tasks:Unwinding manager clock 2026-03-31T23:03:14.185 INFO:teuthology.task.clock:Checking final clock skew... 2026-03-31T23:03:14.185 DEBUG:teuthology.orchestra.run.vm00:> PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-03-31T23:03:14.223 DEBUG:teuthology.orchestra.run.vm05:> PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-03-31T23:03:14.224 DEBUG:teuthology.orchestra.run.vm09:> PATH=/usr/bin:/usr/sbin ntpq -p || PATH=/usr/bin:/usr/sbin chronyc sources || true 2026-03-31T23:03:14.236 INFO:teuthology.orchestra.run.vm00.stderr:bash: line 1: ntpq: command not found 2026-03-31T23:03:14.236 INFO:teuthology.orchestra.run.vm05.stderr:bash: line 1: ntpq: command not found 2026-03-31T23:03:14.239 INFO:teuthology.orchestra.run.vm09.stderr:bash: line 1: ntpq: command not found 2026-03-31T23:03:14.257 INFO:teuthology.orchestra.run.vm00.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-03-31T23:03:14.257 INFO:teuthology.orchestra.run.vm00.stdout:=============================================================================== 2026-03-31T23:03:14.257 INFO:teuthology.orchestra.run.vm00.stdout:^* nbg01.muxx.net 2 6 377 32 +171us[ +169us] +/- 15ms 2026-03-31T23:03:14.257 INFO:teuthology.orchestra.run.vm00.stdout:^- telesto.hot-chilli.net 2 6 277 33 -111us[ -112us] +/- 47ms 2026-03-31T23:03:14.257 INFO:teuthology.orchestra.run.vm00.stdout:^+ ntp2.kernfusion.at 2 6 377 32 +353us[ +353us] +/- 24ms 2026-03-31T23:03:14.257 INFO:teuthology.orchestra.run.vm00.stdout:^+ gsuit.shop 2 6 377 30 -248us[ -248us] +/- 15ms 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm09.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm09.stdout:=============================================================================== 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm09.stdout:^+ ntp2.kernfusion.at 2 6 377 30 -794us[ -794us] +/- 23ms 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm09.stdout:^- telesto.hot-chilli.net 2 7 17 86 +40us[ +19us] +/- 46ms 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm09.stdout:^* gsuit.shop 2 6 377 30 +136us[ +115us] +/- 15ms 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm09.stdout:^+ nbg01.muxx.net 2 6 377 30 +339us[ +319us] +/- 15ms 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm05.stdout:MS Name/IP address Stratum Poll Reach LastRx Last sample 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm05.stdout:=============================================================================== 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm05.stdout:^* nbg01.muxx.net 2 6 377 30 +175us[ +183us] +/- 15ms 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm05.stdout:^- telesto.hot-chilli.net 2 7 340 615 -51us[-1163us] +/- 39ms 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm05.stdout:^+ ntp2.kernfusion.at 2 6 377 30 -237us[ -228us] +/- 25ms 2026-03-31T23:03:14.258 INFO:teuthology.orchestra.run.vm05.stdout:^+ gsuit.shop 2 6 377 30 -8892ns[ -52ns] +/- 15ms 2026-03-31T23:03:14.258 DEBUG:teuthology.run_tasks:Unwinding manager ansible.cephlab 2026-03-31T23:03:14.261 INFO:teuthology.task.ansible:Skipping ansible cleanup... 2026-03-31T23:03:14.261 DEBUG:teuthology.run_tasks:Unwinding manager selinux 2026-03-31T23:03:14.263 DEBUG:teuthology.run_tasks:Unwinding manager pcp 2026-03-31T23:03:14.265 DEBUG:teuthology.run_tasks:Unwinding manager internal.timer 2026-03-31T23:03:14.267 INFO:teuthology.task.internal:Duration was 1908.564692 seconds 2026-03-31T23:03:14.267 DEBUG:teuthology.run_tasks:Unwinding manager internal.syslog 2026-03-31T23:03:14.269 INFO:teuthology.task.internal.syslog:Shutting down syslog monitoring... 2026-03-31T23:03:14.269 DEBUG:teuthology.orchestra.run.vm00:> sudo rm -f -- /etc/rsyslog.d/80-cephtest.conf && sudo service rsyslog restart 2026-03-31T23:03:14.300 DEBUG:teuthology.orchestra.run.vm05:> sudo rm -f -- /etc/rsyslog.d/80-cephtest.conf && sudo service rsyslog restart 2026-03-31T23:03:14.302 DEBUG:teuthology.orchestra.run.vm09:> sudo rm -f -- /etc/rsyslog.d/80-cephtest.conf && sudo service rsyslog restart 2026-03-31T23:03:14.338 INFO:teuthology.orchestra.run.vm05.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-03-31T23:03:14.339 INFO:teuthology.orchestra.run.vm00.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-03-31T23:03:14.342 INFO:teuthology.orchestra.run.vm09.stderr:Redirecting to /bin/systemctl restart rsyslog.service 2026-03-31T23:03:14.523 INFO:teuthology.task.internal.syslog:Checking logs for errors... 2026-03-31T23:03:14.523 DEBUG:teuthology.task.internal.syslog:Checking ubuntu@vm00.local 2026-03-31T23:03:14.523 DEBUG:teuthology.orchestra.run.vm00:> grep -E --binary-files=text '\bBUG\b|\bINFO\b|\bDEADLOCK\b' /home/ubuntu/cephtest/archive/syslog/kern.log | grep -v 'task .* blocked for more than .* seconds' | grep -v 'lockdep is turned off' | grep -v 'trying to register non-static key' | grep -v 'DEBUG: fsize' | grep -v CRON | grep -v 'BUG: bad unlock balance detected' | grep -v 'inconsistent lock state' | grep -v '*** DEADLOCK ***' | grep -v 'INFO: possible irq lock inversion dependency detected' | grep -v 'INFO: NMI handler (perf_event_nmi_handler) took too long to run' | grep -v 'INFO: recovery required on readonly' | grep -v 'ceph-create-keys: INFO' | grep -v INFO:ceph-create-keys | grep -v 'Loaded datasource DataSourceOpenStack' | grep -v 'container-storage-setup: INFO: Volume group backing root filesystem could not be determined' | grep -E -v '\bsalt-master\b|\bsalt-minion\b|\bsalt-api\b' | grep -v ceph-crash | grep -E -v '\btcmu-runner\b.*\bINFO\b' | head -n 1 2026-03-31T23:03:14.542 DEBUG:teuthology.task.internal.syslog:Checking ubuntu@vm05.local 2026-03-31T23:03:14.542 DEBUG:teuthology.orchestra.run.vm05:> grep -E --binary-files=text '\bBUG\b|\bINFO\b|\bDEADLOCK\b' /home/ubuntu/cephtest/archive/syslog/kern.log | grep -v 'task .* blocked for more than .* seconds' | grep -v 'lockdep is turned off' | grep -v 'trying to register non-static key' | grep -v 'DEBUG: fsize' | grep -v CRON | grep -v 'BUG: bad unlock balance detected' | grep -v 'inconsistent lock state' | grep -v '*** DEADLOCK ***' | grep -v 'INFO: possible irq lock inversion dependency detected' | grep -v 'INFO: NMI handler (perf_event_nmi_handler) took too long to run' | grep -v 'INFO: recovery required on readonly' | grep -v 'ceph-create-keys: INFO' | grep -v INFO:ceph-create-keys | grep -v 'Loaded datasource DataSourceOpenStack' | grep -v 'container-storage-setup: INFO: Volume group backing root filesystem could not be determined' | grep -E -v '\bsalt-master\b|\bsalt-minion\b|\bsalt-api\b' | grep -v ceph-crash | grep -E -v '\btcmu-runner\b.*\bINFO\b' | head -n 1 2026-03-31T23:03:14.581 DEBUG:teuthology.task.internal.syslog:Checking ubuntu@vm09.local 2026-03-31T23:03:14.581 DEBUG:teuthology.orchestra.run.vm09:> grep -E --binary-files=text '\bBUG\b|\bINFO\b|\bDEADLOCK\b' /home/ubuntu/cephtest/archive/syslog/kern.log | grep -v 'task .* blocked for more than .* seconds' | grep -v 'lockdep is turned off' | grep -v 'trying to register non-static key' | grep -v 'DEBUG: fsize' | grep -v CRON | grep -v 'BUG: bad unlock balance detected' | grep -v 'inconsistent lock state' | grep -v '*** DEADLOCK ***' | grep -v 'INFO: possible irq lock inversion dependency detected' | grep -v 'INFO: NMI handler (perf_event_nmi_handler) took too long to run' | grep -v 'INFO: recovery required on readonly' | grep -v 'ceph-create-keys: INFO' | grep -v INFO:ceph-create-keys | grep -v 'Loaded datasource DataSourceOpenStack' | grep -v 'container-storage-setup: INFO: Volume group backing root filesystem could not be determined' | grep -E -v '\bsalt-master\b|\bsalt-minion\b|\bsalt-api\b' | grep -v ceph-crash | grep -E -v '\btcmu-runner\b.*\bINFO\b' | head -n 1 2026-03-31T23:03:14.603 INFO:teuthology.task.internal.syslog:Gathering journactl... 2026-03-31T23:03:14.604 DEBUG:teuthology.orchestra.run.vm00:> sudo journalctl > /home/ubuntu/cephtest/archive/syslog/journalctl.log 2026-03-31T23:03:14.605 DEBUG:teuthology.orchestra.run.vm05:> sudo journalctl > /home/ubuntu/cephtest/archive/syslog/journalctl.log 2026-03-31T23:03:14.623 DEBUG:teuthology.orchestra.run.vm09:> sudo journalctl > /home/ubuntu/cephtest/archive/syslog/journalctl.log 2026-03-31T23:03:16.248 INFO:teuthology.task.internal.syslog:Compressing syslogs... 2026-03-31T23:03:16.249 DEBUG:teuthology.orchestra.run.vm00:> find /home/ubuntu/cephtest/archive/syslog -name '*.log' -print0 | sudo xargs -0 --max-args=1 --max-procs=0 --verbose --no-run-if-empty -- gzip -5 --verbose -- 2026-03-31T23:03:16.250 DEBUG:teuthology.orchestra.run.vm05:> find /home/ubuntu/cephtest/archive/syslog -name '*.log' -print0 | sudo xargs -0 --max-args=1 --max-procs=0 --verbose --no-run-if-empty -- gzip -5 --verbose -- 2026-03-31T23:03:16.252 DEBUG:teuthology.orchestra.run.vm09:> find /home/ubuntu/cephtest/archive/syslog -name '*.log' -print0 | sudo xargs -0 --max-args=1 --max-procs=0 --verbose --no-run-if-empty -- gzip -5 --verbose -- 2026-03-31T23:03:16.274 INFO:teuthology.orchestra.run.vm05.stderr:gzip -5 --verbose -- /home/ubuntu/cephtest/archive/syslog/kern.log 2026-03-31T23:03:16.274 INFO:teuthology.orchestra.run.vm05.stderr:gzip -5 --verbose -- /home/ubuntu/cephtest/archive/syslog/misc.log 2026-03-31T23:03:16.275 INFO:teuthology.orchestra.run.vm05.stderr:/home/ubuntu/cephtest/archive/syslog/kern.log: 0.0% -- replaced with /home/ubuntu/cephtest/archive/syslog/kern.log.gz 2026-03-31T23:03:16.275 INFO:teuthology.orchestra.run.vm05.stderr:gzip -5 --verbose -- /home/ubuntu/cephtest/archive/syslog/journalctl.log 2026-03-31T23:03:16.275 INFO:teuthology.orchestra.run.vm00.stderr:gzip -5 --verbose -- /home/ubuntu/cephtest/archive/syslog/kern.log 2026-03-31T23:03:16.275 INFO:teuthology.orchestra.run.vm05.stderr:/home/ubuntu/cephtest/archive/syslog/misc.log: 0.0% -- replaced with /home/ubuntu/cephtest/archive/syslog/misc.log.gz 2026-03-31T23:03:16.275 INFO:teuthology.orchestra.run.vm00.stderr:gzip -5 --verbose -- /home/ubuntu/cephtest/archive/syslog/misc.log 2026-03-31T23:03:16.276 INFO:teuthology.orchestra.run.vm00.stderr:/home/ubuntu/cephtest/archive/syslog/kern.log: gzip -5 0.0% -- replaced with /home/ubuntu/cephtest/archive/syslog/kern.log.gz 2026-03-31T23:03:16.276 INFO:teuthology.orchestra.run.vm00.stderr: --verbose -- /home/ubuntu/cephtest/archive/syslog/journalctl.log 2026-03-31T23:03:16.276 INFO:teuthology.orchestra.run.vm00.stderr:/home/ubuntu/cephtest/archive/syslog/misc.log: 0.0% -- replaced with /home/ubuntu/cephtest/archive/syslog/misc.log.gz 2026-03-31T23:03:16.276 INFO:teuthology.orchestra.run.vm09.stderr:gzip -5 --verbose -- /home/ubuntu/cephtest/archive/syslog/kern.log 2026-03-31T23:03:16.276 INFO:teuthology.orchestra.run.vm09.stderr:gzip -5 --verbose -- /home/ubuntu/cephtest/archive/syslog/misc.log 2026-03-31T23:03:16.277 INFO:teuthology.orchestra.run.vm09.stderr:/home/ubuntu/cephtest/archive/syslog/kern.log: gzip -5 --verbose -- /home/ubuntu/cephtest/archive/syslog/journalctl.log 2026-03-31T23:03:16.278 INFO:teuthology.orchestra.run.vm09.stderr: 0.0% -- replaced with /home/ubuntu/cephtest/archive/syslog/kern.log.gz 2026-03-31T23:03:16.278 INFO:teuthology.orchestra.run.vm09.stderr:/home/ubuntu/cephtest/archive/syslog/misc.log: 0.0% -- replaced with /home/ubuntu/cephtest/archive/syslog/misc.log.gz 2026-03-31T23:03:16.526 INFO:teuthology.orchestra.run.vm00.stderr:/home/ubuntu/cephtest/archive/syslog/journalctl.log: 98.2% -- replaced with /home/ubuntu/cephtest/archive/syslog/journalctl.log.gz 2026-03-31T23:03:16.575 INFO:teuthology.orchestra.run.vm05.stderr:/home/ubuntu/cephtest/archive/syslog/journalctl.log: 98.5% -- replaced with /home/ubuntu/cephtest/archive/syslog/journalctl.log.gz 2026-03-31T23:03:16.879 INFO:teuthology.orchestra.run.vm09.stderr:/home/ubuntu/cephtest/archive/syslog/journalctl.log: 98.6% -- replaced with /home/ubuntu/cephtest/archive/syslog/journalctl.log.gz 2026-03-31T23:03:16.881 DEBUG:teuthology.run_tasks:Unwinding manager internal.sudo 2026-03-31T23:03:16.884 INFO:teuthology.task.internal:Restoring /etc/sudoers... 2026-03-31T23:03:16.884 DEBUG:teuthology.orchestra.run.vm00:> sudo mv -f /etc/sudoers.orig.teuthology /etc/sudoers 2026-03-31T23:03:16.909 DEBUG:teuthology.orchestra.run.vm05:> sudo mv -f /etc/sudoers.orig.teuthology /etc/sudoers 2026-03-31T23:03:16.936 DEBUG:teuthology.orchestra.run.vm09:> sudo mv -f /etc/sudoers.orig.teuthology /etc/sudoers 2026-03-31T23:03:16.961 DEBUG:teuthology.run_tasks:Unwinding manager internal.coredump 2026-03-31T23:03:16.963 DEBUG:teuthology.orchestra.run.vm00:> sudo sysctl -w kernel.core_pattern=core && sudo bash -c 'for f in `find /home/ubuntu/cephtest/archive/coredump -type f`; do file $f | grep -q systemd-sysusers && rm $f || true ; done' && rmdir --ignore-fail-on-non-empty -- /home/ubuntu/cephtest/archive/coredump 2026-03-31T23:03:16.965 DEBUG:teuthology.orchestra.run.vm05:> sudo sysctl -w kernel.core_pattern=core && sudo bash -c 'for f in `find /home/ubuntu/cephtest/archive/coredump -type f`; do file $f | grep -q systemd-sysusers && rm $f || true ; done' && rmdir --ignore-fail-on-non-empty -- /home/ubuntu/cephtest/archive/coredump 2026-03-31T23:03:16.978 DEBUG:teuthology.orchestra.run.vm09:> sudo sysctl -w kernel.core_pattern=core && sudo bash -c 'for f in `find /home/ubuntu/cephtest/archive/coredump -type f`; do file $f | grep -q systemd-sysusers && rm $f || true ; done' && rmdir --ignore-fail-on-non-empty -- /home/ubuntu/cephtest/archive/coredump 2026-03-31T23:03:16.986 INFO:teuthology.orchestra.run.vm00.stdout:kernel.core_pattern = core 2026-03-31T23:03:17.005 INFO:teuthology.orchestra.run.vm05.stdout:kernel.core_pattern = core 2026-03-31T23:03:17.026 INFO:teuthology.orchestra.run.vm09.stdout:kernel.core_pattern = core 2026-03-31T23:03:17.039 DEBUG:teuthology.orchestra.run.vm00:> test -e /home/ubuntu/cephtest/archive/coredump 2026-03-31T23:03:17.054 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-03-31T23:03:17.054 DEBUG:teuthology.orchestra.run.vm05:> test -e /home/ubuntu/cephtest/archive/coredump 2026-03-31T23:03:17.070 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-03-31T23:03:17.070 DEBUG:teuthology.orchestra.run.vm09:> test -e /home/ubuntu/cephtest/archive/coredump 2026-03-31T23:03:17.093 DEBUG:teuthology.orchestra.run:got remote process result: 1 2026-03-31T23:03:17.093 DEBUG:teuthology.run_tasks:Unwinding manager internal.archive 2026-03-31T23:03:17.096 INFO:teuthology.task.internal:Transferring archived files... 2026-03-31T23:03:17.096 DEBUG:teuthology.misc:Transferring archived files from vm00:/home/ubuntu/cephtest/archive to /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360/remote/vm00 2026-03-31T23:03:17.096 DEBUG:teuthology.orchestra.run.vm00:> sudo tar c -f - -C /home/ubuntu/cephtest/archive -- . 2026-03-31T23:03:17.124 DEBUG:teuthology.misc:Transferring archived files from vm05:/home/ubuntu/cephtest/archive to /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360/remote/vm05 2026-03-31T23:03:17.124 DEBUG:teuthology.orchestra.run.vm05:> sudo tar c -f - -C /home/ubuntu/cephtest/archive -- . 2026-03-31T23:03:17.152 DEBUG:teuthology.misc:Transferring archived files from vm09:/home/ubuntu/cephtest/archive to /archive/kyr-2026-03-31_11:18:10-rados-tentacle-none-default-vps/4360/remote/vm09 2026-03-31T23:03:17.152 DEBUG:teuthology.orchestra.run.vm09:> sudo tar c -f - -C /home/ubuntu/cephtest/archive -- . 2026-03-31T23:03:17.187 INFO:teuthology.task.internal:Removing archive directory... 2026-03-31T23:03:17.187 DEBUG:teuthology.orchestra.run.vm00:> rm -rf -- /home/ubuntu/cephtest/archive 2026-03-31T23:03:17.188 DEBUG:teuthology.orchestra.run.vm05:> rm -rf -- /home/ubuntu/cephtest/archive 2026-03-31T23:03:17.191 DEBUG:teuthology.orchestra.run.vm09:> rm -rf -- /home/ubuntu/cephtest/archive 2026-03-31T23:03:17.240 DEBUG:teuthology.run_tasks:Unwinding manager internal.archive_upload 2026-03-31T23:03:17.243 INFO:teuthology.task.internal:Not uploading archives. 2026-03-31T23:03:17.243 DEBUG:teuthology.run_tasks:Unwinding manager internal.base 2026-03-31T23:03:17.249 INFO:teuthology.task.internal:Tidying up after the test... 2026-03-31T23:03:17.249 DEBUG:teuthology.orchestra.run.vm00:> find /home/ubuntu/cephtest -ls ; rmdir -- /home/ubuntu/cephtest 2026-03-31T23:03:17.251 DEBUG:teuthology.orchestra.run.vm05:> find /home/ubuntu/cephtest -ls ; rmdir -- /home/ubuntu/cephtest 2026-03-31T23:03:17.253 DEBUG:teuthology.orchestra.run.vm09:> find /home/ubuntu/cephtest -ls ; rmdir -- /home/ubuntu/cephtest 2026-03-31T23:03:17.267 INFO:teuthology.orchestra.run.vm00.stdout: 8532141 0 drwxr-xr-x 2 ubuntu ubuntu 6 Mar 31 23:03 /home/ubuntu/cephtest 2026-03-31T23:03:17.268 INFO:teuthology.orchestra.run.vm05.stdout: 8532146 0 drwxr-xr-x 2 ubuntu ubuntu 6 Mar 31 23:03 /home/ubuntu/cephtest 2026-03-31T23:03:17.298 INFO:teuthology.orchestra.run.vm09.stdout: 8532145 0 drwxr-xr-x 2 ubuntu ubuntu 6 Mar 31 23:03 /home/ubuntu/cephtest 2026-03-31T23:03:17.299 DEBUG:teuthology.run_tasks:Unwinding manager console_log 2026-03-31T23:03:17.304 INFO:teuthology.run:Summary data: description: rados/cephadm/workunits/{0-distro/centos_9.stream agent/on mon_election/connectivity task/test_monitoring_stack_basic} duration: 1908.564691543579 flavor: default owner: kyr success: true 2026-03-31T23:03:17.304 DEBUG:teuthology.report:Pushing job info to http://localhost:8080 2026-03-31T23:03:17.332 INFO:teuthology.run:pass