Nodes: smithi204

Description: fs/thrash/{begin ceph-thrash/default clusters/1-mds-1-client-coloc conf/{client mds mon osd} mount/fuse msgr-failures/osd-mds-delay objectstore-ec/bluestore-comp-ec-root overrides/{frag_enable session_timeout whitelist_health whitelist_wrongly_marked_down} supported-random-distros$/{ubuntu_16.04} tasks/cfuse_workunit_suites_pjd}

Log: http://qa-proxy.ceph.com/teuthology/yuriw-2021-06-18_15:20:52-fs-nautilus-distro-basic-smithi/6179295/teuthology.log

Sentry event: https://sentry.ceph.com/organizations/ceph/?query=530aa06c1bd34730a82e1961b89e2722

Failure Reason:

{'Failure object was': {'smithi081.front.sepia.ceph.com': {'results': [{'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop0', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop0', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': True, 'end': '2021-06-18 16:33:39.388851', 'stdout': 'Creating new GPT entries.\\nWarning: The kernel is still using the old partition table.\\nThe new table will be used at the next reboot or after you\\nrun partprobe(8) or kpartx(8)\\nGPT data structures destroyed! You may now partition the disk using fdisk or\\nother utilities.', 'cmd': 'sgdisk --zap-all /dev/dm-2 || sgdisk --zap-all /dev/dm-2', 'rc': 0, 'start': '2021-06-18 16:33:37.855286', 'stderr': '', 'delta': '0:00:01.533565', 'invocation': {'module_args': {'creates': 'None', 'executable': 'None', '_uses_shell': True, 'strip_empty_ends': True, '_raw_params': 'sgdisk --zap-all /dev/dm-2 || sgdisk --zap-all /dev/dm-2', 'removes': 'None', 'argv': 'None', 'warn': True, 'chdir': 'None', 'stdin_add_newline': True, 'stdin': 'None'}}, 'stdout_lines': ['Creating new GPT entries.', 'Warning: The kernel is still using the old partition table.', 'The new table will be used at the next reboot or after you', 'run partprobe(8) or kpartx(8)', 'GPT data structures destroyed! You may now partition the disk using fdisk or', 'other utilities.'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': False, 'item': {'key': 'dm-2', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_3', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTFTri7Yflz1IDmOa73TmRv7BJk1Se1F93'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-2', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_3', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTFTri7Yflz1IDmOa73TmRv7BJk1Se1F93'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}}, {'changed': True, 'end': '2021-06-18 16:33:41.355148', 'stdout': 'Creating new GPT entries.\\nWarning: The kernel is still using the old partition table.\\nThe new table will be used at the next reboot or after you\\nrun partprobe(8) or kpartx(8)\\nGPT data structures destroyed! You may now partition the disk using fdisk or\\nother utilities.', 'cmd': 'sgdisk --zap-all /dev/dm-3 || sgdisk --zap-all /dev/dm-3', 'rc': 0, 'start': '2021-06-18 16:33:39.550281', 'stderr': '', 'delta': '0:00:01.804867', 'invocation': {'module_args': {'creates': 'None', 'executable': 'None', '_uses_shell': True, 'strip_empty_ends': True, '_raw_params': 'sgdisk --zap-all /dev/dm-3 || sgdisk --zap-all /dev/dm-3', 'removes': 'None', 'argv': 'None', 'warn': True, 'chdir': 'None', 'stdin_add_newline': True, 'stdin': 'None'}}, 'stdout_lines': ['Creating new GPT entries.', 'Warning: The kernel is still using the old partition table.', 'The new table will be used at the next reboot or after you', 'run partprobe(8) or kpartx(8)', 'GPT data structures destroyed! You may now partition the disk using fdisk or', 'other utilities.'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': False, 'item': {'key': 'dm-3', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_4', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTtEeUlP3Hf9e5X1wqx3Bey3EeGCclsy3v'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-3', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_4', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTtEeUlP3Hf9e5X1wqx3Bey3EeGCclsy3v'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}}, {'changed': True, 'end': '2021-06-18 16:33:42.580833', 'stdout': 'Creating new GPT entries.\\nWarning: The kernel is still using the old partition table.\\nThe new table will be used at the next reboot or after you\\nrun partprobe(8) or kpartx(8)\\nGPT data structures destroyed! You may now partition the disk using fdisk or\\nother utilities.', 'cmd': 'sgdisk --zap-all /dev/dm-1 || sgdisk --zap-all /dev/dm-1', 'rc': 0, 'start': '2021-06-18 16:33:41.531683', 'stderr': '', 'delta': '0:00:01.049150', 'invocation': {'module_args': {'creates': 'None', 'executable': 'None', '_uses_shell': True, 'strip_empty_ends': True, '_raw_params': 'sgdisk --zap-all /dev/dm-1 || sgdisk --zap-all /dev/dm-1', 'removes': 'None', 'argv': 'None', 'warn': True, 'chdir': 'None', 'stdin_add_newline': True, 'stdin': 'None'}}, 'stdout_lines': ['Creating new GPT entries.', 'Warning: The kernel is still using the old partition table.', 'The new table will be used at the next reboot or after you', 'run partprobe(8) or kpartx(8)', 'GPT data structures destroyed! You may now partition the disk using fdisk or', 'other utilities.'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': False, 'item': {'key': 'dm-1', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_2', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTv76DsoooFe2uAqwOSMTiPXoEtGpEev99'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-1', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_2', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTv76DsoooFe2uAqwOSMTiPXoEtGpEev99'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}}, {'changed': True, 'end': '2021-06-18 16:33:43.795806', 'stdout': 'Creating new GPT entries.\\nWarning: The kernel is still using the old partition table.\\nThe new table will be used at the next reboot or after you\\nrun partprobe(8) or kpartx(8)\\nGPT data structures destroyed! You may now partition the disk using fdisk or\\nother utilities.', 'cmd': 'sgdisk --zap-all /dev/dm-0 || sgdisk --zap-all /dev/dm-0', 'rc': 0, 'start': '2021-06-18 16:33:42.743696', 'stderr': '', 'delta': '0:00:01.052110', 'invocation': {'module_args': {'creates': 'None', 'executable': 'None', '_uses_shell': True, 'strip_empty_ends': True, '_raw_params': 'sgdisk --zap-all /dev/dm-0 || sgdisk --zap-all /dev/dm-0', 'removes': 'None', 'argv': 'None', 'warn': True, 'chdir': 'None', 'stdin_add_newline': True, 'stdin': 'None'}}, 'stdout_lines': ['Creating new GPT entries.', 'Warning: The kernel is still using the old partition table.', 'The new table will be used at the next reboot or after you', 'run partprobe(8) or kpartx(8)', 'GPT data structures destroyed! You may now partition the disk using fdisk or', 'other utilities.'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': False, 'item': {'key': 'dm-0', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_1', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvT5LPiYqED1gX5spN9NCDbsxzHc3qbpqJN'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-0', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_1', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvT5LPiYqED1gX5spN9NCDbsxzHc3qbpqJN'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'sda', 'value': {'scheduler_mode': 'deadline', 'rotational': '1', 'vendor': 'ATA', 'sectors': '1953525168', 'links': {'masters': [], 'labels': [], 'ids': ['ata-ST1000NM0033-9ZM173_Z1W5L4BN', 'wwn-0x5000c500918e822f'], 'uuids': []}, 'partitions': {'sda1': {'sectorsize': 512, 'uuid': 'fb4c3e9c-b8ee-4845-8343-667c8b1acb0e', 'links': {'masters': [], 'labels': [], 'ids': ['ata-ST1000NM0033-9ZM173_Z1W5L4BN-part1', 'wwn-0x5000c500918e822f-part1'], 'uuids': ['fb4c3e9c-b8ee-4845-8343-667c8b1acb0e']}, 'sectors': '1953522688', 'start': '2048', 'holders': [], 'size': '931.51 GB'}}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': 'SATA controller: Intel Corporation C610/X99 series chipset 6-Port SATA Controller [AHCI mode] (rev 05)', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'holders': [], 'wwn': '0x5000c500918e822f', 'model': 'ST1000NM0033-9ZM', 'serial': 'Z1W5L4BN', 'size': '931.51 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'sda', 'value': {'scheduler_mode': 'deadline', 'rotational': '1', 'vendor': 'ATA', 'sectors': '1953525168', 'links': {'masters': [], 'labels': [], 'ids': ['ata-ST1000NM0033-9ZM173_Z1W5L4BN', 'wwn-0x5000c500918e822f'], 'uuids': []}, 'partitions': {'sda1': {'sectorsize': 512, 'uuid': 'fb4c3e9c-b8ee-4845-8343-667c8b1acb0e', 'links': {'masters': [], 'labels': [], 'ids': ['ata-ST1000NM0033-9ZM173_Z1W5L4BN-part1', 'wwn-0x5000c500918e822f-part1'], 'uuids': ['fb4c3e9c-b8ee-4845-8343-667c8b1acb0e']}, 'sectors': '1953522688', 'start': '2048', 'holders': [], 'size': '931.51 GB'}}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': 'SATA controller: Intel Corporation C610/X99 series chipset 6-Port SATA Controller [AHCI mode] (rev 05)', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'holders': [], 'wwn': '0x5000c500918e822f', 'model': 'ST1000NM0033-9ZM', 'serial': 'Z1W5L4BN', 'size': '931.51 GB'}}}, {'unreachable': True, 'msg': 'Data could not be sent to remote host "smithi081.front.sepia.ceph.com". Make sure this host can be reached over ssh: ssh: connect to host smithi081.front.sepia.ceph.com port 22: No route to host\\r\\n', 'item': {'key': 'nvme0n1', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '781422768', 'links': {'masters': ['dm-0', 'dm-1', 'dm-2', 'dm-3', 'dm-4'], 'labels': [], 'ids': ['lvm-pv-uuid-NfV2it-eMQ7-luCD-tRov-tkz3-PoAk-xxhhLW', 'nvme-INTEL_SSDPEDMD400G4_PHFT620400N7400BGN'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': 'Non-Volatile memory controller: Intel Corporation PCIe Data Center SSD (rev 01)', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': ['vg_nvme-lv_1', 'vg_nvme-lv_2', 'vg_nvme-lv_3', 'vg_nvme-lv_4', 'vg_nvme-lv_5'], 'partitions': {}, 'model': 'INTEL SSDPEDMD400G4', 'serial': 'PHFT620400N7400BGN', 'size': '372.61 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'nvme0n1', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '781422768', 'links': {'masters': ['dm-0', 'dm-1', 'dm-2', 'dm-3', 'dm-4'], 'labels': [], 'ids': ['lvm-pv-uuid-NfV2it-eMQ7-luCD-tRov-tkz3-PoAk-xxhhLW', 'nvme-INTEL_SSDPEDMD400G4_PHFT620400N7400BGN'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': 'Non-Volatile memory controller: Intel Corporation PCIe Data Center SSD (rev 01)', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': ['vg_nvme-lv_1', 'vg_nvme-lv_2', 'vg_nvme-lv_3', 'vg_nvme-lv_4', 'vg_nvme-lv_5'], 'partitions': {}, 'model': 'INTEL SSDPEDMD400G4', 'serial': 'PHFT620400N7400BGN', 'size': '372.61 GB'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop3', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop3', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop2', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop2', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop1', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop1', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'unreachable': True, 'msg': 'Data could not be sent to remote host "smithi081.front.sepia.ceph.com". Make sure this host can be reached over ssh: ssh: connect to host smithi081.front.sepia.ceph.com port 22: No route to host\\r\\n', 'item': {'key': 'dm-4', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '31252480', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_5', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTXE9cD7tFbBporHAv5zawsiKTjWRsKufD'], 'uuids': ['c7bbc032-dbbf-4889-a320-e4f3b8c314fe']}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '14.90 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-4', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '31252480', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_5', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTXE9cD7tFbBporHAv5zawsiKTjWRsKufD'], 'uuids': ['c7bbc032-dbbf-4889-a320-e4f3b8c314fe']}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '14.90 GB'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop7', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop7', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop6', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop6', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop5', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop5', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop4', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop4', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}], 'changed': True, 'msg': 'All items completed'}}, 'Traceback (most recent call last)': 'File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/__init__.py", line 306, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/__init__.py", line 278, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 27, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 207, in represent_dict return self.represent_mapping(\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 118, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 207, in represent_dict return self.represent_mapping(\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 118, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 199, in represent_list return self.represent_sequence(\'tag:yaml.org,2002:seq\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 92, in represent_sequence node_item = self.represent_data(item) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 207, in represent_dict return self.represent_mapping(\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 118, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 207, in represent_dict return self.represent_mapping(\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 117, in represent_mapping node_key = self.represent_data(item_key) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 231, in represent_undefined raise RepresenterError("cannot represent an object", data)', 'yaml.representer.RepresenterError': "('cannot represent an object', 'key')"}

  • log_href: http://qa-proxy.ceph.com/teuthology/yuriw-2021-06-18_15:20:52-fs-nautilus-distro-basic-smithi/6179295/teuthology.log
  • archive_path: /home/teuthworker/archive/yuriw-2021-06-18_15:20:52-fs-nautilus-distro-basic-smithi/6179295
  • description: fs/thrash/{begin ceph-thrash/default clusters/1-mds-1-client-coloc conf/{client mds mon osd} mount/fuse msgr-failures/osd-mds-delay objectstore-ec/bluestore-comp-ec-root overrides/{frag_enable session_timeout whitelist_health whitelist_wrongly_marked_down} supported-random-distros$/{ubuntu_16.04} tasks/cfuse_workunit_suites_pjd}
  • duration: 0:05:18
  • email: ceph-qa@ceph.io
  • failure_reason: {'Failure object was': {'smithi081.front.sepia.ceph.com': {'results': [{'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop0', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop0', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': True, 'end': '2021-06-18 16:33:39.388851', 'stdout': 'Creating new GPT entries.\\nWarning: The kernel is still using the old partition table.\\nThe new table will be used at the next reboot or after you\\nrun partprobe(8) or kpartx(8)\\nGPT data structures destroyed! You may now partition the disk using fdisk or\\nother utilities.', 'cmd': 'sgdisk --zap-all /dev/dm-2 || sgdisk --zap-all /dev/dm-2', 'rc': 0, 'start': '2021-06-18 16:33:37.855286', 'stderr': '', 'delta': '0:00:01.533565', 'invocation': {'module_args': {'creates': 'None', 'executable': 'None', '_uses_shell': True, 'strip_empty_ends': True, '_raw_params': 'sgdisk --zap-all /dev/dm-2 || sgdisk --zap-all /dev/dm-2', 'removes': 'None', 'argv': 'None', 'warn': True, 'chdir': 'None', 'stdin_add_newline': True, 'stdin': 'None'}}, 'stdout_lines': ['Creating new GPT entries.', 'Warning: The kernel is still using the old partition table.', 'The new table will be used at the next reboot or after you', 'run partprobe(8) or kpartx(8)', 'GPT data structures destroyed! You may now partition the disk using fdisk or', 'other utilities.'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': False, 'item': {'key': 'dm-2', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_3', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTFTri7Yflz1IDmOa73TmRv7BJk1Se1F93'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-2', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_3', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTFTri7Yflz1IDmOa73TmRv7BJk1Se1F93'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}}, {'changed': True, 'end': '2021-06-18 16:33:41.355148', 'stdout': 'Creating new GPT entries.\\nWarning: The kernel is still using the old partition table.\\nThe new table will be used at the next reboot or after you\\nrun partprobe(8) or kpartx(8)\\nGPT data structures destroyed! You may now partition the disk using fdisk or\\nother utilities.', 'cmd': 'sgdisk --zap-all /dev/dm-3 || sgdisk --zap-all /dev/dm-3', 'rc': 0, 'start': '2021-06-18 16:33:39.550281', 'stderr': '', 'delta': '0:00:01.804867', 'invocation': {'module_args': {'creates': 'None', 'executable': 'None', '_uses_shell': True, 'strip_empty_ends': True, '_raw_params': 'sgdisk --zap-all /dev/dm-3 || sgdisk --zap-all /dev/dm-3', 'removes': 'None', 'argv': 'None', 'warn': True, 'chdir': 'None', 'stdin_add_newline': True, 'stdin': 'None'}}, 'stdout_lines': ['Creating new GPT entries.', 'Warning: The kernel is still using the old partition table.', 'The new table will be used at the next reboot or after you', 'run partprobe(8) or kpartx(8)', 'GPT data structures destroyed! You may now partition the disk using fdisk or', 'other utilities.'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': False, 'item': {'key': 'dm-3', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_4', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTtEeUlP3Hf9e5X1wqx3Bey3EeGCclsy3v'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-3', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_4', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTtEeUlP3Hf9e5X1wqx3Bey3EeGCclsy3v'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}}, {'changed': True, 'end': '2021-06-18 16:33:42.580833', 'stdout': 'Creating new GPT entries.\\nWarning: The kernel is still using the old partition table.\\nThe new table will be used at the next reboot or after you\\nrun partprobe(8) or kpartx(8)\\nGPT data structures destroyed! You may now partition the disk using fdisk or\\nother utilities.', 'cmd': 'sgdisk --zap-all /dev/dm-1 || sgdisk --zap-all /dev/dm-1', 'rc': 0, 'start': '2021-06-18 16:33:41.531683', 'stderr': '', 'delta': '0:00:01.049150', 'invocation': {'module_args': {'creates': 'None', 'executable': 'None', '_uses_shell': True, 'strip_empty_ends': True, '_raw_params': 'sgdisk --zap-all /dev/dm-1 || sgdisk --zap-all /dev/dm-1', 'removes': 'None', 'argv': 'None', 'warn': True, 'chdir': 'None', 'stdin_add_newline': True, 'stdin': 'None'}}, 'stdout_lines': ['Creating new GPT entries.', 'Warning: The kernel is still using the old partition table.', 'The new table will be used at the next reboot or after you', 'run partprobe(8) or kpartx(8)', 'GPT data structures destroyed! You may now partition the disk using fdisk or', 'other utilities.'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': False, 'item': {'key': 'dm-1', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_2', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTv76DsoooFe2uAqwOSMTiPXoEtGpEev99'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-1', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_2', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTv76DsoooFe2uAqwOSMTiPXoEtGpEev99'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}}, {'changed': True, 'end': '2021-06-18 16:33:43.795806', 'stdout': 'Creating new GPT entries.\\nWarning: The kernel is still using the old partition table.\\nThe new table will be used at the next reboot or after you\\nrun partprobe(8) or kpartx(8)\\nGPT data structures destroyed! You may now partition the disk using fdisk or\\nother utilities.', 'cmd': 'sgdisk --zap-all /dev/dm-0 || sgdisk --zap-all /dev/dm-0', 'rc': 0, 'start': '2021-06-18 16:33:42.743696', 'stderr': '', 'delta': '0:00:01.052110', 'invocation': {'module_args': {'creates': 'None', 'executable': 'None', '_uses_shell': True, 'strip_empty_ends': True, '_raw_params': 'sgdisk --zap-all /dev/dm-0 || sgdisk --zap-all /dev/dm-0', 'removes': 'None', 'argv': 'None', 'warn': True, 'chdir': 'None', 'stdin_add_newline': True, 'stdin': 'None'}}, 'stdout_lines': ['Creating new GPT entries.', 'Warning: The kernel is still using the old partition table.', 'The new table will be used at the next reboot or after you', 'run partprobe(8) or kpartx(8)', 'GPT data structures destroyed! You may now partition the disk using fdisk or', 'other utilities.'], 'stderr_lines': [], '_ansible_no_log': False, 'failed': False, 'item': {'key': 'dm-0', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_1', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvT5LPiYqED1gX5spN9NCDbsxzHc3qbpqJN'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-0', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '187490304', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_1', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvT5LPiYqED1gX5spN9NCDbsxzHc3qbpqJN'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '89.40 GB'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'sda', 'value': {'scheduler_mode': 'deadline', 'rotational': '1', 'vendor': 'ATA', 'sectors': '1953525168', 'links': {'masters': [], 'labels': [], 'ids': ['ata-ST1000NM0033-9ZM173_Z1W5L4BN', 'wwn-0x5000c500918e822f'], 'uuids': []}, 'partitions': {'sda1': {'sectorsize': 512, 'uuid': 'fb4c3e9c-b8ee-4845-8343-667c8b1acb0e', 'links': {'masters': [], 'labels': [], 'ids': ['ata-ST1000NM0033-9ZM173_Z1W5L4BN-part1', 'wwn-0x5000c500918e822f-part1'], 'uuids': ['fb4c3e9c-b8ee-4845-8343-667c8b1acb0e']}, 'sectors': '1953522688', 'start': '2048', 'holders': [], 'size': '931.51 GB'}}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': 'SATA controller: Intel Corporation C610/X99 series chipset 6-Port SATA Controller [AHCI mode] (rev 05)', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'holders': [], 'wwn': '0x5000c500918e822f', 'model': 'ST1000NM0033-9ZM', 'serial': 'Z1W5L4BN', 'size': '931.51 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'sda', 'value': {'scheduler_mode': 'deadline', 'rotational': '1', 'vendor': 'ATA', 'sectors': '1953525168', 'links': {'masters': [], 'labels': [], 'ids': ['ata-ST1000NM0033-9ZM173_Z1W5L4BN', 'wwn-0x5000c500918e822f'], 'uuids': []}, 'partitions': {'sda1': {'sectorsize': 512, 'uuid': 'fb4c3e9c-b8ee-4845-8343-667c8b1acb0e', 'links': {'masters': [], 'labels': [], 'ids': ['ata-ST1000NM0033-9ZM173_Z1W5L4BN-part1', 'wwn-0x5000c500918e822f-part1'], 'uuids': ['fb4c3e9c-b8ee-4845-8343-667c8b1acb0e']}, 'sectors': '1953522688', 'start': '2048', 'holders': [], 'size': '931.51 GB'}}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': 'SATA controller: Intel Corporation C610/X99 series chipset 6-Port SATA Controller [AHCI mode] (rev 05)', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'holders': [], 'wwn': '0x5000c500918e822f', 'model': 'ST1000NM0033-9ZM', 'serial': 'Z1W5L4BN', 'size': '931.51 GB'}}}, {'unreachable': True, 'msg': 'Data could not be sent to remote host "smithi081.front.sepia.ceph.com". Make sure this host can be reached over ssh: ssh: connect to host smithi081.front.sepia.ceph.com port 22: No route to host\\r\\n', 'item': {'key': 'nvme0n1', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '781422768', 'links': {'masters': ['dm-0', 'dm-1', 'dm-2', 'dm-3', 'dm-4'], 'labels': [], 'ids': ['lvm-pv-uuid-NfV2it-eMQ7-luCD-tRov-tkz3-PoAk-xxhhLW', 'nvme-INTEL_SSDPEDMD400G4_PHFT620400N7400BGN'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': 'Non-Volatile memory controller: Intel Corporation PCIe Data Center SSD (rev 01)', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': ['vg_nvme-lv_1', 'vg_nvme-lv_2', 'vg_nvme-lv_3', 'vg_nvme-lv_4', 'vg_nvme-lv_5'], 'partitions': {}, 'model': 'INTEL SSDPEDMD400G4', 'serial': 'PHFT620400N7400BGN', 'size': '372.61 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'nvme0n1', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '781422768', 'links': {'masters': ['dm-0', 'dm-1', 'dm-2', 'dm-3', 'dm-4'], 'labels': [], 'ids': ['lvm-pv-uuid-NfV2it-eMQ7-luCD-tRov-tkz3-PoAk-xxhhLW', 'nvme-INTEL_SSDPEDMD400G4_PHFT620400N7400BGN'], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': 'Non-Volatile memory controller: Intel Corporation PCIe Data Center SSD (rev 01)', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': ['vg_nvme-lv_1', 'vg_nvme-lv_2', 'vg_nvme-lv_3', 'vg_nvme-lv_4', 'vg_nvme-lv_5'], 'partitions': {}, 'model': 'INTEL SSDPEDMD400G4', 'serial': 'PHFT620400N7400BGN', 'size': '372.61 GB'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop3', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop3', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop2', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop2', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop1', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop1', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'unreachable': True, 'msg': 'Data could not be sent to remote host "smithi081.front.sepia.ceph.com". Make sure this host can be reached over ssh: ssh: connect to host smithi081.front.sepia.ceph.com port 22: No route to host\\r\\n', 'item': {'key': 'dm-4', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '31252480', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_5', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTXE9cD7tFbBporHAv5zawsiKTjWRsKufD'], 'uuids': ['c7bbc032-dbbf-4889-a320-e4f3b8c314fe']}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '14.90 GB'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'dm-4', 'value': {'scheduler_mode': '', 'rotational': '0', 'vendor': 'None', 'sectors': '31252480', 'links': {'masters': [], 'labels': [], 'ids': ['dm-name-vg_nvme-lv_5', 'dm-uuid-LVM-Ao4PXq1wPST5f5KMc8PAtaXZbyRyxYvTXE9cD7tFbBporHAv5zawsiKTjWRsKufD'], 'uuids': ['c7bbc032-dbbf-4889-a320-e4f3b8c314fe']}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '512', 'holders': [], 'partitions': {}, 'model': 'None', 'serial': 'PHFT620400N7400BGN', 'size': '14.90 GB'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop7', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop7', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop6', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop6', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop5', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop5', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}, {'changed': False, 'skipped': True, 'skip_reason': 'Conditional result was False', '_ansible_no_log': False, 'item': {'key': 'loop4', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}, 'ansible_loop_var': 'item', '_ansible_item_label': {'key': 'loop4', 'value': {'scheduler_mode': '', 'rotational': '1', 'vendor': 'None', 'sectors': '0', 'links': {'masters': [], 'labels': [], 'ids': [], 'uuids': []}, 'sas_device_handle': 'None', 'sas_address': 'None', 'virtual': 1, 'host': '', 'sectorsize': '512', 'removable': '0', 'support_discard': '0', 'model': 'None', 'partitions': {}, 'holders': [], 'size': '0.00 Bytes'}}}], 'changed': True, 'msg': 'All items completed'}}, 'Traceback (most recent call last)': 'File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/__init__.py", line 306, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/__init__.py", line 278, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 27, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 207, in represent_dict return self.represent_mapping(\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 118, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 207, in represent_dict return self.represent_mapping(\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 118, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 199, in represent_list return self.represent_sequence(\'tag:yaml.org,2002:seq\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 92, in represent_sequence node_item = self.represent_data(item) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 207, in represent_dict return self.represent_mapping(\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 118, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 48, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 207, in represent_dict return self.represent_mapping(\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 117, in represent_mapping node_key = self.represent_data(item_key) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_f359b10daba6e0103d42ccfc021bc797f3cd7edc/virtualenv/lib/python3.6/site-packages/yaml/representer.py", line 231, in represent_undefined raise RepresenterError("cannot represent an object", data)', 'yaml.representer.RepresenterError': "('cannot represent an object', 'key')"}
  • flavor:
  • job_id: 6179295
  • kernel:
    • sha1: distro
    • kdb: True
  • last_in_suite: False
  • machine_type: smithi
  • name: yuriw-2021-06-18_15:20:52-fs-nautilus-distro-basic-smithi
  • nuke_on_error: True
  • os_type: ubuntu
  • os_version: 16.04
  • overrides:
    • ceph-deploy:
      • conf:
        • client:
          • log file: /var/log/ceph/ceph-$name.$pid.log
        • mon:
          • osd default pool size: 2
    • workunit:
      • sha1: 406a6d1d612dab3e9944852882b0070cf6c1e913
      • branch: nautilus
    • ceph:
      • log-whitelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
        • not responding, replacing
        • \(OSD_SLOW_PING_TIME
        • overall HEALTH_
        • \(FS_DEGRADED\)
        • \(MDS_FAILED\)
        • \(MDS_DEGRADED\)
        • \(FS_WITH_FAILED_MDS\)
        • \(MDS_DAMAGE\)
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
        • \(TOO_FEW_PGS\)
        • overall HEALTH_
        • \(OSD_DOWN\)
        • \(OSD_
        • but it is still running
        • is not responding
      • cephfs_ec_profile:
        • m=2
        • k=2
        • crush-failure-domain=osd
      • fs: xfs
      • conf:
        • global:
          • ms inject delay type: osd mds
          • ms inject delay probability: 0.005
          • ms inject socket failures: 2500
          • ms inject delay max: 1
        • mgr:
          • debug ms: 1
          • debug mgr: 20
        • client:
          • fuse set user groups: True
          • debug ms: 1
          • fuse default permissions: False
          • debug client: 20
          • client mount timeout: 600
        • mon:
          • debug paxos: 20
          • debug mon: 20
          • debug ms: 1
          • mon op complaint time: 120
        • mds:
          • mds bal split bits: 3
          • mds bal split size: 100
          • osd op complaint time: 180
          • debug mds: 20
          • mds bal merge size: 5
          • debug ms: 1
          • mds bal frag: True
          • mds verify scatter: True
          • mds bal fragment size max: 10000
          • mds op complaint time: 180
          • mds debug scatterstat: True
          • mds debug frag: True
        • osd:
          • mon osd full ratio: 0.9
          • debug ms: 1
          • bluestore fsck on mount: True
          • debug osd: 20
          • bluestore compression mode: aggressive
          • debug bluestore: 20
          • debug bluefs: 20
          • osd objectstore: bluestore
          • mon osd backfillfull_ratio: 0.85
          • osd op complaint time: 180
          • bluestore block size: 96636764160
          • debug rocksdb: 10
          • mon osd nearfull ratio: 0.8
          • osd failsafe full ratio: 0.95
      • sha1: 406a6d1d612dab3e9944852882b0070cf6c1e913
      • cephfs:
        • session_timeout: 300
      • log-ignorelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
    • install:
      • ceph:
        • sha1: 406a6d1d612dab3e9944852882b0070cf6c1e913
    • admin_socket:
      • branch: nautilus
    • thrashosds:
      • bdev_inject_crash_probability: 0.5
      • bdev_inject_crash: 2
  • owner: scheduled_yuriw@teuthology
  • pid:
  • roles:
    • ['mon.a', 'mgr.y', 'mds.a', 'osd.0', 'osd.1', 'osd.2', 'osd.3', 'client.0']
    • ['mon.b', 'mon.c', 'mgr.x', 'mds.b', 'osd.4', 'osd.5', 'osd.6', 'osd.7']
  • sentry_event: https://sentry.ceph.com/organizations/ceph/?query=530aa06c1bd34730a82e1961b89e2722
  • status: dead
  • success: False
  • branch: nautilus
  • seed:
  • sha1: 406a6d1d612dab3e9944852882b0070cf6c1e913
  • subset:
  • suite:
  • suite_branch: nautilus
  • suite_path:
  • suite_relpath:
  • suite_repo:
  • suite_sha1: 406a6d1d612dab3e9944852882b0070cf6c1e913
  • targets:
    • smithi204.front.sepia.ceph.com: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDoZ+9zHXX2YAAfQTn14F7xQ6QL6CgXobo3xxwmPRO1ESD20I6EXcDRgM3mOUTra+jcrS6wMCl3DYQ6J2VykvecCXkolS7u/ocPl/XIDXOrACzGec5qXNj8qR30r11WK5H3kwdBj/ALqSudpTu/ciVB1D9QAgmvce2+Y5S2Eve6Nv7C/pouEl6+7MW0aUPdumTMXD2PRlY6tXcHgB6DOKNKaKtaDfzvIVXJ2UL4GKxeKd2uGB6IFytERT9ES17YjnOFR+TrJRL9AtuYFqsOY0PAVNChojTtMtmmpmCPBmryr1pBVkSW0X4f8KwOUOhfZkEgdejKEbMb67bl5XcxwlJX
  • tasks:
    • internal.check_packages:
    • internal.buildpackages_prep:
    • internal.save_config:
    • internal.check_lock:
    • internal.add_remotes:
    • console_log:
    • internal.connect:
    • internal.push_inventory:
    • internal.serialize_remote_roles:
    • internal.check_conflict:
    • internal.check_ceph_data:
    • internal.vm_setup:
    • kernel:
      • sha1: distro
      • kdb: True
    • internal.base:
    • internal.archive_upload:
    • internal.archive:
    • internal.coredump:
    • internal.sudo:
    • internal.syslog:
    • internal.timer:
    • pcp:
    • selinux:
    • ansible.cephlab:
    • clock:
    • install:
      • extra_system_packages:
        • deb:
          • bison
          • flex
          • libelf-dev
          • libssl-dev
        • rpm:
          • bison
          • flex
          • elfutils-libelf-devel
          • openssl-devel
      • extra_packages:
        • deb:
          • python3-cephfs
          • cephfs-shell
        • rpm:
          • python3-cephfs
    • ceph:
    • mds_thrash:
    • ceph-fuse:
    • workunit:
      • clients:
        • all:
          • suites/pjd.sh
      • timeout: 6h
  • teuthology_branch: master
  • verbose: True
  • pcp_grafana_url:
  • priority:
  • user:
  • queue:
  • posted: 2021-06-18 15:23:32
  • started: 2021-06-18 16:22:11
  • updated: 2021-06-18 16:37:18
  • status_class: danger
  • runtime: 0:15:07
  • wait_time: 0:09:49