Description: fs/verify/{begin.yaml centos_latest.yaml clusters/fixed-2-ucephfs.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} tasks/cfuse_workunit_suites_fsstress.yaml validater/lockdep.yaml}

Log: http://qa-proxy.ceph.com/teuthology/pdonnell-2020-02-11_15:01:19-fs-wip-pdonnell-testing-20200211.032856-distro-basic-smithi/4754606/teuthology.log

Failure Reason:

u'mq-deadline', u'rotational': u'1', u'sectors': u'1953525168', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'model': u'ST1000NM0033-9ZM', u'partitions': {u'sda1': {u'start': u'2048', u'sectorsize': 512, u'uuid': u'66e056a8-b246-4f81-96dd-96b23573000c', u'sectors': u'1953522688', u'holders': [], u'links': {u'masters': [], u'labels': [], u'ids': [u'ata-ST1000NM0033-9ZM173_Z1W5QJJR-part1', u'wwn-0x5000c50091e19c4b-part1'], u'uuids': [u'66e056a8-b246-4f81-96dd-96b23573000c']}, u'size': u'931.51 GB'}}}, 'key': u'sda'}}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [u'dm-0', u'dm-1', u'dm-2', u'dm-3', u'dm-4'], u'labels': [], u'ids': [u'lvm-pv-uuid-0t7h4A-CztF-UVeh-T3yc-OS0w-c8Vi-p9Z3OT', u'nvme-INTEL_SSDPEDMD400G4_PHFT62040172400BGN', u'nvme-nvme.8086-50484654363230343031373234303042474e-494e54454c205353445045444d443430304734-00000001'], u'uuids': []}, u'sas_device_handle': None, u'host': u'Non-Volatile memory controller: Intel Corporation PCIe Data Center SSD (rev 01)', u'support_discard': u'512', u'model': u'INTEL SSDPEDMD400G4', u'size': u'372.61 GB', u'scheduler_mode': u'none', u'rotational': u'0', u'sectors': u'781422768', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [u'vg_nvme-lv_4', u'vg_nvme-lv_1', u'vg_nvme-lv_3', u'vg_nvme-lv_5', u'vg_nvme-lv_2'], u'partitions': {}}, 'key': u'nvme0n1'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [u'dm-0', u'dm-1', u'dm-2', u'dm-3', u'dm-4'], u'labels': [], u'ids': [u'lvm-pv-uuid-0t7h4A-CztF-UVeh-T3yc-OS0w-c8Vi-p9Z3OT', u'nvme-INTEL_SSDPEDMD400G4_PHFT62040172400BGN', u'nvme-nvme.8086-50484654363230343031373234303042474e-494e54454c205353445045444d443430304734-00000001'], u'uuids': []}, u'sas_device_handle': None, u'host': u'Non-Volatile memory controller: Intel Corporation PCIe Data Center SSD (rev 01)', u'support_discard': u'512', u'model': u'INTEL SSDPEDMD400G4', u'size': u'372.61 GB', u'scheduler_mode': u'none', u'rotational': u'0', u'sectors': u'781422768', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [u'vg_nvme-lv_4', u'vg_nvme-lv_1', u'vg_nvme-lv_3', u'vg_nvme-lv_5', u'vg_nvme-lv_2'], u'partitions': {}}, 'key': u'nvme0n1'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: Warning: Permanently added \'smithi071.front.sepia.ceph.com,172.21.15.71\' (ECDSA) to the list of known hosts.\r\nPermission denied (publickey,password,keyboard-interactive).\r\n', 'unreachable': True}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_1', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdx62gozlSnfd2ktycAXIYIaIBveIOTOTe'], u'uuids': [u'7abd0803-69ff-4847-9b0b-1f54d16c2621']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-4'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_1', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdx62gozlSnfd2ktycAXIYIaIBveIOTOTe'], u'uuids': [u'7abd0803-69ff-4847-9b0b-1f54d16c2621']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-4'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: Warning: Permanently added \'smithi071.front.sepia.ceph.com,172.21.15.71\' (ECDSA) to the list of known hosts.\r\nPermission denied (publickey,password,keyboard-interactive).\r\n', 'unreachable': True}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_3', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdQNyfm1F1St5DIEnnYl0SW5I1xTIMTwmb'], u'uuids': [u'225b50a6-a840-4f20-9c56-6eb9fed80519']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-2'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_3', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdQNyfm1F1St5DIEnnYl0SW5I1xTIMTwmb'], u'uuids': [u'225b50a6-a840-4f20-9c56-6eb9fed80519']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-2'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: Warning: Permanently added \'smithi071.front.sepia.ceph.com,172.21.15.71\' (ECDSA) to the list of known hosts.\r\nPermission denied (publickey,password,keyboard-interactive).\r\n', 'unreachable': True}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_2', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdNZIs8JfFSQAjziCF3Rl8ydFo4ar7RvyZ'], u'uuids': [u'ee8b5eef-8f15-453b-bf30-bf5624a8ee03']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-3'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_2', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdNZIs8JfFSQAjziCF3Rl8ydFo4ar7RvyZ'], u'uuids': [u'ee8b5eef-8f15-453b-bf30-bf5624a8ee03']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-3'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: ssh: connect to host smithi071.front.sepia.ceph.com port 22: No route to host\r\n', 'unreachable': True}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_5', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdBK8zdbnO17vQQrnr1FESMNOQKUIs3Sz9'], u'uuids': [u'1f23ce65-3157-42f2-806e-6b3a41fba09f']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'14.90 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'31252480', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-0'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_5', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdBK8zdbnO17vQQrnr1FESMNOQKUIs3Sz9'], u'uuids': [u'1f23ce65-3157-42f2-806e-6b3a41fba09f']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'14.90 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'31252480', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-0'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: ssh: connect to host smithi071.front.sepia.ceph.com port 22: No route to host\r\n', 'unreachable': True}, {'stderr_lines': [u'Problem opening /dev/dm-1 for reading! Error is 2.', u'The specified file does not exist!', u"Problem opening '' for writing! Program will now terminate.", u'Warning! MBR not overwritten! Error is 2!', u'Problem opening /dev/dm-1 for reading! Error is 2.', u'The specified file does not exist!', u"Problem opening '' for writing! Program will now terminate.", u'Warning! MBR not overwritten! Error is 2!'], u'changed': True, u'stdout': u'', u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': True, u'strip_empty_ends': True, u'_raw_params': u'sgdisk --zap-all /dev/dm-1 || sgdisk --zap-all /dev/dm-1', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin_add_newline': True, u'stdin': None}}, u'delta': u'0:00:00.014198', 'stdout_lines': [], '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_4', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdMfRtgBuH8sqtHJV8rGmDnxg6twWHXG7X'], u'uuids': [u'40a58be4-5906-4b48-acc4-7459ea6284c7']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-1'}, 'ansible_loop_var': u'item', u'end': u'2020-02-11 16:49:17.363596', '_ansible_no_log': False, u'start': u'2020-02-11 16:49:17.349398', u'failed': True, u'cmd': u'sgdisk --zap-all /dev/dm-1 || sgdisk --zap-all /dev/dm-1', 'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_4', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdMfRtgBuH8sqtHJV8rGmDnxg6twWHXG7X'], u'uuids': [u'40a58be4-5906-4b48-acc4-7459ea6284c7']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-1'}, u'stderr': u"Problem opening /dev/dm-1 for reading! Error is 2.\nThe specified file does not exist!\nProblem opening '' for writing! Program will now terminate.\nWarning! MBR not overwritten! Error is 2!\nProblem opening /dev/dm-1 for reading! Error is 2.\nThe specified file does not exist!\nProblem opening '' for writing! Program will now terminate.\nWarning! MBR not overwritten! Error is 2!", u'rc': 2, u'msg': u'non-zero return code'}]}}Traceback (most recent call last): File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 309, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 281, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 29, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 219, in represent_list return self.represent_sequence(u'tag:yaml.org,2002:seq', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 102, in represent_sequence node_item = self.represent_data(item) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 68, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 251, in represent_undefined raise RepresenterError("cannot represent an object", data)RepresenterError: ('cannot represent an object', u'sda')

  • log_href: http://qa-proxy.ceph.com/teuthology/pdonnell-2020-02-11_15:01:19-fs-wip-pdonnell-testing-20200211.032856-distro-basic-smithi/4754606/teuthology.log
  • archive_path: /home/teuthworker/archive/pdonnell-2020-02-11_15:01:19-fs-wip-pdonnell-testing-20200211.032856-distro-basic-smithi/4754606
  • description: fs/verify/{begin.yaml centos_latest.yaml clusters/fixed-2-ucephfs.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} tasks/cfuse_workunit_suites_fsstress.yaml validater/lockdep.yaml}
  • duration: 0:08:04
  • email: pdonnell@redhat.com
  • failure_reason: u'mq-deadline', u'rotational': u'1', u'sectors': u'1953525168', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'model': u'ST1000NM0033-9ZM', u'partitions': {u'sda1': {u'start': u'2048', u'sectorsize': 512, u'uuid': u'66e056a8-b246-4f81-96dd-96b23573000c', u'sectors': u'1953522688', u'holders': [], u'links': {u'masters': [], u'labels': [], u'ids': [u'ata-ST1000NM0033-9ZM173_Z1W5QJJR-part1', u'wwn-0x5000c50091e19c4b-part1'], u'uuids': [u'66e056a8-b246-4f81-96dd-96b23573000c']}, u'size': u'931.51 GB'}}}, 'key': u'sda'}}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [u'dm-0', u'dm-1', u'dm-2', u'dm-3', u'dm-4'], u'labels': [], u'ids': [u'lvm-pv-uuid-0t7h4A-CztF-UVeh-T3yc-OS0w-c8Vi-p9Z3OT', u'nvme-INTEL_SSDPEDMD400G4_PHFT62040172400BGN', u'nvme-nvme.8086-50484654363230343031373234303042474e-494e54454c205353445045444d443430304734-00000001'], u'uuids': []}, u'sas_device_handle': None, u'host': u'Non-Volatile memory controller: Intel Corporation PCIe Data Center SSD (rev 01)', u'support_discard': u'512', u'model': u'INTEL SSDPEDMD400G4', u'size': u'372.61 GB', u'scheduler_mode': u'none', u'rotational': u'0', u'sectors': u'781422768', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [u'vg_nvme-lv_4', u'vg_nvme-lv_1', u'vg_nvme-lv_3', u'vg_nvme-lv_5', u'vg_nvme-lv_2'], u'partitions': {}}, 'key': u'nvme0n1'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [u'dm-0', u'dm-1', u'dm-2', u'dm-3', u'dm-4'], u'labels': [], u'ids': [u'lvm-pv-uuid-0t7h4A-CztF-UVeh-T3yc-OS0w-c8Vi-p9Z3OT', u'nvme-INTEL_SSDPEDMD400G4_PHFT62040172400BGN', u'nvme-nvme.8086-50484654363230343031373234303042474e-494e54454c205353445045444d443430304734-00000001'], u'uuids': []}, u'sas_device_handle': None, u'host': u'Non-Volatile memory controller: Intel Corporation PCIe Data Center SSD (rev 01)', u'support_discard': u'512', u'model': u'INTEL SSDPEDMD400G4', u'size': u'372.61 GB', u'scheduler_mode': u'none', u'rotational': u'0', u'sectors': u'781422768', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [u'vg_nvme-lv_4', u'vg_nvme-lv_1', u'vg_nvme-lv_3', u'vg_nvme-lv_5', u'vg_nvme-lv_2'], u'partitions': {}}, 'key': u'nvme0n1'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: Warning: Permanently added \'smithi071.front.sepia.ceph.com,172.21.15.71\' (ECDSA) to the list of known hosts.\r\nPermission denied (publickey,password,keyboard-interactive).\r\n', 'unreachable': True}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_1', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdx62gozlSnfd2ktycAXIYIaIBveIOTOTe'], u'uuids': [u'7abd0803-69ff-4847-9b0b-1f54d16c2621']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-4'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_1', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdx62gozlSnfd2ktycAXIYIaIBveIOTOTe'], u'uuids': [u'7abd0803-69ff-4847-9b0b-1f54d16c2621']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-4'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: Warning: Permanently added \'smithi071.front.sepia.ceph.com,172.21.15.71\' (ECDSA) to the list of known hosts.\r\nPermission denied (publickey,password,keyboard-interactive).\r\n', 'unreachable': True}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_3', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdQNyfm1F1St5DIEnnYl0SW5I1xTIMTwmb'], u'uuids': [u'225b50a6-a840-4f20-9c56-6eb9fed80519']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-2'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_3', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdQNyfm1F1St5DIEnnYl0SW5I1xTIMTwmb'], u'uuids': [u'225b50a6-a840-4f20-9c56-6eb9fed80519']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-2'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: Warning: Permanently added \'smithi071.front.sepia.ceph.com,172.21.15.71\' (ECDSA) to the list of known hosts.\r\nPermission denied (publickey,password,keyboard-interactive).\r\n', 'unreachable': True}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_2', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdNZIs8JfFSQAjziCF3Rl8ydFo4ar7RvyZ'], u'uuids': [u'ee8b5eef-8f15-453b-bf30-bf5624a8ee03']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-3'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_2', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdNZIs8JfFSQAjziCF3Rl8ydFo4ar7RvyZ'], u'uuids': [u'ee8b5eef-8f15-453b-bf30-bf5624a8ee03']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-3'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: ssh: connect to host smithi071.front.sepia.ceph.com port 22: No route to host\r\n', 'unreachable': True}, {'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_5', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdBK8zdbnO17vQQrnr1FESMNOQKUIs3Sz9'], u'uuids': [u'1f23ce65-3157-42f2-806e-6b3a41fba09f']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'14.90 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'31252480', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-0'}, 'ansible_loop_var': u'item', '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_5', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdBK8zdbnO17vQQrnr1FESMNOQKUIs3Sz9'], u'uuids': [u'1f23ce65-3157-42f2-806e-6b3a41fba09f']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'14.90 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'31252480', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-0'}, 'msg': u'Data could not be sent to remote host "smithi071.front.sepia.ceph.com". Make sure this host can be reached over ssh: ssh: connect to host smithi071.front.sepia.ceph.com port 22: No route to host\r\n', 'unreachable': True}, {'stderr_lines': [u'Problem opening /dev/dm-1 for reading! Error is 2.', u'The specified file does not exist!', u"Problem opening '' for writing! Program will now terminate.", u'Warning! MBR not overwritten! Error is 2!', u'Problem opening /dev/dm-1 for reading! Error is 2.', u'The specified file does not exist!', u"Problem opening '' for writing! Program will now terminate.", u'Warning! MBR not overwritten! Error is 2!'], u'changed': True, u'stdout': u'', u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': True, u'strip_empty_ends': True, u'_raw_params': u'sgdisk --zap-all /dev/dm-1 || sgdisk --zap-all /dev/dm-1', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin_add_newline': True, u'stdin': None}}, u'delta': u'0:00:00.014198', 'stdout_lines': [], '_ansible_item_label': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_4', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdMfRtgBuH8sqtHJV8rGmDnxg6twWHXG7X'], u'uuids': [u'40a58be4-5906-4b48-acc4-7459ea6284c7']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-1'}, 'ansible_loop_var': u'item', u'end': u'2020-02-11 16:49:17.363596', '_ansible_no_log': False, u'start': u'2020-02-11 16:49:17.349398', u'failed': True, u'cmd': u'sgdisk --zap-all /dev/dm-1 || sgdisk --zap-all /dev/dm-1', 'item': {'value': {u'sectorsize': u'512', u'vendor': None, u'links': {u'masters': [], u'labels': [], u'ids': [u'dm-name-vg_nvme-lv_4', u'dm-uuid-LVM-EGzVjZDKS2tTklUWAgR3f2IU9K9P1WEdMfRtgBuH8sqtHJV8rGmDnxg6twWHXG7X'], u'uuids': [u'40a58be4-5906-4b48-acc4-7459ea6284c7']}, u'sas_device_handle': None, u'host': u'', u'support_discard': u'512', u'model': None, u'size': u'89.40 GB', u'scheduler_mode': u'', u'rotational': u'0', u'sectors': u'187490304', u'sas_address': None, u'virtual': 1, u'removable': u'0', u'holders': [], u'partitions': {}}, 'key': u'dm-1'}, u'stderr': u"Problem opening /dev/dm-1 for reading! Error is 2.\nThe specified file does not exist!\nProblem opening '' for writing! Program will now terminate.\nWarning! MBR not overwritten! Error is 2!\nProblem opening /dev/dm-1 for reading! Error is 2.\nThe specified file does not exist!\nProblem opening '' for writing! Program will now terminate.\nWarning! MBR not overwritten! Error is 2!", u'rc': 2, u'msg': u'non-zero return code'}]}}Traceback (most recent call last): File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 309, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 281, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 29, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 219, in represent_list return self.represent_sequence(u'tag:yaml.org,2002:seq', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 102, in represent_sequence node_item = self.represent_data(item) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 68, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 251, in represent_undefined raise RepresenterError("cannot represent an object", data)RepresenterError: ('cannot represent an object', u'sda')
  • flavor:
  • job_id: 4754606
  • kernel:
    • sha1: distro
    • kdb: True
  • last_in_suite: False
  • machine_type: smithi
  • name: pdonnell-2020-02-11_15:01:19-fs-wip-pdonnell-testing-20200211.032856-distro-basic-smithi
  • nuke_on_error: True
  • os_type: centos
  • os_version: 8.1
  • overrides:
    • ceph-deploy:
      • fs: xfs
      • filestore: True
      • conf:
        • client:
          • log file: /var/log/ceph/ceph-$name.$pid.log
        • mon:
          • osd default pool size: 2
        • osd:
          • osd sloppy crc: True
          • osd objectstore: filestore
    • selinux:
      • whitelist:
        • scontext=system_u:system_r:logrotate_t:s0
    • workunit:
      • sha1: 71281e48a442b0f38f812cad32658ed72c6d8ff5
      • branch: wip-pdonnell-testing-20200211.032856
    • ceph:
      • log-whitelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
        • overall HEALTH_
        • \(FS_DEGRADED\)
        • \(MDS_FAILED\)
        • \(MDS_DEGRADED\)
        • \(FS_WITH_FAILED_MDS\)
        • \(MDS_DAMAGE\)
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
        • \(FS_INLINE_DATA_DEPRECATED\)
        • overall HEALTH_
        • \(OSD_DOWN\)
        • \(OSD_
        • but it is still running
        • is not responding
      • fs: xfs
      • conf:
        • mds:
          • mds bal split bits: 3
          • mds bal split size: 100
          • osd op complaint time: 180
          • debug mds: 20
          • mds bal merge size: 5
          • debug ms: 1
          • mds bal frag: True
          • mds verify scatter: True
          • mds bal fragment size max: 10000
          • mds op complaint time: 180
          • mds debug scatterstat: True
          • mds debug frag: True
        • client:
          • debug ms: 1
          • debug client: 20
          • client mount timeout: 600
        • global:
          • lockdep: True
        • osd:
          • debug ms: 1
          • debug journal: 20
          • debug osd: 25
          • osd objectstore: filestore
          • osd sloppy crc: True
          • debug filestore: 20
          • osd op complaint time: 180
        • mon:
          • debug ms: 1
          • debug mon: 20
          • debug paxos: 20
          • mon op complaint time: 120
      • sha1: 71281e48a442b0f38f812cad32658ed72c6d8ff5
    • install:
      • ceph:
        • sha1: 71281e48a442b0f38f812cad32658ed72c6d8ff5
    • admin_socket:
      • branch: wip-pdonnell-testing-20200211.032856
  • owner: scheduled_pdonnell@teuthology
  • pid:
  • roles:
    • ['mon.a', 'mgr.y', 'mds.a', 'osd.0', 'osd.1', 'osd.2', 'osd.3', 'client.0']
    • ['mon.b', 'mon.c', 'mgr.x', 'mds.b', 'osd.4', 'osd.5', 'osd.6', 'osd.7']
  • sentry_event:
  • status: fail
  • success: False
  • branch: wip-pdonnell-testing-20200211.032856
  • seed:
  • sha1: 71281e48a442b0f38f812cad32658ed72c6d8ff5
  • subset:
  • suite:
  • suite_branch: wip-pdonnell-testing-20200211.032856
  • suite_path:
  • suite_relpath:
  • suite_repo:
  • suite_sha1: 71281e48a442b0f38f812cad32658ed72c6d8ff5
  • targets:
    • tasks:
      • internal.check_packages:
      • internal.buildpackages_prep:
      • internal.lock_machines:
        • 2
        • smithi
      • internal.save_config:
      • internal.check_lock:
      • internal.add_remotes:
      • console_log:
      • internal.connect:
      • internal.push_inventory:
      • internal.serialize_remote_roles:
      • internal.check_conflict:
      • internal.check_ceph_data:
      • internal.vm_setup:
      • kernel:
        • sha1: distro
        • kdb: True
      • internal.base:
      • internal.archive_upload:
      • internal.archive:
      • internal.coredump:
      • internal.sudo:
      • internal.syslog:
      • internal.timer:
      • pcp:
      • selinux:
      • ansible.cephlab:
      • clock:
      • install:
        • extra_system_packages:
          • deb:
            • bison
            • flex
            • libelf-dev
            • libssl-dev
            • dump
            • indent
          • rpm:
            • bison
            • flex
            • elfutils-libelf-devel
            • openssl-devel
            • libacl-devel
            • libaio-devel
            • libattr-devel
            • libtool
            • libuuid-devel
            • xfsdump
            • xfsprogs
            • xfsprogs-devel
        • extra_packages:
          • deb:
            • python3-cephfs
            • cephfs-shell
          • rpm:
            • python3-cephfs
      • ceph:
      • ceph-fuse:
      • workunit:
        • clients:
          • all:
            • suites/fsstress.sh
        • timeout: 6h
    • teuthology_branch: master
    • verbose: False
    • pcp_grafana_url:
    • priority:
    • user:
    • queue:
    • posted: 2020-02-11 15:04:37
    • started: 2020-02-11 16:28:39
    • updated: 2020-02-11 16:50:38
    • status_class: danger
    • runtime: 0:21:59
    • wait_time: 0:13:55