Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
dead 4134464 2019-07-21 01:25:43 2019-07-21 01:26:07 2019-07-21 02:00:06 0:33:59 mira master centos 7.6 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{centos_7.yaml} tasks/rados_striper.yaml} 2
pass 4134465 2019-07-21 01:25:44 2019-07-21 01:26:07 2019-07-21 01:54:05 0:27:58 0:20:12 0:07:46 mira master rhel 7.6 rados/multimon/{clusters/9.yaml msgr-failures/few.yaml msgr/async-v1only.yaml no_pools.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{rhel_7.yaml} tasks/mon_clock_no_skews.yaml} 3
fail 4134466 2019-07-21 01:25:45 2019-07-21 01:26:05 2019-07-21 01:42:04 0:15:59 0:07:40 0:08:19 mira master centos rados/verify/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-thrash/none.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-bitmap.yaml rados.yaml tasks/mon_recovery.yaml validater/valgrind.yaml} 2
Failure Reason:

a2 || wipefs --all /dev/mpatha2', 'item': u'mpatha2', u'stderr': u'wipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory\nwipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory', u'rc': 1, u'msg': u'non-zero return code'}, {'stderr_lines': [u'wipefs: error: /dev/mpatha1: probing initialization failed: No such file or directory', u'wipefs: error: /dev/mpatha1: probing initialization failed: No such file or directory'], u'changed': True, u'stdout': u'', u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': True, u'strip_empty_ends': True, u'_raw_params': u'wipefs --force --all /dev/mpatha1 || wipefs --all /dev/mpatha1', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin_add_newline': True, u'stdin': None}}, u'delta': u'0:00:00.011918', 'stdout_lines': [], '_ansible_item_label': u'mpatha1', 'ansible_loop_var': u'item', u'end': u'2019-07-21 01:40:23.661576', '_ansible_no_log': False, u'start': u'2019-07-21 01:40:23.649658', u'failed': True, u'cmd': u'wipefs --force --all /dev/mpatha1 || wipefs --all /dev/mpatha1', 'item': u'mpatha1', u'stderr': u'wipefs: error: /dev/mpatha1: probing initialization failed: No such file or directory\nwipefs: error: /dev/mpatha1: probing initialization failed: No such file or directory', u'rc': 1, u'msg': u'non-zero return code'}, {'stderr_lines': [u'wipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory', u'wipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory'], u'changed': True, u'stdout': u'', u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': True, u'strip_empty_ends': True, u'_raw_params': u'wipefs --force --all /dev/mpatha2 || wipefs --all /dev/mpatha2', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin_add_newline': True, u'stdin': None}}, u'delta': u'0:00:00.010455', 'stdout_lines': [], '_ansible_item_label': u'mpatha2', 'ansible_loop_var': u'item', u'end': u'2019-07-21 01:40:24.126321', '_ansible_no_log': False, u'start': u'2019-07-21 01:40:24.115866', u'failed': True, u'cmd': u'wipefs --force --all /dev/mpatha2 || wipefs --all /dev/mpatha2', 'item': u'mpatha2', u'stderr': u'wipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory\nwipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory', u'rc': 1, u'msg': u'non-zero return code'}, {'stderr_lines': [u'wipefs: error: /dev/mpatha1: probing initialization failed: No such file or directory', u'wipefs: error: /dev/mpatha1: probing initialization failed: No such file or directory'], u'changed': True, u'stdout': u'', u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': True, u'strip_empty_ends': True, u'_raw_params': u'wipefs --force --all /dev/mpatha1 || wipefs --all /dev/mpatha1', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin_add_newline': True, u'stdin': None}}, u'delta': u'0:00:00.011123', 'stdout_lines': [], '_ansible_item_label': u'mpatha1', 'ansible_loop_var': u'item', u'end': u'2019-07-21 01:40:24.578832', '_ansible_no_log': False, u'start': u'2019-07-21 01:40:24.567709', u'failed': True, u'cmd': u'wipefs --force --all /dev/mpatha1 || wipefs --all /dev/mpatha1', 'item': u'mpatha1', u'stderr': u'wipefs: error: /dev/mpatha1: probing initialization failed: No such file or directory\nwipefs: error: /dev/mpatha1: probing initialization failed: No such file or directory', u'rc': 1, u'msg': u'non-zero return code'}, {'stderr_lines': [u'wipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory', u'wipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory'], u'changed': True, u'stdout': u'', u'invocation': {u'module_args': {u'warn': True, u'executable': None, u'_uses_shell': True, u'strip_empty_ends': True, u'_raw_params': u'wipefs --force --all /dev/mpatha2 || wipefs --all /dev/mpatha2', u'removes': None, u'argv': None, u'creates': None, u'chdir': None, u'stdin_add_newline': True, u'stdin': None}}, u'delta': u'0:00:00.012158', 'stdout_lines': [], '_ansible_item_label': u'mpatha2', 'ansible_loop_var': u'item', u'end': u'2019-07-21 01:40:25.026227', '_ansible_no_log': False, u'start': u'2019-07-21 01:40:25.014069', u'failed': True, u'cmd': u'wipefs --force --all /dev/mpatha2 || wipefs --all /dev/mpatha2', 'item': u'mpatha2', u'stderr': u'wipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory\nwipefs: error: /dev/mpatha2: probing initialization failed: No such file or directory', u'rc': 1, u'msg': u'non-zero return code'}]}}Traceback (most recent call last): File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 309, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 281, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 29, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 219, in represent_list return self.represent_sequence(u'tag:yaml.org,2002:seq', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 102, in represent_sequence node_item = self.represent_data(item) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 68, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 251, in represent_undefined raise RepresenterError("cannot represent an object", data)RepresenterError: ('cannot represent an object', u'mpatha1')

fail 4134467 2019-07-21 01:25:46 2019-07-21 01:26:06 2019-07-21 01:44:05 0:17:59 0:11:23 0:06:36 mira master ubuntu 18.04 rados/mgr/{clusters/{2-node-mgr.yaml openstack.yaml} debug/mgr.yaml objectstore/filestore-xfs.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/ssh_orchestrator.yaml} 2
Failure Reason:

Command failed on mira026 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 1'

pass 4134468 2019-07-21 01:25:47 2019-07-21 01:26:07 2019-07-21 01:50:06 0:23:59 0:18:23 0:05:36 mira master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-stupid.yaml openstack.yaml settings/optimized.yaml supported-random-distro$/{ubuntu_latest.yaml} workloads/radosbench_omap_write.yaml} 1
dead 4134469 2019-07-21 01:25:48 2019-07-21 01:26:06 2019-07-21 02:00:05 0:33:59 mira master rhel 7.6 rados/thrash-erasure-code/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/fast.yaml msgr-failures/fastclose.yaml objectstore/bluestore-comp.yaml rados.yaml recovery-overrides/{more-active-recovery.yaml} supported-random-distro$/{rhel_7.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-rados-plugin=jerasure-k=3-m=1.yaml} 2
fail 4134470 2019-07-21 01:25:49 2019-07-21 01:26:07 2019-07-21 01:44:05 0:17:58 0:11:50 0:06:08 mira master ubuntu 18.04 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{default.yaml} backoff/peering_and_degraded.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/crush-compat.yaml msgr-failures/osd-delay.yaml msgr/async-v2only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/cache-agent-small.yaml} 2
Failure Reason:

Command failed on mira082 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 1'

fail 4134471 2019-07-21 01:25:50 2019-07-21 01:26:07 2019-07-21 01:44:05 0:17:58 0:10:34 0:07:24 mira master ubuntu 18.04 rados/singleton/{all/osd-backfill.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
Failure Reason:

Command failed on mira038 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 1'

dead 4134472 2019-07-21 01:25:51 2019-07-21 01:26:07 2019-07-21 02:00:06 0:33:59 mira master ubuntu 18.04 rados/standalone/{supported-random-distro$/{ubuntu_latest.yaml} workloads/scrub.yaml} 1
fail 4134473 2019-07-21 01:25:52 2019-07-21 01:26:07 2019-07-21 01:46:06 0:19:59 0:11:43 0:08:16 mira master ubuntu 18.04 rados/thrash-erasure-code-isa/{arch/x86_64.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/osd-delay.yaml objectstore/bluestore-comp.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-rados-plugin=isa-k=2-m=1.yaml} 2
Failure Reason:

Command failed on mira069 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 1'

dead 4134474 2019-07-21 01:25:53 2019-07-21 01:42:18 2019-07-21 02:00:17 0:17:59 mira master rhel 7.6 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-active-recovery.yaml} backoff/normal.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/off.yaml msgr-failures/fastclose.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{rhel_7.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/cache-pool-snaps-readproxy.yaml} 2
dead 4134475 2019-07-21 01:25:53 2019-07-21 01:44:14 2019-07-21 02:00:13 0:15:59 mira master rhel 7.6 rados/singleton/{all/osd-recovery-incomplete.yaml msgr-failures/many.yaml msgr/async-v1only.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{rhel_7.yaml}} 1
dead 4134476 2019-07-21 01:25:54 2019-07-21 01:44:13 2019-07-21 02:00:12 0:15:59 mira master centos 7.6 rados/perf/{ceph.yaml objectstore/filestore-xfs.yaml openstack.yaml settings/optimized.yaml supported-random-distro$/{centos_7.yaml} workloads/sample_fio.yaml} 1
dead 4134477 2019-07-21 01:25:55 2019-07-21 01:44:13 2019-07-21 02:00:13 0:16:00 mira master ubuntu 18.04 rados/singleton-nomsgr/{all/recovery-unfound-found.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
dead 4134478 2019-07-21 01:25:56 2019-07-21 01:46:17 2019-07-21 02:00:16 0:13:59 mira master centos 7.6 rados/thrash-old-clients/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-install/luminous-v1only.yaml backoff/normal.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/crush-compat.yaml distro$/{centos_latest.yaml} msgr-failures/fastclose.yaml rados.yaml thrashers/careful.yaml thrashosds-health.yaml workloads/snaps-few-objects.yaml}
dead 4134479 2019-07-21 01:25:57 2019-07-21 01:50:21 2019-07-21 02:00:20 0:09:59 mira master centos 7.6 rados/monthrash/{ceph.yaml clusters/3-mons.yaml msgr-failures/mon-delay.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{centos_7.yaml} thrashers/force-sync-many.yaml workloads/rados_api_tests.yaml} 2
dead 4134480 2019-07-21 01:25:58 2019-07-21 01:54:16 2019-07-21 02:00:15 0:05:59 mira master ubuntu 18.04 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/rados_workunit_loadgen_big.yaml}