Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
pass 4180382 2019-08-04 06:37:41 2019-08-04 20:51:17 2019-08-04 21:15:16 0:23:59 0:13:06 0:10:53 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
pass 4180383 2019-08-04 06:37:42 2019-08-04 20:51:17 2019-08-04 21:23:16 0:31:59 0:17:47 0:14:12 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180384 2019-08-04 06:37:43 2019-08-04 20:52:41 2019-08-04 21:54:41 1:02:00 0:50:11 0:11:49 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml} 3
fail 4180385 2019-08-04 06:37:43 2019-08-04 20:54:45 2019-08-04 21:18:44 0:23:59 0:12:50 0:11:09 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi049 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180386 2019-08-04 06:37:44 2019-08-04 20:54:52 2019-08-04 21:14:51 0:19:59 0:11:32 0:08:27 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

Command failed on smithi050 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180387 2019-08-04 06:37:45 2019-08-04 20:55:40 2019-08-04 21:19:40 0:24:00 0:12:05 0:11:55 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi026 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180388 2019-08-04 06:37:46 2019-08-04 20:56:52 2019-08-04 22:34:52 1:38:00 1:26:53 0:11:07 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
pass 4180389 2019-08-04 06:37:47 2019-08-04 20:57:08 2019-08-04 21:39:07 0:41:59 0:15:38 0:26:21 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
fail 4180390 2019-08-04 06:37:47 2019-08-04 20:57:19 2019-08-04 21:19:19 0:22:00 0:11:49 0:10:11 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi162 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180391 2019-08-04 06:37:48 2019-08-04 20:57:47 2019-08-04 21:17:46 0:19:59 0:11:43 0:08:16 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi103 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180392 2019-08-04 06:37:49 2019-08-04 20:59:57 2019-08-04 22:05:57 1:06:00 0:54:55 0:11:05 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
pass 4180393 2019-08-04 06:37:50 2019-08-04 21:01:20 2019-08-04 21:51:19 0:49:59 0:34:41 0:15:18 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
fail 4180394 2019-08-04 06:37:51 2019-08-04 21:01:34 2019-08-04 21:19:33 0:17:59 0:10:46 0:07:13 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi104 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180395 2019-08-04 06:37:52 2019-08-04 21:03:38 2019-08-04 21:29:38 0:26:00 0:12:14 0:13:46 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180396 2019-08-04 06:37:52 2019-08-04 21:08:09 2019-08-04 23:42:10 2:34:01 2:19:20 0:14:41 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/valgrind.yaml} 3
pass 4180397 2019-08-04 06:37:53 2019-08-04 21:08:40 2019-08-04 22:20:40 1:12:00 0:51:13 0:20:47 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
pass 4180398 2019-08-04 06:37:54 2019-08-04 21:08:48 2019-08-04 21:36:47 0:27:59 0:14:48 0:13:11 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180399 2019-08-04 06:37:55 2019-08-04 21:14:09 2019-08-04 21:32:09 0:18:00 0:09:28 0:08:32 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

Command failed on smithi136 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180400 2019-08-04 06:37:55 2019-08-04 21:14:52 2019-08-04 21:44:52 0:30:00 0:17:12 0:12:48 smithi master centos 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180401 2019-08-04 06:37:56 2019-08-04 21:15:30 2019-08-04 21:51:29 0:35:59 0:22:37 0:13:22 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-04T21:41:24.531254+0000 mon.b (mon.0) 962 : cluster [WRN] Health check failed: 4 daemons have recently crashed (RECENT_CRASH)" in cluster log

fail 4180402 2019-08-04 06:37:57 2019-08-04 21:18:01 2019-08-04 21:40:00 0:21:59 0:10:41 0:11:18 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

Command failed on smithi157 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180403 2019-08-04 06:37:58 2019-08-04 21:18:34 2019-08-04 21:40:34 0:22:00 0:09:48 0:12:12 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi065 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180404 2019-08-04 06:37:59 2019-08-04 21:18:46 2019-08-04 21:54:45 0:35:59 0:09:41 0:26:18 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
Failure Reason:

Command failed on smithi205 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180405 2019-08-04 06:37:59 2019-08-04 21:19:33 2019-08-04 21:45:32 0:25:59 0:15:53 0:10:06 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
fail 4180406 2019-08-04 06:38:00 2019-08-04 21:19:35 2019-08-04 21:47:34 0:27:59 0:10:31 0:17:28 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi077 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180407 2019-08-04 06:38:01 2019-08-04 21:19:41 2019-08-04 21:57:40 0:37:59 0:16:52 0:21:07 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180408 2019-08-04 06:38:02 2019-08-04 21:23:30 2019-08-04 22:31:30 1:08:00 0:50:02 0:17:58 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml} 3
fail 4180409 2019-08-04 06:38:03 2019-08-04 21:27:04 2019-08-04 21:53:03 0:25:59 0:09:36 0:16:23 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on smithi146 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180410 2019-08-04 06:38:03 2019-08-04 21:29:52 2019-08-04 22:35:52 1:06:00 0:45:45 0:20:15 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
pass 4180411 2019-08-04 06:38:04 2019-08-04 21:32:22 2019-08-04 22:26:22 0:54:00 0:19:32 0:34:28 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180412 2019-08-04 06:38:05 2019-08-04 21:37:01 2019-08-04 22:05:00 0:27:59 0:15:52 0:12:07 smithi master centos 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180413 2019-08-04 06:38:06 2019-08-04 21:39:21 2019-08-04 21:59:20 0:19:59 0:09:29 0:10:30 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
Failure Reason:

Command failed on smithi135 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180414 2019-08-04 06:38:07 2019-08-04 21:40:01 2019-08-04 22:08:01 0:28:00 0:11:28 0:16:32 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180415 2019-08-04 06:38:07 2019-08-04 21:40:48 2019-08-04 22:08:47 0:27:59 0:13:42 0:14:17 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
fail 4180416 2019-08-04 06:38:08 2019-08-04 21:41:47 2019-08-04 22:11:46 0:29:59 0:09:47 0:20:12 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi064 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180417 2019-08-04 06:38:09 2019-08-04 21:45:05 2019-08-04 22:47:05 1:02:00 0:10:12 0:51:48 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi192 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180418 2019-08-04 06:38:10 2019-08-04 21:45:33 2019-08-05 00:01:35 2:16:02 2:00:34 0:15:28 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
pass 4180419 2019-08-04 06:38:11 2019-08-04 21:47:00 2019-08-04 22:13:00 0:26:00 0:12:47 0:13:13 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180420 2019-08-04 06:38:11 2019-08-04 21:47:35 2019-08-05 00:45:37 2:58:02 2:24:03 0:33:59 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/valgrind.yaml} 3
pass 4180421 2019-08-04 06:38:12 2019-08-04 21:47:55 2019-08-04 23:09:56 1:22:01 0:36:36 0:45:25 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
fail 4180422 2019-08-04 06:38:13 2019-08-04 21:51:34 2019-08-04 22:09:33 0:17:59 0:09:49 0:08:10 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
Failure Reason:

Command failed on smithi018 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180423 2019-08-04 06:38:14 2019-08-04 21:51:34 2019-08-04 22:23:33 0:31:59 0:21:38 0:10:21 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
pass 4180424 2019-08-04 06:38:14 2019-08-04 21:53:17 2019-08-04 22:35:17 0:42:00 0:13:53 0:28:07 smithi master ubuntu 18.04 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180425 2019-08-04 06:38:15 2019-08-04 21:54:54 2019-08-04 23:04:54 1:10:00 0:48:52 0:21:08 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
fail 4180426 2019-08-04 06:38:16 2019-08-04 21:54:55 2019-08-04 22:40:54 0:45:59 0:09:57 0:36:02 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi146 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180427 2019-08-04 06:38:17 2019-08-04 21:57:54 2019-08-04 22:23:54 0:26:00 0:15:37 0:10:23 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180428 2019-08-04 06:38:18 2019-08-04 21:59:34 2019-08-04 22:21:34 0:22:00 0:09:37 0:12:23 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi074 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180429 2019-08-04 06:38:18 2019-08-04 22:05:14 2019-08-04 22:41:14 0:36:00 0:26:48 0:09:12 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
fail 4180430 2019-08-04 06:38:19 2019-08-04 22:05:58 2019-08-04 22:51:58 0:46:00 0:10:07 0:35:53 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi168 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180431 2019-08-04 06:38:20 2019-08-04 22:07:34 2019-08-04 22:31:34 0:24:00 0:13:19 0:10:41 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
pass 4180432 2019-08-04 06:38:21 2019-08-04 22:08:02 2019-08-04 22:36:01 0:27:59 0:16:58 0:11:01 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180433 2019-08-04 06:38:21 2019-08-04 22:09:01 2019-08-04 23:21:01 1:12:00 0:49:17 0:22:43 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml} 3
fail 4180434 2019-08-04 06:38:22 2019-08-04 22:09:34 2019-08-04 23:11:34 1:02:00 0:11:30 0:50:30 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi132 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180435 2019-08-04 06:38:23 2019-08-04 22:11:59 2019-08-04 22:55:59 0:44:00 0:09:35 0:34:25 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

Command failed on smithi124 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180436 2019-08-04 06:38:24 2019-08-04 22:13:13 2019-08-04 22:47:13 0:34:00 0:15:39 0:18:21 smithi master centos 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180437 2019-08-04 06:38:25 2019-08-04 22:20:55 2019-08-04 23:36:55 1:16:00 0:55:23 0:20:37 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
pass 4180438 2019-08-04 06:38:26 2019-08-04 22:21:35 2019-08-04 22:51:34 0:29:59 0:16:16 0:13:43 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
fail 4180439 2019-08-04 06:38:26 2019-08-04 22:23:47 2019-08-04 22:51:46 0:27:59 0:10:02 0:17:57 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi182 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180440 2019-08-04 06:38:27 2019-08-04 22:23:55 2019-08-04 22:45:54 0:21:59 0:10:14 0:11:45 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi067 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180441 2019-08-04 06:38:28 2019-08-04 22:26:36 2019-08-04 23:32:36 1:06:00 0:49:39 0:16:21 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
pass 4180442 2019-08-04 06:38:29 2019-08-04 22:28:43 2019-08-04 23:50:43 1:22:00 0:36:45 0:45:15 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
fail 4180443 2019-08-04 06:38:30 2019-08-04 22:29:59 2019-08-04 22:51:58 0:21:59 0:10:12 0:11:47 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi112 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180444 2019-08-04 06:38:30 2019-08-04 22:31:44 2019-08-04 23:37:43 1:05:59 0:12:03 0:53:56 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180445 2019-08-04 06:38:31 2019-08-04 22:31:44 2019-08-05 00:59:45 2:28:01 2:10:18 0:17:43 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/valgrind.yaml} 3
pass 4180446 2019-08-04 06:38:32 2019-08-04 22:35:06 2019-08-04 23:15:06 0:40:00 0:24:54 0:15:06 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
pass 4180447 2019-08-04 06:38:33 2019-08-04 22:35:18 2019-08-04 23:13:17 0:37:59 0:12:17 0:25:42 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180448 2019-08-04 06:38:34 2019-08-04 22:35:53 2019-08-04 23:07:52 0:31:59 0:11:17 0:20:42 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

Command failed on smithi041 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180449 2019-08-04 06:38:34 2019-08-04 22:36:03 2019-08-04 23:10:02 0:33:59 0:11:57 0:22:02 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi074 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180450 2019-08-04 06:38:35 2019-08-04 22:41:08 2019-08-04 23:21:08 0:40:00 0:26:50 0:13:10 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-04T23:07:52.724340+0000 mon.b (mon.0) 865 : cluster [WRN] Health check failed: 3 daemons have recently crashed (RECENT_CRASH)" in cluster log

pass 4180451 2019-08-04 06:38:36 2019-08-04 22:41:15 2019-08-05 00:19:15 1:38:00 0:50:04 0:47:56 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
fail 4180452 2019-08-04 06:38:37 2019-08-04 22:41:52 2019-08-04 23:09:51 0:27:59 0:11:02 0:16:57 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi157 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180453 2019-08-04 06:38:38 2019-08-04 22:44:12 2019-08-04 23:04:11 0:19:59 0:10:25 0:09:34 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
Failure Reason:

Command failed on smithi104 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180454 2019-08-04 06:38:38 2019-08-04 22:46:08 2019-08-04 23:36:08 0:50:00 0:15:06 0:34:54 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
pass 4180455 2019-08-04 06:38:39 2019-08-04 22:47:07 2019-08-04 23:29:07 0:42:00 0:28:14 0:13:46 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
pass 4180456 2019-08-04 06:38:40 2019-08-04 22:47:26 2019-08-04 23:49:26 1:02:00 0:17:30 0:44:30 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180457 2019-08-04 06:38:41 2019-08-04 22:48:25 2019-08-04 23:58:25 1:10:00 0:51:33 0:18:27 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml} 3
fail 4180458 2019-08-04 06:38:42 2019-08-04 22:51:48 2019-08-04 23:23:47 0:31:59 0:10:03 0:21:56 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on smithi041 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180459 2019-08-04 06:38:42 2019-08-04 22:51:48 2019-08-04 23:53:48 1:02:00 0:44:27 0:17:33 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
pass 4180460 2019-08-04 06:38:43 2019-08-04 22:51:59 2019-08-04 23:35:59 0:44:00 0:17:34 0:26:26 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180461 2019-08-04 06:38:44 2019-08-04 22:52:00 2019-08-04 23:33:59 0:41:59 0:13:53 0:28:06 smithi master ubuntu 18.04 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180462 2019-08-04 06:38:45 2019-08-04 22:52:10 2019-08-04 23:26:10 0:34:00 0:10:02 0:23:58 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
Failure Reason:

Command failed on smithi074 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180463 2019-08-04 06:38:46 2019-08-04 22:56:13 2019-08-04 23:26:12 0:29:59 0:11:28 0:18:31 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180464 2019-08-04 06:38:47 2019-08-04 23:00:02 2019-08-04 23:48:01 0:47:59 0:13:42 0:34:17 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
fail 4180465 2019-08-04 06:38:48 2019-08-04 23:04:24 2019-08-04 23:46:24 0:42:00 0:09:55 0:32:05 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi192 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180466 2019-08-04 06:38:48 2019-08-04 23:04:56 2019-08-04 23:44:55 0:39:59 0:09:46 0:30:13 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi141 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180467 2019-08-04 06:38:49 2019-08-04 23:08:06 2019-08-05 01:44:08 2:36:02 2:07:58 0:28:04 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
pass 4180468 2019-08-04 06:38:50 2019-08-04 23:08:33 2019-08-05 00:04:32 0:55:59 0:12:21 0:43:38 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180469 2019-08-04 06:38:51 2019-08-04 23:10:04 2019-08-05 01:40:06 2:30:02 2:19:35 0:10:27 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/valgrind.yaml} 3
pass 4180470 2019-08-04 06:38:52 2019-08-04 23:10:05 2019-08-05 00:40:05 1:30:00 0:39:05 0:50:55 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
fail 4180471 2019-08-04 06:38:53 2019-08-04 23:10:05 2019-08-05 00:08:04 0:57:59 0:10:36 0:47:23 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
Failure Reason:

Command failed on smithi200 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180472 2019-08-04 06:38:53 2019-08-04 23:11:48 2019-08-05 00:07:47 0:55:59 0:20:34 0:35:25 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi130 with status 32: 'sync && sudo umount -f /var/lib/ceph/osd/ceph-6'

pass 4180473 2019-08-04 06:38:54 2019-08-04 23:13:31 2019-08-04 23:51:31 0:38:00 0:13:47 0:24:13 smithi master ubuntu 18.04 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180474 2019-08-04 06:38:55 2019-08-04 23:14:22 2019-08-05 00:40:22 1:26:00 0:56:24 0:29:36 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
fail 4180475 2019-08-04 06:38:56 2019-08-04 23:15:20 2019-08-04 23:49:19 0:33:59 0:09:53 0:24:06 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi077 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180476 2019-08-04 06:38:57 2019-08-04 23:21:15 2019-08-04 23:51:15 0:30:00 0:17:23 0:12:37 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180477 2019-08-04 06:38:58 2019-08-04 23:21:15 2019-08-04 23:53:15 0:32:00 0:09:54 0:22:06 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi008 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180478 2019-08-04 06:38:58 2019-08-04 23:24:01 2019-08-05 00:12:01 0:48:00 0:19:03 0:28:57 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
fail 4180479 2019-08-04 06:38:59 2019-08-04 23:26:24 2019-08-05 00:04:24 0:38:00 0:10:23 0:27:37 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi145 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180480 2019-08-04 06:39:00 2019-08-04 23:26:24 2019-08-05 00:12:24 0:46:00 0:12:28 0:33:32 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
pass 4180481 2019-08-04 06:39:01 2019-08-04 23:29:20 2019-08-05 00:31:20 1:02:00 0:15:36 0:46:24 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180482 2019-08-04 06:39:02 2019-08-04 23:30:41 2019-08-05 00:40:41 1:10:00 0:53:47 0:16:13 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml} 3
fail 4180483 2019-08-04 06:39:03 2019-08-04 23:32:50 2019-08-05 00:24:49 0:51:59 0:11:20 0:40:39 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi172 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180484 2019-08-04 06:39:03 2019-08-04 23:34:14 2019-08-04 23:52:13 0:17:59 0:09:42 0:08:17 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

Command failed on smithi163 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180485 2019-08-04 06:39:04 2019-08-04 23:36:13 2019-08-05 00:06:17 0:30:04 0:13:38 0:16:26 smithi master ubuntu 18.04 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180486 2019-08-04 06:39:05 2019-08-04 23:36:13 2019-08-05 01:14:18 1:38:05 1:20:21 0:17:44 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
fail 4180487 2019-08-04 06:39:06 2019-08-04 23:36:56 2019-08-05 00:10:56 0:34:00 0:10:53 0:23:07 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
Failure Reason:

Command failed on smithi049 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180488 2019-08-04 06:39:06 2019-08-04 23:37:58 2019-08-05 00:07:58 0:30:00 0:10:41 0:19:19 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi038 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180489 2019-08-04 06:39:07 2019-08-04 23:42:24 2019-08-05 00:04:23 0:21:59 0:10:19 0:11:40 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi136 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180490 2019-08-04 06:39:08 2019-08-04 23:44:17 2019-08-05 01:12:17 1:28:00 0:51:02 0:36:58 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
fail 4180491 2019-08-04 06:39:09 2019-08-04 23:44:57 2019-08-05 00:24:56 0:39:59 0:11:12 0:28:47 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi160 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180492 2019-08-04 06:39:10 2019-08-04 23:46:38 2019-08-05 00:18:38 0:32:00 0:09:47 0:22:13 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi121 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180493 2019-08-04 06:39:10 2019-08-04 23:48:15 2019-08-05 00:12:15 0:24:00 0:12:30 0:11:30 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
dead 4180494 2019-08-04 06:39:11 2019-08-04 23:49:33 2019-08-05 04:35:37 4:46:04 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/valgrind.yaml} 3
pass 4180495 2019-08-04 06:39:12 2019-08-04 23:49:33 2019-08-05 00:49:33 1:00:00 0:25:23 0:34:37 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
pass 4180496 2019-08-04 06:39:13 2019-08-04 23:50:57 2019-08-05 00:50:57 1:00:00 0:14:16 0:45:44 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180497 2019-08-04 06:39:14 2019-08-04 23:51:16 2019-08-05 00:09:15 0:17:59 0:10:09 0:07:50 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

Command failed on smithi104 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180498 2019-08-04 06:39:15 2019-08-04 23:51:32 2019-08-05 00:29:32 0:38:00 0:10:52 0:27:08 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi067 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180499 2019-08-04 06:39:15 2019-08-04 23:52:28 2019-08-05 01:02:28 1:10:00 0:23:14 0:46:46 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-05T00:52:40.179840+0000 mon.a (mon.0) 839 : cluster [WRN] Health check failed: 5 daemons have recently crashed (RECENT_CRASH)" in cluster log

pass 4180500 2019-08-04 06:39:16 2019-08-04 23:53:16 2019-08-05 00:45:16 0:52:00 0:38:17 0:13:43 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
fail 4180501 2019-08-04 06:39:17 2019-08-04 23:54:02 2019-08-05 00:24:02 0:30:00 0:10:23 0:19:37 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi008 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180502 2019-08-04 06:39:18 2019-08-04 23:58:39 2019-08-05 00:54:38 0:55:59 0:10:52 0:45:07 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
Failure Reason:

Command failed on smithi204 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180503 2019-08-04 06:39:19 2019-08-05 00:01:49 2019-08-05 01:31:49 1:30:00 0:16:21 1:13:39 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
fail 4180504 2019-08-04 06:39:19 2019-08-05 00:04:37 2019-08-05 00:30:36 0:25:59 0:10:43 0:15:16 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi104 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180505 2019-08-04 06:39:20 2019-08-05 00:04:37 2019-08-05 00:38:37 0:34:00 0:23:05 0:10:55 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180506 2019-08-04 06:39:21 2019-08-05 00:04:37 2019-08-05 00:34:37 0:30:00 0:16:12 0:13:48 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/lockdep.yaml} 3
fail 4180507 2019-08-04 06:39:22 2019-08-05 00:06:31 2019-08-05 01:06:31 1:00:00 0:12:35 0:47:25 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on smithi196 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180508 2019-08-04 06:39:23 2019-08-05 00:08:01 2019-08-05 00:48:01 0:40:00 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi164 with status 100: 'sudo DEBIAN_FRONTEND=noninteractive apt-get -y install linux-image-generic'

pass 4180509 2019-08-04 06:39:23 2019-08-05 00:08:02 2019-08-05 02:22:03 2:14:01 0:16:15 1:57:46 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180510 2019-08-04 06:39:24 2019-08-05 00:08:06 2019-08-05 00:40:05 0:31:59 0:10:31 0:21:28 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi026 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180511 2019-08-04 06:39:25 2019-08-05 00:09:29 2019-08-05 00:29:28 0:19:59 0:10:01 0:09:58 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
Failure Reason:

Command failed on smithi103 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180512 2019-08-04 06:39:26 2019-08-05 00:09:54 2019-08-05 00:31:54 0:22:00 0:11:38 0:10:22 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180513 2019-08-04 06:39:27 2019-08-05 00:11:09 2019-08-05 02:03:10 1:52:01 0:10:59 1:41:02 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

Command failed on smithi172 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180514 2019-08-04 06:39:27 2019-08-05 00:11:50 2019-08-05 00:49:50 0:38:00 0:10:11 0:27:49 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi170 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180515 2019-08-04 06:39:28 2019-08-05 00:12:02 2019-08-05 01:06:02 0:54:00 0:11:48 0:42:12 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi156 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180516 2019-08-04 06:39:29 2019-08-05 00:12:29 2019-08-05 02:58:30 2:46:01 2:01:54 0:44:07 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
pass 4180517 2019-08-04 06:39:30 2019-08-05 00:12:29 2019-08-05 00:58:28 0:45:59 0:11:16 0:34:43 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180518 2019-08-04 06:39:31 2019-08-05 00:14:25 2019-08-05 00:50:24 0:35:59 0:24:38 0:11:21 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/valgrind.yaml} 2
Failure Reason:

Command failed on smithi089 with status 1: "sudo sh -c 'cat > /home/ubuntu/cephtest/valgrind.supp'"

fail 4180519 2019-08-04 06:39:32 2019-08-05 00:15:01 2019-08-05 00:53:01 0:38:00 0:11:22 0:26:38 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
Failure Reason:

Command failed on smithi017 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180520 2019-08-04 06:39:32 2019-08-05 00:17:54 2019-08-05 01:03:53 0:45:59 0:12:49 0:33:10 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
Failure Reason:

Command failed on smithi039 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180521 2019-08-04 06:39:33 2019-08-05 00:18:39 2019-08-05 00:56:38 0:37:59 0:22:29 0:15:30 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
pass 4180522 2019-08-04 06:39:34 2019-08-05 00:19:29 2019-08-05 01:03:29 0:44:00 0:23:19 0:20:41 smithi master centos 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180523 2019-08-04 06:39:35 2019-08-05 00:21:46 2019-08-05 01:33:46 1:12:00 1:00:41 0:11:19 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
fail 4180524 2019-08-04 06:39:35 2019-08-05 00:21:58 2019-08-05 00:39:57 0:17:59 0:09:45 0:08:14 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi072 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180525 2019-08-04 06:39:36 2019-08-05 00:22:27 2019-08-05 01:56:28 1:34:01 0:17:35 1:16:26 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180526 2019-08-04 06:39:37 2019-08-05 00:24:16 2019-08-05 01:06:15 0:41:59 0:11:30 0:30:29 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi170 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180527 2019-08-04 06:39:38 2019-08-05 00:24:51 2019-08-05 01:32:51 1:08:00 0:11:24 0:56:36 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
Failure Reason:

Command failed on smithi049 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180528 2019-08-04 06:39:39 2019-08-05 00:24:57 2019-08-05 00:48:57 0:24:00 0:10:01 0:13:59 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi104 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180529 2019-08-04 06:39:39 2019-08-05 00:29:42 2019-08-05 03:13:43 2:44:01 0:12:43 2:31:18 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
pass 4180530 2019-08-04 06:39:40 2019-08-05 00:29:42 2019-08-05 01:45:42 1:16:00 0:24:45 0:51:15 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180531 2019-08-04 06:39:41 2019-08-05 00:30:38 2019-08-05 01:04:37 0:33:59 0:15:35 0:18:24 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/lockdep.yaml} 3
fail 4180532 2019-08-04 06:39:42 2019-08-05 00:31:35 2019-08-05 01:03:34 0:31:59 0:13:40 0:18:19 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi087 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180533 2019-08-04 06:39:43 2019-08-05 00:31:55 2019-08-05 00:57:54 0:25:59 0:09:50 0:16:09 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

worker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 219, in represent_list return self.represent_sequence(u'tag:yaml.org,2002:seq', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 102, in represent_sequence node_item = self.represent_data(item) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 68, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 251, in represent_undefined raise RepresenterError("cannot represent an object", data)RepresenterError: ('cannot represent an object', u'rhel-7-server-rpms')

fail 4180534 2019-08-04 06:39:43 2019-08-05 00:34:51 2019-08-05 01:02:50 0:27:59 0:13:30 0:14:29 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi110 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180535 2019-08-04 06:39:44 2019-08-05 00:38:28 2019-08-05 01:50:28 1:12:00 0:47:55 0:24:05 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
pass 4180536 2019-08-04 06:39:45 2019-08-05 00:38:28 2019-08-05 01:12:27 0:33:59 0:15:17 0:18:42 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
fail 4180537 2019-08-04 06:39:46 2019-08-05 00:38:28 2019-08-05 01:26:27 0:47:59 0:13:42 0:34:17 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi191 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180538 2019-08-04 06:39:47 2019-08-05 00:38:38 2019-08-05 01:06:37 0:27:59 0:12:47 0:15:12 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi062 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

dead 4180539 2019-08-04 06:39:48 2019-08-05 00:40:12 2019-08-05 00:52:11 0:11:59 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
pass 4180540 2019-08-04 06:39:48 2019-08-05 00:40:12 2019-08-05 03:20:13 2:40:01 1:48:01 0:52:00 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
fail 4180541 2019-08-04 06:39:49 2019-08-05 00:40:12 2019-08-05 01:26:11 0:45:59 0:13:38 0:32:21 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi090 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180542 2019-08-04 06:39:50 2019-08-05 00:40:22 2019-08-05 01:06:21 0:25:59 0:09:17 0:16:42 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 2
Failure Reason:

L5dhoC2vYPyQoQkxeMX3BYlhLwkSjCAnjaceJJZyOT7KsqbpanvJxvudMzeyCosntnMuxyPzjH9CkVKqycddZnscdHMkDIZ3TjNhnL8GKK/QCPB60tkCO6HWN/g4HxidRjrp2VjQgIa5MWN9LBR6mGspZI6+rR1ygLhbl1scr1cCCumh65r+zmCC/ZY33YCfRXdmQZo+ShsBa+KbcvakEx0Bqgx7DsMoiR ncutler@pantograf'}, 'ansible_loop_var': u'item', '_ansible_item_label': {u'ovpn': u'smithfarm@wilbur vEk7281ao7DkpXE2Cn3rpw 20058c04c4991caf38b9517faedb75c8562e8d8669808b6be9f70da6a5b64007', u'name': u'smithfarm', u'key': u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC6jqQtqO7pnW39mpSQQC4L2FuxPOx/5j34lp2PrwuTWJoDwtUVCZhpNwSs5UAj3wdXGXCzWYWxu9d9Gh0UEBkiC+2ZHtLvWrwS1TDVRy5g5jYVm9vX3kL5dhoC2vYPyQoQkxeMX3BYlhLwkSjCAnjaceJJZyOT7KsqbpanvJxvudMzeyCosntnMuxyPzjH9CkVKqycddZnscdHMkDIZ3TjNhnL8GKK/QCPB60tkCO6HWN/g4HxidRjrp2VjQgIa5MWN9LBR6mGspZI6+rR1ygLhbl1scr1cCCumh65r+zmCC/ZY33YCfRXdmQZo+ShsBa+KbcvakEx0Bqgx7DsMoiR ncutler@pantograf'}, 'msg': u'SSH Error: data could not be sent to remote host "smithi142.front.sepia.ceph.com". Make sure this host can be reached over ssh', 'unreachable': True}, {'item': {u'ovpn': u'mbenjamin@ferret N7Ln6KHUAYT/iiP4Swkb0A 244b90574d7e294f6f80949ef5e640bab103c2c34e1902a69cdcf592607d950c\nmbenjamin@duel PRRFsQiomHSJX8sVgRHblA f3009adb7502a6c5554e1157d36c63d84f40fe5074a01d7d0280e5c48da374dc\nmbenjamin@rael 8ixfNNx3j2m2+nGhXkHsnQ 62fda7ec724f0fdb3f05add4c3739e07a5c7eeb2380d5f1f7eeeb13db35e16a8\nmbenjamin@shatner hJ1oEl5wSZaKPJC1Ugss8g 9c9161b53da4d842308f80e5b490a0c1539076f129bd0e34ee2253272dd55c21\n', u'name': u'mbenjamin'}, 'ansible_loop_var': u'item', '_ansible_item_label': {u'ovpn': u'mbenjamin@ferret N7Ln6KHUAYT/iiP4Swkb0A 244b90574d7e294f6f80949ef5e640bab103c2c34e1902a69cdcf592607d950c\nmbenjamin@duel PRRFsQiomHSJX8sVgRHblA f3009adb7502a6c5554e1157d36c63d84f40fe5074a01d7d0280e5c48da374dc\nmbenjamin@rael 8ixfNNx3j2m2+nGhXkHsnQ 62fda7ec724f0fdb3f05add4c3739e07a5c7eeb2380d5f1f7eeeb13db35e16a8\nmbenjamin@shatner hJ1oEl5wSZaKPJC1Ugss8g 9c9161b53da4d842308f80e5b490a0c1539076f129bd0e34ee2253272dd55c21\n', u'name': u'mbenjamin'}, 'msg': u'SSH Error: data could not be sent to remote host "smithi142.front.sepia.ceph.com". Make sure this host can be reached over ssh', 'unreachable': True}, {'item': {u'ovpn': u'aemerson@seasalt enqtMOCQ0a24hizRK46SZg 42f36c9c14c1eb7c468e41cf4c5649e30037f0aaf5eefbd6c07be9637224ca01', u'name': u'aemerson', u'key': u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCxUDk9jEQOSrRwe0WMGBPw28fzgOhvqMgcG90SvuUjR20Rx3KUfy9JMxPJPesjBQz0xgH5adOVg2JQeLrt3uSJdIFNpqox8BBrS1u/bWT2dorQjLnFEhrtA8Ae/h2kwLQ8w3yYNWB/PxDSuMRnyH4G8EWtVydzQcV/QnNZ9Y6eBcHaI5C2yVnFl7Fi4zBYc2eeL8o8gWqmM6DobBCvVrxD0pCNAsLhOW6IA140BNGT9F/hQZYje9RQRWbFKh/iKiUhqFYCzLcxFfYXkw2HZMJA2p/bLmuc8ZbgYaIiU6b90kpfDB37Xw0S6toIj9E8h+E3nkNnwraCQcbralhz/bdz aemerson@seasalt'}, 'ansible_loop_var': u'item', '_ansible_item_label': {u'ovpn': u'aemerson@seasalt enqtMOCQ0a24hizRK46SZg 42f36c9c14c1eb7c468e41cf4c5649e30037f0aaf5eefbd6c07be9637224ca01', u'name': u'aemerson', u'key': u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCxUDk9jEQOSrRwe0WMGBPw28fzgOhvqMgcG90SvuUjR20Rx3KUfy9JMxPJPesjBQz0xgH5adOVg2JQeLrt3uSJdIFNpqox8BBrS1u/bWT2dorQjLnFEhrtA8Ae/h2kwLQ8w3yYNWB/PxDSuMRnyH4G8EWtVydzQcV/QnNZ9Y6eBcHaI5C2yVnFl7Fi4zBYc2eeL8o8gWqmM6DobBCvVrxD0pCNAsLhOW6IA140BNGT9F/hQZYje9RQRWbFKh/iKiUhqFYCzLcxFfYXkw2HZMJA2p/bLmuc8ZbgYaIiU6b90kpfDB37Xw0S6toIj9E8h+E3nkNnwraCQcbralhz/bdz aemerson@seasalt'}, 'msg': u'SSH Error: data could not be sent to remote host "smithi142.front.sepia.ceph.com". Make sure this host can be reached over ssh', 'unreachable': True}, {'item': {u'ovpn': u'sbillah@syed-machine qVcw+LuFQQxYW7QpzZ3aLA d028c4635289a781f3ebe26a545e084572613b03cc9cde7770018ad0259e4dc9', u'name': u'sbillah', u'key': u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDEgeHHS5jx9k7QdOEGqZjaxEAPJ6vv/WZXJNpifBpm9Ba1FRA1U3qHV8oX/bBZ08HIBGg8hQOeZ5I7d5HyzR0971W6KVgDF+s6mRN7d+heNi3XmikbJrozLoEiVQNHIsXjUoc655c2y9NR9Lf5FBweSSrbE34jCUqTA3XmZOdbkjY+ngOcDIfNixRG0yZ57p6UqYW0I+Mg68CB7N+Lv4gFvH/968aML7ConABPGs+vnLdNSQbjuibnaoZwzeSgPoaBJEqBCgNkwO8TyaC04okMj2X7/FGxgZNhwF0V5SVpBllWlGqdAigEF0dher88PbzSIFSm/x8PeACSZWkU0QWV Masum@MASUM-PC'}, 'ansible_loop_var': u'item', '_ansible_item_label': {u'ovpn': u'sbillah@syed-machine qVcw+LuFQQxYW7QpzZ3aLA d028c4635289a781f3ebe26a545e084572613b03cc9cde7770018ad0259e4dc9', u'name': u'sbillah', u'key': u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDEgeHHS5jx9k7QdOEGqZjaxEAPJ6vv/WZXJNpifBpm9Ba1FRA1U3qHV8oX/bBZ08HIBGg8hQOeZ5I7d5HyzR0971W6KVgDF+s6mRN7d+heNi3XmikbJrozLoEiVQNHIsXjUoc655c2y9NR9Lf5FBweSSrbE34jCUqTA3XmZOdbkjY+ngOcDIfNixRG0yZ57p6UqYW0I+Mg68CB7N+Lv4gFvH/968aML7ConABPGs+vnLdNSQbjuibnaoZwzeSgPoaBJEqBCgNkwO8TyaC04okMj2X7/FGxgZNhwF0V5SVpBllWlGqdAigEF0dher88PbzSIFSm/x8PeACSZWkU0QWV Masum@MASUM-PC'}, 'msg': u'SSH Error: data could not be sent to remote host "smithi142.front.sepia.ceph.com". Make sure this host can be reached over ssh', 'unreachable': True}, {'item': {u'ovpn': u'ali@freerunner yQjRpRVG7D5KN2HAUjI30g 9d677a34ae98477e6cc8ba1d975d81dcae43a102013b265c63f3ea91e7dacd78', u'name': u'amaredia', u'key': u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCc1ZbHNXuJn7gxyt7QJ579gEM13MuFdTzsrnedYuRIW2Zlm4rCFr8Oj5SGs5DWBIJnd3W4O2v7PjuVQMxU/zbwnj7mdwBmLFe1cSzOJv2eP1R2uaU5z5C7KNmyPLU++pGKClzb6v5wcEQUq2K35xnuXUU9B935dK+Fm7bK7+HAxj+1vpVeycbPFyPhf6mwbx8dZv4uvZGV2+CGBuyIB/5U2AMJZy9LWim3AR35bip4ftXvSKlAON+RHhnS0toG/6uwp0XlFuGn5H8snaca7L6hGtB4xg1PqA5aMf33Jiv2NVLQo8emHU9J/HeNVS7ksoSZ6InynpLZ6b9uXa9OM9XL ali@parkour'}, 'ansible_loop_var': u'item', '_ansible_item_label': {u'ovpn': u'ali@freerunner yQjRpRVG7D5KN2HAUjI30g 9d677a34ae98477e6cc8ba1d975d81dcae43a102013b265c63f3ea91e7dacd78', u'name': u'amaredia', u'key': u'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCc1ZbHNXuJn7gxyt7QJ579gEM13MuFdTzsrnedYuRIW2Zlm4rCFr8Oj5SGs5DWBIJnd3W4O2v7PjuVQMxU/zbwnj7mdwBmLFe1cSzOJv2eP1R2uaU5z5C7KNmyPLU++pGKClzb6v5wcEQUq2K35xnuXUU9B935dK+Fm7bK7+HAxj+1vpVeycbPFyPhf6mwbx8dZv4uvZGV2+CGBuyIB/5U2AMJZy9LWim3AR35bip4ftXvSKlAON+RHhnS0toG/6uwp0XlFuGn5H8snaca7L6hGtB4xg1PqA5aMf33Jiv2NVLQo8emHU9J/HeNVS7ksoSZ6InynpLZ6b9uXa9OM9XL ali@parkour'}, 'msg': u'SSH Error: data could not be sent to remote host "smithi142.front.sepia.ceph.com". Make sure this host can be reached over ssh', 'unreachable': True}, {'item': {u'ovpn': u'tserlin@annarbor DlKe+OWBPcFAQtWMUAHnwg 6b268bd737ffa5dd38865575ccd444b92cb912c70f5b82dac41f9c50505df4a5', u'name': u'tserlin', u'key': u'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA2ok6CBUpOVGv2RFws44GtEP5SVxOi4Vie0WSZYLpD55rTfmOtsItN1d1EciVNTUyWuyzMeQGWC4JAd3/2l3gR/5ZwSvd7b/7TCNYxjAMEubRand0GxEoiKhpkJMMmJqcT0KefP8pr31MASWPuERj1+0/IbjJExsvrJaUjqeIfZ+DWR8dC2VYdcH3hsp6AE3mqKX/9693sxe8ROt6qY4WkpZcO4M90unOVa2CnJsYqKaaIC4z3fmKuHZpJZjiJMrg8rtuN4r7bnKWPEVGcahj+i74JWwKR5+2gntLpxw2chIBmf4qFu6HDplddig4V3I/2NLB8soBpgc+m8O7YyYl0w== thomas@easystreet'}, 'ansible_loop_var': u'item', '_ansible_item_label': {u'ovpn': u'tserlin@annarbor DlKe+OWBPcFAQtWMUAHnwg 6b268bd737ffa5dd38865575ccd444b92cb912c70f5b82dac41f9c50505df4a5', u'name': u'tserlin', u'key': u'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA2ok6CBUpOVGv2RFws44GtEP5SVxOi4Vie0WSZYLpD55rTfmOtsItN1d1EciVNTUyWuyzMeQGWC4JAd3/2l3gR/5ZwSvd7b/7TCNYxjAMEubRand0GxEoiKhpkJMMmJqcT0KefP8pr31MASWPuERj1+0/IbjJExsvrJaUjqeIfZ+DWR8dC2VYdcH3hsp6AE3mqKX/9693sxe8ROt6qY4WkpZcO4M90unOVa2CnJsYqKaaIC4z3fmKuHZpJZjiJMrg8rtuN4r7bnKWPEVGcahj+i74JWwKR5+2gntLpxw2chIBmf4qFu6HDplddig4V3I/2NLB8soBpgc+m8O7YyYl0w== thomas@easystreet'}, 'msg': u'SSH Error: data could not be sent to remote host "smithi142.front.sepia.ceph.com". Make sure this host can be reached over ssh', 'unreachable': True}, {'item': {u'ovpn': u'dis@zambezi wXYUFvWPBlkCFz+mC4RD6A 11c464dfb2a27986e029f1915732a4f237baba4eade02bb045c8f0d13dfada28', u'name': u'dis'}, 'ansible_loop_var': u'item', '_ansible_item_label': {u'ovpn': u'dis@zambezi wXYUFvWPBlkCFz+mC4RD6A 11c464dfb2a27986e029f1915732a4f237baba4eade02bb045c8f0d13dfada28', u'name': u'dis'}, 'msg': u'SSH Error: data could not be sent to remote host "smithi142.front.sepia.ceph.com". Make sure this host can be reached over ssh', 'unreachable': True}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1116, u'groups': u'sudo', u'home': u'/home/gregf', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1116, '_ansible_item_label': {u'ovpn': u'gregf@kai YhNrPfedhZjbvGjOfmotOA 440cf8595a87cd307790bbf79c3668455a0405945e2b271d873325de222cd72f\ngregf@pudgy VZrk8nWzg7pYLOrZru8dBg c1d1a0e469a134ccf5f5a5525631a6b83efa6970beec3b23809eb0daa3dca47f\ngfarnum@WorkMini2 +bAqcTdU7Ok9bGMcB3A84w 3cff1326561a23cf81dd6495373cb83ed149cee026c6374d72c19b483f4f1f07\ngfarnum@Macbook bxVtolCC9SY3QNlpx3cE1w aff8d28bfb4d693253511d29e8d399196e964fc096594ec705748a5469d44654\ngregf@fedoragreg Jdn8I/sFGcr5Aa/dici6lw 50f88afc35c05ef8454742226f7baf2cd20cb1e2d4d0c9f4a393013877736bfa\n', u'name': u'gregf'}, 'item': {u'ovpn': u'gregf@kai YhNrPfedhZjbvGjOfmotOA 440cf8595a87cd307790bbf79c3668455a0405945e2b271d873325de222cd72f\ngregf@pudgy VZrk8nWzg7pYLOrZru8dBg c1d1a0e469a134ccf5f5a5525631a6b83efa6970beec3b23809eb0daa3dca47f\ngfarnum@WorkMini2 +bAqcTdU7Ok9bGMcB3A84w 3cff1326561a23cf81dd6495373cb83ed149cee026c6374d72c19b483f4f1f07\ngfarnum@Macbook bxVtolCC9SY3QNlpx3cE1w aff8d28bfb4d693253511d29e8d399196e964fc096594ec705748a5469d44654\ngregf@fedoragreg Jdn8I/sFGcr5Aa/dici6lw 50f88afc35c05ef8454742226f7baf2cd20cb1e2d4d0c9f4a393013877736bfa\n', u'name': u'gregf'}, u'changed': False, u'name': u'gregf', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'gregf', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1117, u'groups': u'sudo', u'home': u'/home/joshd', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1117, '_ansible_item_label': {u'ovpn': u'jdurgin@glarthnir ElGaAgbs5VZujuzsQMfCmA 2a156febba037d02d1099bc11d1e697d34300b2c420f2df664b5b0de1248f983\njdurgin@new-angeles jqa015PRJcHSp5WHcwJjUg 42113e1156382fde866d691f30584f6b30c3dfc21317ae89b4267efb177d982c\n', u'name': u'joshd'}, 'item': {u'ovpn': u'jdurgin@glarthnir ElGaAgbs5VZujuzsQMfCmA 2a156febba037d02d1099bc11d1e697d34300b2c420f2df664b5b0de1248f983\njdurgin@new-angeles jqa015PRJcHSp5WHcwJjUg 42113e1156382fde866d691f30584f6b30c3dfc21317ae89b4267efb177d982c\n', u'name': u'joshd'}, u'changed': False, u'name': u'joshd', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'joshd', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1118, u'groups': u'sudo', u'home': u'/home/davidz', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1118, '_ansible_item_label': {u'ovpn': u'dzafman@ubuntu-laptop-vm NY9y9tqLY1beEcXDwMavsQ c869d42fae1890574577a8014d97d1247f1a13cb6337037d2714f1d236fc65d2\ndzafman@ubuntu16 2a0rAy5QmNFSEcATNz2h9A b7c11fbb0911fc4ac0216a1a8eac8359a9e8f43d69126db6b45cbeabd358c2b4\ndzafman@ubuntu-1804 PN1pkeGHGloB0K+IZrfB0g f1c01b447b9ec3fc048c32f606a33fb488ff621e11aa305ac979501030202658\n', u'name': u'davidz'}, 'item': {u'ovpn': u'dzafman@ubuntu-laptop-vm NY9y9tqLY1beEcXDwMavsQ c869d42fae1890574577a8014d97d1247f1a13cb6337037d2714f1d236fc65d2\ndzafman@ubuntu16 2a0rAy5QmNFSEcATNz2h9A b7c11fbb0911fc4ac0216a1a8eac8359a9e8f43d69126db6b45cbeabd358c2b4\ndzafman@ubuntu-1804 PN1pkeGHGloB0K+IZrfB0g f1c01b447b9ec3fc048c32f606a33fb488ff621e11aa305ac979501030202658\n', u'name': u'davidz'}, u'changed': False, u'name': u'davidz', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'davidz', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1119, u'groups': u'sudo', u'home': u'/home/gmeno', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1119, '_ansible_item_label': {u'ovpn': u'gmeno@gmeno-virtual-machine FKFu8B2pMqotpmEVAO1few 8229574e499eaf767a408909f5afdf2e2a0bb8f3e61b18d63a651f7102c68dbc', u'name': u'gmeno'}, 'item': {u'ovpn': u'gmeno@gmeno-virtual-machine FKFu8B2pMqotpmEVAO1few 8229574e499eaf767a408909f5afdf2e2a0bb8f3e61b18d63a651f7102c68dbc', u'name': u'gmeno'}, u'changed': False, u'name': u'gmeno', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'gmeno', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1120, u'groups': u'sudo', u'home': u'/home/ivancich', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1120, '_ansible_item_label': {u'ovpn': u'ivancich@ann.arbor Kt2vxZ3Ge609mHfjx0W4Cw aaa55a9e2b5276b62a21cd3c401b365c5c2693e39efccb2f9edefafefa1dc8b1', u'name': u'ivancich'}, 'item': {u'ovpn': u'ivancich@ann.arbor Kt2vxZ3Ge609mHfjx0W4Cw aaa55a9e2b5276b62a21cd3c401b365c5c2693e39efccb2f9edefafefa1dc8b1', u'name': u'ivancich'}, u'changed': False, u'name': u'ivancich', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'ivancich', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1121, u'groups': u'sudo', u'home': u'/home/wusui', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1121, '_ansible_item_label': {u'ovpn': u'wusui@ubuntu /nElLTGqxiOr/hD6eziF5A 2e244e2b62fa42dadf3a3a1dbcc29475410cee6550b0c6b3603c135853937551\nwusui@thinkpad tu2DxDcllIdwb5ewldgT0g 1590a7d9f1377b0094e9ba4277e7bcbe6374791f0b3d3df93026345c058c93f5\n', u'name': u'wusui'}, 'item': {u'ovpn': u'wusui@ubuntu /nElLTGqxiOr/hD6eziF5A 2e244e2b62fa42dadf3a3a1dbcc29475410cee6550b0c6b3603c135853937551\nwusui@thinkpad tu2DxDcllIdwb5ewldgT0g 1590a7d9f1377b0094e9ba4277e7bcbe6374791f0b3d3df93026345c058c93f5\n', u'name': u'wusui'}, u'changed': False, u'name': u'wusui', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'wusui', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1122, u'groups': u'sudo', u'home': u'/home/zyan', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1122, '_ansible_item_label': {u'ovpn': u'zyan@redhat OP16IkReCatMfA4Mf3pkdQ b0262be71ef008e2f7d585e34431dc2757c1e22ac1aa844863be533bf873d304', u'name': u'zyan'}, 'item': {u'ovpn': u'zyan@redhat OP16IkReCatMfA4Mf3pkdQ b0262be71ef008e2f7d585e34431dc2757c1e22ac1aa844863be533bf873d304', u'name': u'zyan'}, u'changed': False, u'name': u'zyan', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'zyan', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1123, u'groups': u'sudo', u'home': u'/home/yuriw', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1123, '_ansible_item_label': {u'ovpn': u'yuriweinstein@Yuris-MacBook-Pro.local wKA8mCcsdsk/CE+1d9GiPw caaf5bf5eb31ef269e3d0bc34d45dd761c0bb0907172eea6e754434de4603db7\nyuriw@home 02TZyR3JHJMxEQob80ICNA 85b4aa0f69f6d469dae0bb3dca4baaf222e164927ed7eed2082caae8f4717e48\nyuriweinstein@xenon1 C9eVdLb/i18lMcMG20rGPw eaddd0e831a77de3f35cb19e307bd6f9aeb0cc03eff1ae58490d7db33ced4311\nyuriw@yuriw-RH 5ivdxgFO4eIkbXVhl8xkvw 59212d29b8b42d9fe457c1b2c43d774e1d25807be10dcc1252d4aec63b97a467\n', u'name': u'yuriw'}, 'item': {u'ovpn': u'yuriweinstein@Yuris-MacBook-Pro.local wKA8mCcsdsk/CE+1d9GiPw caaf5bf5eb31ef269e3d0bc34d45dd761c0bb0907172eea6e754434de4603db7\nyuriw@home 02TZyR3JHJMxEQob80ICNA 85b4aa0f69f6d469dae0bb3dca4baaf222e164927ed7eed2082caae8f4717e48\nyuriweinstein@xenon1 C9eVdLb/i18lMcMG20rGPw eaddd0e831a77de3f35cb19e307bd6f9aeb0cc03eff1ae58490d7db33ced4311\nyuriw@yuriw-RH 5ivdxgFO4eIkbXVhl8xkvw 59212d29b8b42d9fe457c1b2c43d774e1d25807be10dcc1252d4aec63b97a467\n', u'name': u'yuriw'}, u'changed': False, u'name': u'yuriw', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'yuriw', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1124, u'groups': u'sudo', u'home': u'/home/tamil', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1124, '_ansible_item_label': {u'ovpn': u'tmuthamizhan@mac /1CBLC6rxqzJzPspffZV0g 4d1dfff2e097a7fc2a83ea73eccad2f0e453a6338e18c87b4d89bf370733e29c\ntamil@tamil-VirtualBox M22QdmhkSPj9aEcTiuIVfg 8e76e06b14de99318441c75a96e635a92f5bddc54a40b45276191f6829c6b239\n', u'name': u'tamil'}, 'item': {u'ovpn': u'tmuthamizhan@mac /1CBLC6rxqzJzPspffZV0g 4d1dfff2e097a7fc2a83ea73eccad2f0e453a6338e18c87b4d89bf370733e29c\ntamil@tamil-VirtualBox M22QdmhkSPj9aEcTiuIVfg 8e76e06b14de99318441c75a96e635a92f5bddc54a40b45276191f6829c6b239\n', u'name': u'tamil'}, u'changed': False, u'name': u'tamil', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'tamil', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1125, u'groups': u'sudo', u'home': u'/home/jowilkin', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1125, '_ansible_item_label': {u'ovpn': u'jowilkin@jowilkin 2r8xOv/eCTcHr+HSsMkPYg 0ac416d2dc139628144dfa0822af8cc0a455f5f5f3e4d1b9713c14115c062218\njohn@osd-host 7zjDTxAYhCmTX+Az4SJaoA 7d924233fdef168e2c5c01258aa349de108629ef2ff90d17c0b96acf22dac7c2\njohn@admin-host 7cpk7iJ1Hg2vk4bPDovKmA 05765178f27af6dc4e43e47f52d773aac3bc1b3f1dd998bdbf479b951bfd2efb\n', u'name': u'jowilkin'}, 'item': {u'ovpn': u'jowilkin@jowilkin 2r8xOv/eCTcHr+HSsMkPYg 0ac416d2dc139628144dfa0822af8cc0a455f5f5f3e4d1b9713c14115c062218\njohn@osd-host 7zjDTxAYhCmTX+Az4SJaoA 7d924233fdef168e2c5c01258aa349de108629ef2ff90d17c0b96acf22dac7c2\njohn@admin-host 7cpk7iJ1Hg2vk4bPDovKmA 05765178f27af6dc4e43e47f52d773aac3bc1b3f1dd998bdbf479b951bfd2efb\n', u'name': u'jowilkin'}, u'changed': False, u'name': u'jowilkin', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'jowilkin', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1126, u'groups': u'sudo', u'home': u'/home/bhubbard', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1126, '_ansible_item_label': {u'ovpn': u'brad@dhcp-1-165.bne.redhat.com 4oShQI9+vYtX5gA47np/Sw 3fc7df5afa772752d8eee15c01d550cc1dcc88b6e940abc9f9f8f26102d239d4', u'name': u'bhubbard'}, 'item': {u'ovpn': u'brad@dhcp-1-165.bne.redhat.com 4oShQI9+vYtX5gA47np/Sw 3fc7df5afa772752d8eee15c01d550cc1dcc88b6e940abc9f9f8f26102d239d4', u'name': u'bhubbard'}, u'changed': False, u'name': u'bhubbard', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'bhubbard', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1127, u'groups': u'sudo', u'home': u'/home/yehudasa', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1127, '_ansible_item_label': {u'ovpn': u'yehudasa@yehuda-2 NGXHeO0lAvRFfYLalffzEw 0d3489b09f48ad5fa9d1ec5944d9c8020daf852488f389f3d44fe63d3f651f34\nyehuda@yehuda-940X3G shisK5LjI6fr3ZBJy/xX8A 49522899bd26130086ce668079f0062d987d85dfa5767dd5c34e5953db97997a\nyehudasa@yehudasa-desktop OT1MhoO0WihhvkKztqW0Uw 12a4d6b54390b9df7f5af3bd6b533f3c1fee0c7b9fbb79f0a87bcb28b182c7d4\n', u'name': u'yehudasa'}, 'item': {u'ovpn': u'yehudasa@yehuda-2 NGXHeO0lAvRFfYLalffzEw 0d3489b09f48ad5fa9d1ec5944d9c8020daf852488f389f3d44fe63d3f651f34\nyehuda@yehuda-940X3G shisK5LjI6fr3ZBJy/xX8A 49522899bd26130086ce668079f0062d987d85dfa5767dd5c34e5953db97997a\nyehudasa@yehudasa-desktop OT1MhoO0WihhvkKztqW0Uw 12a4d6b54390b9df7f5af3bd6b533f3c1fee0c7b9fbb79f0a87bcb28b182c7d4\n', u'name': u'yehudasa'}, u'changed': False, u'name': u'yehudasa', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'yehudasa', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1128, u'groups': u'sudo', u'home': u'/home/dang', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1128, '_ansible_item_label': {u'ovpn': u'dang@sidious w0CNW2g9K1WiRenVGYWNUA 4f59d761bfab3659115da2b3b80a486266f77b09d8527983217d15648b4f92b4', u'name': u'dang'}, 'item': {u'ovpn': u'dang@sidious w0CNW2g9K1WiRenVGYWNUA 4f59d761bfab3659115da2b3b80a486266f77b09d8527983217d15648b4f92b4', u'name': u'dang'}, u'changed': False, u'name': u'dang', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'dang', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1129, u'groups': u'sudo', u'home': u'/home/branto', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1129, '_ansible_item_label': {u'ovpn': u'branto@work ganye+HpG3dkMEik6WtTng 018f3f9b9d49dcefa701ea304a8e58f002c46f0650edae220a0a7ab1bce36aeb', u'name': u'branto'}, 'item': {u'ovpn': u'branto@work ganye+HpG3dkMEik6WtTng 018f3f9b9d49dcefa701ea304a8e58f002c46f0650edae220a0a7ab1bce36aeb', u'name': u'branto'}, u'changed': False, u'name': u'branto', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'branto', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1130, u'groups': u'sudo', u'home': u'/home/xiaoxichen', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1130, '_ansible_item_label': {u'ovpn': u'xiaoxichen@ebay RvJJ7BhIehpoPtggrwnskQ 862ecfe7e15dfab86d61df86856bfe06cbb99f240f6f03851f7f9e1a255327d6', u'name': u'xiaoxichen'}, 'item': {u'ovpn': u'xiaoxichen@ebay RvJJ7BhIehpoPtggrwnskQ 862ecfe7e15dfab86d61df86856bfe06cbb99f240f6f03851f7f9e1a255327d6', u'name': u'xiaoxichen'}, u'changed': False, u'name': u'xiaoxichen', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'xiaoxichen', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1131, u'groups': u'sudo', u'home': u'/home/ffilz', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1131, '_ansible_item_label': {u'ovpn': u'ffilz@redhat.com 6YdvqxkKfmDWGD2s0wA7Ww 4ce64d08686e34e559ccec2498df433b155b70c9ebccaec616b6b34f0f0c246e', u'name': u'ffilz'}, 'item': {u'ovpn': u'ffilz@redhat.com 6YdvqxkKfmDWGD2s0wA7Ww 4ce64d08686e34e559ccec2498df433b155b70c9ebccaec616b6b34f0f0c246e', u'name': u'ffilz'}, u'changed': False, u'name': u'ffilz', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'ffilz', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1132, u'groups': u'sudo', u'home': u'/home/joao', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1132, '_ansible_item_label': {u'ovpn': u'joao@magrathea eSS2/gvK7ALE6L9bITfuMA c3caaeecee3f43e39b7a81fad50e0d874359c70a9c41b77c661511c71f733909\njoao@timesink 9S3oER36HheVupjRpnLz6A 9dbc964184244e9da269942dc73ec9ebba6594bcccfdc0eb09562b58b4542162\n', u'name': u'joao'}, 'item': {u'ovpn': u'joao@magrathea eSS2/gvK7ALE6L9bITfuMA c3caaeecee3f43e39b7a81fad50e0d874359c70a9c41b77c661511c71f733909\njoao@timesink 9S3oER36HheVupjRpnLz6A 9dbc964184244e9da269942dc73ec9ebba6594bcccfdc0eb09562b58b4542162\n', u'name': u'joao'}, u'changed': False, u'name': u'joao', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'joao', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1133, u'groups': u'sudo', u'home': u'/home/nhm', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1133, '_ansible_item_label': {u'ovpn': u'nh@latte JCH9icAtDPj951rgbPKJyw 9a3d30ec1ec9467ccdc8bdbbfacffd7396fb38e046199ae37b2b7b69dbf37480\nnhm@espresso +YYZPT29wYzY5ooaRzabCQ 1ee041dd58b9ec6eb678c47632ece7cf6c24e23bcbac28a77a82af05ba6cc148\nnhm@mocha HgOGOfkBEzJihFsKmPRfKQ 2e17f3ba0b90df7a36f19a7c8f64d2aa8f966b2794c94caa110d313e927a1c1b\n', u'name': u'nhm'}, 'item': {u'ovpn': u'nh@latte JCH9icAtDPj951rgbPKJyw 9a3d30ec1ec9467ccdc8bdbbfacffd7396fb38e046199ae37b2b7b69dbf37480\nnhm@espresso +YYZPT29wYzY5ooaRzabCQ 1ee041dd58b9ec6eb678c47632ece7cf6c24e23bcbac28a77a82af05ba6cc148\nnhm@mocha HgOGOfkBEzJihFsKmPRfKQ 2e17f3ba0b90df7a36f19a7c8f64d2aa8f966b2794c94caa110d313e927a1c1b\n', u'name': u'nhm'}, u'changed': False, u'name': u'nhm', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'nhm', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1134, u'groups': u'sudo', u'home': u'/home/jj', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1134, '_ansible_item_label': {u'ovpn': u'jj@aurora PFzIfG7NjrCYxJvxd//soA 395884ddf2caa0043f97e4f98530cc3ad646d6f269802a2b79ca1ea7278ee006\njj@metatron iQogIw/KQtewT7oj9Mkivw 0881e5ceb5897e8a370bacee69ad58eb5098090ea4b0d53972214ea7c751e35a\njj@laptop O1e31whZbQ0S7MUtglCRLg 96e39257989ce36e240b5d368e0308d38009d3d923ec398dc9cc6eba371acaa4\njj@aurora2 EtAvlrozxiL3PLYp6mvATg 1018928736c33ed06246f208cd02aa10c0a6efa5b4e34e32408d7a6c72c32e11\n', u'name': u'jj'}, 'item': {u'ovpn': u'jj@aurora PFzIfG7NjrCYxJvxd//soA 395884ddf2caa0043f97e4f98530cc3ad646d6f269802a2b79ca1ea7278ee006\njj@metatron iQogIw/KQtewT7oj9Mkivw 0881e5ceb5897e8a370bacee69ad58eb5098090ea4b0d53972214ea7c751e35a\njj@laptop O1e31whZbQ0S7MUtglCRLg 96e39257989ce36e240b5d368e0308d38009d3d923ec398dc9cc6eba371acaa4\njj@aurora2 EtAvlrozxiL3PLYp6mvATg 1018928736c33ed06246f208cd02aa10c0a6efa5b4e34e32408d7a6c72c32e11\n', u'name': u'jj'}, u'changed': False, u'name': u'jj', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'jj', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1135, u'groups': u'sudo', u'home': u'/home/nwatkins', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1135, '_ansible_item_label': {u'ovpn': u'nwatkins@daq unHfXkUpAHOnzeptznuLLA 33b2003c30d0cc7a6b194e76be92c7b5d270c2d2a4b4a8b6e673f0f0dc1db313', u'name': u'nwatkins'}, 'item': {u'ovpn': u'nwatkins@daq unHfXkUpAHOnzeptznuLLA 33b2003c30d0cc7a6b194e76be92c7b5d270c2d2a4b4a8b6e673f0f0dc1db313', u'name': u'nwatkins'}, u'changed': False, u'name': u'nwatkins', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'nwatkins', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1136, u'groups': u'sudo', u'home': u'/home/mkidd', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1136, '_ansible_item_label': {u'ovpn': u'linuxkidd@zenbook oYp1WWV0JwpikHCWCV52Lg 9aca455b601bf3a365d31068154150ac63dd76f32cef29a55f9685dd1a88aa22', u'name': u'mkidd'}, 'item': {u'ovpn': u'linuxkidd@zenbook oYp1WWV0JwpikHCWCV52Lg 9aca455b601bf3a365d31068154150ac63dd76f32cef29a55f9685dd1a88aa22', u'name': u'mkidd'}, u'changed': False, u'name': u'mkidd', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'mkidd', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1137, u'groups': u'sudo', u'home': u'/home/jlopez', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1137, '_ansible_item_label': {u'ovpn': u'JCLO@oh-mbp-jcl vZhmBh/1LjLFEu+atRec6w 1f13f591373b4dc798a9b701fabf1eb99bf4aa58f87b6420d6c916716f0965af', u'name': u'jlopez'}, 'item': {u'ovpn': u'JCLO@oh-mbp-jcl vZhmBh/1LjLFEu+atRec6w 1f13f591373b4dc798a9b701fabf1eb99bf4aa58f87b6420d6c916716f0965af', u'name': u'jlopez'}, u'changed': False, u'name': u'jlopez', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'jlopez', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1138, u'groups': u'sudo', u'home': u'/home/haomaiwang', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1138, '_ansible_item_label': {u'ovpn': u'yuyuyu101@desktop XAUS/1Geh1T2WY//5mRahw fda03bdaf79c2f39ac3ba6cd9c3a1cb2e66b842a921169f20a00481a4cd3d9cb', u'name': u'haomaiwang'}, 'item': {u'ovpn': u'yuyuyu101@desktop XAUS/1Geh1T2WY//5mRahw fda03bdaf79c2f39ac3ba6cd9c3a1cb2e66b842a921169f20a00481a4cd3d9cb', u'name': u'haomaiwang'}, u'changed': False, u'name': u'haomaiwang', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142.front.sepia.ceph.com', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'haomaiwang', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1139, u'groups': u'sudo', u'home': u'/home/jdillaman', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1139, '_ansible_item_label': {u'ovpn': u'jdillaman@ceph1 kQ96pIpKTocwIj9H1Udb1g 6c087451af94ac18c144712bcc9329799d86f6d90376839dcd6fa41cd73e3608', u'name': u'jdillaman'}, 'item': {u'ovpn': u'jdillaman@ceph1 kQ96pIpKTocwIj9H1Udb1g 6c087451af94ac18c144712bcc9329799d86f6d90376839dcd6fa41cd73e3608', u'name': u'jdillaman'}, u'changed': False, u'name': u'jdillaman', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'jdillaman', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1140, u'groups': u'sudo', u'home': u'/home/kchai', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1140, '_ansible_item_label': {u'ovpn': u'kefu@gen8 HVoNrB5C8+bYxuqIfByeEQ 4dddde1890af2d6df367d3d832cc3b9b660160a1db69f0135e0d09364b2cb9b3', u'name': u'kchai'}, 'item': {u'ovpn': u'kefu@gen8 HVoNrB5C8+bYxuqIfByeEQ 4dddde1890af2d6df367d3d832cc3b9b660160a1db69f0135e0d09364b2cb9b3', u'name': u'kchai'}, u'changed': False, u'name': u'kchai', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'kchai', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1141, u'groups': u'sudo', u'home': u'/home/vumrao', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1141, '_ansible_item_label': {u'ovpn': u'vumrao@vumrao VaMHdnIGTl6y9LIkurxfjQ 71de53c4a0f212b8f919437d7d433d24a33d7a33bc6fe5df5d047e24499994b2', u'name': u'vumrao'}, 'item': {u'ovpn': u'vumrao@vumrao VaMHdnIGTl6y9LIkurxfjQ 71de53c4a0f212b8f919437d7d433d24a33d7a33bc6fe5df5d047e24499994b2', u'name': u'vumrao'}, u'changed': False, u'name': u'vumrao', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'vumrao', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1142, u'groups': u'sudo', u'home': u'/home/dfuller', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1142, '_ansible_item_label': {u'ovpn': u'dfuller@laptop 6U0lNVidRy7Ye/5q6cGN1A aea3d019f68a95094c99385aff099818224455a829615cfc774587e4519398a7\ndfuller@rex001 6Z8bfQDgPXVSGMeeQHoItg 3abd41920b72683fbba7f25be88ff992fcd753119c4d2086c12daaf20798e684\n', u'name': u'dfuller'}, 'item': {u'ovpn': u'dfuller@laptop 6U0lNVidRy7Ye/5q6cGN1A aea3d019f68a95094c99385aff099818224455a829615cfc774587e4519398a7\ndfuller@rex001 6Z8bfQDgPXVSGMeeQHoItg 3abd41920b72683fbba7f25be88ff992fcd753119c4d2086c12daaf20798e684\n', u'name': u'dfuller'}, u'changed': False, u'name': u'dfuller', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'dfuller', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1143, u'groups': u'sudo', u'home': u'/home/owasserm', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1143, '_ansible_item_label': {u'ovpn': u'owasserm@owasserm.redhat.com hsdVTbVub6eRnhlO9B02rQ 7c9baf41670ff9ab612f75d4be42d0aaf0d7ecaa3c8928032b61f1be91725890\n', u'name': u'owasserm'}, 'item': {u'ovpn': u'owasserm@owasserm.redhat.com hsdVTbVub6eRnhlO9B02rQ 7c9baf41670ff9ab612f75d4be42d0aaf0d7ecaa3c8928032b61f1be91725890\n', u'name': u'owasserm'}, u'changed': False, u'name': u'owasserm', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'owasserm', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1144, u'groups': u'sudo', u'home': u'/home/abhishekvrshny', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1144, '_ansible_item_label': {u'ovpn': u'abhishekvrshny@flipkart QTqbWHaqvXwB+yBy6CVO7A 25d026c49dc49b3a1f445d2dc0099d5ed916645b0adb8d0306269ace7a2096e9', u'name': u'abhishekvrshny'}, 'item': {u'ovpn': u'abhishekvrshny@flipkart QTqbWHaqvXwB+yBy6CVO7A 25d026c49dc49b3a1f445d2dc0099d5ed916645b0adb8d0306269ace7a2096e9', u'name': u'abhishekvrshny'}, u'changed': False, u'name': u'abhishekvrshny', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'abhishekvrshny', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1145, u'groups': u'sudo', u'home': u'/home/vasu', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1145, '_ansible_item_label': {u'ovpn': u'vasu@ceph1 +1D1qNUAk8h7OF9LF9qkrQ 963aa256fb99bc4b77e7085f57cf910a04c7f143603f81252331411eb8e37ec3\nvakulkar@vakulkar.sjc.csb O8ac1k0Dh3xkIFX8NFyIZw 471538eeb22384b58921e4f11af272c00c0a953dc7fe8d95ba057e65d141fbd2\nvasu@vasuSrv waJqYAARY/LnfuP1x/KQzQ 68915d3a1eb3dd00a562c149791cec5f43a96f5fd0b851ec855ec3f5dab496b4\n', u'name': u'vasu'}, 'item': {u'ovpn': u'vasu@ceph1 +1D1qNUAk8h7OF9LF9qkrQ 963aa256fb99bc4b77e7085f57cf910a04c7f143603f81252331411eb8e37ec3\nvakulkar@vakulkar.sjc.csb O8ac1k0Dh3xkIFX8NFyIZw 471538eeb22384b58921e4f11af272c00c0a953dc7fe8d95ba057e65d141fbd2\nvasu@vasuSrv waJqYAARY/LnfuP1x/KQzQ 68915d3a1eb3dd00a562c149791cec5f43a96f5fd0b851ec855ec3f5dab496b4\n', u'name': u'vasu'}, u'changed': False, u'name': u'vasu', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'vasu', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1146, u'groups': u'sudo', u'home': u'/home/smohan', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1146, '_ansible_item_label': {u'ovpn': u'shmohan@laptop 7wwHLZZNa4ShUV1imXDDjw 0aca19a8ff6dbeee2821dd75a329a05b8052170204b2b242ced9b1a68ca8df37', u'name': u'smohan'}, 'item': {u'ovpn': u'shmohan@laptop 7wwHLZZNa4ShUV1imXDDjw 0aca19a8ff6dbeee2821dd75a329a05b8052170204b2b242ced9b1a68ca8df37', u'name': u'smohan'}, u'changed': False, u'name': u'smohan', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'smohan', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1147, u'groups': u'sudo', u'home': u'/home/abhi', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1147, '_ansible_item_label': {u'ovpn': u'abhi@trusty SKarQTpBigBobP9sLjdLiw 868a74ed21b46f7f64255897d824f4e3eb21f8dde844bbdaa386681c942d8114', u'name': u'abhi'}, 'item': {u'ovpn': u'abhi@trusty SKarQTpBigBobP9sLjdLiw 868a74ed21b46f7f64255897d824f4e3eb21f8dde844bbdaa386681c942d8114', u'name': u'abhi'}, u'changed': False, u'name': u'abhi', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'abhi', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1148, u'groups': u'sudo', u'home': u'/home/shehbazj', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1148, '_ansible_item_label': {u'ovpn': u'shehbazj@smrhost oEliXj/jPmfXv9AUAFKB5A 8bb3a682b4ff15de655c0fe610d350c5805d0a970471e4810b648f47e2811246', u'name': u'shehbazj'}, 'item': {u'ovpn': u'shehbazj@smrhost oEliXj/jPmfXv9AUAFKB5A 8bb3a682b4ff15de655c0fe610d350c5805d0a970471e4810b648f47e2811246', u'name': u'shehbazj'}, u'changed': False, u'name': u'shehbazj', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'shehbazj', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1149, u'groups': u'sudo', u'home': u'/home/cbodley', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1149, '_ansible_item_label': {u'ovpn': u'cbodley@redhat.com W4gFS4oU8PtUOrdHuBYwXQ af7dafd992687f5d85a79866838a78e5070a4934fb0a935e8094adb31ec28611', u'name': u'cbodley'}, 'item': {u'ovpn': u'cbodley@redhat.com W4gFS4oU8PtUOrdHuBYwXQ af7dafd992687f5d85a79866838a78e5070a4934fb0a935e8094adb31ec28611', u'name': u'cbodley'}, u'changed': False, u'name': u'cbodley', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'cbodley', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1150, u'groups': u'sudo', u'home': u'/home/fche', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1150, '_ansible_item_label': {u'ovpn': u'fche@redhat.com LiziWkWg2uWoEHb3Ln92dQ 9c5497793758b069adbba9284dd55944276ba4dac0bb95d9357c81b58174a3c3', u'name': u'fche'}, 'item': {u'ovpn': u'fche@redhat.com LiziWkWg2uWoEHb3Ln92dQ 9c5497793758b069adbba9284dd55944276ba4dac0bb95d9357c81b58174a3c3', u'name': u'fche'}, u'changed': False, u'name': u'fche', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'fche', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1151, u'groups': u'sudo', u'home': u'/home/onyb', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1151, '_ansible_item_label': {u'ovpn': u'ani@orchid AgbLf9Dufuji5r+9WxM69g b73b7eacb9b628387a17cb1e0a84ff19c29d45dca8f0768e407aa599bc6996c4', u'name': u'onyb'}, 'item': {u'ovpn': u'ani@orchid AgbLf9Dufuji5r+9WxM69g b73b7eacb9b628387a17cb1e0a84ff19c29d45dca8f0768e407aa599bc6996c4', u'name': u'onyb'}, u'changed': False, u'name': u'onyb', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'onyb', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1152, u'groups': u'sudo', u'home': u'/home/mwatts', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1152, '_ansible_item_label': {u'ovpn': u'mdw@degu IPiZqcFT2BLuf2h3+tw58g 7af390a631ec11bddd7d1ac506d29af65e1e01e19f7dc931b4f459030cb7a195', u'name': u'mwatts'}, 'item': {u'ovpn': u'mdw@degu IPiZqcFT2BLuf2h3+tw58g 7af390a631ec11bddd7d1ac506d29af65e1e01e19f7dc931b4f459030cb7a195', u'name': u'mwatts'}, u'changed': False, u'name': u'mwatts', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'mwatts', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1153, u'groups': u'sudo', u'home': u'/home/oprypin', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1153, '_ansible_item_label': {u'ovpn': u'oprypin@bx-pc-arch 5WYD7PWKwdSQjfGo95ehWw f3c9f170d74c6c443cf0f82f0d87045e1d3a0dbcd01474f6e667ea20a00978b3', u'name': u'oprypin'}, 'item': {u'ovpn': u'oprypin@bx-pc-arch 5WYD7PWKwdSQjfGo95ehWw f3c9f170d74c6c443cf0f82f0d87045e1d3a0dbcd01474f6e667ea20a00978b3', u'name': u'oprypin'}, u'changed': False, u'name': u'oprypin', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'oprypin', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1154, u'groups': u'sudo', u'home': u'/home/prsrivas', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1154, '_ansible_item_label': {u'ovpn': u'pritha@dhcp35-190.lab.eng.blr.redhat.com GCk3PIYzUNZ6/xrsKoq8VQ acbfc4279970b44c3d008990e0cf1bb5eb280299218441a0f25fda988bc555f6', u'name': u'prsrivas'}, 'item': {u'ovpn': u'pritha@dhcp35-190.lab.eng.blr.redhat.com GCk3PIYzUNZ6/xrsKoq8VQ acbfc4279970b44c3d008990e0cf1bb5eb280299218441a0f25fda988bc555f6', u'name': u'prsrivas'}, u'changed': False, u'name': u'prsrivas', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'prsrivas', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1155, u'groups': u'sudo', u'home': u'/home/pdonnell', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1155, '_ansible_item_label': {u'ovpn': u'pdonnell@redhat.com Q9j56CPPXipXScmKi57PlQ fb616603b6d27cf65bfa1da83fc0ca39399861ad1c02bfed37ce9be17cdfa8ea', u'name': u'pdonnell'}, 'item': {u'ovpn': u'pdonnell@redhat.com Q9j56CPPXipXScmKi57PlQ fb616603b6d27cf65bfa1da83fc0ca39399861ad1c02bfed37ce9be17cdfa8ea', u'name': u'pdonnell'}, u'changed': False, u'name': u'pdonnell', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'pdonnell', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1156, u'groups': u'sudo', u'home': u'/home/jlayton', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1156, '_ansible_item_label': {u'ovpn': u'jlayton@redhat aNfzMdXOhhmWRb25hwXJIg f51fee42c5268f7b8e00d57092dc522b0a07b31154ea52cf542da9cac5885868', u'name': u'jlayton'}, 'item': {u'ovpn': u'jlayton@redhat aNfzMdXOhhmWRb25hwXJIg f51fee42c5268f7b8e00d57092dc522b0a07b31154ea52cf542da9cac5885868', u'name': u'jlayton'}, u'changed': False, u'name': u'jlayton', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'jlayton', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1157, u'groups': u'sudo', u'home': u'/home/rzarzynski', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1157, '_ansible_item_label': {u'ovpn': u'rzarzyns@redhat.com h4UkibgFG40ygfpfKTMBrg e20ca28c60144dbabc97953cd4c273c1b92cd45ebcddd0f0299679d7a5c87d7f', u'name': u'rzarzynski'}, 'item': {u'ovpn': u'rzarzyns@redhat.com h4UkibgFG40ygfpfKTMBrg e20ca28c60144dbabc97953cd4c273c1b92cd45ebcddd0f0299679d7a5c87d7f', u'name': u'rzarzynski'}, u'changed': False, u'name': u'rzarzynski', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'rzarzynski', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1158, u'groups': u'sudo', u'home': u'/home/rdias', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1158, '_ansible_item_label': {u'ovpn': u'rdias@rdias-suse-laptop 0bh54sdB69mh95n5rWME5g 452e3338e48d04d4e816f4f1cb54d637746a7acc1ffe5e8ed4c1506c8e07a72e', u'name': u'rdias'}, 'item': {u'ovpn': u'rdias@rdias-suse-laptop 0bh54sdB69mh95n5rWME5g 452e3338e48d04d4e816f4f1cb54d637746a7acc1ffe5e8ed4c1506c8e07a72e', u'name': u'rdias'}, u'changed': False, u'name': u'rdias', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'rdias', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1159, u'groups': u'sudo', u'home': u'/home/asheplyakov', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1159, '_ansible_item_label': {u'ovpn': u'asheplyakov@asheplyakov.srt.mirantis.net wFW0ZgT4cNhKRAGXiUtevQ 1b11f0702b2db42a42aae6579737ece2caad3b80a8186b971686575cb76b3051', u'name': u'asheplyakov'}, 'item': {u'ovpn': u'asheplyakov@asheplyakov.srt.mirantis.net wFW0ZgT4cNhKRAGXiUtevQ 1b11f0702b2db42a42aae6579737ece2caad3b80a8186b971686575cb76b3051', u'name': u'asheplyakov'}, u'changed': False, u'name': u'asheplyakov', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'asheplyakov', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1160, u'groups': u'sudo', u'home': u'/home/vshankar', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1160, '_ansible_item_label': {u'ovpn': u'vshankar@h3ckers-pride r39frQXXj1GUJZwq1GS7fw 1170ef4c918c5ff15334d10f666441b0dfe0bb869a5e15218fdfad2e8cc4e953', u'name': u'vshankar'}, 'item': {u'ovpn': u'vshankar@h3ckers-pride r39frQXXj1GUJZwq1GS7fw 1170ef4c918c5ff15334d10f666441b0dfe0bb869a5e15218fdfad2e8cc4e953', u'name': u'vshankar'}, u'changed': False, u'name': u'vshankar', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'vshankar', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1161, u'groups': u'sudo', u'home': u'/home/akupczyk', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1161, '_ansible_item_label': {u'ovpn': u'adam@TP50 C0YuBT9bYaNhdDmjbF56xg 5d298b33b9dbaef364b037561aa5c5de374405bb8afead5280db5b212506ea58', u'name': u'akupczyk'}, 'item': {u'ovpn': u'adam@TP50 C0YuBT9bYaNhdDmjbF56xg 5d298b33b9dbaef364b037561aa5c5de374405bb8afead5280db5b212506ea58', u'name': u'akupczyk'}, u'changed': False, u'name': u'akupczyk', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'akupczyk', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1162, u'groups': u'sudo', u'home': u'/home/nojha', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1162, '_ansible_item_label': {u'ovpn': u'nojha@localhost YQTw/h6ZMgdJn7EPBmbEnw 253574eae62759c4c5d3bc4bf949c59948a0488e4dfe4c91ee754a3b5494847e', u'name': u'nojha'}, 'item': {u'ovpn': u'nojha@localhost YQTw/h6ZMgdJn7EPBmbEnw 253574eae62759c4c5d3bc4bf949c59948a0488e4dfe4c91ee754a3b5494847e', u'name': u'nojha'}, u'changed': False, u'name': u'nojha', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'nojha', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1163, u'groups': u'sudo', u'home': u'/home/ifed01', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1163, '_ansible_item_label': {u'ovpn': u'ifed01@LocalSUSE1 6g6hX1bzTGBTCnDevAn0+w f18c9354f6de3f371c3b51521b62375e474802ac21adb3d71e09d8d5bf9d0c43', u'name': u'ifed01'}, 'item': {u'ovpn': u'ifed01@LocalSUSE1 6g6hX1bzTGBTCnDevAn0+w f18c9354f6de3f371c3b51521b62375e474802ac21adb3d71e09d8d5bf9d0c43', u'name': u'ifed01'}, u'changed': False, u'name': u'ifed01', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'ifed01', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1164, u'groups': u'sudo', u'home': u'/home/myoungwon', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1164, '_ansible_item_label': {u'ovpn': u'myoungwon@omw-dev 78twXwYRU+MeH+yZ9Rw9Zg 2dd66fa122e5cf3b8bfa835cefac7c6e4e66d70643a3819813104c2057e597e4', u'name': u'myoungwon'}, 'item': {u'ovpn': u'myoungwon@omw-dev 78twXwYRU+MeH+yZ9Rw9Zg 2dd66fa122e5cf3b8bfa835cefac7c6e4e66d70643a3819813104c2057e597e4', u'name': u'myoungwon'}, u'changed': False, u'name': u'myoungwon', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'myoungwon', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1165, u'groups': u'sudo', u'home': u'/home/jwilliamson', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1165, '_ansible_item_label': {u'ovpn': u'jwilliamson@glaurung +d028NM7xCxkVdxxO+b1Lw fece65125073fdc287af724ee4724ad84d2864e758d50dcb23c07b05c3595fe0', u'name': u'jwilliamson'}, 'item': {u'ovpn': u'jwilliamson@glaurung +d028NM7xCxkVdxxO+b1Lw fece65125073fdc287af724ee4724ad84d2864e758d50dcb23c07b05c3595fe0', u'name': u'jwilliamson'}, u'changed': False, u'name': u'jwilliamson', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'jwilliamson', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1166, u'groups': u'sudo', u'home': u'/home/gabrioux', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1166, '_ansible_item_label': {u'ovpn': u'gabrioux@elisheba 80kx1htp39RsFrlGONcp+A a95579ef6f90694cd6fd390302adf8532237a8ea65bd5544d9b561654d712ba2', u'name': u'gabrioux'}, 'item': {u'ovpn': u'gabrioux@elisheba 80kx1htp39RsFrlGONcp+A a95579ef6f90694cd6fd390302adf8532237a8ea65bd5544d9b561654d712ba2', u'name': u'gabrioux'}, u'changed': False, u'name': u'gabrioux', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'gabrioux', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1167, u'groups': u'sudo', u'home': u'/home/leseb', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1167, '_ansible_item_label': {u'ovpn': u'leseb@mbp cCx1v5/FfaQ/IQHujqtG9Q 6121d11f9abfa6b1b36330eafaa2196249a9c92f989be25c9fac1558292c920f', u'name': u'leseb'}, 'item': {u'ovpn': u'leseb@mbp cCx1v5/FfaQ/IQHujqtG9Q 6121d11f9abfa6b1b36330eafaa2196249a9c92f989be25c9fac1558292c920f', u'name': u'leseb'}, u'changed': False, u'name': u'leseb', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'leseb', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1168, u'groups': u'sudo', u'home': u'/home/hchen', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1168, '_ansible_item_label': {u'ovpn': u'hchen@host12 hUr3k0rugStZMjvIxIvCOg 9d57e14d49901f18b24ee4076ae7e6a2f9eb6fd9fbce786660c448486c966fca', u'name': u'hchen'}, 'item': {u'ovpn': u'hchen@host12 hUr3k0rugStZMjvIxIvCOg 9d57e14d49901f18b24ee4076ae7e6a2f9eb6fd9fbce786660c448486c966fca', u'name': u'hchen'}, u'changed': False, u'name': u'hchen', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'hchen', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1169, u'groups': u'sudo', u'home': u'/home/jcollin', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1169, '_ansible_item_label': {u'ovpn': u'jcollin@earth +H4Hk4WcNuqdQj7ch/Nulw 8426545e6457c9e1e8adca2af5ddf836fbcfb433cdc5359fd135afdf4e0f7d2a\njcollin@stratocaster jbjV3FsrsTJwyKUA3Y8VVQ 0439745f795fef1399636bd550040d45445d1607b471284c5c9b9dbccc86a987\n', u'name': u'jcollin'}, 'item': {u'ovpn': u'jcollin@earth +H4Hk4WcNuqdQj7ch/Nulw 8426545e6457c9e1e8adca2af5ddf836fbcfb433cdc5359fd135afdf4e0f7d2a\njcollin@stratocaster jbjV3FsrsTJwyKUA3Y8VVQ 0439745f795fef1399636bd550040d45445d1607b471284c5c9b9dbccc86a987\n', u'name': u'jcollin'}, u'changed': False, u'name': u'jcollin', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'jcollin', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1170, u'groups': u'sudo', u'home': u'/home/xxg', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1170, '_ansible_item_label': {u'ovpn': u'xxg@zte Y2d/Ov201XMivDNwo4nUoQ 5e5da8d579793601699af628300430c1e5dd469c8bcff7c3ee11d23ec004bdcc', u'name': u'xxg'}, 'item': {u'ovpn': u'xxg@zte Y2d/Ov201XMivDNwo4nUoQ 5e5da8d579793601699af628300430c1e5dd469c8bcff7c3ee11d23ec004bdcc', u'name': u'xxg'}, u'changed': False, u'name': u'xxg', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'xxg', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1171, u'groups': u'sudo', u'home': u'/home/pcuzner', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1171, '_ansible_item_label': {u'ovpn': u'pcuzner@rh460p oK28wU5DSabvEL4VjDRhEg a449ed81d7e2970f418263fb3ce10dd711d03925a0990ddf298f826aae1caa53', u'name': u'pcuzner'}, 'item': {u'ovpn': u'pcuzner@rh460p oK28wU5DSabvEL4VjDRhEg a449ed81d7e2970f418263fb3ce10dd711d03925a0990ddf298f826aae1caa53', u'name': u'pcuzner'}, u'changed': False, u'name': u'pcuzner', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'pcuzner', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1172, u'groups': u'sudo', u'home': u'/home/liupan', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1172, '_ansible_item_label': {u'ovpn': u'liupan@ceph-dev-2 9+Is4mIZgNkYyJLwHvSNOA 5a8fafc187d52041daf4365125692d4619fc557b75560913130c0596f83bbb77', u'name': u'liupan'}, 'item': {u'ovpn': u'liupan@ceph-dev-2 9+Is4mIZgNkYyJLwHvSNOA 5a8fafc187d52041daf4365125692d4619fc557b75560913130c0596f83bbb77', u'name': u'liupan'}, u'changed': False, u'name': u'liupan', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'liupan', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1173, u'groups': u'sudo', u'home': u'/home/mkogan', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1173, '_ansible_item_label': {u'ovpn': u'mkogan@mkP50 f/LENRJbsyepmvZA23F7Fg b908f1c0237a7ee56b73dc42f2df79b49ca83d6f4573f5229e7cfe6b4ad7b6a2', u'name': u'mkogan'}, 'item': {u'ovpn': u'mkogan@mkP50 f/LENRJbsyepmvZA23F7Fg b908f1c0237a7ee56b73dc42f2df79b49ca83d6f4573f5229e7cfe6b4ad7b6a2', u'name': u'mkogan'}, u'changed': False, u'name': u'mkogan', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'mkogan', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1174, u'groups': u'sudo', u'home': u'/home/amarangone', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1174, '_ansible_item_label': {u'ovpn': u'amarangone@macair.local 9Fslt44BqONCYNhf+uhcnQ 12d46ec6815378a12abc5df00e65235ccbc06ffb0fe5d1db75540a4805cb58b6', u'name': u'amarangone'}, 'item': {u'ovpn': u'amarangone@macair.local 9Fslt44BqONCYNhf+uhcnQ 12d46ec6815378a12abc5df00e65235ccbc06ffb0fe5d1db75540a4805cb58b6', u'name': u'amarangone'}, u'changed': False, u'name': u'amarangone', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'amarangone', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1141, u'groups': u'sudo', u'home': u'/home/vumrao', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1141, '_ansible_item_label': {u'ovpn': u'vumrao@redhat TMNqzMvbJS8Va/8nT9QUQw ab386c2bd7c6796d5413e4d841a16dda2504cca6d95df831a652a30d2e5655ed', u'name': u'vumrao'}, 'item': {u'ovpn': u'vumrao@redhat TMNqzMvbJS8Va/8nT9QUQw ab386c2bd7c6796d5413e4d841a16dda2504cca6d95df831a652a30d2e5655ed', u'name': u'vumrao'}, u'changed': False, u'name': u'vumrao', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'vumrao', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1175, u'groups': u'sudo', u'home': u'/home/kmroz', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1175, '_ansible_item_label': {u'ovpn': u'kmroz@suse /JbrIs2mKL5exdmcDnhRgg db4d19ab99c7174429d5ae7b6ca3cf4e04e9bf7810e1826d90f4627643628d57', u'name': u'kmroz'}, 'item': {u'ovpn': u'kmroz@suse /JbrIs2mKL5exdmcDnhRgg db4d19ab99c7174429d5ae7b6ca3cf4e04e9bf7810e1826d90f4627643628d57', u'name': u'kmroz'}, u'changed': False, u'name': u'kmroz', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'kmroz', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1176, u'groups': u'sudo', u'home': u'/home/henrix', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1176, '_ansible_item_label': {u'ovpn': u'henrix@hermes iPPDBfnLzP5Pe5FuTcJBmw b26aefb8a61451066f984e074f708ea9ca6b2c5d7cca35996c08b0b2bb2c2736', u'name': u'henrix'}, 'item': {u'ovpn': u'henrix@hermes iPPDBfnLzP5Pe5FuTcJBmw b26aefb8a61451066f984e074f708ea9ca6b2c5d7cca35996c08b0b2bb2c2736', u'name': u'henrix'}, u'changed': False, u'name': u'henrix', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'henrix', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1177, u'groups': u'sudo', u'home': u'/home/pbs1108', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1177, '_ansible_item_label': {u'ovpn': u'pbs1108@bspark_cli0 nSCINdeTLTLEO5JP/GIwRQ 76372ad6f7ad731556ff13605c3729eacaf59dcf7f9ac82dd9a8501bd95d3b26', u'name': u'pbs1108'}, 'item': {u'ovpn': u'pbs1108@bspark_cli0 nSCINdeTLTLEO5JP/GIwRQ 76372ad6f7ad731556ff13605c3729eacaf59dcf7f9ac82dd9a8501bd95d3b26', u'name': u'pbs1108'}, u'changed': False, u'name': u'pbs1108', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'pbs1108', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1178, u'groups': u'sudo', u'home': u'/home/clacroix', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1178, '_ansible_item_label': {u'ovpn': u'clacroix@redhat.com ZGY9sgvjT0BuJRi9zrULzg c3311aba4025aa42cd78c999dcee4e2c16415a3ac44ac8c95b77838459ef3315', u'name': u'clacroix'}, 'item': {u'ovpn': u'clacroix@redhat.com ZGY9sgvjT0BuJRi9zrULzg c3311aba4025aa42cd78c999dcee4e2c16415a3ac44ac8c95b77838459ef3315', u'name': u'clacroix'}, u'changed': False, u'name': u'clacroix', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'clacroix', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1179, u'groups': u'sudo', u'home': u'/home/epuertat', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1179, '_ansible_item_label': {u'ovpn': u'epuertat@private BnUoirwxGRWXtLxulJU5xA da2cfc4995bed82ef85db3633edad0a7eb2c32ba559a48259b10be94a8fdf006', u'name': u'epuertat'}, 'item': {u'ovpn': u'epuertat@private BnUoirwxGRWXtLxulJU5xA da2cfc4995bed82ef85db3633edad0a7eb2c32ba559a48259b10be94a8fdf006', u'name': u'epuertat'}, u'changed': False, u'name': u'epuertat', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'epuertat', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1180, u'groups': u'sudo', u'home': u'/home/tdehler', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1180, '_ansible_item_label': {u'ovpn': u'tdehler@think 7e0WC4Vh86XWZviZ9WBMgw 4dc8477db6e4f40312e6b2b9db293dc009e49e518015ace20431c0fb69025461', u'name': u'tdehler'}, 'item': {u'ovpn': u'tdehler@think 7e0WC4Vh86XWZviZ9WBMgw 4dc8477db6e4f40312e6b2b9db293dc009e49e518015ace20431c0fb69025461', u'name': u'tdehler'}, u'changed': False, u'name': u'tdehler', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'tdehler', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1181, u'groups': u'sudo', u'home': u'/home/laura', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1181, '_ansible_item_label': {u'ovpn': u'laura@flab 2DGHIAxD39eNKOPkn3M67w ab1ae304abed3824a68b5c0ecf4f92fca76a4f8b9fcbcc0ca43388a85b7f9305', u'name': u'laura'}, 'item': {u'ovpn': u'laura@flab 2DGHIAxD39eNKOPkn3M67w ab1ae304abed3824a68b5c0ecf4f92fca76a4f8b9fcbcc0ca43388a85b7f9305', u'name': u'laura'}, u'changed': False, u'name': u'laura', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'laura', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1182, u'groups': u'sudo', u'home': u'/home/adamyanova', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1182, '_ansible_item_label': {u'ovpn': u'adamyanova@centos k2nfk8hmGW77nys3R/JkOg 7695b95c2f157d60622e0b0b7ab06fa2cb18661a190d839f7ea587bc44aa0e3c\nadamyanova@ubuntu FmlKgjVzPUxfNDnHeU9vLQ ef7d5524863dfa0787fc5e249873c1a5ea58e7fd5aee27e1d1d33d6f87388a2d\n', u'name': u'adamyanova'}, 'item': {u'ovpn': u'adamyanova@centos k2nfk8hmGW77nys3R/JkOg 7695b95c2f157d60622e0b0b7ab06fa2cb18661a190d839f7ea587bc44aa0e3c\nadamyanova@ubuntu FmlKgjVzPUxfNDnHeU9vLQ ef7d5524863dfa0787fc5e249873c1a5ea58e7fd5aee27e1d1d33d6f87388a2d\n', u'name': u'adamyanova'}, u'changed': False, u'name': u'adamyanova', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'adamyanova', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1183, u'groups': u'sudo', u'home': u'/home/yaarit', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1183, '_ansible_item_label': {u'ovpn': u'yaarit@centos TBuWjkAsj1GB/V9eWc/R1Q 7bd86a857dec48dc25850ecf0c00486d9a89c2ff5f88b2f28c3e36bdeb139fce', u'name': u'yaarit'}, 'item': {u'ovpn': u'yaarit@centos TBuWjkAsj1GB/V9eWc/R1Q 7bd86a857dec48dc25850ecf0c00486d9a89c2ff5f88b2f28c3e36bdeb139fce', u'name': u'yaarit'}, u'changed': False, u'name': u'yaarit', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'yaarit', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1184, u'groups': u'sudo', u'home': u'/home/rpavani1998', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1184, '_ansible_item_label': {u'ovpn': u'admin1@rajula 31GbDo9d1YnW5BQ8u3utvw a2da13cb840f848846023c85442ba7bcce97dc186056a0ecc036a220d7eb7fc3', u'name': u'rpavani1998'}, 'item': {u'ovpn': u'admin1@rajula 31GbDo9d1YnW5BQ8u3utvw a2da13cb840f848846023c85442ba7bcce97dc186056a0ecc036a220d7eb7fc3', u'name': u'rpavani1998'}, u'changed': False, u'name': u'rpavani1998', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'rpavani1998', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1185, u'groups': u'sudo', u'home': u'/home/rishabh', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1185, '_ansible_item_label': {u'ovpn': u'rishabh@p50 zdJ4XsBdVugwMrqJOSBi3Q c78bb28ba5cf2bf9c8edb80fe57814d60cd2ffdbd874cf9a271e5adf171bb0c4', u'name': u'rishabh'}, 'item': {u'ovpn': u'rishabh@p50 zdJ4XsBdVugwMrqJOSBi3Q c78bb28ba5cf2bf9c8edb80fe57814d60cd2ffdbd874cf9a271e5adf171bb0c4', u'name': u'rishabh'}, u'changed': False, u'name': u'rishabh', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'rishabh', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1186, u'groups': u'sudo', u'home': u'/home/skrah', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1186, '_ansible_item_label': {u'ovpn': u'skrah@thinkpad IboPmnUdsLqqRXlHQ1RT5w bf85db9e916dceaf84a1e6ea33c59eb4adb424cb4e727ce0a903a3498b799ed2', u'name': u'skrah'}, 'item': {u'ovpn': u'skrah@thinkpad IboPmnUdsLqqRXlHQ1RT5w bf85db9e916dceaf84a1e6ea33c59eb4adb424cb4e727ce0a903a3498b799ed2', u'name': u'skrah'}, u'changed': False, u'name': u'skrah', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'skrah', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1187, u'groups': u'sudo', u'home': u'/home/smanjara', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1187, '_ansible_item_label': {u'ovpn': u'smanjara@fedora 5oorMoVYD3sT0nmOTBDh9w 83be007f68694c9463ef46e4ce223221d639d78f11d5b68449598de77e8e0ce8', u'name': u'smanjara'}, 'item': {u'ovpn': u'smanjara@fedora 5oorMoVYD3sT0nmOTBDh9w 83be007f68694c9463ef46e4ce223221d639d78f11d5b68449598de77e8e0ce8', u'name': u'smanjara'}, u'changed': False, u'name': u'smanjara', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'smanjara', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1188, u'groups': u'sudo', u'home': u'/home/bengland', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1188, '_ansible_item_label': {u'ovpn': u'bengland@bene-laptop ud1gWgoNggJTS7LQtPTZTA d3ebd084ce385cb450ce2f83c02dc66a1637dedbc7a8b191dab68acfc935af41', u'name': u'bengland'}, 'item': {u'ovpn': u'bengland@bene-laptop ud1gWgoNggJTS7LQtPTZTA d3ebd084ce385cb450ce2f83c02dc66a1637dedbc7a8b191dab68acfc935af41', u'name': u'bengland'}, u'changed': False, u'name': u'bengland', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'bengland', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1189, u'groups': u'sudo', u'home': u'/home/pnawracay', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1189, '_ansible_item_label': {u'ovpn': u'pnawracay@flab LIurEXLa7xbr+jzf2wRJVg 04062ba385602b385fd17f14de3a0cad83c685b8078fd2f18cc9ad77a4f4762d', u'name': u'pnawracay'}, 'item': {u'ovpn': u'pnawracay@flab LIurEXLa7xbr+jzf2wRJVg 04062ba385602b385fd17f14de3a0cad83c685b8078fd2f18cc9ad77a4f4762d', u'name': u'pnawracay'}, u'changed': False, u'name': u'pnawracay', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'pnawracay', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1190, u'groups': u'sudo', u'home': u'/home/alfonsomthd', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1190, '_ansible_item_label': {u'ovpn': u'alfonsomthd@localhost eUvx+7UEx5IYGmS0lNIscQ 2f1bfd4874280b9f525a46e93e767504df80a9b09a83a2fea387dcd6e34bc0f8', u'name': u'alfonsomthd'}, 'item': {u'ovpn': u'alfonsomthd@localhost eUvx+7UEx5IYGmS0lNIscQ 2f1bfd4874280b9f525a46e93e767504df80a9b09a83a2fea387dcd6e34bc0f8', u'name': u'alfonsomthd'}, u'changed': False, u'name': u'alfonsomthd', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'alfonsomthd', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1191, u'groups': u'sudo', u'home': u'/home/oliveiradan', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1191, '_ansible_item_label': {u'ovpn': u'oliveiradan@opensuse-dev twDqMYwYsdYmbgyCpUnElw ec3ee80ddc747c3ca5e1455a122279f8e1e642c5c09aa9c2ca7fec142f55089e', u'name': u'oliveiradan'}, 'item': {u'ovpn': u'oliveiradan@opensuse-dev twDqMYwYsdYmbgyCpUnElw ec3ee80ddc747c3ca5e1455a122279f8e1e642c5c09aa9c2ca7fec142f55089e', u'name': u'oliveiradan'}, u'changed': False, u'name': u'oliveiradan', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'oliveiradan', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1192, u'groups': u'sudo', u'home': u'/home/swagner', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1192, '_ansible_item_label': {u'ovpn': u'swagner@ubuntu 64V1h0Se0FmBQNH7KLibbQ ad7c91e9e2f7f3999492d5e41fbbc993327d37929bd09606227367d75e5556ba', u'name': u'swagner'}, 'item': {u'ovpn': u'swagner@ubuntu 64V1h0Se0FmBQNH7KLibbQ ad7c91e9e2f7f3999492d5e41fbbc993327d37929bd09606227367d75e5556ba', u'name': u'swagner'}, u'changed': False, u'name': u'swagner', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'swagner', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1193, u'groups': u'sudo', u'home': u'/home/yuvalif', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1193, '_ansible_item_label': {u'ovpn': u'ylifshit@localhost dyc2NU2pMz8NF8/uR2kMxA 3a6f1f9e55b5116f74d01ffbabdc339054088d257a16cf9fafcfe05b27fa678e', u'name': u'yuvalif'}, 'item': {u'ovpn': u'ylifshit@localhost dyc2NU2pMz8NF8/uR2kMxA 3a6f1f9e55b5116f74d01ffbabdc339054088d257a16cf9fafcfe05b27fa678e', u'name': u'yuvalif'}, u'changed': False, u'name': u'yuvalif', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'yuvalif', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1194, u'groups': u'sudo', u'home': u'/home/kkeithle', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1194, '_ansible_item_label': {u'ovpn': u'kkeithle@kkeithle.usersys.redhat.com FPnVevv1sp5hlWoJeDCe/g e5a1fa7ccf678b91ed570983d5420c98f109d507442c8e4dcd50803e0d71c852', u'name': u'kkeithle'}, 'item': {u'ovpn': u'kkeithle@kkeithle.usersys.redhat.com FPnVevv1sp5hlWoJeDCe/g e5a1fa7ccf678b91ed570983d5420c98f109d507442c8e4dcd50803e0d71c852', u'name': u'kkeithle'}, u'changed': False, u'name': u'kkeithle', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'kkeithle', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1195, u'groups': u'sudo', u'home': u'/home/emmericp', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1195, '_ansible_item_label': {u'ovpn': u'paul.emmerich@croit.io RN4hOorvA7irUg/3ViM9KQ 3bd06194186d2624cadf255fa1c38ddf7dded0a6d83dc6001cd55fcc0a899130', u'name': u'emmericp'}, 'item': {u'ovpn': u'paul.emmerich@croit.io RN4hOorvA7irUg/3ViM9KQ 3bd06194186d2624cadf255fa1c38ddf7dded0a6d83dc6001cd55fcc0a899130', u'name': u'emmericp'}, u'changed': False, u'name': u'emmericp', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'emmericp', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1196, u'groups': u'sudo', u'home': u'/home/mchangir', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1196, '_ansible_item_label': {u'ovpn': u'mchangir@indraprastha junlGKNU/xzt4OIaGHKBLA e8c67fd935fca490af3fe17453ccae3176268c4bfe1db4a2a879a2ab7ea6bfa5', u'name': u'mchangir'}, 'item': {u'ovpn': u'mchangir@indraprastha junlGKNU/xzt4OIaGHKBLA e8c67fd935fca490af3fe17453ccae3176268c4bfe1db4a2a879a2ab7ea6bfa5', u'name': u'mchangir'}, u'changed': False, u'name': u'mchangir', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'mchangir', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1197, u'groups': u'sudo', u'home': u'/home/sidharthanup', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1197, '_ansible_item_label': {u'ovpn': u'sidharthanup@strawberryfields IZq91vqA+RG1Rtn3JFZb6Q a2873481cac9b8b4a0bd8bebe0248b3dccb370dd18b56a4dae713ca1fb0c4286', u'name': u'sidharthanup'}, 'item': {u'ovpn': u'sidharthanup@strawberryfields IZq91vqA+RG1Rtn3JFZb6Q a2873481cac9b8b4a0bd8bebe0248b3dccb370dd18b56a4dae713ca1fb0c4286', u'name': u'sidharthanup'}, u'changed': False, u'name': u'sidharthanup', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'sidharthanup', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1198, u'groups': u'sudo', u'home': u'/home/varsha', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1198, '_ansible_item_label': {u'ovpn': u'varsha@local q7QjtBqj3duVVKubHLpzjw a358a0d6cd132a451a910abcbcf3070e4144c92638e0487622ae040a3410c07f', u'name': u'varsha'}, 'item': {u'ovpn': u'varsha@local q7QjtBqj3duVVKubHLpzjw a358a0d6cd132a451a910abcbcf3070e4144c92638e0487622ae040a3410c07f', u'name': u'varsha'}, u'changed': False, u'name': u'varsha', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'varsha', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1199, u'groups': u'sudo', u'home': u'/home/sjust', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1199, '_ansible_item_label': {u'ovpn': u'sjust@pondermatic sCd6606QpID1PnHn0AxFag 46da7d4c77cb1238f83a34f99e774707110f997d88f3a0fd240aac9b7b7bbc85\nsjust@rex004 YS0in6YtQJHx5aUo7ZHi8Q bdd5977d05171a365539b19fae283ec2e3c7389516664692b9bbbaf98c7b61f4\nsjust@office w19UilyC/xu7uCzv0DnWRg ab20efc114b769bf4c2cf313eb30db09c2e2f8234992f120cfc3d1b8b347ed3c\nsam@deepthought 44sCi+GEfY0zjKo5M/4FiQ ed1eedd14ca68116a2000477fa078f8f736d0a15640723c32204bb30f14cb888\n', u'name': u'sjust'}, 'item': {u'ovpn': u'sjust@pondermatic sCd6606QpID1PnHn0AxFag 46da7d4c77cb1238f83a34f99e774707110f997d88f3a0fd240aac9b7b7bbc85\nsjust@rex004 YS0in6YtQJHx5aUo7ZHi8Q bdd5977d05171a365539b19fae283ec2e3c7389516664692b9bbbaf98c7b61f4\nsjust@office w19UilyC/xu7uCzv0DnWRg ab20efc114b769bf4c2cf313eb30db09c2e2f8234992f120cfc3d1b8b347ed3c\nsam@deepthought 44sCi+GEfY0zjKo5M/4FiQ ed1eedd14ca68116a2000477fa078f8f736d0a15640723c32204bb30f14cb888\n', u'name': u'sjust'}, u'changed': False, u'name': u'sjust', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'sjust', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1200, u'groups': u'sudo', u'home': u'/home/ideepika', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1200, '_ansible_item_label': {u'ovpn': u'deepika@asus hHsL1xugca0LzY52gKqfqQ 312e0f2680f72d9459c707fcd0ccfb777617f00017f0511839e9b7e3167d590f', u'name': u'ideepika'}, 'item': {u'ovpn': u'deepika@asus hHsL1xugca0LzY52gKqfqQ 312e0f2680f72d9459c707fcd0ccfb777617f00017f0511839e9b7e3167d590f', u'name': u'ideepika'}, u'changed': False, u'name': u'ideepika', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'ideepika', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1201, u'groups': u'sudo', u'home': u'/home/gsalomon', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1201, '_ansible_item_label': {u'ovpn': u'gsalomon@home BrlCVxStNlEZGf5yYYAz1w 4b0c8d5b57dae1328c16d3017b59b632ccdfebe4135209fa97748c70ff00cc46', u'name': u'gsalomon'}, 'item': {u'ovpn': u'gsalomon@home BrlCVxStNlEZGf5yYYAz1w 4b0c8d5b57dae1328c16d3017b59b632ccdfebe4135209fa97748c70ff00cc46', u'name': u'gsalomon'}, u'changed': False, u'name': u'gsalomon', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'gsalomon', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1202, u'groups': u'sudo', u'home': u'/home/soumyakoduri', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1202, '_ansible_item_label': {u'ovpn': u'skoduri@rgw_vm fAskaxmJYtm4TDcModIkrQ 884b57c3b5d56493da361dc9f1cc4e06e766628fcb0f916090f2096edc5ce7de', u'name': u'soumyakoduri'}, 'item': {u'ovpn': u'skoduri@rgw_vm fAskaxmJYtm4TDcModIkrQ 884b57c3b5d56493da361dc9f1cc4e06e766628fcb0f916090f2096edc5ce7de', u'name': u'soumyakoduri'}, u'changed': False, u'name': u'soumyakoduri', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'soumyakoduri', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1203, u'groups': u'sudo', u'home': u'/home/kyr', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1203, '_ansible_item_label': {u'ovpn': u'kyr@suse Xu4+Yi98Il4ETOAav6okqA 03bc46e5fac6346cd82ff681f756860f98e0c61168633ce23325efde11a1964a', u'name': u'kyr'}, 'item': {u'ovpn': u'kyr@suse Xu4+Yi98Il4ETOAav6okqA 03bc46e5fac6346cd82ff681f756860f98e0c61168633ce23325efde11a1964a', u'name': u'kyr'}, u'changed': False, u'name': u'kyr', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'kyr', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1204, u'groups': u'sudo', u'home': u'/home/sseshasa', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1204, '_ansible_item_label': {u'ovpn': u'sseshasa@thinkpad jyB1pr0I3qsDknkTnJTMjg 72ac1456e344c22fd940d0ba0e035aa3819ef7cd3891e53308aa92ba2dec8849', u'name': u'sseshasa'}, 'item': {u'ovpn': u'sseshasa@thinkpad jyB1pr0I3qsDknkTnJTMjg 72ac1456e344c22fd940d0ba0e035aa3819ef7cd3891e53308aa92ba2dec8849', u'name': u'sseshasa'}, u'changed': False, u'name': u'sseshasa', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'sseshasa', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 1205, u'groups': u'sudo', u'home': u'/home/rfriedma', u'move_home': False, u'append': True, '_ansible_no_log': False, 'ansible_loop_var': u'item', u'group': 1205, '_ansible_item_label': {u'ovpn': u'rfriedma@rflap.redhat.com 5+OUPoyz8K0M0kcymdQOjA 40ce705001f31d7156c965228938cd4b02ae1a2c43dac1bbcd1b538e70312189', u'name': u'rfriedma'}, 'item': {u'ovpn': u'rfriedma@rflap.redhat.com 5+OUPoyz8K0M0kcymdQOjA 40ce705001f31d7156c965228938cd4b02ae1a2c43dac1bbcd1b538e70312189', u'name': u'rfriedma'}, u'changed': False, u'name': u'rfriedma', 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'rfriedma', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 10102, '_ansible_no_log': False, u'create_home': True, u'groups': u'sudo', u'home': u'/home/lmb', '_ansible_item_label': {u'ovpn': u'lmb@hermes LMS8kAikL0iqw2S6IbXa3Q f57a493b31e7ed02a2563dd4295278d4842dc698b4c635d011a8d2b4b1fd5c2b', u'name': u'lmb'}, 'ansible_loop_var': u'item', u'group': 10103, u'name': u'lmb', 'item': {u'ovpn': u'lmb@hermes LMS8kAikL0iqw2S6IbXa3Q f57a493b31e7ed02a2563dd4295278d4842dc698b4c635d011a8d2b4b1fd5c2b', u'name': u'lmb'}, u'changed': True, u'system': False, 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'lmb', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}, {u'comment': u'', u'shell': u'/bin/bash', u'uid': 10103, '_ansible_no_log': False, u'create_home': True, u'groups': u'sudo', u'home': u'/home/ksirivad', '_ansible_item_label': {u'ovpn': u'ksirivad@ksirivad prZzE08FqnY6hFtGk0Q6XQ 2ef0878b0050cf28775813fe0991f9a746c07e61920280ce29ee69088eda5efc', u'name': u'ksirivad'}, 'ansible_loop_var': u'item', u'group': 10104, u'name': u'ksirivad', 'item': {u'ovpn': u'ksirivad@ksirivad prZzE08FqnY6hFtGk0Q6XQ 2ef0878b0050cf28775813fe0991f9a746c07e61920280ce29ee69088eda5efc', u'name': u'ksirivad'}, u'changed': True, u'system': False, 'failed': False, u'state': u'present', u'invocation': {u'module_args': {u'comment': None, u'ssh_key_bits': 0, u'update_password': u'always', u'non_unique': False, u'force': False, u'skeleton': None, u'create_home': True, u'password_lock': None, u'ssh_key_passphrase': None, u'home': None, u'append': True, u'uid': None, u'ssh_key_comment': u'ansible-generated on smithi142', u'group': None, u'system': False, u'state': u'present', u'role': None, u'hidden': None, u'ssh_key_type': u'rsa', u'authorization': None, u'profile': None, u'shell': u'/bin/bash', u'expires': None, u'ssh_key_file': None, u'groups': [u'sudo'], u'move_home': False, u'password': None, u'name': u'ksirivad', u'local': None, u'seuser': None, u'remove': False, u'login_class': None, u'generate_ssh_key': None}}}]}}Traceback (most recent call last): File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 309, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 281, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 29, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 219, in represent_list return self.represent_sequence(u'tag:yaml.org,2002:seq', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 102, in represent_sequence node_item = self.represent_data(item) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 68, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 251, in represent_undefined raise RepresenterError("cannot represent an object", data)RepresenterError: ('cannot represent an object', u'')

pass 4180543 2019-08-04 06:39:51 2019-08-05 00:40:24 2019-08-05 01:48:24 1:08:00 0:52:21 0:15:39 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/valgrind.yaml} 3
pass 4180544 2019-08-04 06:39:52 2019-08-05 00:40:42 2019-08-05 02:02:43 1:22:01 0:31:10 0:50:51 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
fail 4180545 2019-08-04 06:39:52 2019-08-05 00:45:30 2019-08-05 01:17:29 0:31:59 0:11:11 0:20:48 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi169 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180546 2019-08-04 06:39:53 2019-08-05 00:45:38 2019-08-05 01:27:38 0:42:00 0:13:17 0:28:43 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

Command failed on smithi162 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180547 2019-08-04 06:39:54 2019-08-05 00:46:25 2019-08-05 01:44:25 0:58:00 0:19:10 0:38:50 smithi master ubuntu 18.04 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180548 2019-08-04 06:39:55 2019-08-05 00:47:41 2019-08-05 01:37:41 0:50:00 0:26:59 0:23:01 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-05T01:24:12.802527+0000 mon.a (mon.0) 763 : cluster [WRN] Health check failed: 6 daemons have recently crashed (RECENT_CRASH)" in cluster log

pass 4180549 2019-08-04 06:39:56 2019-08-05 00:48:02 2019-08-05 02:10:03 1:22:01 0:36:21 0:45:40 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
fail 4180550 2019-08-04 06:39:57 2019-08-05 00:48:26 2019-08-05 01:26:26 0:38:00 0:13:42 0:24:18 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi172 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180551 2019-08-04 06:39:57 2019-08-05 00:49:12 2019-08-05 01:11:11 0:21:59 0:09:46 0:12:13 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
Failure Reason:

Command failed on smithi077 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180552 2019-08-04 06:39:58 2019-08-05 00:49:34 2019-08-05 01:45:34 0:56:00 0:13:45 0:42:15 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
pass 4180553 2019-08-04 06:39:59 2019-08-05 00:49:51 2019-08-05 01:37:51 0:48:00 0:35:29 0:12:31 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
pass 4180554 2019-08-04 06:40:00 2019-08-05 00:50:39 2019-08-05 01:36:39 0:46:00 0:22:36 0:23:24 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180555 2019-08-04 06:40:01 2019-08-05 00:50:58 2019-08-05 01:22:58 0:32:00 0:16:12 0:15:48 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/lockdep.yaml} 3
fail 4180556 2019-08-04 06:40:01 2019-08-05 00:52:26 2019-08-05 01:40:25 0:47:59 0:10:59 0:37:00 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on smithi148 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180557 2019-08-04 06:40:02 2019-08-05 00:53:02 2019-08-05 01:51:02 0:58:00 0:37:58 0:20:02 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
fail 4180558 2019-08-04 06:40:03 2019-08-05 00:54:53 2019-08-05 01:22:52 0:27:59 0:13:40 0:14:19 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi141 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180559 2019-08-04 06:40:04 2019-08-05 00:56:54 2019-08-05 02:56:55 2:00:01 0:16:12 1:43:49 smithi master centos 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180560 2019-08-04 06:40:05 2019-08-05 00:58:09 2019-08-05 01:20:08 0:21:59 0:12:17 0:09:42 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
Failure Reason:

Command failed on smithi103 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180561 2019-08-04 06:40:06 2019-08-05 00:58:30 2019-08-05 01:24:29 0:25:59 0:11:20 0:14:39 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
dead 4180562 2019-08-04 06:40:06 2019-08-05 00:59:59 2019-08-05 01:17:58 0:17:59 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

SSH connection to smithi079 was lost: 'mkdir -p -m0755 -- /home/ubuntu/cephtest'

fail 4180563 2019-08-04 06:40:07 2019-08-05 01:00:14 2019-08-05 01:42:14 0:42:00 0:10:53 0:31:07 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi003 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180564 2019-08-04 06:40:08 2019-08-05 01:02:42 2019-08-05 01:24:42 0:22:00 0:13:30 0:08:30 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi163 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180565 2019-08-04 06:40:09 2019-08-05 01:02:52 2019-08-05 03:26:53 2:24:01 2:12:38 0:11:23 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
pass 4180566 2019-08-04 06:40:10 2019-08-05 01:03:30 2019-08-05 01:35:30 0:32:00 0:11:20 0:20:40 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180567 2019-08-04 06:40:10 2019-08-05 01:03:35 2019-08-05 02:41:36 1:38:01 0:55:34 0:42:27 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/valgrind.yaml} 3
pass 4180568 2019-08-04 06:40:11 2019-08-05 01:04:08 2019-08-05 02:42:08 1:38:00 0:39:18 0:58:42 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
fail 4180569 2019-08-04 06:40:12 2019-08-05 01:04:39 2019-08-05 01:24:38 0:19:59 0:13:29 0:06:30 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
Failure Reason:

Command failed on smithi196 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180570 2019-08-04 06:40:13 2019-08-05 01:06:21 2019-08-05 02:16:16 1:09:55 0:20:53 0:49:02 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
pass 4180571 2019-08-04 06:40:14 2019-08-05 01:06:17 2019-08-05 01:44:16 0:37:59 0:19:59 0:18:00 smithi master ubuntu 18.04 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180572 2019-08-04 06:40:14 2019-08-05 01:06:22 2019-08-05 03:16:23 2:10:01 1:07:30 1:02:31 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
fail 4180573 2019-08-04 06:40:15 2019-08-05 01:06:32 2019-08-05 01:30:31 0:23:59 0:10:55 0:13:04 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi139 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180574 2019-08-04 06:40:16 2019-08-05 01:06:39 2019-08-05 01:54:38 0:47:59 0:16:03 0:31:56 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180575 2019-08-04 06:40:17 2019-08-05 01:07:04 2019-08-05 01:29:03 0:21:59 0:12:27 0:09:32 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi017 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180576 2019-08-04 06:40:18 2019-08-05 01:08:25 2019-08-05 01:36:24 0:27:59 0:08:56 0:19:03 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
Failure Reason:

Command failed on smithi002 with status 1: "sudo sh -c 'cat > /home/ubuntu/cephtest/valgrind.supp'"

fail 4180577 2019-08-04 06:40:18 2019-08-05 01:11:26 2019-08-05 03:09:27 1:58:01 0:10:30 1:47:31 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi186 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180578 2019-08-04 06:40:19 2019-08-05 01:12:19 2019-08-05 02:08:19 0:56:00 0:12:49 0:43:11 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
pass 4180579 2019-08-04 06:40:20 2019-08-05 01:12:42 2019-08-05 01:58:41 0:45:59 0:21:59 0:24:00 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180580 2019-08-04 06:40:21 2019-08-05 01:14:07 2019-08-05 02:10:07 0:56:00 0:17:21 0:38:39 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/lockdep.yaml} 3
fail 4180581 2019-08-04 06:40:22 2019-08-05 01:14:20 2019-08-05 01:56:19 0:41:59 0:24:00 0:17:59 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-05T01:46:18.530101+0000 mon.a (mon.0) 994 : cluster [WRN] Health check failed: 2 daemons have recently crashed (RECENT_CRASH)" in cluster log

fail 4180582 2019-08-04 06:40:22 2019-08-05 01:16:32 2019-08-05 01:42:31 0:25:59 0:10:55 0:15:04 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

Command failed on smithi008 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180583 2019-08-04 06:40:23 2019-08-05 01:17:31 2019-08-05 01:53:30 0:35:59 0:11:34 0:24:25 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi038 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180584 2019-08-04 06:40:24 2019-08-05 01:18:13 2019-08-05 03:10:14 1:52:01 1:14:56 0:37:05 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
pass 4180585 2019-08-04 06:40:25 2019-08-05 01:20:23 2019-08-05 03:10:23 1:50:00 0:16:36 1:33:24 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
fail 4180586 2019-08-04 06:40:26 2019-08-05 01:22:45 2019-08-05 02:16:45 0:54:00 0:10:01 0:43:59 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi120 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180587 2019-08-04 06:40:27 2019-08-05 01:22:54 2019-08-05 01:46:53 0:23:59 0:10:19 0:13:40 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi139 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180588 2019-08-04 06:40:27 2019-08-05 01:22:59 2019-08-05 02:32:59 1:10:00 0:51:19 0:18:41 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
fail 4180589 2019-08-04 06:40:28 2019-08-05 01:24:44 2019-08-05 02:16:43 0:51:59 0:11:02 0:40:57 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi077 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180590 2019-08-04 06:40:29 2019-08-05 01:24:44 2019-08-05 02:56:44 1:32:00 0:11:41 1:20:19 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, 'item': u'boost-random', u'rc': 0, u'msg': u'', u'changes': {u'removed': [u'boost-random']}, '_ansible_no_log': False}, {'ansible_loop_var': u'item', '_ansible_item_label': u'boost-program-options', 'failed': False, u'changed': False, u'results': [u'boost-program-options is not installed'], u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'boost-program-options'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, 'item': u'boost-program-options', u'rc': 0, u'msg': u'', '_ansible_no_log': False}, {'ansible_loop_var': u'item', '_ansible_item_label': u'leveldb', 'failed': False, u'changed': True, u'results': [u'Loaded plugins: fastestmirror, langpacks, priorities, product-id, search-\n : disabled-repos, subscription-manager\nResolving Dependencies\n--> Running transaction check\n---> Package leveldb.x86_64 0:1.12.0-11.el7 will be erased\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository Size\n================================================================================\nRemoving:\n leveldb x86_64 1.12.0-11.el7 @epel 439 k\n\nTransaction Summary\n================================================================================\nRemove 1 Package\n\nInstalled size: 439 k\nDownloading packages:\nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Erasing : leveldb-1.12.0-11.el7.x86_64 1/1 \nLoading mirror speeds from cached hostfile\n * epel: ftp.linux.ncsu.edu\n Verifying : leveldb-1.12.0-11.el7.x86_64 1/1 \n\nRemoved:\n leveldb.x86_64 0:1.12.0-11.el7 \n\nComplete!\n'], u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'leveldb'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, 'item': u'leveldb', u'rc': 0, u'msg': u'', u'changes': {u'removed': [u'leveldb']}, '_ansible_no_log': False}, {'ansible_loop_var': u'item', '_ansible_item_label': u'xmlstarlet', 'failed': False, u'changed': True, u'results': [u'Loaded plugins: fastestmirror, langpacks, priorities, product-id, search-\n : disabled-repos, subscription-manager\nResolving Dependencies\n--> Running transaction check\n---> Package xmlstarlet.x86_64 0:1.6.1-1.el7 will be erased\n--> Finished Dependency Resolution\n\nDependencies Resolved\n\n================================================================================\n Package Arch Version Repository Size\n================================================================================\nRemoving:\n xmlstarlet x86_64 1.6.1-1.el7 @epel 192 k\n\nTransaction Summary\n================================================================================\nRemove 1 Package\n\nInstalled size: 192 k\nDownloading packages:\nRunning transaction check\nRunning transaction test\nTransaction test succeeded\nRunning transaction\n Erasing : xmlstarlet-1.6.1-1.el7.x86_64 1/1 \nLoading mirror speeds from cached hostfile\n * epel: ftp.linux.ncsu.edu\n Verifying : xmlstarlet-1.6.1-1.el7.x86_64 1/1 \n\nRemoved:\n xmlstarlet.x86_64 0:1.6.1-1.el7 \n\nComplete!\n'], u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'xmlstarlet'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, 'item': u'xmlstarlet', u'rc': 0, u'msg': u'', u'changes': {u'removed': [u'xmlstarlet']}, '_ansible_no_log': False}, {'ansible_loop_var': u'item', '_ansible_item_label': u'python-jinja2', 'failed': False, u'changed': False, u'results': [u'python-jinja2 is not installed'], u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'python-jinja2'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, 'item': u'python-jinja2', u'rc': 0, u'msg': u'', '_ansible_no_log': False}, {'ansible_loop_var': u'item', '_ansible_item_label': u'python-ceph', 'failed': False, u'changed': False, u'results': [u'python-ceph is not installed'], u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'python-ceph'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, 'item': u'python-ceph', u'rc': 0, u'msg': u'', '_ansible_no_log': False}, {'ansible_loop_var': u'item', '_ansible_item_label': u'python-flask', 'failed': False, u'changed': False, u'results': [u'python-flask is not installed'], u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'python-flask'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, 'item': u'python-flask', u'rc': 0, u'msg': u'', '_ansible_no_log': False}, {'ansible_loop_var': u'item', '_ansible_item_label': u'python-requests', 'failed': False, u'changed': False, u'results': [u'python-requests is not installed'], u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'python-requests'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, 'item': u'python-requests', u'rc': 0, u'msg': u'', '_ansible_no_log': False}, {'ansible_loop_var': u'item', '_ansible_item_label': u'boost-random', 'failed': False, u'changed': False, u'results': [u'boost-random is not installed'], u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'boost-random'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, 'item': u'boost-random', u'rc': 0, u'msg': u'', '_ansible_no_log': False}, {'ansible_loop_var': u'item', '_ansible_no_log': False, 'item': u'python-urllib3', u'failed': True, u'msg': u'yum lockfile is held by another process', 'changed': False, u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'python-urllib3'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, '_ansible_item_label': u'python-urllib3'}, {'ansible_loop_var': u'item', '_ansible_no_log': False, 'item': u'python-babel', u'failed': True, u'msg': u'yum lockfile is held by another process', 'changed': False, u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'python-babel'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, '_ansible_item_label': u'python-babel'}, {'ansible_loop_var': u'item', '_ansible_no_log': False, 'item': u'hdparm', u'failed': True, u'msg': u'yum lockfile is held by another process', 'changed': False, u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'hdparm'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, '_ansible_item_label': u'hdparm'}, {'ansible_loop_var': u'item', '_ansible_no_log': False, 'item': u'python-markupsafe', u'failed': True, u'msg': u'yum lockfile is held by another process', 'changed': False, u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'python-markupsafe'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, '_ansible_item_label': u'python-markupsafe'}, {'ansible_loop_var': u'item', '_ansible_no_log': False, 'item': u'python-werkzeug', u'failed': True, u'msg': u'yum lockfile is held by another process', 'changed': False, u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'python-werkzeug'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, '_ansible_item_label': u'python-werkzeug'}, {'ansible_loop_var': u'item', '_ansible_no_log': False, 'item': u'python-itsdangerous', u'failed': True, u'msg': u'yum lockfile is held by another process', 'changed': False, u'invocation': {u'module_args': {u'install_weak_deps': True, u'autoremove': False, u'lock_timeout': 0, u'download_dir': None, u'install_repoquery': True, u'update_cache': False, u'conf_file': None, u'exclude': [], u'update_only': False, u'installroot': u'/', u'allow_downgrade': False, u'name': [u'python-itsdangerous'], u'download_only': False, u'bugfix': False, u'list': None, u'disable_gpg_check': False, u'disable_excludes': None, u'use_backend': u'auto', u'validate_certs': True, u'state': u'absent', u'disablerepo': [], u'releasever': None, u'disable_plugin': [], u'enablerepo': [], u'skip_broken': False, u'security': False, u'enable_plugin': []}}, '_ansible_item_label': u'python-itsdangerous'}]}}Traceback (most recent call last): File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 309, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 281, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 29, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 219, in represent_list return self.represent_sequence(u'tag:yaml.org,2002:seq', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 102, in represent_sequence node_item = self.represent_data(item) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 68, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 251, in represent_undefined raise RepresenterError("cannot represent an object", data)RepresenterError: ('cannot represent an object', u'boost-random')

pass 4180591 2019-08-04 06:40:30 2019-08-05 01:24:44 2019-08-05 02:06:43 0:41:59 0:12:51 0:29:08 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180592 2019-08-04 06:40:31 2019-08-05 01:26:26 2019-08-05 02:44:26 1:18:00 0:56:49 0:21:11 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/valgrind.yaml} 3
pass 4180593 2019-08-04 06:40:31 2019-08-05 01:26:27 2019-08-05 02:22:27 0:56:00 0:33:42 0:22:18 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
fail 4180594 2019-08-04 06:40:32 2019-08-05 01:26:29 2019-08-05 01:48:33 0:22:04 0:11:05 0:10:59 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi086 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180595 2019-08-04 06:40:33 2019-08-05 01:27:52 2019-08-05 01:57:51 0:29:59 0:10:18 0:19:41 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

Command failed on smithi156 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180596 2019-08-04 06:40:34 2019-08-05 01:29:15 2019-08-05 01:53:15 0:24:00 0:11:19 0:12:41 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi049 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180597 2019-08-04 06:40:35 2019-08-05 01:29:15 2019-08-05 02:09:20 0:40:05 0:22:41 0:17:24 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-05T01:59:23.848344+0000 mon.a (mon.0) 904 : cluster [WRN] Health check failed: 4 daemons have recently crashed (RECENT_CRASH)" in cluster log

pass 4180598 2019-08-04 06:40:36 2019-08-05 01:30:46 2019-08-05 02:22:46 0:52:00 0:38:21 0:13:39 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
fail 4180599 2019-08-04 06:40:37 2019-08-05 01:32:04 2019-08-05 02:28:03 0:55:59 0:09:59 0:46:00 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi032 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180600 2019-08-04 06:40:37 2019-08-05 01:32:52 2019-08-05 01:52:51 0:19:59 0:10:23 0:09:36 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
Failure Reason:

Command failed on smithi106 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180601 2019-08-04 06:40:38 2019-08-05 01:34:00 2019-08-05 02:18:00 0:44:00 0:16:25 0:27:35 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
pass 4180602 2019-08-04 06:40:39 2019-08-05 01:35:44 2019-08-05 02:37:44 1:02:00 0:33:08 0:28:52 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
pass 4180603 2019-08-04 06:40:40 2019-08-05 01:36:25 2019-08-05 02:28:25 0:52:00 0:22:39 0:29:21 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180604 2019-08-04 06:40:41 2019-08-05 01:36:40 2019-08-05 02:14:40 0:38:00 0:18:06 0:19:54 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/lockdep.yaml} 3
fail 4180605 2019-08-04 06:40:42 2019-08-05 01:37:55 2019-08-05 01:57:54 0:19:59 0:10:14 0:09:45 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on smithi196 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180606 2019-08-04 06:40:43 2019-08-05 01:37:55 2019-08-05 02:53:55 1:16:00 0:35:52 0:40:08 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
pass 4180607 2019-08-04 06:40:43 2019-08-05 01:40:20 2019-08-05 02:16:19 0:35:59 0:21:12 0:14:47 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180608 2019-08-04 06:40:44 2019-08-05 01:40:27 2019-08-05 02:22:26 0:41:59 0:10:58 0:31:01 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi172 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180609 2019-08-04 06:40:45 2019-08-05 01:42:28 2019-08-05 03:06:29 1:24:01 0:10:12 1:13:49 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
Failure Reason:

Command failed on smithi180 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180610 2019-08-04 06:40:46 2019-08-05 01:42:32 2019-08-05 02:16:32 0:34:00 0:11:55 0:22:05 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180611 2019-08-04 06:40:47 2019-08-05 01:44:23 2019-08-05 02:10:22 0:25:59 0:13:01 0:12:58 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
fail 4180612 2019-08-04 06:40:48 2019-08-05 01:44:23 2019-08-05 02:08:22 0:23:59 0:09:36 0:14:23 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi197 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180613 2019-08-04 06:40:48 2019-08-05 01:44:26 2019-08-05 02:02:25 0:17:59 0:09:57 0:08:02 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi120 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

dead 4180614 2019-08-04 06:40:49 2019-08-05 01:45:49 2019-08-05 02:39:49 0:54:00 0:10:14 0:43:46 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

recent call last): File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 309, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 281, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 29, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 219, in represent_list return self.represent_sequence(u'tag:yaml.org,2002:seq', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 102, in represent_sequence node_item = self.represent_data(item) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u'tag:yaml.org,2002:map', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 68, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 251, in represent_undefined raise RepresenterError("cannot represent an object", data)RepresenterError: ('cannot represent an object', u'cryptopp-devel')

pass 4180615 2019-08-04 06:40:50 2019-08-05 01:45:49 2019-08-05 03:11:49 1:26:00 0:11:40 1:14:20 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
dead 4180616 2019-08-04 06:40:51 2019-08-05 01:46:03 2019-08-05 04:36:04 2:50:01 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/valgrind.yaml} 3
pass 4180617 2019-08-04 06:40:52 2019-08-05 01:47:08 2019-08-05 03:51:09 2:04:01 1:14:02 0:49:59 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
fail 4180618 2019-08-04 06:40:53 2019-08-05 01:48:38 2019-08-05 02:34:38 0:46:00 0:10:17 0:35:43 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
Failure Reason:

Command failed on smithi096 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180619 2019-08-04 06:40:54 2019-08-05 01:48:39 2019-08-05 02:22:38 0:33:59 0:21:28 0:12:31 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
pass 4180620 2019-08-04 06:40:54 2019-08-05 01:50:42 2019-08-05 02:20:41 0:29:59 0:19:26 0:10:33 smithi master ubuntu 18.04 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180621 2019-08-04 06:40:55 2019-08-05 01:51:03 2019-08-05 03:51:04 2:00:01 0:48:29 1:11:32 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
fail 4180622 2019-08-04 06:40:57 2019-08-05 01:53:06 2019-08-05 02:39:05 0:45:59 0:10:32 0:35:27 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi172 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180623 2019-08-04 06:40:57 2019-08-05 01:53:16 2019-08-05 02:39:16 0:46:00 0:16:19 0:29:41 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180624 2019-08-04 06:40:58 2019-08-05 01:53:32 2019-08-05 02:13:31 0:19:59 0:09:47 0:10:12 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi110 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180625 2019-08-04 06:40:59 2019-08-05 01:54:53 2019-08-05 02:50:53 0:56:00 0:31:10 0:24:50 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
fail 4180626 2019-08-04 06:41:00 2019-08-05 01:56:34 2019-08-05 02:18:33 0:21:59 0:09:59 0:12:00 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi103 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180627 2019-08-04 06:41:01 2019-08-05 01:56:34 2019-08-05 02:30:33 0:33:59 0:12:54 0:21:05 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
pass 4180628 2019-08-04 06:41:02 2019-08-05 01:58:06 2019-08-05 02:36:06 0:38:00 0:16:41 0:21:19 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180629 2019-08-04 06:41:03 2019-08-05 01:58:07 2019-08-05 03:18:07 1:20:00 0:48:14 0:31:46 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml} 3
fail 4180630 2019-08-04 06:41:04 2019-08-05 01:58:43 2019-08-05 02:42:42 0:43:59 0:26:26 0:17:33 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-05T02:31:57.879760+0000 mon.a (mon.0) 990 : cluster [WRN] Health check failed: 2 daemons have recently crashed (RECENT_CRASH)" in cluster log

fail 4180631 2019-08-04 06:41:05 2019-08-05 02:02:40 2019-08-05 02:38:39 0:35:59 0:10:37 0:25:22 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

Command failed on smithi146 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180632 2019-08-04 06:41:05 2019-08-05 02:02:44 2019-08-05 03:16:44 1:14:00 0:10:37 1:03:23 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi200 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180633 2019-08-04 06:41:06 2019-08-05 02:03:12 2019-08-05 03:11:11 1:07:59 0:52:27 0:15:32 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
fail 4180634 2019-08-04 06:41:07 2019-08-05 02:06:58 2019-08-05 02:40:57 0:33:59 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 2
Failure Reason:

Could not reconnect to ubuntu@smithi080.front.sepia.ceph.com

fail 4180635 2019-08-04 06:41:08 2019-08-05 02:08:19 2019-08-05 02:26:18 0:17:59 0:09:38 0:08:21 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi113 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180636 2019-08-04 06:41:09 2019-08-05 02:08:20 2019-08-05 02:32:19 0:23:59 0:10:07 0:13:52 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi078 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180637 2019-08-04 06:41:10 2019-08-05 02:08:24 2019-08-05 03:24:24 1:16:00 0:51:12 0:24:48 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
fail 4180638 2019-08-04 06:41:11 2019-08-05 02:09:34 2019-08-05 03:09:34 1:00:00 0:11:36 0:48:24 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi104 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180639 2019-08-04 06:41:12 2019-08-05 02:10:04 2019-08-05 02:48:03 0:37:59 0:10:05 0:27:54 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi106 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

dead 4180640 2019-08-04 06:41:13 2019-08-05 02:10:08 2019-08-05 02:20:07 0:09:59 0:02:59 0:07:00 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 2
Failure Reason:

{'smithi134.front.sepia.ceph.com': {'changed': False, 'censored': "the output has been hidden due to the fact that 'no_log: true' was specified for this result"}, 'smithi099.front.sepia.ceph.com': {'changed': False, 'censored': "the output has been hidden due to the fact that 'no_log: true' was specified for this result"}, 'smithi076.front.sepia.ceph.com': {'changed': False, 'censored': "the output has been hidden due to the fact that 'no_log: true' was specified for this result"}}

dead 4180641 2019-08-04 06:41:14 2019-08-05 02:10:24 2019-08-05 04:36:25 2:26:01 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/valgrind.yaml} 3
pass 4180642 2019-08-04 06:41:15 2019-08-05 02:10:27 2019-08-05 02:50:26 0:39:59 0:22:29 0:17:30 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
pass 4180643 2019-08-04 06:41:16 2019-08-05 02:12:47 2019-08-05 02:36:46 0:23:59 0:12:17 0:11:42 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180644 2019-08-04 06:41:16 2019-08-05 02:13:32 2019-08-05 02:37:31 0:23:59 0:08:59 0:15:00 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

{'Failure object was': {'smithi134.front.sepia.ceph.com': {"u'msg'": "u'yum lockfile is held by another process'", 'changed': False, "u'invocation'": {"u'module_args'": {"u'list'": 'None', "u'disable_plugin'": [], "u'install_weak_deps'": True, "u'name'": ["u'yum-utils'"], "u'releasever'": 'None', "u'security'": False, "u'conf_file'": 'None', "u'autoremove'": False, "u'update_cache'": False, "u'disablerepo'": [], "u'skip_broken'": False, "u'disable_gpg_check'": False, "u'lock_timeout'": 0, "u'download_only'": False, "u'enablerepo'": [], "u'disable_excludes'": 'None', "u'download_dir'": 'None', "u'state'": "u'present'", "u'install_repoquery'": True, "u'allow_downgrade'": False, "u'validate_certs'": True, "u'update_only'": False, "u'exclude'": [], "u'bugfix'": False, "u'enable_plugin'": [], "u'use_backend'": "u'auto'", "u'installroot'": "u'/'"}}, '_ansible_no_log': False}}, 'Traceback (most recent call last)': 'File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 309, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 281, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 29, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 68, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 251, in represent_undefined raise RepresenterError("cannot represent an object", data)', 'RepresenterError': "('cannot represent an object', u'/')"}

fail 4180645 2019-08-04 06:41:17 2019-08-05 02:14:54 2019-08-05 02:38:53 0:23:59 0:11:34 0:12:25 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi074 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180646 2019-08-04 06:41:18 2019-08-05 02:16:31 2019-08-05 02:52:30 0:35:59 0:25:18 0:10:41 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-05T02:41:00.372877+0000 mon.a (mon.0) 758 : cluster [WRN] Health check failed: 2 daemons have recently crashed (RECENT_CRASH)" in cluster log

pass 4180647 2019-08-04 06:41:19 2019-08-05 02:16:31 2019-08-05 03:30:31 1:14:00 0:43:05 0:30:55 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
fail 4180648 2019-08-04 06:41:20 2019-08-05 02:16:33 2019-08-05 02:54:33 0:38:00 0:11:17 0:26:43 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi130 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180649 2019-08-04 06:41:21 2019-08-05 02:16:45 2019-08-05 02:50:44 0:33:59 0:10:29 0:23:30 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
Failure Reason:

Command failed on smithi120 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180650 2019-08-04 06:41:22 2019-08-05 02:16:47 2019-08-05 03:20:46 1:03:59 0:14:56 0:49:03 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
pass 4180651 2019-08-04 06:41:23 2019-08-05 02:18:15 2019-08-05 03:18:15 1:00:00 0:32:39 0:27:21 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
dead 4180652 2019-08-04 06:41:23 2019-08-05 02:18:15 2019-08-05 02:58:15 0:40:00 0:08:05 0:31:55 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

SSH connection to smithi099 was lost: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180653 2019-08-04 06:41:24 2019-08-05 02:18:36 2019-08-05 03:50:36 1:32:00 0:48:09 0:43:51 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml} 3
fail 4180654 2019-08-04 06:41:25 2019-08-05 02:20:22 2019-08-05 02:56:21 0:35:59 0:11:12 0:24:47 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on smithi074 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180655 2019-08-04 06:41:26 2019-08-05 02:20:43 2019-08-05 03:36:43 1:16:00 0:44:35 0:31:25 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
fail 4180656 2019-08-04 06:41:27 2019-08-05 02:22:16 2019-08-05 02:58:16 0:36:00 0:08:44 0:27:16 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

{'Failure object was': {'smithi134.front.sepia.ceph.com': {"u'msg'": "u'yum lockfile is held by another process'", 'changed': False, "u'invocation'": {"u'module_args'": {"u'list'": 'None', "u'disable_plugin'": [], "u'install_weak_deps'": True, "u'name'": ["u'http://satellite.front.sepia.ceph.com/pub/katello-ca-consumer-latest.noarch.rpm'"], "u'releasever'": 'None', "u'security'": False, "u'conf_file'": 'None', "u'autoremove'": False, "u'update_cache'": False, "u'disablerepo'": [], "u'skip_broken'": False, "u'disable_gpg_check'": False, "u'lock_timeout'": 0, "u'download_only'": False, "u'enablerepo'": [], "u'disable_excludes'": 'None', "u'download_dir'": 'None', "u'state'": "u'present'", "u'install_repoquery'": True, "u'allow_downgrade'": False, "u'validate_certs'": False, "u'update_only'": False, "u'exclude'": [], "u'bugfix'": False, "u'enable_plugin'": [], "u'use_backend'": "u'auto'", "u'installroot'": "u'/'"}}, '_ansible_no_log': False}}, 'Traceback (most recent call last)': 'File "/home/teuthworker/src/git.ceph.com_git_ceph-cm-ansible_master/callback_plugins/failure_log.py", line 44, in log_failure log.error(yaml.safe_dump(failure)) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 309, in safe_dump return dump_all([data], stream, Dumper=SafeDumper, **kwds) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/__init__.py", line 281, in dump_all dumper.represent(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 29, in represent node = self.represent_data(data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 58, in represent_data node = self.yaml_representers[data_types[0]](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 227, in represent_dict return self.represent_mapping(u\'tag:yaml.org,2002:map\', data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 125, in represent_mapping node_value = self.represent_data(item_value) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 68, in represent_data node = self.yaml_representers[None](self, data) File "/home/teuthworker/src/git.ceph.com_git_teuthology_master/virtualenv/local/lib/python2.7/site-packages/yaml/representer.py", line 251, in represent_undefined raise RepresenterError("cannot represent an object", data)', 'RepresenterError': "('cannot represent an object', u'/')"}

pass 4180657 2019-08-04 06:41:28 2019-08-05 02:22:28 2019-08-05 02:50:27 0:27:59 0:16:08 0:11:51 smithi master centos 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180658 2019-08-04 06:41:29 2019-08-05 02:22:29 2019-08-05 02:32:27 0:09:58 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml}
Failure Reason:

Command failed on smithi129 with status 1: 'sudo package-cleanup -y --oldkernels'

dead 4180659 2019-08-04 06:41:29 2019-08-05 02:22:39 2019-08-05 02:34:38 0:11:59 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 1
Failure Reason:

SSH connection to smithi204 was lost: 'sudo package-cleanup -y --oldkernels'

fail 4180660 2019-08-04 06:41:30 2019-08-05 02:22:48 2019-08-05 02:56:47 0:33:59 0:11:57 0:22:02 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

Command failed on smithi023 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180661 2019-08-04 06:41:31 2019-08-05 02:54:15 2019-08-05 03:12:14 0:17:59 0:10:30 0:07:29 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi065 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180662 2019-08-04 06:41:32 2019-08-05 02:54:34 2019-08-05 03:12:33 0:17:59 0:10:09 0:07:50 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi002 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

dead 4180663 2019-08-04 06:41:33 2019-08-05 02:56:35 2019-08-05 04:36:36 1:40:01 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
pass 4180664 2019-08-04 06:41:34 2019-08-05 02:56:41 2019-08-05 03:58:41 1:02:00 0:11:46 0:50:14 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
dead 4180665 2019-08-04 06:41:34 2019-08-05 02:56:45 2019-08-05 04:36:46 1:40:01 1:14:02 0:25:59 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/valgrind.yaml} 3
Failure Reason:

psutil.NoSuchProcess process no longer exists (pid=17516)

pass 4180666 2019-08-04 06:41:35 2019-08-05 02:56:48 2019-08-05 03:58:48 1:02:00 0:42:26 0:19:34 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
dead 4180667 2019-08-04 06:41:36 2019-08-05 02:56:56 2019-08-05 04:34:56 1:38:00 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml}
pass 4180668 2019-08-04 06:41:37 2019-08-05 02:58:29 2019-08-05 03:46:29 0:48:00 0:21:48 0:26:12 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
fail 4180669 2019-08-04 06:41:38 2019-08-05 02:58:29 2019-08-05 03:54:29 0:56:00 0:10:48 0:45:12 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi145 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180670 2019-08-04 06:41:39 2019-08-05 02:58:32 2019-08-05 03:42:31 0:43:59 0:10:43 0:33:16 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on smithi103 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180671 2019-08-04 06:41:40 2019-08-05 03:02:47 2019-08-05 03:38:46 0:35:59 0:10:08 0:25:51 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
Failure Reason:

Command failed on smithi006 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180672 2019-08-04 06:41:41 2019-08-05 03:06:44 2019-08-05 03:36:43 0:29:59 0:16:19 0:13:40 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
dead 4180673 2019-08-04 06:41:41 2019-08-05 03:09:41 2019-08-05 04:35:42 1:26:01 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
pass 4180674 2019-08-04 06:41:42 2019-08-05 03:09:41 2019-08-05 04:11:41 1:02:00 0:31:46 0:30:14 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
fail 4180675 2019-08-04 06:41:43 2019-08-05 03:10:15 2019-08-05 04:22:15 1:12:00 0:09:35 1:02:25 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi151 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180676 2019-08-04 06:41:44 2019-08-05 03:10:25 2019-08-05 03:36:24 0:25:59 0:12:50 0:13:09 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
pass 4180677 2019-08-04 06:41:44 2019-08-05 03:11:25 2019-08-05 04:33:25 1:22:00 0:16:53 1:05:07 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180678 2019-08-04 06:41:45 2019-08-05 03:11:51 2019-08-05 04:11:51 1:00:00 0:47:11 0:12:49 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml} 3
fail 4180679 2019-08-04 06:41:46 2019-08-05 03:12:17 2019-08-05 03:52:17 0:40:00 0:28:20 0:11:40 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-05T03:40:26.229644+0000 mon.a (mon.0) 950 : cluster [WRN] Health check failed: 1 daemons have recently crashed (RECENT_CRASH)" in cluster log

fail 4180680 2019-08-04 06:41:47 2019-08-05 03:12:48 2019-08-05 03:34:47 0:21:59 0:09:51 0:12:08 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

Command failed on smithi136 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180681 2019-08-04 06:41:48 2019-08-05 03:13:45 2019-08-05 03:39:44 0:25:59 0:11:03 0:14:56 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi172 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

dead 4180682 2019-08-04 06:41:49 2019-08-05 03:14:27 2019-08-05 04:36:27 1:22:00 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml} 3
dead 4180683 2019-08-04 06:41:49 2019-08-05 03:16:38 2019-08-05 04:36:38 1:20:00 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml}
fail 4180684 2019-08-04 06:41:50 2019-08-05 03:16:45 2019-08-05 04:08:45 0:52:00 0:09:48 0:42:12 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on smithi187 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180685 2019-08-04 06:41:51 2019-08-05 03:18:21 2019-08-05 03:42:20 0:23:59 0:09:51 0:14:08 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi152 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180686 2019-08-04 06:41:52 2019-08-05 03:18:21 2019-08-05 04:20:21 1:02:00 0:49:08 0:12:52 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
dead 4180687 2019-08-04 06:41:53 2019-08-05 03:20:29 2019-08-05 04:36:29 1:16:00 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml}
fail 4180688 2019-08-04 06:41:53 2019-08-05 03:20:48 2019-08-05 04:30:48 1:10:00 0:09:44 1:00:16 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi019 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180689 2019-08-04 06:41:54 2019-08-05 03:24:39 2019-08-05 04:06:39 0:42:00 0:12:48 0:29:12 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
dead 4180690 2019-08-04 06:41:55 2019-08-05 03:27:07 2019-08-05 04:37:07 1:10:00 0:20:53 0:49:07 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/valgrind.yaml} 3
Failure Reason:

psutil.NoSuchProcess process no longer exists (pid=28379)

pass 4180691 2019-08-04 06:41:56 2019-08-05 03:30:45 2019-08-05 04:22:45 0:52:00 0:24:55 0:27:05 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
pass 4180692 2019-08-04 06:41:57 2019-08-05 03:34:45 2019-08-05 04:00:44 0:25:59 0:14:58 0:11:01 smithi master centos 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180693 2019-08-04 06:41:57 2019-08-05 03:34:48 2019-08-05 04:06:48 0:32:00 0:09:52 0:22:08 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

Command failed on smithi116 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180694 2019-08-04 06:41:58 2019-08-05 03:36:38 2019-08-05 04:22:37 0:45:59 0:17:14 0:28:45 smithi master centos 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{centos_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180695 2019-08-04 06:41:59 2019-08-05 03:36:44 2019-08-05 04:12:44 0:36:00 0:22:35 0:13:25 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

"2019-08-05T04:02:45.128843+0000 mon.b (mon.0) 785 : cluster [WRN] Health check failed: 4 daemons have recently crashed (RECENT_CRASH)" in cluster log

fail 4180696 2019-08-04 06:42:00 2019-08-05 03:36:44 2019-08-05 04:00:44 0:24:00 0:11:00 0:13:00 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
Failure Reason:

Command failed on smithi089 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180697 2019-08-04 06:42:01 2019-08-05 03:39:00 2019-08-05 04:23:01 0:44:01 0:09:37 0:34:24 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on smithi182 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

dead 4180698 2019-08-04 06:42:01 2019-08-05 03:39:00 2019-08-05 04:35:00 0:56:00 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml}
pass 4180699 2019-08-04 06:42:02 2019-08-05 03:39:46 2019-08-05 04:09:45 0:29:59 0:16:19 0:13:40 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml} 3
dead 4180700 2019-08-04 06:42:03 2019-08-05 03:42:34 2019-08-05 04:36:33 0:53:59 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
pass 4180701 2019-08-04 06:42:04 2019-08-05 03:42:34 2019-08-05 04:28:33 0:45:59 0:17:53 0:28:06 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
fail 4180702 2019-08-04 06:42:05 2019-08-05 03:44:48 2019-08-05 15:47:01 12:02:13 11:48:09 0:14:04 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml}
Failure Reason:

psutil.NoSuchProcess process no longer exists (pid=32137)

fail 4180703 2019-08-04 06:42:05 2019-08-05 03:46:44 2019-08-05 04:06:43 0:19:59 0:09:56 0:10:03 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on smithi018 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

dead 4180704 2019-08-04 06:42:06 2019-08-05 03:50:50 2019-08-05 04:36:50 0:46:00 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3
pass 4180705 2019-08-04 06:42:07 2019-08-05 03:51:05 2019-08-05 04:35:05 0:44:00 0:15:05 0:28:55 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
pass 4180706 2019-08-04 06:42:08 2019-08-05 03:51:10 2019-08-05 04:33:10 0:42:00 0:12:58 0:29:02 smithi master ubuntu 18.04 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
fail 4180707 2019-08-04 06:42:09 2019-08-05 03:52:31 2019-08-05 04:36:30 0:43:59 0:08:23 0:35:36 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml} 3
Failure Reason:

psutil.NoSuchProcess process no longer exists (pid=5061)

dead 4180708 2019-08-04 06:42:10 2019-08-05 03:52:54 2019-08-05 04:36:53 0:43:59 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml}
dead 4180709 2019-08-04 06:42:11 2019-08-05 03:54:43 2019-08-05 04:36:42 0:41:59 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml}
fail 4180710 2019-08-04 06:42:11 2019-08-05 03:58:55 2019-08-05 04:14:54 0:15:59 0:09:43 0:06:16 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi019 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

fail 4180711 2019-08-04 06:42:12 2019-08-05 03:58:55 2019-08-05 04:14:54 0:15:59 0:09:38 0:06:21 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml} 3
Failure Reason:

Command failed on smithi174 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

dead 4180712 2019-08-04 06:42:13 2019-08-05 04:00:51 2019-08-05 04:36:51 0:36:00 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml} 3
pass 4180713 2019-08-04 06:42:14 2019-08-05 04:00:51 2019-08-05 04:32:51 0:32:00 0:12:29 0:19:31 smithi master multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml} 3
dead 4180714 2019-08-04 06:42:15 2019-08-05 04:00:51 2019-08-05 04:36:51 0:36:00 smithi master centos multimds/verify/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_fsstress.yaml validater/valgrind.yaml} 3
dead 4180715 2019-08-04 06:42:15 2019-08-05 04:06:53 2019-08-05 04:36:52 0:29:59 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml}
dead 4180716 2019-08-04 06:42:16 2019-08-05 04:06:53 2019-08-05 04:36:53 0:30:00 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml}
dead 4180717 2019-08-04 06:42:17 2019-08-05 04:06:53 2019-08-05 04:36:53 0:30:00 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml}
dead 4180718 2019-08-04 06:42:18 2019-08-05 04:08:59 2019-08-05 04:34:58 0:25:59 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml}
dead 4180719 2019-08-04 06:42:19 2019-08-05 04:09:46 2019-08-05 04:35:46 0:26:00 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
dead 4180720 2019-08-04 06:42:20 2019-08-05 04:11:56 2019-08-05 04:35:55 0:23:59 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml}
dead 4180721 2019-08-04 06:42:20 2019-08-05 04:11:56 2019-08-05 04:35:55 0:23:59 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsstress.yaml}
dead 4180722 2019-08-04 06:42:21 2019-08-05 04:11:56 2019-08-05 04:35:55 0:23:59 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml}
dead 4180723 2019-08-04 06:42:22 2019-08-05 04:12:43 2019-08-05 04:36:42 0:23:59 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_fsx.yaml}
dead 4180724 2019-08-04 06:42:23 2019-08-05 04:12:45 2019-08-05 04:36:44 0:23:59 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_pjd.yaml}
fail 4180725 2019-08-04 06:42:24 2019-08-05 04:15:09 2019-08-05 04:37:08 0:21:59 0:03:22 0:18:37 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_exports.yaml} 3
Failure Reason:

psutil.NoSuchProcess process no longer exists (pid=4616)

dead 4180726 2019-08-04 06:42:25 2019-08-05 04:15:09 2019-08-05 04:35:08 0:19:59 smithi master multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/fuse.yaml msgr-failures/none.yaml objectstore-ec/filestore-xfs.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml}
dead 4180727 2019-08-04 06:42:26 2019-08-05 04:20:35 2019-08-05 04:36:34 0:15:59 smithi master multimds/verify/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{kernel-testing.yaml mount.yaml ms-die-on-skipped.yaml} objectstore-ec/bluestore-ec-root.yaml overrides/{fuse-default-perm-no.yaml verify/{frag_enable.yaml mon-debug.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml}} tasks/cfuse_workunit_suites_dbench.yaml validater/lockdep.yaml} 3
dead 4180728 2019-08-04 06:42:27 2019-08-05 04:22:30 2019-08-05 04:36:29 0:13:59 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cephfs_test_snapshots.yaml}
dead 4180729 2019-08-04 06:42:28 2019-08-05 04:22:39 2019-08-05 04:36:38 0:13:59 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_kernel_untar_build.yaml}
dead 4180730 2019-08-04 06:42:28 2019-08-05 04:22:46 2019-08-05 04:36:45 0:13:59 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mon.yaml clusters/3-mds-2-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{rhel_7.yaml}} ms-die-on-skipped.yaml}} msgr-failures/osd-mds-delay.yaml objectstore-ec/bluestore-bitmap.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_pjd.yaml}
dead 4180731 2019-08-04 06:42:29 2019-08-05 04:23:02 2019-08-05 04:35:01 0:11:59 smithi master multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/fuse.yaml objectstore-ec/filestore-xfs.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_misc.yaml}
dead 4180732 2019-08-04 06:42:30 2019-08-05 04:28:48 2019-08-05 04:36:47 0:07:59 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-bitmap.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_norstats.yaml}
dead 4180733 2019-08-04 06:42:31 2019-08-05 04:31:02 2019-08-05 04:35:01 0:03:59 smithi master rhel 7.6 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-comp-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_blogbench.yaml}
fail 4180734 2019-08-04 06:42:32 2019-08-05 04:33:05 2019-08-05 04:53:05 0:20:00 0:10:11 0:09:49 smithi master rhel 7.6 multimds/thrash/{begin.yaml ceph-thrash/mds.yaml clusters/9-mds-3-standby.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} mount/kclient/{mount.yaml overrides/{distro/rhel/{k-distro.yaml rhel_7.yaml} ms-die-on-skipped.yaml}} msgr-failures/none.yaml objectstore-ec/bluestore-comp-ec-root.yaml overrides/{fuse-default-perm-no.yaml thrash/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} thrash_debug.yaml} tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on smithi112 with status 1: 'sudo yum -y install ceph-radosgw ceph-test ceph ceph-mgr ceph-mgr-dashboard ceph-mgr-diskprediction-cloud ceph-mgr-diskprediction-local ceph-mgr-rook ceph-mgr-ssh ceph-fuse libcephfs2 libcephfs-devel librados2 librbd1 python-ceph rbd-fuse python36-cephfs bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel bison flex elfutils-libelf-devel openssl-devel'

pass 4180735 2019-08-04 06:42:33 2019-08-05 04:33:11 2019-08-05 05:55:12 1:22:01 0:50:46 0:31:15 smithi master multimds/basic/{begin.yaml clusters/9-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/yes.yaml mount/fuse.yaml objectstore-ec/bluestore-comp.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_dbench.yaml} 3
pass 4180736 2019-08-04 06:42:34 2019-08-05 04:33:27 2019-08-05 08:21:30 3:48:03 1:41:10 2:06:53 smithi master ubuntu 18.04 multimds/basic/{begin.yaml clusters/3-mds.yaml conf/{client.yaml mds.yaml mon.yaml osd.yaml} inline/no.yaml mount/kclient/{mount.yaml overrides/{distro/random/{k-testing.yaml supported$/{ubuntu_latest.yaml}} ms-die-on-skipped.yaml}} objectstore-ec/bluestore-ec-root.yaml overrides/{basic/{frag_enable.yaml whitelist_health.yaml whitelist_wrongly_marked_down.yaml} fuse-default-perm-no.yaml} q_check_counter/check_counter.yaml tasks/cfuse_workunit_suites_ffsb.yaml} 3