ID
Status
Ceph Branch
Suite Branch
Teuthology Branch
Machine
OS
Nodes
Description
Failure Reason
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kernel_cfuse_workunits_dbench_iozone.yaml}
"2018-02-28 05:31:48.961628 mon.b mon.1 158.69.72.62:6789/0 31 : cluster [WRN] Health check failed: 1/3 mons down, quorum b,c (MON_DOWN)" in cluster log
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/auto-repair.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp-ec-root.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/backtrace.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_misc.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_o_trunc.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/client-limits.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/client-recovery.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/config-commands.yaml whitelist_health.yaml}
Command failed on ovh022 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 7'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/damage.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-ec-root.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/data-scan.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/failover.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_trivial_sync.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/bluestore-comp.yaml tasks/kernel_cfuse_workunits_untarbuild_blogbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/forward-scrub.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/journal-repair.yaml whitelist_health.yaml}
Command failed on ovh091 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_misc.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_o_trunc.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/mds-flush.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/filestore-xfs.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/mds-full.yaml whitelist_health.yaml}
Test failure: test_full_different_file (tasks.cephfs.test_full.TestClusterFull)
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_ffsb.yaml}
Command failed (workunit test suites/ffsb.sh) on ovh085 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=luminous TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/ffsb.sh'
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/pool-perm.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/sessionmap.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp-ec-root.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/strays.yaml whitelist_health.yaml}
Command failed on ovh065 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/volume-client.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_trivial_sync.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kernel_cfuse_workunits_dbench_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/auto-repair.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/backtrace.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_misc.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_o_trunc.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/client-limits.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-ec-root.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/client-recovery.yaml whitelist_health.yaml}
Command failed on ovh073 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/config-commands.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/damage.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
Found coredumps on ubuntu@ovh098.front.sepia.ceph.com
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/data-scan.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/failover.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_trivial_sync.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/bluestore.yaml tasks/kernel_cfuse_workunits_untarbuild_blogbench.yaml}
"2018-02-28 05:31:49.091753 mon.b mon.0 158.69.72.67:6789/0 122 : cluster [WRN] Health check failed: 1 osds down (OSD_DOWN)" in cluster log
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/forward-scrub.yaml whitelist_health.yaml}
Command failed on ovh065 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/filestore-xfs.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
while scanning a plain scalar in "/tmp/teuth_ansible_failures_XOJrhF", line 1, column 251 found unexpected ':' in "/tmp/teuth_ansible_failures_XOJrhF", line 1, column 271 Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/journal-repair.yaml whitelist_health.yaml}
Test failure: test_reset (tasks.cephfs.test_journal_repair.TestJournalRepair), test_reset (tasks.cephfs.test_journal_repair.TestJournalRepair)
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_misc.yaml}
while scanning a plain scalar in "/tmp/teuth_ansible_failures_MLcAVf", line 1, column 251 found unexpected ':' in "/tmp/teuth_ansible_failures_MLcAVf", line 1, column 271 Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_o_trunc.yaml}
while scanning a plain scalar in "/tmp/teuth_ansible_failures_swaouq", line 12, column 251 found unexpected ':' in "/tmp/teuth_ansible_failures_swaouq", line 12, column 271 Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/mds-flush.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp-ec-root.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/mds-full.yaml whitelist_health.yaml}
"2018-02-28 09:47:37.430735 mon.a mon.0 158.69.81.185:6789/0 220 : cluster [WRN] Health check failed: 2 osds down (OSD_DOWN)" in cluster log
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/pool-perm.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/sessionmap.yaml whitelist_health.yaml}
Command failed on ovh002 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
Command failed on ovh052 with status 100: 'sudo DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" autoremove'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/strays.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/volume-client.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_trivial_sync.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/filestore-xfs.yaml tasks/kernel_cfuse_workunits_dbench_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/auto-repair.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-ec-root.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/backtrace.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_misc.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_o_trunc.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/client-limits.yaml whitelist_health.yaml}
Command failed on ovh053 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/client-recovery.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/config-commands.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/damage.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/filestore-xfs.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
Command failed (workunit test suites/ffsb.sh) on ovh007 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=luminous TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/ffsb.sh'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/data-scan.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/failover.yaml whitelist_health.yaml}
Command failed on ovh003 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_trivial_sync.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kernel_cfuse_workunits_untarbuild_blogbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/forward-scrub.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp-ec-root.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/journal-repair.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_misc.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_o_trunc.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/mds-flush.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/mds-full.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/pool-perm.yaml whitelist_health.yaml}
Command failed on ovh036 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/sessionmap.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-ec-root.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/strays.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/volume-client.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_trivial_sync.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/bluestore-comp.yaml tasks/kernel_cfuse_workunits_dbench_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/auto-repair.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/backtrace.yaml whitelist_health.yaml}
"2018-02-28 16:28:10.457020 mon.a mon.0 158.69.64.249:6789/0 129 : cluster [WRN] Health check failed: 1 osds down (OSD_DOWN)" in cluster log
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_misc.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_o_trunc.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/client-limits.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/filestore-xfs.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/client-recovery.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_ffsb.yaml}
Command failed (workunit test suites/ffsb.sh) on ovh077 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=luminous TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/ffsb.sh'
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/config-commands.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/damage.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp-ec-root.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/data-scan.yaml whitelist_health.yaml}
Command failed on ovh091 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 7'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/failover.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_trivial_sync.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kernel_cfuse_workunits_untarbuild_blogbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/forward-scrub.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/journal-repair.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_misc.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_o_trunc.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/mds-flush.yaml whitelist_health.yaml}
"2018-02-28 21:59:13.407543 mon.a mon.0 158.69.74.241:6789/0 406 : cluster [WRN] daemon mds.c is not responding, replacing it as rank 0 with standby daemon mds.d" in cluster log
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-ec-root.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
Command failed (workunit test suites/ffsb.sh) on ovh041 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=luminous TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/ffsb.sh'
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/mds-full.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/pool-perm.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/sessionmap.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/strays.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/volume-client.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_trivial_sync.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/bluestore.yaml tasks/kernel_cfuse_workunits_dbench_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/auto-repair.yaml whitelist_health.yaml}
Command failed on ovh072 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/filestore-xfs.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
Command failed (workunit test suites/ffsb.sh) on ovh059 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=luminous TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/ffsb.sh'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/backtrace.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_misc.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_o_trunc.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/client-limits.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp-ec-root.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/client-recovery.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/config-commands.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/damage.yaml whitelist_health.yaml}
Command failed on ovh072 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-comp.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/data-scan.yaml whitelist_health.yaml}
"2018-02-28 22:37:59.318041 mon.a mon.0 158.69.75.125:6789/0 2641 : cluster [WRN] daemon mds.c is not responding, replacing it as rank 0 with standby daemon mds.b" in cluster log
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/failover.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_trivial_sync.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_direct_io.yaml}
luminous
luminous
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml objectstore-ec/filestore-xfs.yaml tasks/kernel_cfuse_workunits_untarbuild_blogbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/forward-scrub.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore-ec-root.yaml thrashers/default.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/journal-repair.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_misc.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_o_trunc.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/mds-flush.yaml whitelist_health.yaml}
Command failed on ovh098 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 2'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_snaps.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/bluestore.yaml thrashers/mds.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_ffsb.yaml}
Command failed (workunit test suites/ffsb.sh) on ovh099 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=luminous TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/ffsb.sh'
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/mds-full.yaml whitelist_health.yaml}
"2018-02-28 20:48:30.649031 mon.a mon.0 158.69.72.252:6789/0 217 : cluster [WRN] Health check failed: 2 osds down (OSD_DOWN)" in cluster log
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_dbench.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_ffsb.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-comp.yaml tasks/pool-perm.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_suites_fsstress.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-comp-ec-root.yaml tasks/kclient_workunit_suites_fsx.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore-ec-root.yaml tasks/sessionmap.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml objectstore-ec/filestore-xfs.yaml thrashers/mon.yaml thrashosds-health.yaml whitelist_health.yaml workloads/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore-comp.yaml tasks/kclient_workunit_suites_fsync.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/bluestore.yaml tasks/strays.yaml whitelist_health.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/bluestore-ec-root.yaml tasks/kclient_workunit_suites_iozone.yaml}
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/no.yaml objectstore-ec/bluestore.yaml tasks/kclient_workunit_suites_pjd.yaml}
luminous
luminous
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml objectstore-ec/filestore-xfs.yaml tasks/volume-client.yaml whitelist_health.yaml}
Command failed on ovh026 with status 1: 'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage daemon-helper kill ceph-osd -f --cluster ceph -i 3'
luminous
luminous
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml inline/yes.yaml objectstore-ec/filestore-xfs.yaml tasks/kclient_workunit_trivial_sync.yaml}