ID
Status
Ceph Branch
Suite Branch
Teuthology Branch
Machine
OS
Nodes
Description
Failure Reason
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_direct_io.yaml}
kraken
kraken
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml fs/xfs.yaml tasks/kernel_cfuse_workunits_dbench_iozone.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/auto-repair.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml thrashers/default.yaml workloads/kclient_workunit_suites_ffsb.yaml}
Command failed (workunit test suites/ffsb.sh) on ovh016 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=kraken TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/ffsb.sh'
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_misc.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/backtrace.yaml xfs.yaml}
{'ovh084.front.sepia.ceph.com': {'msg': 'One or more items failed', 'failed': True, 'changed': False}}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_o_trunc.yaml}
Command failed on ovh039 with status 100: u'sudo DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install ceph=11.2.0-291-gae0eab5-1xenial ceph-mds=11.2.0-291-gae0eab5-1xenial ceph-mgr=11.2.0-291-gae0eab5-1xenial ceph-common=11.2.0-291-gae0eab5-1xenial ceph-fuse=11.2.0-291-gae0eab5-1xenial ceph-test=11.2.0-291-gae0eab5-1xenial radosgw=11.2.0-291-gae0eab5-1xenial python-ceph=11.2.0-291-gae0eab5-1xenial libcephfs2=11.2.0-291-gae0eab5-1xenial libcephfs-dev=11.2.0-291-gae0eab5-1xenial libcephfs-java=11.2.0-291-gae0eab5-1xenial libcephfs-jni=11.2.0-291-gae0eab5-1xenial librados2=11.2.0-291-gae0eab5-1xenial librbd1=11.2.0-291-gae0eab5-1xenial rbd-fuse=11.2.0-291-gae0eab5-1xenial'
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_snaps.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/client-limits.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml thrashers/mds.yaml workloads/kclient_workunit_suites_iozone.yaml}
Command failed on ovh080 with status 100: u'sudo DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install ceph=11.2.0-291-gae0eab5-1xenial ceph-mds=11.2.0-291-gae0eab5-1xenial ceph-mgr=11.2.0-291-gae0eab5-1xenial ceph-common=11.2.0-291-gae0eab5-1xenial ceph-fuse=11.2.0-291-gae0eab5-1xenial ceph-test=11.2.0-291-gae0eab5-1xenial radosgw=11.2.0-291-gae0eab5-1xenial python-ceph=11.2.0-291-gae0eab5-1xenial libcephfs2=11.2.0-291-gae0eab5-1xenial libcephfs-dev=11.2.0-291-gae0eab5-1xenial libcephfs-java=11.2.0-291-gae0eab5-1xenial libcephfs-jni=11.2.0-291-gae0eab5-1xenial librados2=11.2.0-291-gae0eab5-1xenial librbd1=11.2.0-291-gae0eab5-1xenial rbd-fuse=11.2.0-291-gae0eab5-1xenial'
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_suites_dbench.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_suites_ffsb.yaml}
"2017-06-08 19:42:57.567052 osd.2 158.69.65.204:6800/8205 1828 : cluster [WRN] OSD near full (90%)" in cluster log
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/client-recovery.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_suites_fsstress.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_suites_fsx.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/config-commands.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml thrashers/mon.yaml workloads/kclient_workunit_suites_ffsb.yaml}
"2017-06-08 20:23:08.689960 osd.2 158.69.66.58:6800/8191 1 : cluster [WRN] OSD near full (90%)" in cluster log
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_suites_fsync.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_suites_iozone.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/damage.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_suites_pjd.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_trivial_sync.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/data-scan.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_direct_io.yaml}
kraken
kraken
master
ovh
 
kcephfs/mixed-clients/{clusters/2-clients.yaml conf.yaml fs/xfs.yaml tasks/kernel_cfuse_workunits_untarbuild_blogbench.yaml}
kraken
kraken
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml thrashers/default.yaml workloads/kclient_workunit_suites_iozone.yaml}
"2017-06-08 21:06:57.392313 osd.3 158.69.66.10:6804/10674 3432 : cluster [WRN] OSD near full (90%)" in cluster log
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_kernel_untar_build.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/forward-scrub.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_misc.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_o_trunc.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/journal-repair.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_snaps.yaml}
kraken
kraken
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml thrashers/mds.yaml workloads/kclient_workunit_suites_ffsb.yaml}
"2017-06-08 20:57:38.123374 osd.2 158.69.67.37:6800/8206 1 : cluster [WRN] OSD near full (90%)" in cluster log
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_suites_dbench.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/mds-flush.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_suites_ffsb.yaml}
Command failed (workunit test suites/ffsb.sh) on ovh049 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=kraken TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/ffsb.sh'
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_suites_fsstress.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/pool-perm.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_suites_fsx.yaml}
Command failed on ovh043 with status 4: u'rm -f /tmp/linux-image.deb && echo linux-image-4.12.0-rc3-ceph-gf7fd5a7ba1f5_4.12.0-rc3-ceph-gf7fd5a7ba1f5-1_amd64.deb | wget -nv -O /tmp/linux-image.deb --base=https://1.chacra.ceph.com/r/kernel/testing/f7fd5a7ba1f5cc4545b20c138a3094c0841a7b2a/ubuntu/xenial/flavors/default/pool/main/l/linux-4.12.0-rc3-ceph-gf7fd5a7ba1f5/ --input-file=-'
kraken
kraken
master
ovh
 
kcephfs/thrash/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml thrashers/mon.yaml workloads/kclient_workunit_suites_iozone.yaml}
{'ovh076.front.sepia.ceph.com': {'_ansible_parsed': True, 'stderr_lines': ["E: Failed to fetch http://nova.clouds.archive.ubuntu.com/ubuntu/pool/main/p/python-defaults/libpython-all-dev_2.7.11-1_amd64.deb Temporary failure resolving 'nova.clouds.archive.ubuntu.com'", '', 'E: Unable to fetch some archives, maybe run apt-get update or try with --fix-missing?'], 'changed': False, '_ansible_no_log': False, 'stdout': "Reading package lists...\nBuilding dependency tree...\nReading state information...\nThe following additional packages will be installed:\n libpython-all-dev python-all python-all-dev python-setuptools python-wheel\nSuggested packages:\n python-setuptools-doc\nThe following NEW packages will be installed:\n libpython-all-dev python-all python-all-dev python-pip python-setuptools\n python-wheel\n0 upgraded, 6 newly installed, 0 to remove and 91 not upgraded.\nNeed to get 364 kB of archives.\nAfter this operation, 1395 kB of additional disk space will be used.\nErr:1 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/main amd64 libpython-all-dev amd64 2.7.11-1\n Temporary failure resolving 'nova.clouds.archive.ubuntu.com'\nGet:2 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/main amd64 python-all amd64 2.7.11-1 [978 B]\nGet:3 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/main amd64 python-all-dev amd64 2.7.11-1 [1000 B]\nGet:4 http://nova.clouds.archive.ubuntu.com/ubuntu xenial-updates/universe amd64 python-pip all 8.1.1-2ubuntu0.4 [144 kB]\nGet:5 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/main amd64 python-setuptools all 20.7.0-1 [169 kB]\nGet:6 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/universe amd64 python-wheel all 0.29.0-1 [48.0 kB]\nFetched 363 kB in 22s (15.9 kB/s)\n", 'cache_updated': False, 'failed': True, 'stderr': "E: Failed to fetch http://nova.clouds.archive.ubuntu.com/ubuntu/pool/main/p/python-defaults/libpython-all-dev_2.7.11-1_amd64.deb Temporary failure resolving 'nova.clouds.archive.ubuntu.com'\n\nE: Unable to fetch some archives, maybe run apt-get update or try with --fix-missing?\n", 'invocation': {'module_args': {'dpkg_options': 'force-confdef,force-confold', 'autoremove': None, 'force': False, 'name': 'python-pip', 'package': ['python-pip'], 'purge': False, 'allow_unauthenticated': False, 'state': 'present', 'upgrade': None, 'update_cache': None, 'deb': None, 'only_upgrade': False, 'cache_valid_time': 0, 'default_release': None, 'install_recommends': None}}, 'msg': '\'/usr/bin/apt-get -y -o "Dpkg::Options::=--force-confdef" -o "Dpkg::Options::=--force-confold" install \'python-pip\'\' failed: E: Failed to fetch http://nova.clouds.archive.ubuntu.com/ubuntu/pool/main/p/python-defaults/libpython-all-dev_2.7.11-1_amd64.deb Temporary failure resolving \'nova.clouds.archive.ubuntu.com\'\n\nE: Unable to fetch some archives, maybe run apt-get update or try with --fix-missing?\n', 'stdout_lines': ['Reading package lists...', 'Building dependency tree...', 'Reading state information...', 'The following additional packages will be installed:', ' libpython-all-dev python-all python-all-dev python-setuptools python-wheel', 'Suggested packages:', ' python-setuptools-doc', 'The following NEW packages will be installed:', ' libpython-all-dev python-all python-all-dev python-pip python-setuptools', ' python-wheel', '0 upgraded, 6 newly installed, 0 to remove and 91 not upgraded.', 'Need to get 364 kB of archives.', 'After this operation, 1395 kB of additional disk space will be used.', 'Err:1 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/main amd64 libpython-all-dev amd64 2.7.11-1', " Temporary failure resolving 'nova.clouds.archive.ubuntu.com'", 'Get:2 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/main amd64 python-all amd64 2.7.11-1 [978 B]', 'Get:3 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/main amd64 python-all-dev amd64 2.7.11-1 [1000 B]', 'Get:4 http://nova.clouds.archive.ubuntu.com/ubuntu xenial-updates/universe amd64 python-pip all 8.1.1-2ubuntu0.4 [144 kB]', 'Get:5 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/main amd64 python-setuptools all 20.7.0-1 [169 kB]', 'Get:6 http://nova.clouds.archive.ubuntu.com/ubuntu xenial/universe amd64 python-wheel all 0.29.0-1 [48.0 kB]', 'Fetched 363 kB in 22s (15.9 kB/s)'], 'cache_update_time': 1496952712}}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_suites_fsync.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/sessionmap.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_suites_iozone.yaml}
Command failed on ovh099 with status 100: u'sudo DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install ceph=11.2.0-291-gae0eab5-1xenial ceph-mds=11.2.0-291-gae0eab5-1xenial ceph-mgr=11.2.0-291-gae0eab5-1xenial ceph-common=11.2.0-291-gae0eab5-1xenial ceph-fuse=11.2.0-291-gae0eab5-1xenial ceph-test=11.2.0-291-gae0eab5-1xenial radosgw=11.2.0-291-gae0eab5-1xenial python-ceph=11.2.0-291-gae0eab5-1xenial libcephfs2=11.2.0-291-gae0eab5-1xenial libcephfs-dev=11.2.0-291-gae0eab5-1xenial libcephfs-java=11.2.0-291-gae0eab5-1xenial libcephfs-jni=11.2.0-291-gae0eab5-1xenial librados2=11.2.0-291-gae0eab5-1xenial librbd1=11.2.0-291-gae0eab5-1xenial rbd-fuse=11.2.0-291-gae0eab5-1xenial'
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/no.yaml tasks/kclient_workunit_suites_pjd.yaml}
kraken
kraken
master
ovh
 
kcephfs/recovery/{clusters/4-remote-clients.yaml debug/mds_client.yaml dirfrag/frag_enable.yaml mounts/kmounts.yaml tasks/volume-client.yaml xfs.yaml}
kraken
kraken
master
ovh
 
kcephfs/cephfs/{clusters/fixed-3-cephfs.yaml conf.yaml fs/xfs.yaml inline/yes.yaml tasks/kclient_workunit_trivial_sync.yaml}