ID
Status
Ceph Branch
Suite Branch
Teuthology Branch
Machine
OS
Nodes
Description
Failure Reason
wip-yuri4-testing-2019-02-05-1539-mimic
wip-yuri4-testing-2019-02-05-1539-mimic
master
smithi
ubuntu 16.04
rados/thrash-old-clients/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-install/hammer.yaml backoff/normal.yaml ceph.yaml clusters/{openstack.yaml two-plus-three.yaml} d-balancer/off.yaml distro$/{ubuntu_16.04.yaml} msgr-failures/fastclose.yaml msgr/random.yaml rados.yaml rocksdb.yaml thrashers/morepggrow.yaml thrashosds-health.yaml workloads/radosbench.yaml}
reached maximum tries (500) after waiting for 3000 seconds
wip-yuri4-testing-2019-02-05-1539-mimic
wip-yuri4-testing-2019-02-05-1539-mimic
master
smithi
rhel 7.5
rados/singleton-nomsgr/{all/ceph-kvstore-tool.yaml rados.yaml supported-random-distro$/{rhel_latest.yaml}}
Command failed (workunit test cephtool/test_kvstore_tool.sh) on smithi196 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=5e6d1de370e8238cc4f2b6c7fad087d0d8f1bbdd TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/cephtool/test_kvstore_tool.sh'
wip-yuri4-testing-2019-02-05-1539-mimic
wip-yuri4-testing-2019-02-05-1539-mimic
master
smithi
ubuntu 18.04
rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{more-active-recovery.yaml} backoff/normal.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/crush-compat.yaml msgr-failures/fastclose.yaml msgr/random.yaml objectstore/filestore-xfs.yaml rados.yaml rocksdb.yaml supported-random-distro$/{ubuntu_latest.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/radosbench.yaml}
wip-yuri4-testing-2019-02-05-1539-mimic
wip-yuri4-testing-2019-02-05-1539-mimic
master
smithi
ubuntu 18.04
rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{default.yaml} backoff/peering.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/off.yaml msgr-failures/few.yaml msgr/simple.yaml objectstore/filestore-xfs.yaml rados.yaml rocksdb.yaml supported-random-distro$/{ubuntu_latest.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/set-chunks-read.yaml}
wip-yuri4-testing-2019-02-05-1539-mimic
wip-yuri4-testing-2019-02-05-1539-mimic
master
smithi
centos 7.4
rados/thrash-old-clients/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-install/hammer.yaml backoff/normal.yaml ceph.yaml clusters/{openstack.yaml two-plus-three.yaml} d-balancer/crush-compat.yaml distro$/{centos_latest.yaml} msgr-failures/fastclose.yaml msgr/random.yaml rados.yaml rocksdb.yaml thrashers/default.yaml thrashosds-health.yaml workloads/test_rbd_api.yaml}
Command failed on smithi076 with status 1: '\n sudo yum -y install ceph-radosgw\n '
wip-yuri4-testing-2019-02-05-1539-mimic
wip-yuri4-testing-2019-02-05-1539-mimic
master
smithi
centos 7.4
rados/thrash-erasure-code-shec/{ceph.yaml clusters/{fixed-4.yaml openstack.yaml} leveldb.yaml msgr-failures/few.yaml objectstore/bluestore.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{centos_latest.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-rados-plugin=shec-k=4-m=3-c=2.yaml}
wip-yuri4-testing-2019-02-05-1539-mimic
wip-yuri4-testing-2019-02-05-1539-mimic
master
smithi
ubuntu 16.04
rados/thrash-old-clients/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-install/luminous.yaml backoff/peering_and_degraded.yaml ceph.yaml clusters/{openstack.yaml two-plus-three.yaml} d-balancer/crush-compat.yaml distro$/{ubuntu_16.04.yaml} msgr-failures/osd-delay.yaml msgr/async.yaml rados.yaml rocksdb.yaml thrashers/mapgap.yaml thrashosds-health.yaml workloads/radosbench.yaml}
reached maximum tries (500) after waiting for 3000 seconds