ID
Status
Ceph Branch
Suite Branch
Teuthology Branch
Machine
OS
Nodes
Description
Failure Reason
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-low-osd-mem-target openstack settings/optimized ubuntu_latest workloads/fio_4M_rand_read}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-stupid openstack settings/optimized ubuntu_latest workloads/fio_4M_rand_rw}
octopus
octopus
master
smithi
centos 7.6
rados/thrash-old-clients/{0-size-min-size-overrides/2-size-2-min-size 1-install/jewel backoff/normal ceph clusters/{openstack three-plus-one} d-balancer/crush-compat distro$/{centos_7.6} msgr-failures/fastclose rados thrashers/careful thrashosds-health workloads/rbd_cls}
Command failed on smithi001 with status 5: 'sudo systemctl stop ceph-95fb623e-f1f4-11ea-a080-001a4aab830c@mon.a'
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-basic-min-osd-mem-target openstack settings/optimized ubuntu_latest workloads/fio_4M_rand_write}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-bitmap openstack settings/optimized ubuntu_latest workloads/radosbench_4K_rand_read}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-comp openstack settings/optimized ubuntu_latest workloads/radosbench_4K_seq_read}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-low-osd-mem-target openstack settings/optimized ubuntu_latest workloads/radosbench_4M_rand_read}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-stupid openstack settings/optimized ubuntu_latest workloads/radosbench_4M_seq_read}
octopus
octopus
master
smithi
centos 7.6
rados/thrash-old-clients/{0-size-min-size-overrides/3-size-2-min-size 1-install/mimic-v1only backoff/normal ceph clusters/{openstack three-plus-one} d-balancer/off distro$/{centos_7.6} msgr-failures/fastclose rados thrashers/morepggrow thrashosds-health workloads/cache-snaps}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-basic-min-osd-mem-target openstack settings/optimized ubuntu_latest workloads/radosbench_4M_write}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-bitmap openstack settings/optimized ubuntu_latest workloads/radosbench_omap_write}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-comp openstack settings/optimized ubuntu_latest workloads/sample_fio}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-low-osd-mem-target openstack settings/optimized ubuntu_latest workloads/sample_radosbench}
octopus
octopus
master
smithi
ubuntu 18.04
rados/standalone/{supported-random-distro$/{ubuntu_latest} workloads/mon}
Command failed (workunit test mon/mon-last-epoch-clean.sh) on smithi074 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=65d8836383deeacfc8343f8d6563548a4c44c835 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/standalone/mon/mon-last-epoch-clean.sh'
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-low-osd-mem-target openstack settings/optimized ubuntu_latest workloads/cosbench_64K_read_write}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-stupid openstack settings/optimized ubuntu_latest workloads/cosbench_64K_write}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-basic-min-osd-mem-target openstack settings/optimized ubuntu_latest workloads/fio_4K_rand_read}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-bitmap openstack settings/optimized ubuntu_latest workloads/fio_4K_rand_rw}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-comp openstack settings/optimized ubuntu_latest workloads/fio_4M_rand_read}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-low-osd-mem-target openstack settings/optimized ubuntu_latest workloads/fio_4M_rand_rw}
octopus
octopus
master
smithi
centos 7.6
rados/thrash-old-clients/{0-size-min-size-overrides/3-size-2-min-size 1-install/jewel-v1only backoff/normal ceph clusters/{openstack three-plus-one} d-balancer/off distro$/{centos_7.6} msgr-failures/fastclose rados thrashers/morepggrow thrashosds-health workloads/radosbench}
Command failed on smithi198 with status 1: 'sudo /home/ubuntu/cephtest/cephadm --image quay.ceph.io/ceph-ci/ceph:65d8836383deeacfc8343f8d6563548a4c44c835 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 0efb780a-f1f4-11ea-a080-001a4aab830c -- ceph orch host add smithi198'
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-stupid openstack settings/optimized ubuntu_latest workloads/fio_4M_rand_write}
octopus
octopus
master
smithi
ubuntu 18.04
rados/perf/{ceph objectstore/bluestore-basic-min-osd-mem-target openstack settings/optimized ubuntu_latest workloads/radosbench_4K_rand_read}