Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
pass 5861430 2021-02-06 13:15:45 2021-02-06 14:13:16 2021-02-06 16:00:35 1:47:19 1:33:43 0:13:36 gibba master rhel 8.3 fs/mixed-clients/{begin clusters/1a3s-mds-2c-client conf/{client mds mon osd} kclient-overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped} objectstore-ec/bluestore-bitmap overrides/{frag_enable osd-asserts whitelist_health whitelist_wrongly_marked_down} tasks/kernel_cfuse_workunits_dbench_iozone} 2
fail 5861431 2021-02-06 13:15:46 2021-02-06 14:15:46 2021-02-06 15:33:31 1:17:45 0:41:13 0:36:32 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{rhel_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-comp-ec-root omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/3 scrub/yes tasks/{0-check-counter workunit/fs/misc}} 3
Failure Reason:

"2021-02-06T15:02:15.812436+0000 mds.e (mds.0) 19 : cluster [WRN] Scrub error on inode 0x10000000262 (/client.0/tmp/testdir/dir1) see mds.e log and `damage ls` output for details" in cluster log

pass 5861432 2021-02-06 13:15:47 2021-02-06 14:38:39 2021-02-06 15:36:51 0:58:12 0:40:11 0:18:01 gibba master rhel 8.3 fs/thrash/workloads/{begin clusters/1a5s-mds-1c-client conf/{client mds mon osd} distro/{rhel_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} msgr-failures/osd-mds-delay objectstore-ec/bluestore-comp-ec-root overrides/{frag_enable session_timeout thrashosds-health whitelist_health whitelist_wrongly_marked_down} ranks/3 tasks/{1-thrash/mon 2-workunit/suites/ffsb}} 2
pass 5861433 2021-02-06 13:15:48 2021-02-06 14:46:00 2021-02-06 16:36:17 1:50:17 0:49:23 1:00:54 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{rhel_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-comp-ec-root omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/5 scrub/yes tasks/{0-check-counter workunit/suites/ffsb}} 3
pass 5861434 2021-02-06 13:15:49 2021-02-06 15:33:37 2021-02-06 16:03:46 0:30:09 0:15:20 0:14:49 gibba master rhel 8.3 fs/thrash/workloads/{begin clusters/1a5s-mds-1c-client conf/{client mds mon osd} distro/{ubuntu_latest} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} msgr-failures/osd-mds-delay objectstore-ec/bluestore-comp-ec-root overrides/{frag_enable session_timeout thrashosds-health whitelist_health whitelist_wrongly_marked_down} ranks/5 tasks/{1-thrash/osd 2-workunit/suites/pjd}} 2
pass 5861435 2021-02-06 13:15:50 2021-02-06 15:36:59 2021-02-06 16:25:29 0:48:30 0:12:27 0:36:03 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{centos_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-comp-ec-root omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/1 scrub/yes tasks/{0-check-counter workunit/suites/fsync-tester}} 3
pass 5861436 2021-02-06 13:15:51 2021-02-06 16:00:42 2021-02-06 16:42:23 0:41:41 0:27:06 0:14:35 gibba master rhel 8.3 fs/functional/{begin clusters/1a3s-mds-4c-client conf/{client mds mon osd} distro/{centos_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore/bluestore-ec-root overrides/{frag_enable no_client_pidfile whitelist_health whitelist_wrongly_marked_down} tasks/admin} 2
pass 5861437 2021-02-06 13:15:52 2021-02-06 16:03:52 2021-02-06 16:52:22 0:48:30 0:13:53 0:34:37 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{centos_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-comp-ec-root omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/3 scrub/yes tasks/{0-check-counter workunit/suites/pjd}} 3
pass 5861438 2021-02-06 13:15:53 2021-02-06 16:25:35 2021-02-06 16:59:13 0:33:38 0:12:09 0:21:29 gibba master rhel 8.3 fs/thrash/workloads/{begin clusters/1a5s-mds-1c-client conf/{client mds mon osd} distro/{centos_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} msgr-failures/osd-mds-delay objectstore-ec/bluestore-comp-ec-root overrides/{frag_enable session_timeout thrashosds-health whitelist_health whitelist_wrongly_marked_down} ranks/1 tasks/{1-thrash/osd 2-workunit/fs/trivial_sync}} 2
fail 5861439 2021-02-06 13:15:53 2021-02-06 16:36:27 2021-02-06 18:53:17 2:16:50 1:58:49 0:18:01 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{centos_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-comp-ec-root omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/5 scrub/yes tasks/{0-check-counter workunit/suites/blogbench}} 3
Failure Reason:

"2021-02-06T17:05:31.950056+0000 mds.f (mds.0) 14 : cluster [WRN] Scrub error on inode 0x10000000aef (/client.0/tmp/blogbench-1.0/src/blogtest_in/blog-13) see mds.f log and `damage ls` output for details" in cluster log

pass 5861440 2021-02-06 13:15:55 2021-02-06 16:42:28 2021-02-06 17:51:16 1:08:48 0:48:05 0:20:43 gibba master rhel 8.3 fs/thrash/workloads/{begin clusters/1a5s-mds-1c-client conf/{client mds mon osd} distro/{centos_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} msgr-failures/osd-mds-delay objectstore-ec/bluestore-comp-ec-root overrides/{frag_enable session_timeout thrashosds-health whitelist_health whitelist_wrongly_marked_down} ranks/3 tasks/{1-thrash/osd 2-workunit/suites/ffsb}} 2
pass 5861441 2021-02-06 13:15:56 2021-02-06 16:52:29 2021-02-06 17:26:07 0:33:38 0:15:02 0:18:36 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{centos_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-comp-ec-root omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/1 scrub/yes tasks/{0-check-counter workunit/suites/fsstress}} 3
fail 5861442 2021-02-06 13:15:58 2021-02-06 16:59:21 2021-02-06 18:49:05 1:49:44 1:10:42 0:39:02 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{rhel_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-comp-ec-root omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/3 scrub/yes tasks/{0-check-counter workunit/suites/iogen}} 3
Failure Reason:

SELinux denials found on ubuntu@gibba004.front.sepia.ceph.com: ['type=AVC msg=audit(1612634409.975:8472): avc: denied { read write open } for pid=15643 comm="cachefilesd" path="/home/ubuntu/cephtest/archive/coredump/1612634407.15643.core" dev="sda1" ino=524499 scontext=system_u:system_r:cachefilesd_t:s0 tcontext=system_u:object_r:user_home_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1612634408.842:8468): avc: denied { add_name } for pid=15643 comm="cachefilesd" name="1612634407.15643.core" scontext=system_u:system_r:cachefilesd_t:s0 tcontext=unconfined_u:object_r:user_home_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1612634407.838:8463): avc: denied { search } for pid=15643 comm="cachefilesd" name="ubuntu" dev="sda1" ino=2387 scontext=system_u:system_r:cachefilesd_t:s0 tcontext=unconfined_u:object_r:user_home_dir_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1612634408.842:8469): avc: denied { create } for pid=15643 comm="cachefilesd" name="1612634407.15643.core" scontext=system_u:system_r:cachefilesd_t:s0 tcontext=system_u:object_r:user_home_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1612634408.842:8467): avc: denied { write } for pid=15643 comm="cachefilesd" name="coredump" dev="sda1" ino=524299 scontext=system_u:system_r:cachefilesd_t:s0 tcontext=unconfined_u:object_r:user_home_t:s0 tclass=dir permissive=1']

pass 5861443 2021-02-06 13:16:00 2021-02-06 17:26:14 2021-02-06 18:16:03 0:49:49 0:13:34 0:36:15 gibba master rhel 8.3 fs/thrash/workloads/{begin clusters/1a5s-mds-1c-client conf/{client mds mon osd} distro/{rhel_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} msgr-failures/osd-mds-delay objectstore-ec/bluestore-comp-ec-root overrides/{frag_enable session_timeout thrashosds-health whitelist_health whitelist_wrongly_marked_down} ranks/5 tasks/{1-thrash/mds 2-workunit/suites/pjd}} 2
pass 5861444 2021-02-06 13:16:02 2021-02-06 17:51:17 2021-02-06 18:46:56 0:55:39 0:18:41 0:36:58 gibba master rhel 8.3 fs/thrash/multifs/{begin clusters/1a3s-mds-2c-client conf/{client mds mon osd} distro/{rhel_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} msgr-failures/osd-mds-delay objectstore/bluestore-bitmap overrides/{frag_enable multifs session_timeout thrashosds-health whitelist_health whitelist_wrongly_marked_down} tasks/{1-thrash/mon 2-workunit/cfuse_workunit_suites_fsstress}} 2
fail 5861445 2021-02-06 13:16:03 2021-02-06 18:16:10 2021-02-06 20:21:55 2:05:45 1:18:36 0:47:09 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{centos_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-bitmap omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/5 scrub/yes tasks/{0-check-counter workunit/fs/misc}} 3
Failure Reason:

Command failed on gibba004 with status 1: 'sudo rm -rf -- /home/ubuntu/cephtest/mnt.0/client.0/tmp'

pass 5861446 2021-02-06 13:16:05 2021-02-06 18:49:15 2021-02-06 19:34:05 0:44:50 0:28:13 0:16:37 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{rhel_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-bitmap omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/1 scrub/yes tasks/{0-check-counter workunit/suites/ffsb}} 3
pass 5861447 2021-02-06 13:16:07 2021-02-06 18:53:26 2021-02-06 19:16:13 0:22:47 0:10:30 0:12:17 gibba master rhel 8.3 fs/thrash/workloads/{begin clusters/1a5s-mds-1c-client conf/{client mds mon osd} distro/{rhel_8} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} msgr-failures/osd-mds-delay objectstore-ec/bluestore-comp-ec-root overrides/{frag_enable session_timeout thrashosds-health whitelist_health whitelist_wrongly_marked_down} ranks/1 tasks/{1-thrash/mds 2-workunit/fs/trivial_sync}} 2
pass 5861448 2021-02-06 13:16:08 2021-02-06 18:53:27 2021-02-06 19:58:58 1:05:31 0:11:54 0:53:37 gibba master rhel 8.3 fs/workload/{begin clusters/1a5s-mds-1c-client-3node conf/{client mds mon osd} distro/{ubuntu_latest} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} objectstore-ec/bluestore-bitmap omap_limit/10000 overrides/{frag_enable osd-asserts session_timeout whitelist_health whitelist_wrongly_marked_down} ranks/3 scrub/yes tasks/{0-check-counter workunit/suites/fsync-tester}} 3
pass 5861449 2021-02-06 13:16:09 2021-02-06 19:34:11 2021-02-06 20:24:58 0:50:47 0:39:13 0:11:34 gibba master rhel 8.3 fs/thrash/workloads/{begin clusters/1a5s-mds-1c-client conf/{client mds mon osd} distro/{ubuntu_latest} mount/kclient/{mount overrides/{distro/stock/{k-stock rhel_8} ms-die-on-skipped}} msgr-failures/osd-mds-delay objectstore-ec/bluestore-comp-ec-root overrides/{frag_enable session_timeout thrashosds-health whitelist_health whitelist_wrongly_marked_down} ranks/3 tasks/{1-thrash/mds 2-workunit/suites/ffsb}} 2