Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
pass 4615849 2019-12-19 22:51:33 2019-12-19 22:53:04 2019-12-19 23:13:01 0:19:57 0:13:54 0:06:03 smithi master ubuntu 18.04 rados/singleton/{all/osd-backfill.yaml msgr-failures/many.yaml msgr/async-v1only.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4615850 2019-12-19 22:51:34 2019-12-19 22:53:04 2019-12-19 23:15:01 0:21:57 0:14:09 0:07:48 smithi master rhel 8.0 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-bitmap.yaml supported-random-distro$/{rhel_8.yaml} tasks/failover.yaml} 2
pass 4615851 2019-12-19 22:51:35 2019-12-19 22:53:13 2019-12-19 23:29:12 0:35:59 0:27:50 0:08:09 smithi master ubuntu 18.04 rados/singleton-bluestore/{all/cephtool.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4615852 2019-12-19 22:51:37 2019-12-19 22:54:29 2019-12-19 23:28:29 0:34:00 0:25:51 0:08:09 smithi master rhel 8.0 rados/thrash-erasure-code/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/normal.yaml msgr-failures/fastclose.yaml objectstore/bluestore-bitmap.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/fastread.yaml thrashosds-health.yaml workloads/ec-rados-plugin=jerasure-k=2-m=1.yaml} 2
pass 4615853 2019-12-19 22:51:38 2019-12-19 22:54:29 2019-12-19 23:22:30 0:28:01 0:17:58 0:10:03 smithi master centos 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-async-recovery.yaml} backoff/peering.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/upmap.yaml msgr-failures/fastclose.yaml msgr/async-v1only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/mapgap.yaml thrashosds-health.yaml workloads/cache-agent-big.yaml} 2
pass 4615854 2019-12-19 22:51:39 2019-12-19 22:54:29 2019-12-19 23:16:30 0:22:01 0:13:57 0:08:04 smithi master ubuntu 18.04 rados/singleton/{all/osd-recovery-incomplete.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4615855 2019-12-19 22:51:40 2019-12-19 22:54:49 2019-12-19 23:10:48 0:15:59 0:09:11 0:06:48 smithi master rhel 8.0 rados/multimon/{clusters/3.yaml msgr-failures/few.yaml msgr/async-v1only.yaml no_pools.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} tasks/mon_clock_with_skews.yaml} 2
fail 4615856 2019-12-19 22:51:42 2019-12-19 22:55:02 2019-12-19 23:14:59 0:19:57 0:12:11 0:07:46 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-bitmap.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_4M_rand_read.yaml} 1
Failure Reason:

Command failed on smithi039 with status 1: '/home/ubuntu/cephtest/cbt/cbt.py -a /home/ubuntu/cephtest/archive/cbt /home/ubuntu/cephtest/archive/cbt/cbt_config.yaml'

pass 4615857 2019-12-19 22:51:43 2019-12-19 22:56:15 2019-12-19 23:28:14 0:31:59 0:25:07 0:06:52 smithi master rhel 8.0 rados/objectstore/{backends/objectcacher-stress.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4615858 2019-12-19 22:51:44 2019-12-19 22:56:23 2019-12-19 23:22:23 0:26:00 0:18:44 0:07:16 smithi master ubuntu 18.04 rados/singleton-nomsgr/{all/osd_stale_reads.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4615859 2019-12-19 22:51:46 2019-12-19 22:56:26 2019-12-19 23:14:25 0:17:59 0:10:53 0:07:06 smithi master ubuntu 18.04 rados/singleton/{all/osd-recovery.yaml msgr-failures/many.yaml msgr/async.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
fail 4615860 2019-12-19 22:51:47 2019-12-19 22:56:32 2019-12-19 23:22:32 0:26:00 0:18:20 0:07:40 smithi master rhel 8.0 rados/monthrash/{ceph.yaml clusters/9-mons.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/force-sync-many.yaml workloads/pool-create-delete.yaml} 2
Failure Reason:

SELinux denials found on ubuntu@smithi124.front.sepia.ceph.com: ['type=AVC msg=audit(1576797473.347:6901): avc: denied { open } for pid=34029 comm="rhsmcertd-worke" path="/var/cache/dnf/ceph-b62bef3614ddca06/repodata/repomd.xml" dev="sda1" ino=262154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.711:6905): avc: denied { read } for pid=34130 comm="setroubleshootd" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.164:6895): avc: denied { open } for pid=34029 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.711:6906): avc: denied { lock } for pid=34130 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.238:6900): avc: denied { add_name } for pid=34029 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576797473.711:6905): avc: denied { open } for pid=34130 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.164:6895): avc: denied { read write } for pid=34029 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.238:6900): avc: denied { create } for pid=34029 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.164:6897): avc: denied { getattr } for pid=34029 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.238:6899): avc: denied { open } for pid=34029 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.164:6898): avc: denied { map } for pid=34029 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.501:6903): avc: denied { unlink } for pid=34029 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=58949 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.238:6900): avc: denied { open } for pid=34029 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=58949 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.711:6907): avc: denied { map } for pid=34130 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.581:6904): avc: denied { read } for pid=34029 comm="rhsmcertd-worke" name="satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.238:6900): avc: denied { write } for pid=34029 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576797473.581:6904): avc: denied { open } for pid=34029 comm="rhsmcertd-worke" path="/etc/dnf/modules.d/satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.501:6903): avc: denied { remove_name } for pid=34029 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=58949 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576797473.164:6896): avc: denied { lock } for pid=34029 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797473.347:6902): avc: denied { setattr } for pid=34029 comm="rhsmcertd-worke" name="2c35ae2a211c917c8720c3cf79187f2d3049d82f8e613c2dab44a57fa73c97e7-primary.xml.gz" dev="sda1" ino=262180 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1']

pass 4615861 2019-12-19 22:51:48 2019-12-19 22:56:46 2019-12-19 23:32:45 0:35:59 0:29:36 0:06:23 smithi master ubuntu 18.04 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/rados_workunit_loadgen_big.yaml} 2
fail 4615862 2019-12-19 22:51:50 2019-12-19 22:56:46 2019-12-19 23:14:45 0:17:59 0:03:34 0:14:25 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-install/jewel-v1only.yaml backoff/normal.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/crush-compat.yaml distro$/{ubuntu_latest.yaml} msgr-failures/fastclose.yaml rados.yaml thrashers/careful.yaml thrashosds-health.yaml workloads/test_rbd_api.yaml} 4
Failure Reason:

Command failed on smithi095 with status 100: u'sudo DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install ceph=10.2.11-18-g115560f-1bionic ceph-mds=10.2.11-18-g115560f-1bionic ceph-common=10.2.11-18-g115560f-1bionic ceph-fuse=10.2.11-18-g115560f-1bionic ceph-test=10.2.11-18-g115560f-1bionic radosgw=10.2.11-18-g115560f-1bionic python3-rados=10.2.11-18-g115560f-1bionic python3-rgw=10.2.11-18-g115560f-1bionic python3-cephfs=10.2.11-18-g115560f-1bionic python3-rbd=10.2.11-18-g115560f-1bionic librados2=10.2.11-18-g115560f-1bionic librbd1=10.2.11-18-g115560f-1bionic rbd-fuse=10.2.11-18-g115560f-1bionic librados2=10.2.11-18-g115560f-1bionic'

pass 4615863 2019-12-19 22:51:51 2019-12-19 22:56:49 2019-12-19 23:20:48 0:23:59 0:12:54 0:11:05 smithi master ubuntu 18.04 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{more-async-partial-recovery.yaml} backoff/peering_and_degraded.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/crush-compat.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} thrashers/morepggrow.yaml thrashosds-health.yaml workloads/cache-agent-small.yaml} 2
pass 4615864 2019-12-19 22:51:53 2019-12-19 22:58:12 2019-12-19 23:18:11 0:19:59 0:08:56 0:11:03 smithi master centos 8.0 rados/singleton/{all/peer.yaml msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4615865 2019-12-19 22:51:54 2019-12-19 22:58:12 2019-12-19 23:18:11 0:19:59 0:11:43 0:08:16 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-comp.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_4M_seq_read.yaml} 1
fail 4615866 2019-12-19 22:51:56 2019-12-19 22:58:12 2019-12-19 23:46:12 0:48:00 0:38:00 0:10:00 smithi master rhel 8.0 rados/dashboard/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/filestore-xfs.yaml supported-random-distro$/{rhel_8.yaml} tasks/dashboard.yaml} 2
Failure Reason:

Test failure: test_all (tasks.mgr.dashboard.test_rgw.RgwBucketTest)

pass 4615867 2019-12-19 22:51:57 2019-12-19 22:58:29 2019-12-19 23:14:30 0:16:01 0:08:09 0:07:52 smithi master centos 8.0 rados/singleton-nomsgr/{all/pool-access.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4615868 2019-12-19 22:51:58 2019-12-19 23:00:42 2019-12-19 23:22:41 0:21:59 0:13:47 0:08:12 smithi master rhel 8.0 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-comp.yaml supported-random-distro$/{rhel_8.yaml} tasks/insights.yaml} 2
pass 4615869 2019-12-19 22:52:00 2019-12-19 23:00:42 2019-12-19 23:28:41 0:27:59 0:19:30 0:08:29 smithi master ubuntu 18.04 rados/thrash-erasure-code-overwrites/{bluestore-bitmap.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/fast.yaml msgr-failures/fastclose.yaml rados.yaml recovery-overrides/{more-async-partial-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/fastread.yaml thrashosds-health.yaml workloads/ec-small-objects-overwrites.yaml} 2
pass 4615870 2019-12-19 22:52:01 2019-12-19 23:00:46 2019-12-19 23:28:45 0:27:59 0:11:54 0:16:05 smithi master centos 8.0 rados/thrash-erasure-code-shec/{ceph.yaml clusters/{fixed-4.yaml openstack.yaml} msgr-failures/fastclose.yaml objectstore/bluestore-comp.yaml rados.yaml recovery-overrides/{more-async-partial-recovery.yaml} supported-random-distro$/{centos_8.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-rados-plugin=shec-k=4-m=3-c=2.yaml} 4
pass 4615871 2019-12-19 22:52:03 2019-12-19 23:02:52 2019-12-19 23:38:51 0:35:59 0:28:12 0:07:47 smithi master rhel 8.0 rados/thrash-erasure-code-isa/{arch/x86_64.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-rados-plugin=isa-k=2-m=1.yaml} 2
pass 4615872 2019-12-19 22:52:04 2019-12-19 23:02:52 2019-12-19 23:20:51 0:17:59 0:10:16 0:07:43 smithi master ubuntu 18.04 rados/thrash-erasure-code-big/{ceph.yaml cluster/{12-osds.yaml openstack.yaml} msgr-failures/few.yaml objectstore/bluestore-bitmap.yaml rados.yaml recovery-overrides/{more-async-partial-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-rados-plugin=lrc-k=4-m=2-l=3.yaml} 3
fail 4615873 2019-12-19 22:52:05 2019-12-19 23:02:52 2019-12-19 23:18:51 0:15:59 0:06:02 0:09:57 smithi master centos 8.0 rados/verify/{centos_latest.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-thrash/none.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-avl.yaml rados.yaml tasks/rados_api_tests.yaml validater/valgrind.yaml} 2
Failure Reason:

Command failed on smithi084 with status 1: 'sudo yum -y install ceph-debuginfo'

pass 4615874 2019-12-19 22:52:06 2019-12-19 23:02:52 2019-12-19 23:20:51 0:17:59 0:09:57 0:08:02 smithi master ubuntu 18.04 rados/singleton/{all/pg-autoscaler.yaml msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 2
pass 4615875 2019-12-19 22:52:08 2019-12-19 23:04:41 2019-12-19 23:30:40 0:25:59 0:20:13 0:05:46 smithi master rhel 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-async-partial-recovery.yaml} backoff/normal.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/off.yaml msgr-failures/osd-delay.yaml msgr/async.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/none.yaml thrashosds-health.yaml workloads/cache-pool-snaps-readproxy.yaml} 2
fail 4615876 2019-12-19 22:52:09 2019-12-19 23:04:41 2019-12-20 00:16:41 1:12:00 1:05:00 0:07:00 smithi master rhel 8.0 rados/standalone/{supported-random-distro$/{rhel_8.yaml} workloads/scrub.yaml} 1
Failure Reason:

Command failed (workunit test scrub/osd-scrub-test.sh) on smithi001 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/standalone/scrub/osd-scrub-test.sh'

pass 4615877 2019-12-19 22:52:11 2019-12-19 23:04:41 2019-12-19 23:20:40 0:15:59 0:10:15 0:05:44 smithi master rhel 8.0 rados/singleton/{all/pg-removal-interruption.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4615878 2019-12-19 22:52:12 2019-12-19 23:04:42 2019-12-19 23:32:40 0:27:58 0:21:04 0:06:54 smithi master rhel 8.0 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/many.yaml msgr/async-v1only.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} tasks/rados_workunit_loadgen_mix.yaml} 2
pass 4615879 2019-12-19 22:52:13 2019-12-19 23:04:41 2019-12-19 23:22:40 0:17:59 0:09:58 0:08:01 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-low-osd-mem-target.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_4M_write.yaml} 1
pass 4615880 2019-12-19 22:52:14 2019-12-19 23:04:44 2019-12-19 23:36:43 0:31:59 0:23:27 0:08:32 smithi master ubuntu 18.04 rados/thrash-erasure-code/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/fast.yaml msgr-failures/few.yaml objectstore/bluestore-comp.yaml rados.yaml recovery-overrides/{more-async-partial-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/minsize_recovery.yaml thrashosds-health.yaml workloads/ec-rados-plugin=jerasure-k=3-m=1.yaml} 2
fail 4615881 2019-12-19 22:52:16 2019-12-19 23:04:46 2019-12-19 23:24:45 0:19:59 0:03:33 0:16:26 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-install/jewel.yaml backoff/peering.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/off.yaml distro$/{ubuntu_latest.yaml} msgr-failures/few.yaml rados.yaml thrashers/default.yaml thrashosds-health.yaml workloads/cache-agent-big.yaml} 4
Failure Reason:

Command failed on smithi097 with status 100: u'sudo DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install ceph=10.2.11-18-g115560f-1bionic ceph-mds=10.2.11-18-g115560f-1bionic ceph-common=10.2.11-18-g115560f-1bionic ceph-fuse=10.2.11-18-g115560f-1bionic ceph-test=10.2.11-18-g115560f-1bionic radosgw=10.2.11-18-g115560f-1bionic python3-rados=10.2.11-18-g115560f-1bionic python3-rgw=10.2.11-18-g115560f-1bionic python3-cephfs=10.2.11-18-g115560f-1bionic python3-rbd=10.2.11-18-g115560f-1bionic librados2=10.2.11-18-g115560f-1bionic librbd1=10.2.11-18-g115560f-1bionic rbd-fuse=10.2.11-18-g115560f-1bionic librados2=10.2.11-18-g115560f-1bionic'

pass 4615882 2019-12-19 22:52:17 2019-12-19 23:04:46 2019-12-20 03:32:54 4:28:08 4:16:04 0:12:04 smithi master ubuntu 18.04 rados/objectstore/{backends/objectstore.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
fail 4615883 2019-12-19 22:52:18 2019-12-19 23:04:49 2019-12-19 23:54:48 0:49:59 0:41:16 0:08:43 smithi master centos 8.0 rados/singleton-nomsgr/{all/recovery-unfound-found.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
Failure Reason:

Exiting scrub checking -- not all pgs scrubbed.

pass 4615884 2019-12-19 22:52:20 2019-12-19 23:04:50 2019-12-19 23:34:48 0:29:58 0:22:45 0:07:13 smithi master ubuntu 18.04 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{default.yaml} backoff/peering.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/upmap.yaml msgr-failures/fastclose.yaml msgr/async-v1only.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/cache-pool-snaps.yaml} 2
fail 4615885 2019-12-19 22:52:21 2019-12-19 23:06:51 2019-12-19 23:48:50 0:41:59 0:34:56 0:07:03 smithi master rhel 8.0 rados/singleton/{all/radostool.yaml msgr-failures/many.yaml msgr/async-v1only.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
Failure Reason:

"2019-12-19T23:44:05.459159+0000 mon.a (mon.0) 231 : cluster [WRN] Health check failed: 1 osds down (OSD_DOWN)" in cluster log

pass 4615886 2019-12-19 22:52:22 2019-12-19 23:06:51 2019-12-19 23:28:50 0:21:59 0:14:46 0:07:13 smithi master rhel 8.0 rados/multimon/{clusters/6.yaml msgr-failures/many.yaml msgr/async-v2only.yaml no_pools.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} tasks/mon_recovery.yaml} 2
pass 4615887 2019-12-19 22:52:23 2019-12-19 23:06:51 2019-12-19 23:44:52 0:38:01 0:27:04 0:10:57 smithi master rhel 8.0 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-low-osd-mem-target.yaml supported-random-distro$/{rhel_8.yaml} tasks/module_selftest.yaml} 2
pass 4615888 2019-12-19 22:52:24 2019-12-19 23:06:51 2019-12-19 23:40:51 0:34:00 0:24:37 0:09:23 smithi master rhel 8.0 rados/singleton/{all/random-eio.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 2
pass 4615889 2019-12-19 22:52:25 2019-12-19 23:06:51 2019-12-19 23:32:50 0:25:59 0:20:37 0:05:22 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-stupid.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_omap_write.yaml} 1
pass 4615890 2019-12-19 22:52:26 2019-12-19 23:06:51 2019-12-19 23:28:50 0:21:59 0:08:47 0:13:12 smithi master ubuntu 18.04 rados/monthrash/{ceph.yaml clusters/3-mons.yaml msgr-failures/mon-delay.yaml msgr/async-v1only.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} thrashers/many.yaml workloads/rados_5925.yaml} 2
dead 4615891 2019-12-19 22:52:28 2019-12-19 23:07:14 2019-12-20 11:09:46 12:02:32 smithi master centos 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-async-recovery.yaml} backoff/peering_and_degraded.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/crush-compat.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/cache-snaps.yaml} 2
pass 4615892 2019-12-19 22:52:29 2019-12-19 23:08:38 2019-12-19 23:24:37 0:15:59 0:07:59 0:08:00 smithi master ubuntu 18.04 rados/singleton-nomsgr/{all/version-number-sanity.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
fail 4615893 2019-12-19 22:52:30 2019-12-19 23:08:38 2019-12-19 23:40:37 0:31:59 0:24:15 0:07:44 smithi master rhel 8.0 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} tasks/rados_workunit_loadgen_mostlyread.yaml} 2
Failure Reason:

SELinux denials found on ubuntu@smithi153.front.sepia.ceph.com: ['type=AVC msg=audit(1576797530.042:4555): avc: denied { open } for pid=20294 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.658:4552): avc: denied { setattr } for pid=20193 comm="rhsmcertd-worke" name="6e2fe611f78ac434c2918bac1eec468dbd24c9b4cdb65bf6a744d10f764f3284-primary.xml.gz" dev="sda1" ino=262274 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.529:4549): avc: denied { open } for pid=20193 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.529:4550): avc: denied { create } for pid=20193 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.529:4550): avc: denied { open } for pid=20193 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=10703 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.632:4551): avc: denied { open } for pid=20193 comm="rhsmcertd-worke" path="/var/cache/dnf/copr:copr.fedorainfracloud.org:ktdreyer:ceph-el8-ac801414381f5e61/repodata/repomd.xml" dev="sda1" ino=262184 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.529:4550): avc: denied { write } for pid=20193 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576797530.042:4557): avc: denied { map } for pid=20294 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.446:4548): avc: denied { map } for pid=20193 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.783:4553): avc: denied { remove_name } for pid=20193 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=10703 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576797529.446:4545): avc: denied { read write } for pid=20193 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.446:4546): avc: denied { lock } for pid=20193 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.529:4550): avc: denied { add_name } for pid=20193 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576797529.865:4554): avc: denied { open } for pid=20193 comm="rhsmcertd-worke" path="/etc/dnf/modules.d/satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.446:4545): avc: denied { open } for pid=20193 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797530.042:4556): avc: denied { lock } for pid=20294 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.865:4554): avc: denied { read } for pid=20193 comm="rhsmcertd-worke" name="satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.783:4553): avc: denied { unlink } for pid=20193 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=10703 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797530.042:4555): avc: denied { read } for pid=20294 comm="setroubleshootd" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576797529.446:4547): avc: denied { getattr } for pid=20193 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1']

pass 4615894 2019-12-19 22:52:31 2019-12-19 23:08:38 2019-12-19 23:28:37 0:19:59 0:12:17 0:07:42 smithi master ubuntu 18.04 rados/singleton/{all/rebuild-mondb.yaml msgr-failures/many.yaml msgr/async.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4615895 2019-12-19 22:52:32 2019-12-19 23:08:47 2019-12-19 23:46:46 0:37:59 0:24:51 0:13:08 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-install/luminous-v1only.yaml backoff/peering_and_degraded.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/crush-compat.yaml distro$/{ubuntu_latest.yaml} msgr-failures/osd-delay.yaml rados.yaml thrashers/mapgap.yaml thrashosds-health.yaml workloads/cache-snaps.yaml} 4
pass 4615896 2019-12-19 22:52:33 2019-12-19 23:10:37 2019-12-19 23:40:36 0:29:59 0:23:42 0:06:17 smithi master ubuntu 18.04 rados/thrash-erasure-code-overwrites/{bluestore-bitmap.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/normal.yaml msgr-failures/few.yaml rados.yaml recovery-overrides/{more-async-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/minsize_recovery.yaml thrashosds-health.yaml workloads/ec-snaps-few-objects-overwrites.yaml} 2
pass 4615897 2019-12-19 22:52:34 2019-12-19 23:10:37 2019-12-19 23:34:36 0:23:59 0:13:35 0:10:24 smithi master rhel 8.0 rados/thrash-erasure-code-shec/{ceph.yaml clusters/{fixed-4.yaml openstack.yaml} msgr-failures/few.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml recovery-overrides/{more-async-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-rados-plugin=shec-k=4-m=3-c=2.yaml} 4
dead 4615898 2019-12-19 22:52:35 2019-12-19 23:10:44 2019-12-20 11:13:22 12:02:38 smithi master rhel 8.0 rados/thrash-erasure-code-isa/{arch/x86_64.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/osd-delay.yaml objectstore/bluestore-stupid.yaml rados.yaml recovery-overrides/{more-async-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-rados-plugin=isa-k=2-m=1.yaml} 2
pass 4615899 2019-12-19 22:52:36 2019-12-19 23:10:49 2019-12-19 23:46:49 0:36:00 0:27:59 0:08:01 smithi master rhel 8.0 rados/thrash-erasure-code-big/{ceph.yaml cluster/{12-osds.yaml openstack.yaml} msgr-failures/osd-delay.yaml objectstore/bluestore-comp.yaml rados.yaml recovery-overrides/{more-partial-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-rados-plugin=jerasure-k=4-m=2.yaml} 3
pass 4615900 2019-12-19 22:52:38 2019-12-19 23:12:37 2019-12-19 23:34:36 0:21:59 0:12:54 0:09:05 smithi master centos 8.0 rados/verify/{centos_latest.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-thrash/default/{default.yaml thrashosds-health.yaml} msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-bitmap.yaml rados.yaml tasks/rados_cls_all.yaml validater/lockdep.yaml} 2
pass 4615901 2019-12-19 22:52:39 2019-12-19 23:12:37 2019-12-19 23:28:36 0:15:59 0:09:17 0:06:42 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-basic-min-osd-mem-target.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/sample_fio.yaml} 1
fail 4615902 2019-12-19 22:52:40 2019-12-19 23:12:46 2019-12-19 23:34:45 0:21:59 0:14:41 0:07:18 smithi master rhel 8.0 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{default.yaml} backoff/normal.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/off.yaml msgr-failures/osd-delay.yaml msgr/async.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/cache.yaml} 2
Failure Reason:

SELinux denials found on ubuntu@smithi107.front.sepia.ceph.com: ['type=AVC msg=audit(1576798303.271:10301): avc: denied { create } for pid=38623 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.631:10305): avc: denied { open } for pid=38623 comm="rhsmcertd-worke" path="/etc/dnf/modules.d/satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.196:10296): avc: denied { open } for pid=38623 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.385:10303): avc: denied { setattr } for pid=38623 comm="rhsmcertd-worke" name="2c35ae2a211c917c8720c3cf79187f2d3049d82f8e613c2dab44a57fa73c97e7-primary.xml.gz" dev="sda1" ino=262180 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.271:10301): avc: denied { open } for pid=38623 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=56740 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.549:10304): avc: denied { unlink } for pid=38623 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=56740 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.197:10299): avc: denied { map } for pid=38623 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.631:10305): avc: denied { read } for pid=38623 comm="rhsmcertd-worke" name="satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.196:10296): avc: denied { read write } for pid=38623 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.385:10302): avc: denied { open } for pid=38623 comm="rhsmcertd-worke" path="/var/cache/dnf/ceph-b62bef3614ddca06/repodata/repomd.xml" dev="sda1" ino=262154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.271:10300): avc: denied { open } for pid=38623 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.754:10306): avc: denied { open } for pid=38677 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.754:10306): avc: denied { read } for pid=38677 comm="setroubleshootd" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.754:10307): avc: denied { lock } for pid=38677 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.197:10298): avc: denied { getattr } for pid=38623 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.754:10308): avc: denied { map } for pid=38677 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.196:10297): avc: denied { lock } for pid=38623 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798303.549:10304): avc: denied { remove_name } for pid=38623 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=56740 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576798303.271:10301): avc: denied { add_name } for pid=38623 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576798303.271:10301): avc: denied { write } for pid=38623 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1']

pass 4615903 2019-12-19 22:52:41 2019-12-19 23:13:03 2019-12-19 23:35:02 0:21:59 0:15:51 0:06:08 smithi master ubuntu 18.04 rados/singleton/{all/recovery-preemption.yaml msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
fail 4615904 2019-12-19 22:52:42 2019-12-19 23:14:36 2019-12-20 00:00:37 0:46:01 0:36:17 0:09:44 smithi master centos 8.0 rados/dashboard/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-avl.yaml supported-random-distro$/{centos_8.yaml} tasks/dashboard.yaml} 2
Failure Reason:

Test failure: test_all (tasks.mgr.dashboard.test_rgw.RgwBucketTest)

pass 4615905 2019-12-19 22:52:43 2019-12-19 23:14:36 2019-12-19 23:30:35 0:15:59 0:10:49 0:05:10 smithi master rhel 8.0 rados/objectstore/{backends/alloc-hint.yaml supported-random-distro$/{rhel_8.yaml}} 1
fail 4615906 2019-12-19 22:52:44 2019-12-19 23:14:36 2019-12-19 23:34:35 0:19:59 0:11:44 0:08:15 smithi master centos 8.0 rados/rest/{mgr-restful.yaml supported-random-distro$/{centos_8.yaml}} 1
Failure Reason:

Command failed (workunit test rest/test-restful.sh) on smithi137 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.a/client.a/tmp && cd -- /home/ubuntu/cephtest/mnt.a/client.a/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="a" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.a CEPH_ROOT=/home/ubuntu/cephtest/clone.client.a adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.a/qa/workunits/rest/test-restful.sh'

pass 4615907 2019-12-19 22:52:45 2019-12-19 23:14:36 2019-12-19 23:52:36 0:38:00 0:28:01 0:09:59 smithi master ubuntu 18.04 rados/singleton-bluestore/{all/cephtool.yaml msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
fail 4615908 2019-12-19 22:52:47 2019-12-19 23:14:36 2019-12-19 23:16:35 0:01:59 0 smithi master centos 8.0 rados/singleton-flat/valgrind-leaks/centos_latest.yaml
Failure Reason:

list index out of range

fail 4615909 2019-12-19 22:52:48 2019-12-19 23:14:36 2019-12-19 23:36:35 0:21:59 0:13:21 0:08:38 smithi master centos 8.0 rados/singleton-nomsgr/{all/admin_socket_output.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
Failure Reason:

"2019-12-19T23:28:53.938351+0000 mon.a (mon.0) 179 : cluster [WRN] Health check failed: 1 filesystem is degraded (FS_DEGRADED)" in cluster log

pass 4615910 2019-12-19 22:52:49 2019-12-19 23:14:36 2019-12-19 23:32:35 0:17:59 0:09:33 0:08:26 smithi master ubuntu 18.04 rados/standalone/{supported-random-distro$/{ubuntu_latest.yaml} workloads/crush.yaml} 1
pass 4615911 2019-12-19 22:52:50 2019-12-19 23:14:36 2019-12-20 00:52:38 1:38:02 1:29:23 0:08:39 smithi master ubuntu 18.04 rados/upgrade/mimic-x-singleton/{0-cluster/{openstack.yaml start.yaml} 1-install/mimic.yaml 2-partial-upgrade/firsthalf.yaml 3-thrash/default.yaml 4-workload/{rbd-cls.yaml rbd-import-export.yaml readwrite.yaml snaps-few-objects.yaml} 5-workload/{radosbench.yaml rbd_api.yaml} 6-finish-upgrade.yaml 7-nautilus.yaml 8-workload/{rbd-python.yaml snaps-many-objects.yaml} bluestore-bitmap.yaml thrashosds-health.yaml ubuntu_latest.yaml} 4
dead 4615912 2019-12-19 22:52:51 2019-12-19 23:14:39 2019-12-20 11:17:15 12:02:36 smithi master rhel 8.0 rados/thrash-erasure-code/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/normal.yaml msgr-failures/osd-delay.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml recovery-overrides/{more-active-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/morepggrow.yaml thrashosds-health.yaml workloads/ec-radosbench.yaml} 2
pass 4615913 2019-12-19 22:52:53 2019-12-19 23:14:47 2019-12-19 23:32:46 0:17:59 0:08:18 0:09:41 smithi master ubuntu 18.04 rados/singleton/{all/resolve_stuck_peering.yaml msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 2
fail 4615914 2019-12-19 22:52:54 2019-12-19 23:15:01 2019-12-19 23:35:00 0:19:59 0:08:13 0:11:46 smithi master ubuntu 18.04 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-stupid.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/orchestrator_cli.yaml} 2
Failure Reason:

Test failure: test_device_ls (tasks.mgr.test_orchestrator_cli.TestOrchestratorCli)

pass 4615915 2019-12-19 22:52:55 2019-12-19 23:15:03 2019-12-19 23:37:02 0:21:59 0:11:53 0:10:06 smithi master centos 8.0 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/many.yaml msgr/async.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{centos_8.yaml} tasks/readwrite.yaml} 2
pass 4615916 2019-12-19 22:52:56 2019-12-19 23:15:06 2019-12-19 23:37:06 0:22:00 0:13:21 0:08:39 smithi master centos 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-async-recovery.yaml} backoff/peering.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/upmap.yaml msgr-failures/fastclose.yaml msgr/async-v1only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/mapgap.yaml thrashosds-health.yaml workloads/dedup_tier.yaml} 2
pass 4615917 2019-12-19 22:52:58 2019-12-19 23:15:06 2019-12-19 23:33:06 0:18:00 0:10:04 0:07:56 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-bitmap.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/sample_radosbench.yaml} 1
fail 4615918 2019-12-19 22:52:59 2019-12-19 23:16:39 2019-12-19 23:46:38 0:29:59 0:21:35 0:08:24 smithi master ubuntu 18.04 rados/singleton/{all/test-crash.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
Failure Reason:

Command failed (workunit test rados/test_crash.sh) on smithi113 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/rados/test_crash.sh'

pass 4615919 2019-12-19 22:53:00 2019-12-19 23:16:38 2019-12-19 23:34:38 0:18:00 0:07:37 0:10:23 smithi master ubuntu 18.04 rados/multimon/{clusters/9.yaml msgr-failures/few.yaml msgr/async.yaml no_pools.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/mon_clock_no_skews.yaml} 3
pass 4615920 2019-12-19 22:53:02 2019-12-19 23:16:39 2019-12-19 23:36:38 0:19:59 0:09:48 0:10:11 smithi master centos 8.0 rados/singleton-nomsgr/{all/balancer.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4615921 2019-12-19 22:53:03 2019-12-19 23:16:51 2019-12-19 23:38:50 0:21:59 0:14:47 0:07:12 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-install/luminous.yaml backoff/normal.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/off.yaml distro$/{ubuntu_latest.yaml} msgr-failures/fastclose.yaml rados.yaml thrashers/morepggrow.yaml thrashosds-health.yaml workloads/radosbench.yaml} 4
fail 4615922 2019-12-19 22:53:05 2019-12-19 23:17:14 2019-12-19 23:35:13 0:17:59 0:08:45 0:09:14 smithi master centos 8.0 rados/singleton/{all/test_envlibrados_for_rocksdb.yaml msgr-failures/many.yaml msgr/async-v1only.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
Failure Reason:

Command failed (workunit test rados/test_envlibrados_for_rocksdb.sh) on smithi006 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/rados/test_envlibrados_for_rocksdb.sh'

fail 4615923 2019-12-19 22:53:06 2019-12-19 23:18:32 2019-12-20 02:50:36 3:32:04 3:23:35 0:08:29 smithi master centos 8.0 rados/monthrash/{ceph.yaml clusters/9-mons.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/one.yaml workloads/rados_api_tests.yaml} 2
Failure Reason:

Command failed (workunit test rados/test.sh) on smithi131 with status 124: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/rados/test.sh'

pass 4615924 2019-12-19 22:53:08 2019-12-19 23:18:33 2019-12-19 23:54:32 0:35:59 0:25:37 0:10:22 smithi master ubuntu 18.04 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{more-partial-recovery.yaml} backoff/peering_and_degraded.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/crush-compat.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} thrashers/morepggrow.yaml thrashosds-health.yaml workloads/pool-snaps-few-objects.yaml} 2
pass 4615925 2019-12-19 22:53:09 2019-12-19 23:18:32 2019-12-19 23:44:32 0:26:00 0:16:33 0:09:27 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-bitmap.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/cosbench_64K_read_write.yaml} 1
fail 4615926 2019-12-19 22:53:10 2019-12-19 23:18:33 2019-12-20 00:40:33 1:22:00 1:13:51 0:08:09 smithi master rhel 8.0 rados/singleton/{all/thrash-backfill-full.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 2
Failure Reason:

SELinux denials found on ubuntu@smithi124.front.sepia.ceph.com: ['type=AVC msg=audit(1576802198.252:36095): avc: denied { lock } for pid=103218 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802203.611:36106): avc: denied { lock } for pid=103333 comm="rpm" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802205.004:36108): avc: denied { write } for pid=103218 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576802205.161:36109): avc: denied { open } for pid=103218 comm="rhsmcertd-worke" path="/etc/dnf/modules.d/satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802199.699:36098): avc: denied { read } for pid=103319 comm="setroubleshootd" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802198.252:36096): avc: denied { getattr } for pid=103218 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802198.252:36097): avc: denied { map } for pid=103218 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802202.997:36102): avc: denied { write } for pid=103218 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576802205.161:36109): avc: denied { read } for pid=103218 comm="rhsmcertd-worke" name="satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802203.611:36105): avc: denied { read } for pid=103333 comm="rpm" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802198.252:36094): avc: denied { open } for pid=103218 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802199.699:36098): avc: denied { open } for pid=103319 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802203.146:36103): avc: denied { open } for pid=103218 comm="rhsmcertd-worke" path="/var/cache/dnf/ceph-b62bef3614ddca06/repodata/repomd.xml" dev="sda1" ino=262158 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802199.699:36100): avc: denied { map } for pid=103319 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802202.997:36102): avc: denied { open } for pid=103218 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=60286 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802203.611:36105): avc: denied { open } for pid=103333 comm="rpm" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802205.004:36108): avc: denied { remove_name } for pid=103218 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=60286 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576802202.997:36102): avc: denied { add_name } for pid=103218 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576802202.984:36101): avc: denied { open } for pid=103218 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802203.611:36107): avc: denied { map } for pid=103333 comm="rpm" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802203.220:36104): avc: denied { setattr } for pid=103218 comm="rhsmcertd-worke" name="2c35ae2a211c917c8720c3cf79187f2d3049d82f8e613c2dab44a57fa73c97e7-primary.xml.gz" dev="sda1" ino=262180 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802205.004:36108): avc: denied { unlink } for pid=103218 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=60286 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802199.699:36099): avc: denied { lock } for pid=103319 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802198.252:36094): avc: denied { read write } for pid=103218 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802202.997:36102): avc: denied { create } for pid=103218 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1']

pass 4615927 2019-12-19 22:53:11 2019-12-19 23:18:47 2019-12-19 23:42:46 0:23:59 0:15:37 0:08:22 smithi master ubuntu 18.04 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/repair_test.yaml} 2
fail 4615928 2019-12-19 22:53:13 2019-12-19 23:18:52 2019-12-19 23:38:51 0:19:59 0:13:28 0:06:31 smithi master rhel 8.0 rados/objectstore/{backends/ceph_objectstore_tool.yaml supported-random-distro$/{rhel_8.yaml}} 1
Failure Reason:

SELinux denials found on ubuntu@smithi181.front.sepia.ceph.com: ['type=AVC msg=audit(1576798169.371:5213): avc: denied { create } for pid=21824 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.294:5208): avc: denied { read write } for pid=21824 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.294:5209): avc: denied { lock } for pid=21824 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.601:5215): avc: denied { unlink } for pid=21824 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=59849 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.681:5216): avc: denied { read } for pid=21824 comm="rhsmcertd-worke" name="satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.371:5213): avc: denied { open } for pid=21824 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=59849 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798170.242:5247): avc: denied { lock } for pid=22599 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.371:5212): avc: denied { open } for pid=21824 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.476:5214): avc: denied { open } for pid=21824 comm="rhsmcertd-worke" path="/var/cache/dnf/ceph-b62bef3614ddca06/repodata/repomd.xml" dev="sda1" ino=262174 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.295:5210): avc: denied { getattr } for pid=21824 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.371:5213): avc: denied { add_name } for pid=21824 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576798169.295:5211): avc: denied { map } for pid=21824 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.681:5216): avc: denied { open } for pid=21824 comm="rhsmcertd-worke" path="/etc/dnf/modules.d/satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798170.242:5246): avc: denied { read } for pid=22599 comm="setroubleshootd" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.294:5208): avc: denied { open } for pid=21824 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.371:5213): avc: denied { write } for pid=21824 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576798170.242:5246): avc: denied { open } for pid=22599 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798169.601:5215): avc: denied { remove_name } for pid=21824 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=59849 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576798170.242:5248): avc: denied { map } for pid=22599 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1']

pass 4615929 2019-12-19 22:53:14 2019-12-19 23:20:41 2019-12-19 23:36:40 0:15:59 0:10:12 0:05:47 smithi master rhel 8.0 rados/singleton-nomsgr/{all/cache-fs-trunc.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4615930 2019-12-19 22:53:15 2019-12-19 23:20:41 2019-12-19 23:38:41 0:18:00 0:11:20 0:06:40 smithi master ubuntu 18.04 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/filestore-xfs.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/progress.yaml} 2
fail 4615931 2019-12-19 22:53:16 2019-12-19 23:20:49 2019-12-20 00:06:49 0:46:00 0:31:37 0:14:23 smithi master rhel 8.0 rados/thrash-erasure-code-overwrites/{bluestore-bitmap.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/fast.yaml msgr-failures/osd-delay.yaml rados.yaml recovery-overrides/{more-async-partial-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/morepggrow.yaml thrashosds-health.yaml workloads/ec-pool-snaps-few-objects-overwrites.yaml} 2
Failure Reason:

Command failed on smithi166 with status 11: u'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg deep-scrub 1.54'

pass 4615932 2019-12-19 22:53:17 2019-12-19 23:20:52 2019-12-19 23:46:52 0:26:00 0:11:52 0:14:08 smithi master ubuntu 18.04 rados/thrash-erasure-code-shec/{ceph.yaml clusters/{fixed-4.yaml openstack.yaml} msgr-failures/osd-delay.yaml objectstore/bluestore-stupid.yaml rados.yaml recovery-overrides/{more-active-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-rados-plugin=shec-k=4-m=3-c=2.yaml} 4
dead 4615933 2019-12-19 22:53:18 2019-12-19 23:20:52 2019-12-20 11:23:32 12:02:40 smithi master rhel 8.0 rados/thrash-erasure-code-isa/{arch/x86_64.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/fastclose.yaml objectstore/filestore-xfs.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/mapgap.yaml thrashosds-health.yaml workloads/ec-rados-plugin=isa-k=2-m=1.yaml} 2
pass 4615934 2019-12-19 22:53:19 2019-12-19 23:22:32 2019-12-19 23:42:32 0:20:00 0:12:45 0:07:15 smithi master rhel 8.0 rados/thrash-erasure-code-big/{ceph.yaml cluster/{12-osds.yaml openstack.yaml} msgr-failures/fastclose.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml recovery-overrides/{more-partial-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/fastread.yaml thrashosds-health.yaml workloads/ec-rados-plugin=lrc-k=4-m=2-l=3.yaml} 3
fail 4615935 2019-12-19 22:53:21 2019-12-19 23:22:34 2019-12-19 23:40:32 0:17:58 0:05:58 0:12:00 smithi master centos 8.0 rados/verify/{centos_latest.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-thrash/none.yaml msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-comp.yaml rados.yaml tasks/mon_recovery.yaml validater/valgrind.yaml} 2
Failure Reason:

Command failed on smithi117 with status 1: 'sudo yum -y install ceph-debuginfo'

fail 4615936 2019-12-19 22:53:22 2019-12-19 23:22:34 2019-12-20 02:46:36 3:24:02 3:14:13 0:09:49 smithi master centos 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-active-recovery.yaml} backoff/normal.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/off.yaml msgr-failures/osd-delay.yaml msgr/async.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/none.yaml thrashosds-health.yaml workloads/rados_api_tests.yaml} 2
Failure Reason:

Command failed (workunit test rados/test.sh) on smithi190 with status 124: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/rados/test.sh'

pass 4615937 2019-12-19 22:53:23 2019-12-19 23:22:34 2019-12-20 00:02:33 0:39:59 0:29:10 0:10:49 smithi master rhel 8.0 rados/singleton/{all/thrash-eio.yaml msgr-failures/many.yaml msgr/async.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 2
pass 4615938 2019-12-19 22:53:24 2019-12-19 23:22:42 2019-12-19 23:50:41 0:27:59 0:20:09 0:07:50 smithi master ubuntu 18.04 rados/thrash-erasure-code/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/fast.yaml msgr-failures/fastclose.yaml objectstore/bluestore-stupid.yaml rados.yaml recovery-overrides/{more-async-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/ec-small-objects-fast-read.yaml} 2
pass 4615939 2019-12-19 22:53:25 2019-12-19 23:22:43 2019-12-20 00:06:42 0:43:59 0:13:36 0:30:23 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-install/mimic-v1only.yaml backoff/peering.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/crush-compat.yaml distro$/{ubuntu_latest.yaml} msgr-failures/few.yaml rados.yaml thrashers/none.yaml thrashosds-health.yaml workloads/rbd_cls.yaml} 4
pass 4615940 2019-12-19 22:53:27 2019-12-19 23:24:51 2019-12-19 23:50:50 0:25:59 0:18:06 0:07:53 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-comp.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/cosbench_64K_write.yaml} 1
fail 4615941 2019-12-19 22:53:28 2019-12-19 23:24:51 2019-12-20 02:46:58 3:22:07 3:14:18 0:07:49 smithi master rhel 8.0 rados/standalone/{supported-random-distro$/{rhel_8.yaml} workloads/erasure-code.yaml} 1
Failure Reason:

Command failed (workunit test erasure-code/test-erasure-eio.sh) on smithi123 with status 124: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/standalone/erasure-code/test-erasure-eio.sh'

pass 4615942 2019-12-19 22:53:29 2019-12-19 23:24:51 2019-12-19 23:54:52 0:30:01 0:21:20 0:08:41 smithi master rhel 8.0 rados/singleton/{all/thrash-rados/{thrash-rados.yaml thrashosds-health.yaml} msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 2
dead 4615943 2019-12-19 22:53:30 2019-12-19 23:26:49 2019-12-20 11:29:22 12:02:33 smithi master rhel 8.0 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{more-async-partial-recovery.yaml} backoff/peering.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/upmap.yaml msgr-failures/fastclose.yaml msgr/async-v1only.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/radosbench.yaml} 2
fail 4615944 2019-12-19 22:53:31 2019-12-19 23:26:49 2019-12-20 00:12:49 0:46:00 0:36:32 0:09:28 smithi master centos 8.0 rados/dashboard/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-bitmap.yaml supported-random-distro$/{centos_8.yaml} tasks/dashboard.yaml} 2
Failure Reason:

Test failure: test_all (tasks.mgr.dashboard.test_rgw.RgwBucketTest)

pass 4615945 2019-12-19 22:53:32 2019-12-19 23:28:34 2019-12-19 23:44:33 0:15:59 0:10:11 0:05:48 smithi master rhel 8.0 rados/singleton-nomsgr/{all/ceph-kvstore-tool.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4615946 2019-12-19 22:53:33 2019-12-19 23:28:34 2019-12-19 23:58:35 0:30:01 0:22:19 0:07:42 smithi master rhel 8.0 rados/singleton/{all/thrash_cache_writeback_proxy_none.yaml msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 2
pass 4615947 2019-12-19 22:53:34 2019-12-19 23:28:37 2019-12-19 23:56:37 0:28:00 0:18:10 0:09:50 smithi master ubuntu 18.04 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/rgw_snaps.yaml} 2
pass 4615948 2019-12-19 22:53:35 2019-12-19 23:28:38 2019-12-19 23:50:38 0:22:00 0:08:02 0:13:58 smithi master centos 8.0 rados/multimon/{clusters/21.yaml msgr-failures/many.yaml msgr/async-v1only.yaml no_pools.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{centos_8.yaml} tasks/mon_clock_with_skews.yaml} 3
pass 4615949 2019-12-19 22:53:36 2019-12-19 23:28:46 2019-12-19 23:46:46 0:18:00 0:09:16 0:08:44 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-low-osd-mem-target.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/fio_4K_rand_read.yaml} 1
pass 4615950 2019-12-19 22:53:37 2019-12-19 23:28:47 2019-12-19 23:48:46 0:19:59 0:09:59 0:10:00 smithi master ubuntu 18.04 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-avl.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/prometheus.yaml} 2
pass 4615951 2019-12-19 22:53:38 2019-12-19 23:28:51 2019-12-19 23:46:52 0:18:01 0:08:38 0:09:23 smithi master centos 8.0 rados/singleton/{all/watch-notify-same-primary.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4615952 2019-12-19 22:53:40 2019-12-19 23:28:52 2019-12-19 23:52:52 0:24:00 0:12:15 0:11:45 smithi master centos 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-async-partial-recovery.yaml} backoff/peering_and_degraded.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/crush-compat.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/redirect.yaml} 2
pass 4615953 2019-12-19 22:53:41 2019-12-19 23:29:14 2019-12-20 00:19:14 0:50:00 0:40:25 0:09:35 smithi master centos 8.0 rados/monthrash/{ceph.yaml clusters/3-mons.yaml msgr-failures/mon-delay.yaml msgr/async.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/sync-many.yaml workloads/rados_mon_osdmap_prune.yaml} 2
fail 4615954 2019-12-19 22:53:42 2019-12-19 23:30:53 2019-12-19 23:48:53 0:18:00 0:10:03 0:07:57 smithi master rhel 8.0 rados/objectstore/{backends/filejournal.yaml supported-random-distro$/{rhel_8.yaml}} 1
Failure Reason:

SELinux denials found on ubuntu@smithi035.front.sepia.ceph.com: ['type=AVC msg=audit(1576798866.951:4561): avc: denied { getattr } for pid=20226 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798867.224:4567): avc: denied { unlink } for pid=20226 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=23570 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.884:4555): avc: denied { open } for pid=20226 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.956:4564): avc: denied { create } for pid=20226 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798867.309:4568): avc: denied { open } for pid=20226 comm="rhsmcertd-worke" path="/etc/dnf/modules.d/satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798867.441:4569): avc: denied { read } for pid=20301 comm="setroubleshootd" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798867.441:4570): avc: denied { lock } for pid=20301 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.937:4559): avc: denied { read write } for pid=20226 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.956:4564): avc: denied { open } for pid=20226 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=23570 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.956:4564): avc: denied { add_name } for pid=20226 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576798866.884:4558): avc: denied { map } for pid=20226 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.956:4564): avc: denied { write } for pid=20226 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576798867.062:4565): avc: denied { open } for pid=20226 comm="rhsmcertd-worke" path="/var/cache/dnf/copr:copr.fedorainfracloud.org:ktdreyer:ceph-el8-ac801414381f5e61/repodata/repomd.xml" dev="sda1" ino=262184 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.937:4560): avc: denied { lock } for pid=20226 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.884:4556): avc: denied { lock } for pid=20226 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798867.441:4569): avc: denied { open } for pid=20301 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798867.224:4567): avc: denied { remove_name } for pid=20226 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=23570 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576798866.951:4562): avc: denied { map } for pid=20226 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.884:4557): avc: denied { getattr } for pid=20226 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798867.089:4566): avc: denied { setattr } for pid=20226 comm="rhsmcertd-worke" name="6e2fe611f78ac434c2918bac1eec468dbd24c9b4cdb65bf6a744d10f764f3284-primary.xml.gz" dev="sda1" ino=262274 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798867.441:4571): avc: denied { map } for pid=20301 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.956:4563): avc: denied { open } for pid=20226 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798867.309:4568): avc: denied { read } for pid=20226 comm="rhsmcertd-worke" name="satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.937:4559): avc: denied { open } for pid=20226 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576798866.884:4555): avc: denied { read write } for pid=20226 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1']

pass 4615955 2019-12-19 22:53:43 2019-12-19 23:30:53 2019-12-19 23:46:53 0:16:00 0:07:18 0:08:42 smithi master centos 8.0 rados/singleton-nomsgr/{all/ceph-post-file.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4615956 2019-12-19 22:53:44 2019-12-19 23:32:44 2019-12-20 00:12:43 0:39:59 0:30:24 0:09:35 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-install/mimic.yaml backoff/peering_and_degraded.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/off.yaml distro$/{ubuntu_latest.yaml} msgr-failures/osd-delay.yaml rados.yaml thrashers/pggrow.yaml thrashosds-health.yaml workloads/snaps-few-objects.yaml} 4
pass 4615957 2019-12-19 22:53:45 2019-12-19 23:32:44 2019-12-19 23:46:43 0:13:59 0:07:00 0:06:59 smithi master ubuntu 18.04 rados/singleton/{all/admin-socket.yaml msgr-failures/many.yaml msgr/async.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4615958 2019-12-19 22:53:47 2019-12-19 23:32:44 2019-12-19 23:48:43 0:15:59 0:09:17 0:06:42 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-stupid.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/fio_4K_rand_rw.yaml} 1
fail 4615959 2019-12-19 22:53:48 2019-12-19 23:32:46 2019-12-20 00:16:46 0:44:00 0:36:54 0:07:06 smithi master rhel 8.0 rados/singleton-bluestore/{all/cephtool.yaml msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
Failure Reason:

Command failed (workunit test cephtool/test.sh) on smithi085 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/cephtool/test.sh'

fail 4615960 2019-12-19 22:53:49 2019-12-19 23:32:47 2019-12-20 00:02:47 0:30:00 0:22:45 0:07:15 smithi master rhel 8.0 rados/thrash-erasure-code-overwrites/{bluestore-bitmap.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/normal.yaml msgr-failures/fastclose.yaml rados.yaml recovery-overrides/{more-active-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/ec-small-objects-fast-read-overwrites.yaml} 2
Failure Reason:

SELinux denials found on ubuntu@smithi046.front.sepia.ceph.com: ['type=AVC msg=audit(1576799652.657:17911): avc: denied { read write } for pid=70853 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.734:17916): avc: denied { create } for pid=70853 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.734:17916): avc: denied { open } for pid=70853 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=59780 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799653.189:17928): avc: denied { map } for pid=71027 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.850:17917): avc: denied { open } for pid=70853 comm="rhsmcertd-worke" path="/var/cache/dnf/ceph-69b5c88b61d71540/repodata/repomd.xml" dev="sda1" ino=262154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.657:17911): avc: denied { open } for pid=70853 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.657:17914): avc: denied { map } for pid=70853 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.734:17916): avc: denied { add_name } for pid=70853 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576799653.189:17927): avc: denied { lock } for pid=71027 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.657:17913): avc: denied { getattr } for pid=70853 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.657:17912): avc: denied { lock } for pid=70853 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.851:17918): avc: denied { setattr } for pid=70853 comm="rhsmcertd-worke" name="77950341a02eb4603ce116071aa18013c1f9e114406df1694cfccc21155717b9-primary.xml.gz" dev="sda1" ino=262180 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799652.734:17915): avc: denied { open } for pid=70853 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799653.017:17924): avc: denied { remove_name } for pid=70853 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=59780 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576799652.734:17916): avc: denied { write } for pid=70853 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576799653.189:17926): avc: denied { open } for pid=71027 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799653.017:17924): avc: denied { unlink } for pid=70853 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=59780 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799653.102:17925): avc: denied { read } for pid=70853 comm="rhsmcertd-worke" name="satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799653.189:17926): avc: denied { read } for pid=71027 comm="setroubleshootd" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576799653.102:17925): avc: denied { open } for pid=70853 comm="rhsmcertd-worke" path="/etc/dnf/modules.d/satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1']

pass 4615961 2019-12-19 22:53:50 2019-12-19 23:32:52 2019-12-19 23:52:52 0:20:00 0:13:01 0:06:59 smithi master rhel 8.0 rados/thrash-erasure-code-shec/{ceph.yaml clusters/{fixed-4.yaml openstack.yaml} msgr-failures/fastclose.yaml objectstore/filestore-xfs.yaml rados.yaml recovery-overrides/{more-async-partial-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-rados-plugin=shec-k=4-m=3-c=2.yaml} 4
dead 4615962 2019-12-19 22:53:51 2019-12-19 23:33:07 2019-12-20 11:35:45 12:02:38 smithi master rhel 8.0 rados/thrash-erasure-code-isa/{arch/x86_64.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml objectstore/bluestore-avl.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/morepggrow.yaml thrashosds-health.yaml workloads/ec-rados-plugin=isa-k=2-m=1.yaml} 2
pass 4615963 2019-12-19 22:53:52 2019-12-19 23:34:55 2019-12-19 23:56:54 0:21:59 0:15:36 0:06:23 smithi master rhel 8.0 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{more-active-recovery.yaml} backoff/normal.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/off.yaml msgr-failures/osd-delay.yaml msgr/async.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/redirect_promote_tests.yaml} 2
pass 4615964 2019-12-19 22:53:53 2019-12-19 23:34:55 2019-12-20 00:04:54 0:29:59 0:19:35 0:10:24 smithi master ubuntu 18.04 rados/thrash-erasure-code/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/normal.yaml msgr-failures/few.yaml objectstore/filestore-xfs.yaml rados.yaml recovery-overrides/{more-partial-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-small-objects-many-deletes.yaml} 2
pass 4615965 2019-12-19 22:53:54 2019-12-19 23:34:55 2019-12-19 23:56:54 0:21:59 0:12:30 0:09:29 smithi master rhel 8.0 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} tasks/scrub_test.yaml} 2
pass 4615966 2019-12-19 22:53:56 2019-12-19 23:34:55 2019-12-20 00:12:54 0:37:59 0:26:55 0:11:04 smithi master centos 8.0 rados/thrash-erasure-code-big/{ceph.yaml cluster/{12-osds.yaml openstack.yaml} msgr-failures/few.yaml objectstore/bluestore-stupid.yaml rados.yaml recovery-overrides/{more-async-recovery.yaml} supported-random-distro$/{centos_8.yaml} thrashers/mapgap.yaml thrashosds-health.yaml workloads/ec-rados-plugin=jerasure-k=4-m=2.yaml} 3
pass 4615967 2019-12-19 22:53:57 2019-12-19 23:34:55 2019-12-20 00:02:54 0:27:59 0:20:42 0:07:17 smithi master centos 8.0 rados/verify/{centos_latest.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-thrash/default/{default.yaml thrashosds-health.yaml} msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml tasks/rados_api_tests.yaml validater/lockdep.yaml} 2
pass 4615968 2019-12-19 22:53:59 2019-12-19 23:34:55 2019-12-19 23:50:54 0:15:59 0:10:30 0:05:29 smithi master rhel 8.0 rados/singleton/{all/deduptool.yaml msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4615969 2019-12-19 22:54:00 2019-12-19 23:35:01 2019-12-19 23:55:01 0:20:00 0:13:00 0:07:00 smithi master rhel 8.0 rados/standalone/{supported-random-distro$/{rhel_8.yaml} workloads/mgr.yaml} 1
pass 4615970 2019-12-19 22:54:01 2019-12-19 23:35:03 2019-12-19 23:51:02 0:15:59 0:09:23 0:06:36 smithi master centos 8.0 rados/singleton/{all/divergent_priors.yaml msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4615971 2019-12-19 22:54:03 2019-12-19 23:35:15 2019-12-19 23:53:14 0:17:59 0:08:40 0:09:19 smithi master centos 8.0 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-bitmap.yaml supported-random-distro$/{centos_8.yaml} tasks/workunits.yaml} 2
pass 4615972 2019-12-19 22:54:04 2019-12-19 23:36:56 2019-12-20 00:00:55 0:23:59 0:14:47 0:09:12 smithi master rhel 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-async-recovery.yaml} backoff/peering.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/upmap.yaml msgr-failures/fastclose.yaml msgr/async-v1only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/mapgap.yaml thrashosds-health.yaml workloads/redirect_set_object.yaml} 2
pass 4615973 2019-12-19 22:54:05 2019-12-19 23:36:56 2019-12-19 23:52:55 0:15:59 0:09:19 0:06:40 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-basic-min-osd-mem-target.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/fio_4M_rand_read.yaml} 1
pass 4615974 2019-12-19 22:54:07 2019-12-19 23:36:56 2019-12-20 00:06:55 0:29:59 0:13:31 0:16:28 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-install/nautilus-v1only.yaml backoff/normal.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/crush-compat.yaml distro$/{ubuntu_latest.yaml} msgr-failures/fastclose.yaml rados.yaml thrashers/careful.yaml thrashosds-health.yaml workloads/test_rbd_api.yaml} 4
pass 4615975 2019-12-19 22:54:08 2019-12-19 23:36:56 2019-12-19 23:52:55 0:15:59 0:08:43 0:07:16 smithi master ubuntu 18.04 rados/singleton/{all/divergent_priors2.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4615976 2019-12-19 22:54:09 2019-12-19 23:37:04 2019-12-19 23:55:03 0:17:59 0:12:25 0:05:34 smithi master rhel 8.0 rados/multimon/{clusters/3.yaml msgr-failures/few.yaml msgr/async-v2only.yaml no_pools.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} tasks/mon_recovery.yaml} 2
pass 4615977 2019-12-19 22:54:10 2019-12-19 23:37:08 2019-12-20 00:11:07 0:33:59 0:24:15 0:09:44 smithi master rhel 8.0 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/many.yaml msgr/async-v1only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} tasks/rados_api_tests.yaml} 2
fail 4615978 2019-12-19 22:54:11 2019-12-19 23:39:01 2019-12-20 00:27:00 0:47:59 0:38:09 0:09:50 smithi master rhel 8.0 rados/dashboard/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-comp.yaml supported-random-distro$/{rhel_8.yaml} tasks/dashboard.yaml} 2
Failure Reason:

Test failure: test_all (tasks.mgr.dashboard.test_rgw.RgwBucketTest)

pass 4615979 2019-12-19 22:54:13 2019-12-19 23:39:01 2019-12-20 01:55:04 2:16:03 2:10:29 0:05:34 smithi master rhel 8.0 rados/objectstore/{backends/filestore-idempotent-aio-journal.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4615980 2019-12-19 22:54:14 2019-12-19 23:39:01 2019-12-19 23:55:00 0:15:59 0:08:14 0:07:45 smithi master centos 8.0 rados/singleton-nomsgr/{all/export-after-evict.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4615981 2019-12-19 22:54:15 2019-12-19 23:39:01 2019-12-19 23:57:00 0:17:59 0:12:10 0:05:49 smithi master rhel 8.0 rados/singleton/{all/dump-stuck.yaml msgr-failures/many.yaml msgr/async-v1only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4615982 2019-12-19 22:54:17 2019-12-19 23:40:53 2019-12-20 00:02:52 0:21:59 0:11:42 0:10:17 smithi master centos 8.0 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{default.yaml} backoff/peering_and_degraded.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/crush-compat.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/morepggrow.yaml thrashosds-health.yaml workloads/set-chunk-promote-flush.yaml} 2
pass 4615983 2019-12-19 22:54:18 2019-12-19 23:40:53 2019-12-20 00:16:54 0:36:01 0:26:06 0:09:55 smithi master ubuntu 18.04 rados/monthrash/{ceph.yaml clusters/9-mons.yaml msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} thrashers/sync.yaml workloads/rados_mon_workunits.yaml} 2
pass 4615984 2019-12-19 22:54:19 2019-12-19 23:40:53 2019-12-19 23:58:52 0:17:59 0:09:17 0:08:42 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-bitmap.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/fio_4M_rand_rw.yaml} 1
fail 4615985 2019-12-19 22:54:20 2019-12-19 23:40:54 2019-12-20 00:26:54 0:46:00 0:39:35 0:06:25 smithi master centos 8.0 rados/singleton/{all/ec-lost-unfound.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
Failure Reason:

Exiting scrub checking -- not all pgs scrubbed.

pass 4615986 2019-12-19 22:54:21 2019-12-19 23:42:51 2019-12-20 00:10:54 0:28:03 0:19:54 0:08:09 smithi master centos 8.0 rados/thrash-erasure-code/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/fast.yaml msgr-failures/osd-delay.yaml objectstore/bluestore-avl.yaml rados.yaml recovery-overrides/{more-async-partial-recovery.yaml} supported-random-distro$/{centos_8.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-small-objects.yaml} 2
pass 4615987 2019-12-19 22:54:22 2019-12-19 23:42:51 2019-12-20 00:02:52 0:20:01 0:08:42 0:11:19 smithi master centos 8.0 rados/singleton-nomsgr/{all/full-tiering.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4615988 2019-12-19 22:54:23 2019-12-19 23:44:51 2019-12-20 00:12:54 0:28:03 0:20:27 0:07:36 smithi master ubuntu 18.04 rados/thrash-erasure-code-overwrites/{bluestore-bitmap.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/fast.yaml msgr-failures/few.yaml rados.yaml recovery-overrides/{more-async-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-small-objects-overwrites.yaml} 2
pass 4615989 2019-12-19 22:54:24 2019-12-19 23:44:51 2019-12-20 00:04:52 0:20:01 0:11:23 0:08:38 smithi master ubuntu 18.04 rados/thrash-erasure-code-shec/{ceph.yaml clusters/{fixed-4.yaml openstack.yaml} msgr-failures/fastclose.yaml objectstore/bluestore-avl.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-rados-plugin=shec-k=4-m=3-c=2.yaml} 4
pass 4615990 2019-12-19 22:54:25 2019-12-19 23:44:53 2019-12-20 00:06:53 0:22:00 0:15:39 0:06:21 smithi master rhel 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-active-recovery.yaml} backoff/normal.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/off.yaml msgr-failures/osd-delay.yaml msgr/async.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/none.yaml thrashosds-health.yaml workloads/set-chunks-read.yaml} 2
pass 4615991 2019-12-19 22:54:26 2019-12-19 23:45:50 2019-12-20 00:15:49 0:29:59 0:23:42 0:06:17 smithi master rhel 8.0 rados/thrash-erasure-code-isa/{arch/x86_64.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/osd-delay.yaml objectstore/bluestore-bitmap.yaml rados.yaml recovery-overrides/{more-active-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/none.yaml thrashosds-health.yaml workloads/ec-rados-plugin=isa-k=2-m=1.yaml} 2
pass 4615992 2019-12-19 22:54:28 2019-12-19 23:46:32 2019-12-20 00:10:31 0:23:59 0:09:56 0:14:03 smithi master ubuntu 18.04 rados/thrash-erasure-code-big/{ceph.yaml cluster/{12-osds.yaml openstack.yaml} msgr-failures/osd-delay.yaml objectstore/filestore-xfs.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/morepggrow.yaml thrashosds-health.yaml workloads/ec-rados-plugin=lrc-k=4-m=2-l=3.yaml} 3
fail 4615993 2019-12-19 22:54:29 2019-12-19 23:46:39 2019-12-20 00:04:39 0:18:00 0:05:52 0:12:08 smithi master centos 8.0 rados/verify/{centos_latest.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-thrash/none.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml tasks/rados_cls_all.yaml validater/valgrind.yaml} 2
Failure Reason:

Command failed on smithi169 with status 1: 'sudo yum -y install ceph-debuginfo'

pass 4615994 2019-12-19 22:54:30 2019-12-19 23:46:44 2019-12-20 00:28:44 0:42:00 0:19:53 0:22:07 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-install/nautilus-v2only.yaml backoff/peering.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/off.yaml distro$/{ubuntu_latest.yaml} msgr-failures/few.yaml rados.yaml thrashers/default.yaml thrashosds-health.yaml workloads/cache-agent-big.yaml} 4
pass 4615995 2019-12-19 22:54:31 2019-12-19 23:46:47 2019-12-20 00:00:46 0:13:59 0:08:51 0:05:08 smithi master rhel 8.0 rados/singleton/{all/erasure-code-nonregression.yaml msgr-failures/many.yaml msgr/async.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4615996 2019-12-19 22:54:33 2019-12-19 23:46:47 2019-12-20 00:02:46 0:15:59 0:09:12 0:06:47 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-comp.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/fio_4M_rand_write.yaml} 1
pass 4615997 2019-12-19 22:54:34 2019-12-19 23:46:50 2019-12-20 00:06:49 0:19:59 0:10:51 0:09:08 smithi master ubuntu 18.04 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/rados_cls_all.yaml} 2
pass 4615998 2019-12-19 22:54:35 2019-12-19 23:46:53 2019-12-20 00:20:54 0:34:01 0:26:00 0:08:01 smithi master centos 8.0 rados/singleton/{all/lost-unfound-delete.yaml msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4615999 2019-12-19 22:54:36 2019-12-19 23:46:54 2019-12-20 00:18:54 0:32:00 0:24:15 0:07:45 smithi master ubuntu 18.04 rados/standalone/{supported-random-distro$/{ubuntu_latest.yaml} workloads/misc.yaml} 1
fail 4616000 2019-12-19 22:54:37 2019-12-19 23:46:54 2019-12-20 00:20:55 0:34:01 0:24:37 0:09:24 smithi master rhel 8.0 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{more-partial-recovery.yaml} backoff/peering.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/upmap.yaml msgr-failures/fastclose.yaml msgr/async-v1only.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/small-objects.yaml} 2
Failure Reason:

Command failed on smithi189 with status 11: u'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg deep-scrub 1.26'

pass 4616001 2019-12-19 22:54:38 2019-12-19 23:49:02 2019-12-20 02:17:06 2:28:04 2:18:52 0:09:12 smithi master centos 8.0 rados/objectstore/{backends/filestore-idempotent.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4616002 2019-12-19 22:54:39 2019-12-19 23:49:02 2019-12-20 00:05:01 0:15:59 0:08:58 0:07:01 smithi master ubuntu 18.04 rados/singleton-nomsgr/{all/health-warnings.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4616003 2019-12-19 22:54:40 2019-12-19 23:49:02 2019-12-20 00:19:02 0:30:00 0:21:45 0:08:15 smithi master ubuntu 18.04 rados/singleton/{all/lost-unfound.yaml msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4616004 2019-12-19 22:54:41 2019-12-19 23:49:02 2019-12-20 00:09:01 0:19:59 0:12:23 0:07:36 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-low-osd-mem-target.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_4K_rand_read.yaml} 1
pass 4616005 2019-12-19 22:54:42 2019-12-19 23:50:57 2019-12-20 00:06:56 0:15:59 0:07:14 0:08:45 smithi master ubuntu 18.04 rados/multimon/{clusters/6.yaml msgr-failures/many.yaml msgr/async.yaml no_pools.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/mon_clock_no_skews.yaml} 2
pass 4616006 2019-12-19 22:54:44 2019-12-19 23:50:58 2019-12-20 00:29:00 0:38:02 0:30:28 0:07:34 smithi master rhel 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-partial-recovery.yaml} backoff/peering_and_degraded.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/crush-compat.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/snaps-few-objects.yaml} 2
pass 4616007 2019-12-19 22:54:45 2019-12-19 23:50:58 2019-12-20 00:09:00 0:18:02 0:09:18 0:08:44 smithi master ubuntu 18.04 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-low-osd-mem-target.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/crash.yaml} 2
pass 4616008 2019-12-19 22:54:46 2019-12-19 23:50:58 2019-12-20 00:09:00 0:18:02 0:07:53 0:10:09 smithi master centos 8.0 rados/singleton/{all/max-pg-per-osd.from-mon.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4616009 2019-12-19 22:54:47 2019-12-19 23:51:04 2019-12-20 00:27:06 0:36:02 0:24:03 0:11:59 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-install/nautilus.yaml backoff/peering_and_degraded.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/crush-compat.yaml distro$/{ubuntu_latest.yaml} msgr-failures/osd-delay.yaml rados.yaml thrashers/mapgap.yaml thrashosds-health.yaml workloads/cache-snaps.yaml} 4
pass 4616010 2019-12-19 22:54:48 2019-12-19 23:52:34 2019-12-20 00:16:33 0:23:59 0:14:00 0:09:59 smithi master centos 8.0 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/many.yaml msgr/async.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{centos_8.yaml} tasks/rados_python.yaml} 2
dead 4616011 2019-12-19 22:54:50 2019-12-19 23:52:39 2019-12-20 11:55:09 12:02:30 smithi master centos 8.0 rados/monthrash/{ceph.yaml clusters/3-mons.yaml msgr-failures/mon-delay.yaml msgr/async-v2only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/force-sync-many.yaml workloads/snaps-few-objects.yaml} 2
fail 4616012 2019-12-19 22:54:51 2019-12-19 23:52:54 2019-12-20 00:38:55 0:46:01 0:37:51 0:08:10 smithi master rhel 8.0 rados/dashboard/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-low-osd-mem-target.yaml supported-random-distro$/{rhel_8.yaml} tasks/dashboard.yaml} 2
Failure Reason:

Test failure: test_all (tasks.mgr.dashboard.test_rgw.RgwBucketTest)

pass 4616013 2019-12-19 22:54:52 2019-12-19 23:52:54 2019-12-20 00:28:54 0:36:00 0:28:15 0:07:45 smithi master ubuntu 18.04 rados/singleton-bluestore/{all/cephtool.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
fail 4616014 2019-12-19 22:54:53 2019-12-19 23:52:56 2019-12-20 00:08:55 0:15:59 0:06:23 0:09:36 smithi master centos rados/singleton-flat/valgrind-leaks/valgrind-leaks.yaml 1
Failure Reason:

Command failed on smithi133 with status 1: 'sudo yum -y install ceph'

fail 4616015 2019-12-19 22:54:54 2019-12-19 23:52:56 2019-12-20 00:08:57 0:16:01 0:07:59 0:08:02 smithi master ubuntu 18.04 rados/singleton-nomsgr/{all/large-omap-object-warnings.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
Failure Reason:

Command failed (workunit test rados/test_large_omap_detection.py) on smithi106 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/rados/test_large_omap_detection.py'

fail 4616016 2019-12-19 22:54:56 2019-12-19 23:53:18 2019-12-20 00:27:18 0:34:00 0:13:47 0:20:13 smithi master ubuntu 18.04 rados/upgrade/nautilus-x-singleton/{0-cluster/{openstack.yaml start.yaml} 1-install/nautilus.yaml 2-partial-upgrade/firsthalf.yaml 3-thrash/default.yaml 4-workload/{rbd-cls.yaml rbd-import-export.yaml readwrite.yaml snaps-few-objects.yaml} 5-workload/{radosbench.yaml rbd_api.yaml} 6-finish-upgrade.yaml 7-octopus.yaml 8-workload/{rbd-python.yaml snaps-many-objects.yaml} bluestore-bitmap.yaml thrashosds-health.yaml ubuntu_latest.yaml} 4
Failure Reason:

Command failed on smithi084 with status 128: 'rm -rf /home/ubuntu/cephtest/clone.client.0 && git clone --depth 1 --branch nautilus https://github.com/liewegas/ceph /home/ubuntu/cephtest/clone.client.0 && cd /home/ubuntu/cephtest/clone.client.0'

pass 4616017 2019-12-19 22:54:57 2019-12-19 23:54:56 2019-12-20 00:30:55 0:35:59 0:26:43 0:09:16 smithi master ubuntu 18.04 rados/thrash-erasure-code/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/normal.yaml msgr-failures/fastclose.yaml objectstore/bluestore-bitmap.yaml rados.yaml recovery-overrides/{more-partial-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/fastread.yaml thrashosds-health.yaml workloads/ec-rados-plugin=clay-k=4-m=2.yaml} 2
pass 4616018 2019-12-19 22:54:58 2019-12-19 23:54:56 2019-12-20 00:18:55 0:23:59 0:18:29 0:05:30 smithi master rhel 8.0 rados/singleton/{all/max-pg-per-osd.from-primary.yaml msgr-failures/many.yaml msgr/async-v1only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4616019 2019-12-19 22:54:59 2019-12-19 23:54:56 2019-12-20 00:12:55 0:17:59 0:10:27 0:07:32 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-stupid.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_4K_seq_read.yaml} 1
pass 4616020 2019-12-19 22:55:01 2019-12-19 23:55:01 2019-12-20 00:15:00 0:19:59 0:13:23 0:06:36 smithi master ubuntu 18.04 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{more-partial-recovery.yaml} backoff/normal.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/off.yaml msgr-failures/osd-delay.yaml msgr/async.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/write_fadvise_dontneed.yaml} 2
pass 4616021 2019-12-19 22:55:03 2019-12-19 23:55:02 2019-12-20 00:29:02 0:34:00 0:25:10 0:08:50 smithi master ubuntu 18.04 rados/thrash-erasure-code-overwrites/{bluestore-bitmap.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/normal.yaml msgr-failures/osd-delay.yaml rados.yaml recovery-overrides/{more-async-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-snaps-few-objects-overwrites.yaml} 2
pass 4616022 2019-12-19 22:55:04 2019-12-19 23:55:05 2019-12-20 00:23:06 0:28:01 0:13:18 0:14:43 smithi master rhel 8.0 rados/thrash-erasure-code-shec/{ceph.yaml clusters/{fixed-4.yaml openstack.yaml} msgr-failures/few.yaml objectstore/bluestore-bitmap.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-rados-plugin=shec-k=4-m=3-c=2.yaml} 4
pass 4616023 2019-12-19 22:55:06 2019-12-19 23:56:57 2019-12-20 00:31:00 0:34:03 0:25:58 0:08:05 smithi master ubuntu 18.04 rados/thrash-erasure-code-isa/{arch/x86_64.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/fastclose.yaml objectstore/bluestore-comp.yaml rados.yaml recovery-overrides/{more-active-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/ec-rados-plugin=isa-k=2-m=1.yaml} 2
fail 4616024 2019-12-19 22:55:07 2019-12-19 23:56:57 2019-12-20 00:41:01 0:44:04 0:27:29 0:16:35 smithi master rhel 8.0 rados/thrash-erasure-code-big/{ceph.yaml cluster/{12-osds.yaml openstack.yaml} msgr-failures/fastclose.yaml objectstore/bluestore-avl.yaml rados.yaml recovery-overrides/{more-async-partial-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/ec-rados-plugin=jerasure-k=4-m=2.yaml} 3
Failure Reason:

SELinux denials found on ubuntu@smithi063.front.sepia.ceph.com: ['type=AVC msg=audit(1576801253.082:6945): avc: denied { lock } for pid=29628 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.716:6933): avc: denied { setattr } for pid=29365 comm="rhsmcertd-worke" name="6e2fe611f78ac434c2918bac1eec468dbd24c9b4cdb65bf6a744d10f764f3284-primary.xml.gz" dev="sda1" ino=264533 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801253.082:6944): avc: denied { read } for pid=29628 comm="setroubleshootd" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.563:6921): avc: denied { map } for pid=29365 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.563:6920): avc: denied { getattr } for pid=29365 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.569:6922): avc: denied { open } for pid=29365 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.569:6923): avc: denied { open } for pid=29365 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=59781 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.489:6914): avc: denied { open } for pid=29365 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801253.083:6946): avc: denied { map } for pid=29628 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.489:6916): avc: denied { getattr } for pid=29365 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.940:6937): avc: denied { open } for pid=29365 comm="rhsmcertd-worke" path="/etc/dnf/modules.d/satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.569:6923): avc: denied { create } for pid=29365 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.489:6915): avc: denied { lock } for pid=29365 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.687:6932): avc: denied { open } for pid=29365 comm="rhsmcertd-worke" path="/var/cache/dnf/ceph-69b5c88b61d71540/repodata/repomd.xml" dev="sda1" ino=262158 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801253.082:6944): avc: denied { open } for pid=29628 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.854:6936): avc: denied { remove_name } for pid=29365 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=59781 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576801252.940:6937): avc: denied { read } for pid=29365 comm="rhsmcertd-worke" name="satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.569:6923): avc: denied { add_name } for pid=29365 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576801252.559:6918): avc: denied { read write } for pid=29365 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.559:6919): avc: denied { lock } for pid=29365 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.854:6936): avc: denied { unlink } for pid=29365 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=59781 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.489:6917): avc: denied { map } for pid=29365 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.569:6923): avc: denied { write } for pid=29365 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576801252.489:6914): avc: denied { read write } for pid=29365 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576801252.559:6918): avc: denied { open } for pid=29365 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1']

pass 4616025 2019-12-19 22:55:08 2019-12-19 23:56:57 2019-12-20 00:21:00 0:24:03 0:11:07 0:12:56 smithi master centos 8.0 rados/verify/{centos_latest.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-thrash/default/{default.yaml thrashosds-health.yaml} msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/filestore-xfs.yaml rados.yaml tasks/mon_recovery.yaml validater/lockdep.yaml} 2
pass 4616026 2019-12-19 22:55:09 2019-12-19 23:57:01 2019-12-20 00:15:00 0:17:59 0:11:21 0:06:38 smithi master ubuntu 18.04 rados/singleton/{all/max-pg-per-osd.from-replica.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4616027 2019-12-19 22:55:11 2019-12-19 23:58:53 2019-12-20 00:14:54 0:16:01 0:09:19 0:06:42 smithi master rhel 8.0 rados/objectstore/{backends/fusestore.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4616028 2019-12-19 22:55:12 2019-12-19 23:58:54 2019-12-20 00:18:54 0:20:00 0:12:08 0:07:52 smithi master rhel 8.0 rados/singleton-nomsgr/{all/lazy_omap_stats_output.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
fail 4616029 2019-12-19 22:55:13 2019-12-20 00:00:57 2019-12-20 00:24:57 0:24:00 0:16:08 0:07:52 smithi master rhel 8.0 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} tasks/rados_stress_watch.yaml} 2
Failure Reason:

SELinux denials found on ubuntu@smithi175.front.sepia.ceph.com: ['type=AVC msg=audit(1576800574.857:3285): avc: denied { write } for pid=14442 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576800575.354:3289): avc: denied { lock } for pid=14503 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=61046 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.857:3285): avc: denied { add_name } for pid=14442 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576800574.840:3281): avc: denied { lock } for pid=14442 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=61154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.792:3279): avc: denied { getattr } for pid=14442 comm="rhsmcertd-worke" path="/var/lib/rpm/Packages" dev="sda1" ino=61046 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.791:3277): avc: denied { open } for pid=14442 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=61154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800575.354:3288): avc: denied { open } for pid=14503 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=61046 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.854:3283): avc: denied { map } for pid=14442 comm="rhsmcertd-worke" path="/var/lib/rpm/Name" dev="sda1" ino=61070 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.791:3278): avc: denied { lock } for pid=14442 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=61154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.954:3286): avc: denied { open } for pid=14442 comm="rhsmcertd-worke" path="/var/cache/dnf/epel-fafd94c310c51e1e/metalink.xml" dev="sda1" ino=262151 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.854:3282): avc: denied { getattr } for pid=14442 comm="rhsmcertd-worke" path="/var/lib/rpm/Packages" dev="sda1" ino=61046 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800575.354:3290): avc: denied { map } for pid=14503 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=61070 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.840:3280): avc: denied { open } for pid=14442 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=61154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.840:3280): avc: denied { read write } for pid=14442 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=61154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.857:3285): avc: denied { create } for pid=14442 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.857:3284): avc: denied { open } for pid=14442 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.791:3277): avc: denied { read write } for pid=14442 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=61154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.973:3287): avc: denied { remove_name } for pid=14442 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=60967 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576800574.973:3287): avc: denied { unlink } for pid=14442 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=60967 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800574.857:3285): avc: denied { open } for pid=14442 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=60967 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800575.354:3288): avc: denied { read } for pid=14503 comm="setroubleshootd" name="Packages" dev="sda1" ino=61046 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1']

pass 4616030 2019-12-19 22:55:14 2019-12-20 00:00:57 2019-12-20 00:17:00 0:16:03 0:11:00 0:05:03 smithi master rhel 8.0 rados/singleton/{all/mon-auth-caps.yaml msgr-failures/many.yaml msgr/async.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4616031 2019-12-19 22:55:15 2019-12-20 00:00:57 2019-12-20 00:20:57 0:20:00 0:11:35 0:08:25 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-basic-min-osd-mem-target.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_4M_rand_read.yaml} 1
fail 4616032 2019-12-19 22:55:16 2019-12-20 00:02:52 2019-12-20 00:34:54 0:32:02 0:24:03 0:07:59 smithi master rhel 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{more-async-recovery.yaml} backoff/peering.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/upmap.yaml msgr-failures/fastclose.yaml msgr/async-v1only.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} thrashers/mapgap.yaml thrashosds-health.yaml workloads/admin_socket_objecter_requests.yaml} 2
Failure Reason:

Command failed on smithi062 with status 127: '/home/ubuntu/cephtest/admin_socket_client.0/objecter_requests'

fail 4616033 2019-12-19 22:55:18 2019-12-20 00:02:52 2019-12-20 00:18:54 0:16:02 0:03:14 0:12:48 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-install/hammer.yaml backoff/normal.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/off.yaml distro$/{ubuntu_latest.yaml} msgr-failures/fastclose.yaml rados.yaml thrashers/morepggrow.yaml thrashosds-health.yaml workloads/radosbench.yaml} 4
Failure Reason:

Failed to fetch package version from https://shaman.ceph.com/api/search/?status=ready&project=ceph&flavor=default&distros=ubuntu%2F18.04%2Fx86_64&ref=hammer

fail 4616034 2019-12-19 22:55:19 2019-12-20 00:02:52 2019-12-20 00:44:55 0:42:03 0:34:01 0:08:02 smithi master ubuntu 18.04 rados/standalone/{supported-random-distro$/{ubuntu_latest.yaml} workloads/mon.yaml} 1
Failure Reason:

Command failed (workunit test mon/osd-pool-create.sh) on smithi068 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/standalone/mon/osd-pool-create.sh'

pass 4616035 2019-12-19 22:55:20 2019-12-20 00:02:52 2019-12-20 00:22:54 0:20:02 0:11:51 0:08:11 smithi master centos 8.0 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-stupid.yaml supported-random-distro$/{centos_8.yaml} tasks/failover.yaml} 2
pass 4616036 2019-12-19 22:55:21 2019-12-20 00:02:54 2019-12-20 00:20:54 0:18:00 0:12:02 0:05:58 smithi master rhel 8.0 rados/singleton/{all/mon-config-key-caps.yaml msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-bitmap.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4616037 2019-12-19 22:55:22 2019-12-20 00:02:54 2019-12-20 00:20:54 0:18:00 0:11:35 0:06:25 smithi master rhel 8.0 rados/singleton-nomsgr/{all/librados_hello_world.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4616038 2019-12-19 22:55:23 2019-12-20 00:02:55 2019-12-20 00:20:55 0:18:00 0:08:04 0:09:56 smithi master centos 8.0 rados/multimon/{clusters/21.yaml msgr-failures/few.yaml msgr/async.yaml no_pools.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{centos_8.yaml} tasks/mon_clock_no_skews.yaml} 3
fail 4616039 2019-12-19 22:55:24 2019-12-20 00:04:58 2019-12-20 00:31:00 0:26:02 0:17:01 0:09:01 smithi master centos 8.0 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{more-async-partial-recovery.yaml} backoff/peering_and_degraded.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/crush-compat.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/morepggrow.yaml thrashosds-health.yaml workloads/cache-agent-big.yaml} 2
Failure Reason:

SELinux denials found on ubuntu@smithi009.front.sepia.ceph.com: ['type=AVC msg=audit(1576800844.592:3662): avc: denied { lock } for pid=15176 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=657 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800844.592:3663): avc: denied { map } for pid=15176 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=658 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800844.592:3661): avc: denied { read } for pid=15176 comm="setroubleshootd" name="Packages" dev="sda1" ino=657 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576800844.592:3661): avc: denied { open } for pid=15176 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=657 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1']

pass 4616040 2019-12-19 22:55:25 2019-12-20 00:04:58 2019-12-20 00:31:00 0:26:02 0:18:48 0:07:14 smithi master centos 8.0 rados/singleton/{all/mon-config-keys.yaml msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
pass 4616041 2019-12-19 22:55:26 2019-12-20 00:04:59 2019-12-20 00:23:00 0:18:01 0:10:37 0:07:24 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-bitmap.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_4M_seq_read.yaml} 1
pass 4616042 2019-12-19 22:55:28 2019-12-20 00:05:03 2019-12-20 00:35:02 0:29:59 0:22:48 0:07:11 smithi master ubuntu 18.04 rados/thrash-erasure-code/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/fast.yaml msgr-failures/few.yaml objectstore/bluestore-comp.yaml rados.yaml recovery-overrides/{more-partial-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/minsize_recovery.yaml thrashosds-health.yaml workloads/ec-rados-plugin=jerasure-k=2-m=1.yaml} 2
pass 4616043 2019-12-19 22:55:29 2019-12-20 00:07:07 2019-12-20 00:35:07 0:28:00 0:20:42 0:07:18 smithi master centos 8.0 rados/monthrash/{ceph.yaml clusters/9-mons.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/many.yaml workloads/pool-create-delete.yaml} 2
pass 4616044 2019-12-19 22:55:30 2019-12-20 00:07:07 2019-12-20 00:21:07 0:14:00 0:08:21 0:05:39 smithi master ubuntu 18.04 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/many.yaml msgr/async-v2only.yaml objectstore/bluestore-comp.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml} tasks/rados_striper.yaml} 2
pass 4616045 2019-12-19 22:55:31 2019-12-20 00:07:08 2019-12-20 00:23:07 0:15:59 0:08:56 0:07:03 smithi master centos 8.0 rados/singleton/{all/mon-config.yaml msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
fail 4616046 2019-12-19 22:55:32 2019-12-20 00:07:18 2019-12-20 00:31:18 0:24:00 0:03:29 0:20:31 smithi master ubuntu 18.04 rados/thrash-old-clients/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-install/jewel-v1only.yaml backoff/peering.yaml ceph.yaml clusters/{openstack.yaml three-plus-one.yaml} d-balancer/crush-compat.yaml distro$/{ubuntu_latest.yaml} msgr-failures/few.yaml rados.yaml thrashers/none.yaml thrashosds-health.yaml workloads/rbd_cls.yaml} 4
Failure Reason:

Command failed on smithi161 with status 100: u'sudo DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install ceph=10.2.11-18-g115560f-1bionic ceph-mds=10.2.11-18-g115560f-1bionic ceph-common=10.2.11-18-g115560f-1bionic ceph-fuse=10.2.11-18-g115560f-1bionic ceph-test=10.2.11-18-g115560f-1bionic radosgw=10.2.11-18-g115560f-1bionic python3-rados=10.2.11-18-g115560f-1bionic python3-rgw=10.2.11-18-g115560f-1bionic python3-cephfs=10.2.11-18-g115560f-1bionic python3-rbd=10.2.11-18-g115560f-1bionic librados2=10.2.11-18-g115560f-1bionic librbd1=10.2.11-18-g115560f-1bionic rbd-fuse=10.2.11-18-g115560f-1bionic librados2=10.2.11-18-g115560f-1bionic'

pass 4616047 2019-12-19 22:55:33 2019-12-20 00:07:19 2019-12-20 00:29:18 0:21:59 0:11:25 0:10:34 smithi master centos 8.0 rados/thrash/{0-size-min-size-overrides/2-size-2-min-size.yaml 1-pg-log-overrides/normal_pg_log.yaml 2-recovery-overrides/{default.yaml} backoff/normal.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/off.yaml msgr-failures/osd-delay.yaml msgr/async.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/none.yaml thrashosds-health.yaml workloads/cache-agent-small.yaml} 2
fail 4616048 2019-12-19 22:55:34 2019-12-20 00:09:24 2019-12-20 00:55:24 0:46:00 0:36:23 0:09:37 smithi master centos 8.0 rados/dashboard/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/bluestore-stupid.yaml supported-random-distro$/{centos_8.yaml} tasks/dashboard.yaml} 2
Failure Reason:

Test failure: test_all (tasks.mgr.dashboard.test_rgw.RgwBucketTest)

pass 4616049 2019-12-19 22:55:35 2019-12-20 00:09:24 2019-12-20 00:25:23 0:15:59 0:07:32 0:08:27 smithi master ubuntu 18.04 rados/objectstore/{backends/keyvaluedb.yaml supported-random-distro$/{ubuntu_latest.yaml}} 1
pass 4616050 2019-12-19 22:55:37 2019-12-20 00:09:24 2019-12-20 00:33:24 0:24:00 0:14:58 0:09:02 smithi master centos 8.0 rados/singleton-nomsgr/{all/msgr.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1
fail 4616051 2019-12-19 22:55:38 2019-12-20 00:09:25 2019-12-20 00:29:24 0:19:59 0:12:18 0:07:41 smithi master rhel 8.0 rados/mgr/{clusters/{2-node-mgr.yaml} debug/mgr.yaml objectstore/filestore-xfs.yaml supported-random-distro$/{rhel_8.yaml} tasks/insights.yaml} 2
Failure Reason:

Test failure: test_health_history (tasks.mgr.test_insights.TestInsights)

pass 4616052 2019-12-19 22:55:39 2019-12-20 00:09:24 2019-12-20 00:43:24 0:34:00 0:26:19 0:07:41 smithi master ubuntu 18.04 rados/thrash-erasure-code-overwrites/{bluestore-bitmap.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} fast/fast.yaml msgr-failures/fastclose.yaml rados.yaml recovery-overrides/{more-async-partial-recovery.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/fastread.yaml thrashosds-health.yaml workloads/ec-pool-snaps-few-objects-overwrites.yaml} 2
pass 4616053 2019-12-19 22:55:40 2019-12-20 00:10:50 2019-12-20 00:38:49 0:27:59 0:11:09 0:16:50 smithi master ubuntu 18.04 rados/thrash-erasure-code-shec/{ceph.yaml clusters/{fixed-4.yaml openstack.yaml} msgr-failures/osd-delay.yaml objectstore/bluestore-comp.yaml rados.yaml recovery-overrides/{default.yaml} supported-random-distro$/{ubuntu_latest.yaml} thrashers/default.yaml thrashosds-health.yaml workloads/ec-rados-plugin=shec-k=4-m=3-c=2.yaml} 4
dead 4616054 2019-12-19 22:55:41 2019-12-20 00:10:55 2019-12-20 12:13:29 12:02:34 smithi master rhel 8.0 rados/thrash-erasure-code-isa/{arch/x86_64.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml objectstore/bluestore-comp.yaml rados.yaml recovery-overrides/{more-active-recovery.yaml} supported-random-distro$/{rhel_8.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-rados-plugin=isa-k=2-m=1.yaml} 2
pass 4616055 2019-12-19 22:55:42 2019-12-20 00:11:09 2019-12-20 00:47:08 0:35:59 0:25:55 0:10:04 smithi master centos 8.0 rados/thrash-erasure-code-big/{ceph.yaml cluster/{12-osds.yaml openstack.yaml} msgr-failures/fastclose.yaml objectstore/bluestore-avl.yaml rados.yaml recovery-overrides/{more-partial-recovery.yaml} supported-random-distro$/{centos_8.yaml} thrashers/careful.yaml thrashosds-health.yaml workloads/ec-rados-plugin=jerasure-k=4-m=2.yaml} 3
pass 4616056 2019-12-19 22:55:43 2019-12-20 00:13:03 2019-12-20 00:35:03 0:22:00 0:11:44 0:10:16 smithi master centos 8.0 rados/verify/{centos_latest.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-thrash/default/{default.yaml thrashosds-health.yaml} msgr-failures/few.yaml msgr/async-v1only.yaml objectstore/bluestore-avl.yaml rados.yaml tasks/mon_recovery.yaml validater/lockdep.yaml} 2
pass 4616057 2019-12-19 22:55:45 2019-12-20 00:13:03 2019-12-20 00:31:03 0:18:00 0:10:18 0:07:42 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-comp.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_4M_write.yaml} 1
pass 4616058 2019-12-19 22:55:46 2019-12-20 00:13:04 2019-12-20 00:37:03 0:23:59 0:17:34 0:06:25 smithi master rhel 8.0 rados/singleton/{all/osd-backfill.yaml msgr-failures/many.yaml msgr/async-v1only.yaml objectstore/bluestore-stupid.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
pass 4616059 2019-12-19 22:55:47 2019-12-20 00:13:04 2019-12-20 00:37:03 0:23:59 0:16:40 0:07:19 smithi master rhel 8.0 rados/singleton/{all/osd-recovery-incomplete.yaml msgr-failures/few.yaml msgr/async-v2only.yaml objectstore/filestore-xfs.yaml rados.yaml supported-random-distro$/{rhel_8.yaml}} 1
fail 4616060 2019-12-19 22:55:48 2019-12-20 00:13:04 2019-12-20 00:49:04 0:36:00 0:24:32 0:11:28 smithi master centos 8.0 rados/thrash/{0-size-min-size-overrides/3-size-2-min-size.yaml 1-pg-log-overrides/short_pg_log.yaml 2-recovery-overrides/{default.yaml} backoff/peering.yaml ceph.yaml clusters/{fixed-2.yaml openstack.yaml} d-balancer/upmap.yaml msgr-failures/fastclose.yaml msgr/async-v1only.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{centos_8.yaml} thrashers/pggrow.yaml thrashosds-health.yaml workloads/cache-pool-snaps-readproxy.yaml} 2
Failure Reason:

Command failed on smithi085 with status 11: u'sudo adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 120 ceph --cluster ceph pg deep-scrub 1.64'

fail 4616061 2019-12-19 22:55:49 2019-12-20 00:15:13 2019-12-20 05:19:24 5:04:11 4:57:00 0:07:11 smithi master rhel 8.0 rados/standalone/{supported-random-distro$/{rhel_8.yaml} workloads/osd.yaml} 1
Failure Reason:

Command failed (workunit test osd/osd-rep-recov-eio.sh) on smithi001 with status 124: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=6bc46754bbd0a66b353695c3f38e3ef0742c60d2 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/standalone/osd/osd-rep-recov-eio.sh'

fail 4616062 2019-12-19 22:55:50 2019-12-20 00:15:14 2019-12-20 00:53:13 0:37:59 0:31:19 0:06:40 smithi master rhel 8.0 rados/basic/{ceph.yaml clusters/{fixed-2.yaml openstack.yaml} msgr-failures/few.yaml msgr/async.yaml objectstore/bluestore-low-osd-mem-target.yaml rados.yaml supported-random-distro$/{rhel_8.yaml} tasks/rados_workunit_loadgen_big.yaml} 2
Failure Reason:

SELinux denials found on ubuntu@smithi101.front.sepia.ceph.com: ['type=AVC msg=audit(1576802806.641:6877): avc: denied { unlink } for pid=30577 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=59864 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.380:6874): avc: denied { write } for pid=30577 comm="rhsmcertd-worke" name="dnf" dev="sda1" ino=60792 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576802806.307:6871): avc: denied { getattr } for pid=30577 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.380:6873): avc: denied { open } for pid=30577 comm="rhsmcertd-worke" path="/var/log/hawkey.log" dev="sda1" ino=60817 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:var_log_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.837:6879): avc: denied { open } for pid=30631 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.307:6872): avc: denied { map } for pid=30577 comm="rhsmcertd-worke" path="/var/lib/rpm/__db.001" dev="sda1" ino=262271 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.723:6878): avc: denied { read } for pid=30577 comm="rhsmcertd-worke" name="satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.641:6877): avc: denied { remove_name } for pid=30577 comm="rhsmcertd-worke" name="metadata_lock.pid" dev="sda1" ino=59864 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576802806.380:6874): avc: denied { create } for pid=30577 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.838:6880): avc: denied { lock } for pid=30631 comm="setroubleshootd" path="/var/lib/rpm/Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.723:6878): avc: denied { open } for pid=30577 comm="rhsmcertd-worke" path="/etc/dnf/modules.d/satellite-5-client.module" dev="sda1" ino=57237 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:root_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.380:6874): avc: denied { open } for pid=30577 comm="rhsmcertd-worke" path="/var/cache/dnf/metadata_lock.pid" dev="sda1" ino=59864 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.307:6869): avc: denied { read write } for pid=30577 comm="rhsmcertd-worke" name=".dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.838:6881): avc: denied { map } for pid=30631 comm="setroubleshootd" path="/var/lib/rpm/Name" dev="sda1" ino=262251 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.489:6876): avc: denied { setattr } for pid=30577 comm="rhsmcertd-worke" name="77950341a02eb4603ce116071aa18013c1f9e114406df1694cfccc21155717b9-primary.xml.gz" dev="sda1" ino=262180 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.307:6870): avc: denied { lock } for pid=30577 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.489:6875): avc: denied { open } for pid=30577 comm="rhsmcertd-worke" path="/var/cache/dnf/ceph-69b5c88b61d71540/repodata/repomd.xml" dev="sda1" ino=262154 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:rpm_var_cache_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.380:6874): avc: denied { add_name } for pid=30577 comm="rhsmcertd-worke" name="metadata_lock.pid" scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=system_u:object_r:rpm_var_cache_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1576802806.837:6879): avc: denied { read } for pid=30631 comm="setroubleshootd" name="Packages" dev="sda1" ino=262250 scontext=system_u:system_r:setroubleshootd_t:s0-s0:c0.c1023 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1576802806.307:6869): avc: denied { open } for pid=30577 comm="rhsmcertd-worke" path="/var/lib/rpm/.dbenv.lock" dev="sda1" ino=262270 scontext=system_u:system_r:rhsmcertd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1']

pass 4616063 2019-12-19 22:55:51 2019-12-20 00:15:13 2019-12-20 00:47:13 0:32:00 0:21:34 0:10:26 smithi master ubuntu 18.04 rados/singleton-nomsgr/{all/multi-backfill-reject.yaml rados.yaml supported-random-distro$/{ubuntu_latest.yaml}} 2
pass 4616064 2019-12-19 22:55:52 2019-12-20 00:15:50 2019-12-20 00:43:50 0:28:00 0:20:13 0:07:47 smithi master ubuntu 18.04 rados/perf/{ceph.yaml objectstore/bluestore-low-osd-mem-target.yaml openstack.yaml settings/optimized.yaml ubuntu_latest.yaml workloads/radosbench_omap_write.yaml} 1
pass 4616065 2019-12-19 22:55:53 2019-12-20 00:16:56 2019-12-20 00:36:55 0:19:59 0:12:07 0:07:52 smithi master centos 8.0 rados/singleton/{all/osd-recovery.yaml msgr-failures/many.yaml msgr/async.yaml objectstore/bluestore-avl.yaml rados.yaml supported-random-distro$/{centos_8.yaml}} 1