Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
fail 7141708 2023-01-27 14:19:23 2023-01-27 14:20:53 2023-01-27 14:49:50 0:28:57 0:19:24 0:09:33 smithi main centos 8.stream orch:cephadm/workunits/{0-distro/centos_8.stream_container_tools agent/on mon_election/connectivity task/test_set_mon_crush_locations} 3
Failure Reason:

Command failed on smithi098 with status 4: 'sudo /home/ubuntu/cephtest/cephadm --image quay-quay-quay.apps.os.sepia.ceph.com/ceph-ci/ceph:50df78f86bdf5142d7f299e04e05fa72ef8696ba shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 7ffb2a90-9e50-11ed-9e56-001a4aab830c -- bash -c \'ceph mon dump --format json | jq -e \'"\'"\'.mons | .[] | select(.name == "host.a") | .crush_location == "{datacenter=a}"\'"\'"\'\''

fail 7141709 2023-01-27 14:19:24 2023-01-27 14:20:54 2023-01-27 14:35:44 0:14:50 0:09:05 0:05:45 smithi main rhel 8.6 orch:cephadm/workunits/{0-distro/rhel_8.6_container_tools_rhel8 agent/off mon_election/classic task/test_cephadm_repos} 1
Failure Reason:

Command failed (workunit test cephadm/test_repos.sh) on smithi086 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=50df78f86bdf5142d7f299e04e05fa72ef8696ba TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 CEPH_MNT=/home/ubuntu/cephtest/mnt.0 timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/cephadm/test_repos.sh'

fail 7141710 2023-01-27 14:19:25 2023-01-27 14:20:54 2023-01-27 15:24:15 1:03:21 0:55:00 0:08:21 smithi main centos 8.stream orch:cephadm/upgrade/{1-start-distro/1-start-centos_8.stream_container-tools 2-repo_digest/defaut 3-upgrade/staggered 4-wait 5-upgrade-ls agent/off mon_election/classic} 2
Failure Reason:

Command failed on smithi080 with status 1: 'sudo /home/ubuntu/cephtest/cephadm --image quay.io/ceph/ceph:v16.2.0 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 060deb5a-9e50-11ed-9e56-001a4aab830c -e sha1=50df78f86bdf5142d7f299e04e05fa72ef8696ba -- bash -c \'ceph versions | jq -e \'"\'"\'.osd | length == 2\'"\'"\'\''

fail 7141711 2023-01-27 14:19:26 2023-01-27 14:21:24 2023-01-27 14:48:30 0:27:06 0:17:52 0:09:14 smithi main centos 8.stream orch:cephadm/workunits/{0-distro/centos_8.stream_container_tools_crun agent/on mon_election/connectivity task/test_orch_cli} 1
Failure Reason:

SELinux denials found on ubuntu@smithi067.front.sepia.ceph.com: ['type=AVC msg=audit(1674830180.105:17023): avc: denied { read } for pid=94165 comm="rpm" name="Packages" dev="sda1" ino=525269 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674829796.086:215): avc: denied { lock } for pid=6357 comm="rpm" path="/var/lib/rpm/Packages" dev="sda1" ino=98534 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674830052.644:14040): avc: denied { read } for pid=77402 comm="rpm" name="Packages" dev="sda1" ino=98534 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674830052.644:14041): avc: denied { lock } for pid=77402 comm="rpm" path="/var/lib/rpm/Packages" dev="sda1" ino=98534 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674830179.750:16992): avc: denied { read } for pid=715 comm="sssd" name="resolv.conf" dev="sda1" ino=525304 scontext=system_u:system_r:sssd_t:s0 tcontext=unconfined_u:object_r:user_home_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674830180.105:17024): avc: denied { lock } for pid=94165 comm="rpm" path="/var/lib/rpm/Packages" dev="sda1" ino=525269 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674829796.066:214): avc: denied { open } for pid=6357 comm="rpm" path="/var/lib/rpm/Packages" dev="sda1" ino=98534 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674830052.644:14042): avc: denied { map } for pid=77402 comm="rpm" path="/var/lib/rpm/Name" dev="sda1" ino=98535 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674830180.105:17023): avc: denied { open } for pid=94165 comm="rpm" path="/var/lib/rpm/Packages" dev="sda1" ino=525269 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674830180.105:17025): avc: denied { map } for pid=94165 comm="rpm" path="/var/lib/rpm/Name" dev="sda1" ino=525270 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674830051.131:14037): avc: denied { search } for pid=6244 comm=72733A6D61696E20513A526567 name="cephtest" dev="sda1" ino=393230 scontext=system_u:system_r:syslogd_t:s0 tcontext=unconfined_u:object_r:user_home_t:s0 tclass=dir permissive=1', 'type=AVC msg=audit(1674829796.093:216): avc: denied { map } for pid=6357 comm="rpm" path="/var/lib/rpm/Name" dev="sda1" ino=98535 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674829796.066:214): avc: denied { read } for pid=6357 comm="rpm" name="Packages" dev="sda1" ino=98534 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1', 'type=AVC msg=audit(1674830052.644:14040): avc: denied { open } for pid=77402 comm="rpm" path="/var/lib/rpm/Packages" dev="sda1" ino=98534 scontext=system_u:system_r:setroubleshootd_t:s0 tcontext=unconfined_u:object_r:var_lib_t:s0 tclass=file permissive=1']

fail 7141712 2023-01-27 14:19:28 2023-01-27 14:21:25 2023-01-27 14:47:44 0:26:19 0:20:56 0:05:23 smithi main rhel 8.6 orch:cephadm/workunits/{0-distro/rhel_8.6_container_tools_rhel8 agent/on mon_election/connectivity task/test_set_mon_crush_locations} 3
Failure Reason:

Command failed on smithi012 with status 4: 'sudo /home/ubuntu/cephtest/cephadm --image quay-quay-quay.apps.os.sepia.ceph.com/ceph-ci/ceph:50df78f86bdf5142d7f299e04e05fa72ef8696ba shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 4ca45a7c-9e50-11ed-9e56-001a4aab830c -- bash -c \'ceph mon dump --format json | jq -e \'"\'"\'.mons | .[] | select(.name == "host.a") | .crush_location == "{datacenter=a}"\'"\'"\'\''

fail 7141713 2023-01-27 14:19:29 2023-01-27 14:21:25 2023-01-27 14:38:35 0:17:10 0:08:47 0:08:23 smithi main centos 8.stream orch:cephadm/workunits/{0-distro/centos_8.stream_container_tools_crun agent/off mon_election/classic task/test_cephadm_repos} 1
Failure Reason:

Command failed (workunit test cephadm/test_repos.sh) on smithi119 with status 1: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=50df78f86bdf5142d7f299e04e05fa72ef8696ba TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 CEPH_MNT=/home/ubuntu/cephtest/mnt.0 timeout 3h /home/ubuntu/cephtest/clone.client.0/qa/workunits/cephadm/test_repos.sh'

fail 7141714 2023-01-27 14:19:30 2023-01-27 14:21:45 2023-01-27 14:37:56 0:16:11 0:06:48 0:09:23 smithi main centos 8.stream orch:cephadm/workunits/{0-distro/rhel_8.6_container_tools_3.0 agent/on mon_election/connectivity task/test_iscsi_pids_limit/{centos_8.stream_container_tools test_iscsi_pids_limit}} 1
Failure Reason:

Command failed on smithi026 with status 1: 'TESTDIR=/home/ubuntu/cephtest bash -s'

fail 7141715 2023-01-27 14:19:31 2023-01-27 14:23:36 2023-01-27 15:30:35 1:06:59 0:55:22 0:11:37 smithi main centos 8.stream orch:cephadm/upgrade/{1-start-distro/1-start-centos_8.stream_container-tools 2-repo_digest/defaut 3-upgrade/staggered 4-wait 5-upgrade-ls agent/on mon_election/classic} 2
Failure Reason:

Command failed on smithi090 with status 1: 'sudo /home/ubuntu/cephtest/cephadm --image quay.io/ceph/ceph:v16.2.0 shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid ce555008-9e50-11ed-9e56-001a4aab830c -e sha1=50df78f86bdf5142d7f299e04e05fa72ef8696ba -- bash -c \'ceph versions | jq -e \'"\'"\'.osd | length == 2\'"\'"\'\''

fail 7141716 2023-01-27 14:19:32 2023-01-27 14:24:56 2023-01-27 14:54:45 0:29:49 0:19:23 0:10:26 smithi main centos 8.stream orch:cephadm/workunits/{0-distro/centos_8.stream_container_tools_crun agent/on mon_election/connectivity task/test_set_mon_crush_locations} 3
Failure Reason:

Command failed on smithi016 with status 4: 'sudo /home/ubuntu/cephtest/cephadm --image quay-quay-quay.apps.os.sepia.ceph.com/ceph-ci/ceph:50df78f86bdf5142d7f299e04e05fa72ef8696ba shell -c /etc/ceph/ceph.conf -k /etc/ceph/ceph.client.admin.keyring --fsid 4aa14a9a-9e51-11ed-9e56-001a4aab830c -- bash -c \'ceph mon dump --format json | jq -e \'"\'"\'.mons | .[] | select(.name == "host.a") | .crush_location == "{datacenter=a}"\'"\'"\'\''