- log_href:
http://qa-proxy.ceph.com/teuthology/yuriw-2020-06-15_18:26:21-rados-wip-yuri2-testing-2020-06-15-1645-octopus-distro-basic-smithi/5151109/teuthology.log
- archive_path:
/home/teuthworker/archive/yuriw-2020-06-15_18:26:21-rados-wip-yuri2-testing-2020-06-15-1645-octopus-distro-basic-smithi/5151109
- description:
rados/cephadm/smoke-roleless/{distro/centos_8.0 start}
- duration:
0:03:40
- email:
ceph-qa@ceph.io
- failure_reason:
No module named 'ceph_manager'
- flavor:
- job_id:
5151109
- kernel:
- last_in_suite:
False
- machine_type:
smithi
- name:
yuriw-2020-06-15_18:26:21-rados-wip-yuri2-testing-2020-06-15-1645-octopus-distro-basic-smithi
- nuke_on_error:
True
- os_type:
centos
- os_version:
8.0
- overrides:
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- selinux:
- whitelist:
-
scontext=system_u:system_r:logrotate_t:s0
- workunit:
- sha1:
32f62798ee9a2145abdafd82203a5740d87c5dbd
- branch:
wip-yuri2-testing-2020-06-15-1645-octopus
- ceph:
- log-whitelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
- conf:
- mgr:
- debug ms:
1
- debug mgr:
20
- mon:
- debug paxos:
20
- debug mon:
20
- debug ms:
1
- osd:
- debug osd:
25
- debug filestore:
20
- debug journal:
20
- debug ms:
20
- osd shutdown pgref assert:
True
- sha1:
32f62798ee9a2145abdafd82203a5740d87c5dbd
- install:
- ceph:
- sha1:
32f62798ee9a2145abdafd82203a5740d87c5dbd
- admin_socket:
- branch:
wip-yuri2-testing-2020-06-15-1645-octopus
- owner:
scheduled_yuriw@teuthology
- pid:
- roles:
-
['host.a', 'osd.0', 'osd.1', 'osd.2', 'osd.3', 'client.0']
-
['host.b', 'osd.4', 'osd.5', 'osd.6', 'osd.7', 'client.1']
- sentry_event:
http://sentry.ceph.com/sepia/teuthology/?q=06777057168d46afbd7a7a6231111d07
- status:
fail
- success:
False
- branch:
wip-yuri2-testing-2020-06-15-1645-octopus
- seed:
- sha1:
32f62798ee9a2145abdafd82203a5740d87c5dbd
- subset:
- suite:
- suite_branch:
wip-yuri2-testing-2020-06-15-1645-octopus
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
32f62798ee9a2145abdafd82203a5740d87c5dbd
- targets:
- smithi192.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDdBHH8kFMrot5qRf+r/XsItS8TucPiaW82AYHoJEyPJMl3vuQg7giudrbKPfOd9TxzgrscK0qSeAXm+GEF3g1rdYbjUZBhBmdXVxva6e6yCkgrM6CmA3WnFRpyx3p0CalPQQNFhBOg5mfomGmJAH4bhlEoxwtU/NiLeyEWXGkMgEnvFUC8R9h5wXWKwySteqbo+CyHb+tOmpcwVWgauQJANtPXurjnzmteT7fG8H56G+MdXXfVMVI9MoAKllnwFIC89f1HGKRZfmNRryAPX7vhQI0GUGhdmDWFS/gqjRLaODSls3h7GLBIQgtcBeBKyCWXcxWHGlGbD9Un08BLdOn2gerNe/a1mBJ2lvOFcVv28GYJ9rWOsKYsF2OQer8uErKK/LIPGCWyl7CSoWoUHDoFSQZeTy2DEdfJYav8xtAsPPUsakelJ6UlKHFwsUDCTIzbr2K1RXRnLs8amg9LEEGXFBS2wzOcAnEVhIS4mdn/oWwQ5hvQvGUBRGcwUxTFomk=
- smithi107.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDPQXZJ/86l41E+NV+WFXevv3+ZTqGuZ/8kIMmKlcIisg1Eyoz1FacEkmNWEg5gzN4ZF2WwxoyaFCG9LkwZ2hih7NVAF5LmSouFPlKq+15A0sXQe5XUbR6XPR6Tg/im5Sdmt/dEdP40RO9NknYXfLlTwAwDTrFrkAxwwRGgWDmos55xIbel3YMhmfTs8Fc+ng/F1aw7ImwRpcIdbeCD99Eqopj1lOlL7dnxLR5JLWLmG1VLR9D21OEPvYavlFL2bJGgVt+C8HVMghpz0A1005iWyzxPIHAgs1H76q4qYC35JZjT+LTWcs0DRcp42uU5yN2owdMRX7mK3aUYihq55Au3IDYpuQZ60FTgWp2beCYiKjUNSDrd25MGzlzjq+hJ43So0DV4HsWABq+S25NAO591LQ5iWEaPJw8dp0AyIG4lhv6hQfjNf03cNfu0mVBuVPfkdzIJYpz4preipuxP64SmzcMkGq/dnh5zgS3KnhsfM2B7ITmlvujClQ459y/NtFs=
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock:
-
- cephadm:
-
- cephadm.shell:
- host.a:
-
ceph orch status
-
ceph orch ps
-
ceph orch ls
-
ceph orch host ls
-
ceph orch device ls
teuthology_branch:
master
verbose:
True
pcp_grafana_url:
priority:
user:
queue:
posted:
2020-06-15 18:30:44
started:
2020-06-15 22:29:41
updated:
2020-06-15 22:39:40
status_class:
danger
runtime:
0:09:59
wait_time:
0:06:19