- log_href:
http://qa-proxy.ceph.com/teuthology/zack-2016-09-23_16:16:49-teuthology-jewel-distro-basic-smithi/433907/teuthology.log
- archive_path:
/home/teuthworker/archive/zack-2016-09-23_16:16:49-teuthology-jewel-distro-basic-smithi/433907
- description:
teuthology/multi-cluster/{all/ceph.yaml fs/xfs.yaml}
- duration:
0:09:11
- email:
zack@redhat.com
- failure_reason:
- flavor:
basic
- job_id:
433907
- kernel:
- last_in_suite:
False
- machine_type:
smithi
- name:
zack-2016-09-23_16:16:49-teuthology-jewel-distro-basic-smithi
- nuke_on_error:
True
- os_type:
- os_version:
- overrides:
- ceph:
- log-whitelist:
- fs:
xfs
- conf:
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug osd:
25
- debug filestore:
20
- debug journal:
20
- debug ms:
1
- osd sloppy crc:
True
- sha1:
ecc23778eb545d8dd55e2e4735b53cc93f92e65b
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- osd default pool size:
2
- debug mon:
1
- debug paxos:
20
- debug ms:
20
- workunit:
- sha1:
ecc23778eb545d8dd55e2e4735b53cc93f92e65b
- install:
- ceph:
- sha1:
ecc23778eb545d8dd55e2e4735b53cc93f92e65b
- admin_socket:
- owner:
scheduled_zack@zwork.local
- pid:
- roles:
-
['ceph.mon.a', 'ceph.mon.b', 'backup.osd.0', 'backup.osd.1', 'backup.osd.2', 'backup.client.0']
-
['backup.mon.a', 'ceph.osd.0', 'ceph.osd.1', 'ceph.osd.2', 'ceph.client.0', 'client.1', 'osd.3']
- sentry_event:
- status:
pass
- success:
True
- branch:
jewel
- seed:
- sha1:
ecc23778eb545d8dd55e2e4735b53cc93f92e65b
- subset:
- suite:
- suite_branch:
jewel
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
9d2af1dc4d9c812774170824b71fbf4bd824e91d
- targets:
- ubuntu@smithi103.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDCHuOoreIqynCn5atBFAeO0VIFk3C0Vah4pR0Nct0FfQa8gpshd2kWv+TVrVcTAz+PNQavFDeX2q7Rk7r3ofYp5oyd0OH00NOsUjiXwRztw5BXCosFsKn2WVIKzNCXQ5ceSKh7NuD58yRnkUXo5gbuSAf6zS07anyZPdWYS2UnIiAOnVTeaGrOpIKvyo+HzJFi0ubJ3vZFesWrELKSgZmiaDtCVkkv3z+5xWi3DmEsLFxyM5CQ8pTNDnjgRhi5Ylm3z13Y7V4WoKleEj0HLoluzF6hwBO26wg5BQBEEGGYq54KRbtWeCMxfLe4NzmzEveNGuJFkoGNUqHDhjNDJYW3
- ubuntu@smithi056.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCxNSB/Pg0q32Pi2E5u2fpshLRT4N6RXGiOV1XnaY23AZOASrTZwjB3FO3N4U+DHzBK0vJ/yDhs7IkNtzyreN55hmMZBGIBItLs7gRQw5ozrLt/h+9YoeSDJ6/nr3YbZlp7Ffd7d5gHQPhHp7GaJGyb3QCjUZfZ/YK+vdcr+ZS6Vl8TCK7pslFF2biSz8XhpNzvD8WTrbe9JVQQlJM/T0AaQR05A/e1UXA1bdfwBlxRM5aCk1Od+Mzj/oo40PytOEZhXdE8qspYH1ivJxP0bJTQi0Igu8CxSIRKDtJGjLwrLkMu6ImHRnuEK1BykFJgUrHTRIQN+tDo/BT/qQwagpe7
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock.check:
-
- install:
-
- ceph:
- cluster:
backup
- log-whitelist:
- conf:
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug osd:
25
- debug filestore:
20
- debug journal:
20
- debug ms:
1
- osd sloppy crc:
True
- fs:
xfs
- sha1:
ecc23778eb545d8dd55e2e4735b53cc93f92e65b
-
- ceph:
-
- workunit:
- clients:
- ceph.client.0:
- backup.client.0:
- sha1:
ecc23778eb545d8dd55e2e4735b53cc93f92e65b
teuthology_branch:
wip-shaman-2
verbose:
True
pcp_grafana_url:
http://pcp.front.sepia.ceph.com:44323/grafana/index.html#/dashboard/script/index.js?time_to=2016-09-23T22%3A27%3A18&time_from=2016-09-23T22%3A18%3A48&hosts=smithi103%2Csmithi056
priority:
user:
queue:
posted:
2016-09-23 22:17:38
started:
2016-09-23 22:18:31
updated:
2016-09-23 22:28:30
status_class:
success
runtime:
0:09:59
wait_time:
0:00:48