- log_href:
http://qa-proxy.ceph.com/teuthology/zack-2016-09-20_10:22:33-teuthology-master-distro-basic-smithi/425958/teuthology.log
- archive_path:
/home/teuthworker/archive/zack-2016-09-20_10:22:33-teuthology-master-distro-basic-smithi/425958
- description:
teuthology/multi-cluster/{all/ceph.yaml fs/xfs.yaml}
- duration:
0:12:22
- email:
zack@redhat.com
- failure_reason:
- flavor:
basic
- job_id:
425958
- kernel:
- last_in_suite:
False
- machine_type:
smithi
- name:
zack-2016-09-20_10:22:33-teuthology-master-distro-basic-smithi
- nuke_on_error:
True
- os_type:
centos
- os_version:
- overrides:
- ceph:
- log-whitelist:
- fs:
xfs
- conf:
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug osd:
25
- debug filestore:
20
- debug journal:
20
- debug ms:
1
- osd sloppy crc:
True
- sha1:
c76824f86f4c2c7293261442c19e0bd01aaa3d3d
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- osd default pool size:
2
- debug mon:
1
- debug paxos:
20
- debug ms:
20
- workunit:
- sha1:
c76824f86f4c2c7293261442c19e0bd01aaa3d3d
- install:
- ceph:
- sha1:
c76824f86f4c2c7293261442c19e0bd01aaa3d3d
- admin_socket:
- owner:
scheduled_zack@zwork.local
- pid:
- roles:
-
['ceph.mon.a', 'ceph.mon.b', 'backup.osd.0', 'backup.osd.1', 'backup.osd.2', 'backup.client.0']
-
['backup.mon.a', 'ceph.osd.0', 'ceph.osd.1', 'ceph.osd.2', 'ceph.client.0', 'client.1', 'osd.3']
- sentry_event:
- status:
pass
- success:
True
- branch:
master
- seed:
- sha1:
c76824f86f4c2c7293261442c19e0bd01aaa3d3d
- subset:
- suite:
- suite_branch:
master
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
- targets:
- ubuntu@smithi095.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCsdDNEVxRmCKqca0lGpgIlXg8CRW5QgT9xgH37GWeGdKdEH40GhNK6e4I0e/OkaJ28OuWh23vNhDPaAM/3pWIYysPRa5nhJiXLJwg4Q0wVdtNwqf34I+nuCjm+v9fTaaFOhHUqARdDd9o9WH2qTLK8ysl9e2gjtGPldOGVyq/CZXtRB9OYX8LlmUMhpfPa8DsPk3iuMGoc+NaQv5iN66CJW4Fy2gpzx0UvHs2Nb1f18GUYFXDoDsRL8qEDCZ2E4uAik7eG7i30rX2VHTMCmHeOx/CUefO34S5XnZu/D5VJwHfRWgXEPZG5Xo2VAJpo2fqejv37fyqWc3YKFjDMt1P7
- ubuntu@smithi074.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDdb5FCPPwEFkOCaDEKox0DHk+oFhiKvq4WQzcp2T5LfEQmZsPzEb7HQ2XS3lDGrMhUX/5UNggl8RkGC1BUuMCpMA5bHbiucMPPihvj/Uh12pJhSsFYWWg1RBVJJnLBPaWEKLa4shvTkdHbjf7C7yM6Bw9CXLv8DpZMPQqZvfkDwBkOgFihmx36spD/2QCFMo5bLd6fqzOYF7hLgoTw5UeEC48hRf9k5n7mnEBX3ZwgKKKf7OP7IvHU5euPTQu/VMRa0odM7/ElmGDWCzJOnotpGtZKP5sfIiD0Aq8Tu0DJCcDSX834ebAdJnapuFhNMrLOm2KkK9lm4H7MwrftkdH9
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock.check:
-
- install:
-
- ceph:
- cluster:
backup
- log-whitelist:
- conf:
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug osd:
25
- debug filestore:
20
- debug journal:
20
- debug ms:
1
- osd sloppy crc:
True
- fs:
xfs
- sha1:
c76824f86f4c2c7293261442c19e0bd01aaa3d3d
-
- ceph:
-
- workunit:
- clients:
- ceph.client.0:
- backup.client.0:
- sha1:
c76824f86f4c2c7293261442c19e0bd01aaa3d3d
teuthology_branch:
wip-nodeless
verbose:
True
pcp_grafana_url:
http://pcp.front.sepia.ceph.com:44323/grafana/index.html#/dashboard/script/index.js?time_to=2016-09-20T16%3A34%3A28&time_from=2016-09-20T16%3A23%3A27&hosts=smithi095%2Csmithi074
priority:
user:
queue:
posted:
2016-09-20 16:22:57
started:
2016-09-20 16:23:04
updated:
2016-09-20 16:37:04
status_class:
success
runtime:
0:14:00
wait_time:
0:01:38