- log_href:
http://qa-proxy.ceph.com/teuthology/teuthology-2016-06-20_03:00:02-rados-hammer-distro-basic-vps/267116/teuthology.log
- archive_path:
/var/lib/teuthworker/archive/teuthology-2016-06-20_03:00:02-rados-hammer-distro-basic-vps/267116
- description:
rados/basic/{clusters/fixed-2.yaml fs/btrfs.yaml msgr-failures/few.yaml tasks/repair_test.yaml}
- duration:
0:27:31
- email:
ceph-qa@ceph.com
- failure_reason:
- flavor:
basic
- job_id:
267116
- kernel:
- last_in_suite:
False
- machine_type:
vps
- name:
teuthology-2016-06-20_03:00:02-rados-hammer-distro-basic-vps
- nuke_on_error:
True
- os_type:
ubuntu
- os_version:
- overrides:
- ceph:
- log-whitelist:
-
slow request
-
candidate had a read error
-
deep-scrub 0 missing, 1 inconsistent objects
-
deep-scrub 0 missing, 4 inconsistent objects
-
deep-scrub 1 errors
-
deep-scrub 4 errors
-
!= known omap_digest
-
!= known data_digest
-
repair 0 missing, 1 inconsistent objects
-
repair 0 missing, 4 inconsistent objects
-
repair 1 errors, 1 fixed
-
repair 4 errors, 4 fixed
-
scrub 0 missing, 1 inconsistent
-
scrub 1 errors
-
size 1 != known size
-
!= best guess
- fs:
btrfs
- conf:
- global:
- ms inject socket failures:
5000
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug filestore:
20
- debug journal:
20
- debug osd:
25
- osd sloppy crc:
True
- osd op thread timeout:
60
- filestore debug inject read err:
True
- debug ms:
1
- sha1:
2e156d7ad4b9f4ffd6028df3a460b50b30c8b0d3
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- osd default pool size:
2
- debug mon:
1
- debug paxos:
20
- debug ms:
20
- branch:
- dev-commit:
2e156d7ad4b9f4ffd6028df3a460b50b30c8b0d3
- workunit:
- sha1:
2e156d7ad4b9f4ffd6028df3a460b50b30c8b0d3
- install:
- ceph:
- sha1:
2e156d7ad4b9f4ffd6028df3a460b50b30c8b0d3
- admin_socket:
- owner:
scheduled_teuthology@teuthology
- pid:
- roles:
-
['mon.a', 'mon.c', 'osd.0', 'osd.1', 'osd.2', 'client.0']
-
['mon.b', 'osd.3', 'osd.4', 'osd.5', 'client.1']
- sentry_event:
http://sentry.ceph.com/sepia/teuthology/?q=fc1174b218f945d4a6f741ce60219680
- status:
fail
- success:
False
- branch:
hammer
- seed:
- sha1:
2e156d7ad4b9f4ffd6028df3a460b50b30c8b0d3
- subset:
- suite:
- suite_branch:
hammer
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
- targets:
- ubuntu@vpm161.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDd3GRkKzdHNKjXka6MbdH2Pq7XSyWn9DgoH2lWhR9mddbnXYe4URiWcUVhYGpLdVLiH7oqoKNGxpqwSu2LQ73RYkL2Bfadb6LV6hryheJ4Q0DzPPuZt8mwdz/PCNh7oK4+fDafYIOmD/jEaHRTCrhALgFNmKO30ZIkAbL74PvE6Ty3AJ2l1OnFwxvP3krvrruyimUKYqVqRgFDqVHtj6bSFSLA//xLOS0MGfK9OFjHunTtk9k+ZZy4KKFWtYX56pnKla7K5nEIFBpZbGxl8Q1M5rjx1/6JCBYKmb9od9oTItRC4hqy9Hn8+o95xKbFCcPbFsZO3/1/Ml+tFOk+RpHR
- ubuntu@vpm087.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDIZFh4MTnm2DjCgd1rDR53u4Cl1siYc4eRS4VF/8NYA27AhQ77+z+uc3fMQPzTCGCc9kHgRq730u15Y1YVkM3hElon4i9EnDlnM4fOpxsgEe99hIssaqsjBjWTf+ABfL2ug425ozX3dMy7fD/zQipUVUcw/fMGQ5xAivO/OMjw3L4xLuPV8gaOngivcOyTk0uDwPEmO+Ga7oQllz3rPzOBirv7NCUZKX4gs20uZptTjY4kj3e+jObi93k7kQ7VC07EiAs62cc722tsZdZediz4psvKomIetWXWENYlBah92fkcwQ4faBu691Pz4+UgmAn06FnKsWgOp5VxhPq3eKoN
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock.check:
-
- install:
-
- ceph:
-
- repair_test:
teuthology_branch:
master
verbose:
True
pcp_grafana_url:
http://pcp.front.sepia.ceph.com:44323/grafana/index.html#/dashboard/script/index.js?time_to=2016-06-21T09%3A01%3A15&time_from=2016-06-21T08%3A33%3A56&hosts=vpm161%2Cvpm087
priority:
user:
queue:
posted:
2016-06-20 10:02:59
started:
2016-06-21 07:55:28
updated:
2016-06-21 09:03:30
status_class:
danger
runtime:
1:08:02
wait_time:
0:40:31