- log_href:
http://qa-proxy.ceph.com/teuthology/teuthology-2016-07-18_23:09:02-teuthology-master---basic-vps/322330/teuthology.log
- archive_path:
/var/lib/teuthworker/archive/teuthology-2016-07-18_23:09:02-teuthology-master---basic-vps/322330
- description:
teuthology/multi-cluster/{all/ceph.yaml fs/xfs.yaml}
- duration:
- email:
ceph-qa@ceph.com
- failure_reason:
- flavor:
- job_id:
322330
- kernel:
- last_in_suite:
False
- machine_type:
vps
- name:
teuthology-2016-07-18_23:09:02-teuthology-master---basic-vps
- nuke_on_error:
True
- os_type:
- os_version:
- overrides:
- s3tests:
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- osd default pool size:
2
- debug mon:
1
- debug paxos:
20
- debug ms:
20
- rgw:
- default_idle_timeout:
1200
- ceph-fuse:
- client.0:
- mount_wait:
60
- mount_timeout:
120
- workunit:
- sha1:
d1f681a2741ac805d1a21087023147dddd218940
- ceph:
- log-whitelist:
- fs:
xfs
- conf:
- global:
- osd heartbeat grace:
100
- mon lease ack timeout:
25
- mon lease:
15
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug osd:
25
- debug filestore:
20
- debug journal:
20
- debug ms:
1
- osd sloppy crc:
True
- sha1:
d1f681a2741ac805d1a21087023147dddd218940
- install:
- ceph:
- sha1:
d1f681a2741ac805d1a21087023147dddd218940
- admin_socket:
- owner:
scheduled_teuthology@teuthology
- pid:
- roles:
-
['ceph.mon.a', 'ceph.mon.b', 'backup.osd.0', 'backup.osd.1', 'backup.osd.2', 'backup.client.0']
-
['backup.mon.a', 'ceph.osd.0', 'ceph.osd.1', 'ceph.osd.2', 'ceph.client.0', 'client.1', 'osd.3']
- sentry_event:
- status:
dead
- success:
- branch:
master
- seed:
- sha1:
d1f681a2741ac805d1a21087023147dddd218940
- subset:
- suite:
- suite_branch:
master
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
- targets:
- ubuntu@vpm177.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCjzuftSsxBdDYT4k8g1bx/s8xgNWAYcszMc+D/wBdkz0jU/umRehTSZxYkaabBDEWHEURGNCDipup5giWXcUxRt1a//JTlPNao2LjndNB6XblWqYrDjOJpF8Djqh+O6hV5+KNfrM0/WgjOaFst1IpFQbtjJZJttKkzqfAVnWjF/Lg3pes3K3YQjo/7DXgDvyqe77XLgQ0G83yvyFKCHNbRu7V7LjD0mIGo5sDA+zmEmyCnJdD1qbUsc6EhTfQbZbdfS6U59m00nG4KR08PeydIKkzDYpjv7LeFvJ583XgTCzFUIxz71jaArH6VZy5bA/1bmwCgtBkhC3gulWWYNqsh
- ubuntu@vpm137.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDEqjXcxq5E7vpbTBFeHOwXmsxM0rxlDEJk4qSh4YJCt27rPcc3CxvA/2p7md0uK+xGL8pLrqJKXviyu6s5GTBmN4uzica5pacJiTl2lr9EXOx3EDtBwulFHxi5rPQTMTY0+l3lnLQEaMWFImbZOw+GsKl3gc5HDG7moQS9Buob1gbR4lgc/ncb7MMBbo9bwWBK1giDGNzizSaVAOkr0DJnLFMCuBqSu//YrGI1niDNcZDz0FtEnhx6AGS05gM/Oxfe8G6nc0zJjAk2uIenDun5Diyjl7DAOFZ00vbcxgio4e0EI9X2TdHqUehmTz7cAg+MJ1IsXwoC9BuILQ45+iDB
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock.check:
-
- install:
-
- ceph:
-
- ceph:
-
- workunit:
- clients:
- ceph.client.0:
- backup.client.0:
teuthology_branch:
master
verbose:
True
pcp_grafana_url:
priority:
user:
queue:
posted:
2016-07-19 06:09:39
started:
2016-07-19 06:48:18
updated:
2016-07-19 18:50:43
status_class:
danger
runtime:
12:02:25