- log_href:
http://qa-proxy.ceph.com/teuthology/nojha-2019-01-11_02:50:48-upgrade:jewel-x-wip-36686-luminous-2019-01-10-distro-basic-smithi/3445008/teuthology.log
- archive_path:
/home/teuthworker/archive/nojha-2019-01-11_02:50:48-upgrade:jewel-x-wip-36686-luminous-2019-01-10-distro-basic-smithi/3445008
- description:
upgrade:jewel-x/ceph-deploy/{distros/ubuntu_latest.yaml jewel-luminous.yaml slow_requests.yaml}
- duration:
0:43:33
- email:
nojha@redhat.com
- failure_reason:
- flavor:
- job_id:
3445008
- kernel:
- last_in_suite:
False
- machine_type:
ovh
- name:
nojha-2019-01-11_02:50:48-upgrade:jewel-x-wip-36686-luminous-2019-01-10-distro-basic-smithi
- nuke_on_error:
True
- os_type:
ubuntu
- os_version:
16.04
- overrides:
- ceph:
- log-whitelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
-
slow request
- conf:
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug osd:
25
- debug filestore:
20
- debug journal:
20
- debug ms:
1
- sha1:
d1481470d05f341e49cc467314b2dfa477aa5520
- ceph-deploy:
- fs:
xfs
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- rbd default features:
5
- global:
- mon pg warn min per osd:
2
- osd:
- osd pool default size:
2
- osd objectstore:
filestore
- osd sloppy crc:
True
- mon:
- workunit:
- sha1:
f75db9b030f5f7a2eeba92948bfb098f521c89f6
- branch:
wip-36686-luminous
- install:
- ceph:
- sha1:
d1481470d05f341e49cc467314b2dfa477aa5520
- admin_socket:
- branch:
wip-36686-luminous-2019-01-10
- owner:
scheduled_nojha@teuthology
- pid:
- roles:
-
['mon.a', 'mds.a', 'osd.0', 'osd.1', 'osd.2', 'mgr.x']
-
['mon.b', 'mgr.y']
-
['mon.c', 'osd.3', 'osd.4', 'osd.5']
-
['osd.6', 'osd.7', 'osd.8', 'client.0']
- sentry_event:
- status:
pass
- success:
True
- branch:
wip-36686-luminous-2019-01-10
- seed:
- sha1:
d1481470d05f341e49cc467314b2dfa477aa5520
- subset:
- suite:
- suite_branch:
wip-36686-luminous
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
f75db9b030f5f7a2eeba92948bfb098f521c89f6
- targets:
- ovh099.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDdfwVr9iHllfTYIDXq9eP/m4XeAo9/pSjZznP3GAhc17omxHufwejRYmkpPi9YFcB2kDJ5SSeuEtOTfnnixHoEzQYClYj9LArHeU4598C/lRr/mFuhI1m2fgzFX5DSZzo9CFKYFXBmuBmg2xs4K2TW7wmBasur/WQLCo0afTaIuXMNi8Ldl/ZDWn07jBUPgJ5CaRmeP/6iFOARnN4KDflXWGCs79L6kJcrMQN//qRx47/xErgnAGylpel6wDD2dR+C0pkd3SDs3AxuBE2fdXPqgIGOfixv/wLi/zMJHh6SklVLr9u04pIOXlMy8hYZlMZf4BM/DqpE3gZ0UzH5UeAb
- ovh012.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCrJm+O1ylm0wc7EoPkwdcLziINiJyx4JF+qiv28pP98JWIgSb7ApKY/R7hmcWinyWNCC3DugVoS8f/KkJkXonmiVEywad83uEBxc23xIPN8MNrASJiNm7Qq0Of2lFqPoEcdMBMM1qXqO160oeZIk0Vo/IsS7jMBVEgre204eUL3ZZykxq3CBO42si9/rrrmG1ISBnhwE+AoXFuDxYZwrPGZu9hB0L7EveY3xVPwyRljAka+pHODD4bB45+vgAaANrszlD2P9/olNlcVJsvgWjgleO3r1r25Z/DMF+tVlyVaHGqwzvbjorHYdkpGaN3oMW9LB4phwrxxGkUCejA3lAj
- ovh060.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCxHOTwDLlBreHPI7fMB6Ai0wPfp9bEmrOBZY0OfNU0Nnx292GSJ6tXYQ8eWcbzhdge3Km9O9rubPSMwbNcjQ1hjG+u3NQpuu7bZpuzYzK6J8r3/fam+GqU4DjE1tPRwgWOB6LScrHM1NJEZlZ5d6zH575tK16OHtrDdEnxMZOx9GF6imavp/zSHO+E+k9buvdsecGB2PV58S89nLX7CQr8MuKlncIR89Gq8S+GrxcXGTpzbRxU7hefP4R7rrqxZToC3Dpx7RziJrCMdsckO8f3qf0kEn+F9Q9jDGf7XkPQZZu+iJSDaRzbtHnCSYvifqWnZDuwJs45sJ0TjX6M4IP3
- ovh015.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCkk2wM1bpQnUe24eyZ/6ir2xzBmGQy2kZEewI5Dff/s4iQkhwX0pgdmx5cnbkBInc6sN8Olw4C6v+J/+8Uw9RfipvyXjOIrFCsK/WZiJz37xqBxia7QauSDIO3jmqXVD1Dv944OdaQEVSnp/GiuIiw0IwLwDObdzD4DmIUGS/qZ3PtoKxBzL6XQGQU8Erw5zhPkH7iB0PTj3ZHYdFVrfKkFxNBbgFnmLMYYkR2DKgwQTuFLKJ84UrOSHaDS42HXIBYQWp9Wob0dW9Tr4bxjBUGd/BafZnrJT7hOKQQQyxE6OhYLrIQ529zaWzjr+gfRgGVN0eNs4UhcXLHzMeIGsrF
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock:
-
- ssh-keys:
-
- print:
**** done ssh-keys
-
- ceph-deploy:
- skip-mgr:
True
- fs:
xfs
- branch:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- rbd default features:
5
- global:
- mon pg warn min per osd:
2
- osd:
- osd pool default size:
2
- osd objectstore:
filestore
- osd sloppy crc:
True
- mon:
- ceph-deploy-branch:
1.5.39-stable
-
- print:
**** done initial ceph-deploy
-
- ceph-deploy.upgrade:
- setup-mgr-node:
True
- roles:
- check-for-healthy:
True
-
- print:
**** done ceph-deploy upgrade
-
- exec:
- osd.0:
-
ceph osd require-osd-release luminous
-
ceph osd set-require-min-compat-client luminous
-
- print:
**** done `ceph osd require-osd-release luminous`
-
- workunit:
- clients:
- branch:
wip-36686-luminous
- sha1:
f75db9b030f5f7a2eeba92948bfb098f521c89f6
-
- print:
**** done kernel_untar_build.sh
-
- systemd:
-
- print:
**** done systemd
-
- workunit:
- clients:
- branch:
wip-36686-luminous
- sha1:
f75db9b030f5f7a2eeba92948bfb098f521c89f6
-
- print:
**** done rados/load-gen-mix.sh
teuthology_branch:
master
verbose:
True
pcp_grafana_url:
priority:
user:
queue:
posted:
2019-01-11 02:51:32
started:
2019-01-11 03:28:57
updated:
2019-01-11 04:56:58
status_class:
success
runtime:
1:28:01
wait_time:
0:44:28