- log_href:
http://qa-proxy.ceph.com/teuthology/teuthology-2018-03-28_04:23:01-upgrade:jewel-x-luminous-distro-basic-smithi/2330039/teuthology.log
- archive_path:
/home/teuthworker/archive/teuthology-2018-03-28_04:23:01-upgrade:jewel-x-luminous-distro-basic-smithi/2330039
- description:
upgrade:jewel-x/ceph-deploy/{distros/ubuntu_latest.yaml jewel-luminous.yaml}
- duration:
0:43:57
- email:
ceph-qa@ceph.com
- failure_reason:
- flavor:
- job_id:
2330039
- kernel:
- last_in_suite:
False
- machine_type:
ovh
- name:
teuthology-2018-03-28_04:23:01-upgrade:jewel-x-luminous-distro-basic-smithi
- nuke_on_error:
True
- os_type:
ubuntu
- os_version:
16.04
- overrides:
- ceph:
- log-whitelist:
- conf:
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug osd:
25
- debug filestore:
20
- debug journal:
20
- debug ms:
1
- sha1:
19e457355b99415a66565bbecd55d8deed159c99
- ceph-deploy:
- fs:
xfs
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- rbd default features:
5
- global:
- mon pg warn min per osd:
2
- osd:
- osd pool default size:
2
- osd objectstore:
filestore
- osd sloppy crc:
True
- mon:
- workunit:
- sha1:
19e457355b99415a66565bbecd55d8deed159c99
- branch:
luminous
- install:
- ceph:
- sha1:
19e457355b99415a66565bbecd55d8deed159c99
- admin_socket:
- owner:
scheduled_teuthology@teuthology
- pid:
- roles:
-
['mon.a', 'mds.a', 'osd.0', 'osd.1', 'osd.2', 'mgr.x']
-
['mon.b', 'mgr.y']
-
['mon.c', 'osd.3', 'osd.4', 'osd.5']
-
['osd.6', 'osd.7', 'osd.8', 'client.0']
- sentry_event:
- status:
pass
- success:
True
- branch:
luminous
- seed:
- sha1:
19e457355b99415a66565bbecd55d8deed159c99
- subset:
- suite:
- suite_branch:
luminous
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
19e457355b99415a66565bbecd55d8deed159c99
- targets:
- ovh039.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDJOXvsTyDDVwwshTzEgiXFKVJ7/6nQnjBVV1K+85A7w4/rl5ohvCo99MbDkoyRsFUcvoORvd7KhvqUkYxl/Sqfjdk7ZsQjahVcuJHrVyQ0eG0hRgrguJhpkc9gFi41kWOB3q7sg5iJ8mm0L2XhIBxHd+XNnmn7jLzgxXQiB7phrVrqd1ZQ4VVWjSWZFUxmYfs/Wpw91a/sjoPRJ+E13wUfjMv+o5unxnFTKN8UZvdQ+oT/QoS4OvNdHY1WjkNrUELGFnjihZBSgoG52GMe3wNDV/B7pamzNqRU+2zs+kCmwLIUYyj3NzGuYcBrGxTZ3eVBl1NwmPQgocazRIlz0l4T
- ovh020.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDKNbpQXXzENAhqOJle7MlPp+SsBarozFv6RGpsdlg+Y240gSU+00jRQKSFg2HeKQ4VDE5XlsUX7AEmGbk0UVWBAtzZe1I3SMPLgOEwxRQSzPztVMaaxkaYwFidVHeo92fzq3OF7VvUX7CqZA+sBTDXRqISgrGvLyuudD61Mxy8YUDt4UW9s8th8y7iWtx7ivar49iotCCBfAYHwDn6UShtL1HBrlM+FRdEv5fqf4pQ2Btrr4PB1QSAtFwXgnXixgJbeOk6FnbCnPi7NLzsdYhiGoYyyoOWAjPaUu6L/uYSjvz3bt2fpMlz8+kt5GUWq3IDk9aYEGdxNJS7Yj5RFMfx
- ovh080.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCk/7+CToTGCh+L2tRHpGHUbIqIsqyCMyNNJpUTimR1uUeWHMY8kXJKORQy/2FpK4+euTFsCITKjy/59e3jxWeVpvB/fWQt/ABi3t17CUcy7u1Z2Vjz+1CP+uNLUU5PBIsUmdUUKP4eE455nnG8N+yFtCAAnkTIjI4X2Ceo0HGK7nb5RPmoVbYCSMXC8afhqGV98H5Wqv1ElxxLln9r2rAzMdFoQCnDhvjS1LbmM8HjA+Twww1dpTk7FJiQk8e6GZ+68tGifw7yaNxw049YvQfRQeHWujPut+hgRZIMMUzVqE+jGVXLn9+07mJ1hBQBNOa5kLbNpKhL/1Y9omhoD2yj
- ovh024.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCsltbmKcIankcNo0y32idEG9xSzEOb9StfwY3R8VArysMvFrrcneZp4knnv7C7DBr4i1Gvry8lGqYAK+J9VIVPoW/1zCtUC/CudjtK9/HHy6qWARDTNntZx06LI0GT5Is/+vc2OJhk1snAA/8P5GoiUWJ33BHF4cQwkvWENDrhTvf4ZVw6+8NXUnFfv3Luj/WcpLwtoQIzWxtUbNxE1m3pT9PaNa6dBGEZKimXIJNh2qBIMj4gY8ICaqpGD7Kw6WS3cLnZVt+geRpmAjtgVlj61Viy1SSvFGdHLWhMTYjw6QJHnnmsaAHUbaYSWbEdwoL5k8nvjCVqrfvB9stOd0bN
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock:
-
- ssh-keys:
-
- print:
**** done ssh-keys
-
- ceph-deploy:
- skip-mgr:
True
- fs:
xfs
- branch:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- rbd default features:
5
- global:
- mon pg warn min per osd:
2
- osd:
- osd pool default size:
2
- osd objectstore:
filestore
- osd sloppy crc:
True
- mon:
- ceph-deploy-branch:
1.5.39-stable
-
- print:
**** done initial ceph-deploy
-
- ceph-deploy.upgrade:
- setup-mgr-node:
True
- roles:
- check-for-healthy:
True
-
- print:
**** done ceph-deploy upgrade
-
- exec:
- osd.0:
-
ceph osd require-osd-release luminous
-
ceph osd set-require-min-compat-client luminous
-
- print:
**** done `ceph osd require-osd-release luminous`
-
- workunit:
- clients:
- branch:
luminous
- sha1:
19e457355b99415a66565bbecd55d8deed159c99
-
- print:
**** done kernel_untar_build.sh
-
- systemd:
-
- print:
**** done systemd
-
- workunit:
- clients:
- branch:
luminous
- sha1:
19e457355b99415a66565bbecd55d8deed159c99
-
- print:
**** done rados/load-gen-mix.sh
teuthology_branch:
master
verbose:
True
pcp_grafana_url:
priority:
user:
queue:
posted:
2018-03-28 04:23:56
started:
2018-03-28 06:08:33
updated:
2018-03-28 14:12:47
status_class:
success
runtime:
8:04:14
wait_time:
7:20:17