- log_href:
http://qa-proxy.ceph.com/teuthology/teuthology-2019-06-16_05:30:03-upgrade:luminous-p2p-luminous-distro-basic-smithi/4039917/teuthology.log
- archive_path:
/home/teuthworker/archive/teuthology-2019-06-16_05:30:03-upgrade:luminous-p2p-luminous-distro-basic-smithi/4039917
- description:
upgrade:luminous-p2p/luminous-p2p-parallel/{point-to-point-upgrade.yaml supported/ubuntu_latest.yaml}
- duration:
2:56:44
- email:
ceph-qa@lists.ceph.com
- failure_reason:
- flavor:
basic
- job_id:
4039917
- kernel:
- last_in_suite:
False
- machine_type:
smithi
- name:
teuthology-2019-06-16_05:30:03-upgrade:luminous-p2p-luminous-distro-basic-smithi
- nuke_on_error:
True
- os_type:
ubuntu
- os_version:
16.04
- overrides:
- ceph:
- log-whitelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
-
reached quota
-
scrub
-
osd_map_max_advance
-
wrongly marked
-
FS_DEGRADED
-
POOL_APP_NOT_ENABLED
-
CACHE_POOL_NO_HIT_SET
-
POOL_FULL
-
SMALLER_PG
-
pool\(s\) full
-
OSD_DOWN
-
missing hit_sets
-
CACHE_POOL_NEAR_FULL
-
PG_AVAILABILITY
-
PG_DEGRADED
-
application not enabled
-
overall HEALTH_
- fs:
xfs
- conf:
- client:
- rgw_crypt_require_ssl:
False
- rgw crypt s3 kms encryption keys:
testkey-1=YmluCmJvb3N0CmJvb3N0LWJ1aWxkCmNlcGguY29uZgo= testkey-2=aWIKTWFrZWZpbGUKbWFuCm91dApzcmMKVGVzdGluZwo=
- mon:
- mon debug unsafe allow tier with nonempty snaps:
True
- mon warn on pool no app:
False
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug ms:
1
- debug journal:
20
- osd_class_load_list:
cephfs hello journal lock log numops rbd refcount replica_log rgw sdk statelog timeindex user version
- debug osd:
25
- osd_class_default_list:
cephfs hello journal lock log numops rbd refcount replica_log rgw sdk statelog timeindex user version
- osd map max advance:
1000
- debug filestore:
20
- sha1:
da246edbc62460c349adb76877b9ca1f6611b9b6
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- workunit:
- sha1:
da246edbc62460c349adb76877b9ca1f6611b9b6
- branch:
luminous
- install:
- ceph:
- sha1:
da246edbc62460c349adb76877b9ca1f6611b9b6
- admin_socket:
- owner:
scheduled_teuthology@teuthology
- pid:
- roles:
-
['mon.a', 'mds.a', 'osd.0', 'osd.1', 'osd.2', 'mgr.x']
-
['mon.b', 'mon.c', 'osd.3', 'osd.4', 'osd.5', 'client.0']
-
['client.1']
- sentry_event:
- status:
pass
- success:
True
- branch:
luminous
- seed:
- sha1:
da246edbc62460c349adb76877b9ca1f6611b9b6
- subset:
- suite:
- suite_branch:
luminous
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
da246edbc62460c349adb76877b9ca1f6611b9b6
- targets:
- smithi181.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDIZItZ6zl9fXHoi9KZ+qDoszq5E5dx51A+fi1njQQYFksH8sw45SsNQ4QuT5NdiHc9s7jGZoHRLv9sFtSwNYMxLvZfPf5ESoES6nM+wImv0M5Uc/SDksw5RaWBUCnJ9tHCit6/MLtC/skYjxtAOZ2P2nPHGjIsn8Kemjf/wIphu/A/tg/ekhiZ4lCmDcdxz9+XkuM435JqMVPCAFuP2b/HtPxUud2CARdTRsUsUPcMSXYfGsPKji7rxifAJEsXn8TOEhNRVeoETkY+M2ENqVomIwgjKFQXuzSSkJWcB7dCLHf1bufAQOAzFb8R8HwCUGBbOItNhot6OFTu8wmONcn/
- smithi178.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDc+KF9FRmZJmVbWq6gsGp74qA5hVdh0S2Uw8qMh+HslXhLcLj0m6Vyag+8XLjnHZ1gaTRMTUfd5MdCneH7JZ1G3HuAuWieMi/cNUgzwjzQYD+18BC9waY439iBuvTSmmRLh8IO0MWsxQWM7NZR5DIqlJDeu6W6Qw7gQrJD6WX/83NeL7eS/BE7cO+BKa/4h9xT+pcZpVWUkFoz4Y7z7efJTNTLZXjaBP6QVxbcA8CunC/5CY7BR0b+Era5XYuBdH9727i6M7R6POLqVa203zb0Fy//lKatXuvECBBC0qWgZ5CzInjJ3uZju3VvIYbLXk+EG+FekTFggp3AlLHmu4ox
- smithi087.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQClz70YRjd9ULVOMbQvfXg5QvydSG9tGQMfJenKWiS2XKC5QzswgEaH14LhrkNPrrCUCzOzeV6fNkMFGRFN/xnknwyB4cDwHIziDNnYheiZzcyNR15jCsQof0Ak9awrV9dhduGt9hS9DiKZ8uL7HFFmUhlT5b7Ukdzfqerfbss7ZhV+NNh3P6fG3SDRaDoggt5p7kMuO53AZsv9JaaE35rY2ehwwFcV8Z+oQ6WGlZzVwhOqlI+xC9LQTV+h1iSIx6RlErsU8Bxz357wsmIohKiiTUQdoIyRyjC1dahDU6SbEXwEkQlGlPd9XXw/BwqGa5rqQotYJDoUiTvt7u3nJ+5j
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock:
-
- print:
**** v12.2.2 about to install
-
- install:
- tag:
v12.2.2
- sha1:
da246edbc62460c349adb76877b9ca1f6611b9b6
-
- print:
**** done v12.2.2 install
-
- ceph:
- fs:
xfs
- sha1:
da246edbc62460c349adb76877b9ca1f6611b9b6
- cluster:
ceph
- log-whitelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
-
reached quota
-
scrub
-
osd_map_max_advance
-
wrongly marked
-
FS_DEGRADED
-
POOL_APP_NOT_ENABLED
-
CACHE_POOL_NO_HIT_SET
-
POOL_FULL
-
SMALLER_PG
-
pool\(s\) full
-
OSD_DOWN
-
missing hit_sets
-
CACHE_POOL_NEAR_FULL
-
PG_AVAILABILITY
-
PG_DEGRADED
-
application not enabled
-
overall HEALTH_
- conf:
- client:
- rgw_crypt_require_ssl:
False
- rgw crypt s3 kms encryption keys:
testkey-1=YmluCmJvb3N0CmJvb3N0LWJ1aWxkCmNlcGguY29uZgo= testkey-2=aWIKTWFrZWZpbGUKbWFuCm91dApzcmMKVGVzdGluZwo=
- mon:
- mon debug unsafe allow tier with nonempty snaps:
True
- mon warn on pool no app:
False
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug ms:
1
- debug journal:
20
- osd_class_load_list:
cephfs hello journal lock log numops rbd refcount replica_log rgw sdk statelog timeindex user version
- debug osd:
25
- osd_class_default_list:
cephfs hello journal lock log numops rbd refcount replica_log rgw sdk statelog timeindex user version
- osd map max advance:
1000
- debug filestore:
20
- add_osds_to_crush:
True
-
- print:
**** done ceph xfs
-
- sequential:
-
- print:
**** done workload v12.2.2
-
- install.upgrade:
- mon.a:
- project:
ceph
- tag:
v12.2.5
- mon.b:
- project:
ceph
- tag:
v12.2.5
-
- parallel:
-
workload_luminous
-
upgrade-sequence_luminous
-
- print:
**** done parallel luminous v12.2.5
-
- install.upgrade:
- mon.a:
- project:
ceph
- tag:
v12.2.7
- mon.b:
- project:
ceph
- tag:
v12.2.7
-
- parallel:
-
workload_luminous
-
upgrade-sequence_luminous
-
- print:
**** done parallel luminous v12.2.7
-
- install.upgrade:
- mon.a:
- project:
ceph
- tag:
v12.2.8
- mon.b:
- project:
ceph
- tag:
v12.2.8
-
- parallel:
-
workload_luminous
-
upgrade-sequence_luminous
-
- print:
**** done parallel luminous v12.2.8
-
- install.upgrade:
- mon.a:
- project:
ceph
- tag:
v12.2.9
- mon.b:
- project:
ceph
- tag:
v12.2.9
-
- parallel:
-
workload_luminous
-
upgrade-sequence_luminous
-
- print:
**** done parallel luminous v12.2.9
-
- install.upgrade:
- mon.a:
- project:
ceph
- tag:
v12.2.10
- mon.b:
- project:
ceph
- tag:
v12.2.10
-
- parallel:
-
workload_luminous
-
upgrade-sequence_luminous
-
- print:
**** done parallel luminous v12.2.10
-
- install.upgrade:
-
- parallel:
-
workload_luminous
-
upgrade-sequence_luminous
-
- print:
**** done parallel luminous branch
teuthology_branch:
master
verbose:
True
pcp_grafana_url:
priority:
user:
queue:
posted:
2019-06-16 05:30:38
started:
2019-06-19 10:02:40
updated:
2019-06-19 13:46:43
status_class:
success
runtime:
3:44:03
wait_time:
0:47:19