- log_href:
http://qa-proxy.ceph.com/teuthology/teuthology-2018-08-08_20:18:57-upgrade:luminous-p2p-luminous-distro-basic-smithi/2883598/teuthology.log
- archive_path:
/home/teuthworker/archive/teuthology-2018-08-08_20:18:57-upgrade:luminous-p2p-luminous-distro-basic-smithi/2883598
- description:
upgrade:luminous-p2p/{point-to-point-upgrade.yaml supported/centos_latest.yaml}
- duration:
1:16:17
- email:
ceph-qa@ceph.com
- failure_reason:
- flavor:
basic
- job_id:
2883598
- kernel:
- last_in_suite:
False
- machine_type:
smithi
- name:
teuthology-2018-08-08_20:18:57-upgrade:luminous-p2p-luminous-distro-basic-smithi
- nuke_on_error:
True
- os_type:
centos
- os_version:
7.4
- overrides:
- ceph:
- log-whitelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
-
reached quota
-
scrub
-
osd_map_max_advance
-
wrongly marked
-
FS_DEGRADED
-
POOL_APP_NOT_ENABLED
-
CACHE_POOL_NO_HIT_SET
-
POOL_FULL
-
SMALLER_PG
-
pool\(s\) full
-
OSD_DOWN
-
missing hit_sets
-
CACHE_POOL_NEAR_FULL
-
PG_AVAILABILITY
-
PG_DEGRADED
-
application not enabled
-
overall HEALTH_
- fs:
xfs
- conf:
- client:
- rgw_crypt_require_ssl:
False
- rgw crypt s3 kms encryption keys:
testkey-1=YmluCmJvb3N0CmJvb3N0LWJ1aWxkCmNlcGguY29uZgo= testkey-2=aWIKTWFrZWZpbGUKbWFuCm91dApzcmMKVGVzdGluZwo=
- mon:
- mon debug unsafe allow tier with nonempty snaps:
True
- mon warn on pool no app:
False
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug ms:
1
- debug journal:
20
- osd_class_load_list:
cephfs hello journal lock log numops rbd refcount replica_log rgw sdk statelog timeindex user version
- debug osd:
25
- osd_class_default_list:
cephfs hello journal lock log numops rbd refcount replica_log rgw sdk statelog timeindex user version
- osd map max advance:
1000
- debug filestore:
20
- sha1:
038739ce44531168e90ae95857d8f2ef657665b3
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- workunit:
- sha1:
038739ce44531168e90ae95857d8f2ef657665b3
- branch:
luminous
- install:
- ceph:
- sha1:
038739ce44531168e90ae95857d8f2ef657665b3
- admin_socket:
- owner:
scheduled_teuthology@teuthology
- pid:
- roles:
-
['mon.a', 'mds.a', 'osd.0', 'osd.1', 'osd.2', 'mgr.x']
-
['mon.b', 'mon.c', 'osd.3', 'osd.4', 'osd.5', 'client.0']
-
['client.1']
- sentry_event:
- status:
pass
- success:
True
- branch:
luminous
- seed:
- sha1:
038739ce44531168e90ae95857d8f2ef657665b3
- subset:
- suite:
- suite_branch:
luminous
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
038739ce44531168e90ae95857d8f2ef657665b3
- targets:
- smithi200.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCxN8wJJ+a9Rjezk9mVObUGOxcYlfTsd7Od/AkD8waMJbRR6WMGg3gnnHkqJXLGkmWMG4vwTQArSYi2Xv26dpiVV6yOW2mXTnZB/qEyTfDayfSWIPU8wVkUG+aqQ8qv0130EJY2T86+BZ+hWEcfYOs9YMJFfVedPmpaKwiwn/W7NF3Q0I9SDulQIALA/56fRnM1J+2p0xAQLQDfHrlx40/1i8+4bl85A0OFxn2OjCayanajHDiKW6EGGkuYqYiHa6nfSbRpLiXJOOqul81LtFI18GPYvSOQqjwbcYnXkHGIcwsY1AS1bsXlg1kDplhldQwl3Tc1sIKwd9E7w94L3lIh
- smithi068.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDgOLWbKBME0xrjiQrheJ3GRylMuG51E/5irm4ftYyr0cOGv0DHdBgBruV2TuY0DnSWuBtGat5aS49ErepBDAurtgkwU+2OmY2/333fCRaYV0QjHb6MyNzmKNEKJ1E24CCoztCj+AYN6y1BRJOvjg9vf/R2D21G9HEXXMHwsQb8ZOWuTx/TeT2i5Y3um24LhRXJAPfdYQBJvUOPQuKYEylaKGp0MocXQFqlLhTBnYYuAYkftIfPBuda1mTGIPpG16zPcDXjhLHuJIIDOIRlHy+gY87wuuCh5s7Z44aNWGXx6MyO8z7oyxMqi17OzWZpeuD8fC6iEjSsr+abfwH5in6H
- smithi066.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCrf8drtGRNiPOgVkvYNrqjDyinhl+Ok1FikxmHu6NdkteHE72p1r7UmJCgLD5/910UzNIxyksiTKJWkp98rRGyD5GcBNHt2sQCVgmUrTnF7q857V/9WlprTzoSNk8Y1Hk4tIdLq62vvl+TX/sdc5vNCnswLOuirM3xTY/Os0ojn7QFtT36bijnG2rl586SPN0+vt8t5oqJ4iI7bxwKuPuFfWzP+tTu6myPWxjG8IVgLUgncw1u7G3UvQQ3txdlmYJmv2z/2FogJ3qm7Z/cGFNy7huaitptHwhC3Gkh4lfMyuzoPoZLIYmmr6duwOlw+P5qdDwTXbAWydfbRK/i52Ot
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock:
-
- print:
**** v12.2.2 about to install
-
- install:
- tag:
v12.2.2
- sha1:
038739ce44531168e90ae95857d8f2ef657665b3
-
- print:
**** done v12.2.2 install
-
- ceph:
- fs:
xfs
- sha1:
038739ce44531168e90ae95857d8f2ef657665b3
- cluster:
ceph
- log-whitelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
-
reached quota
-
scrub
-
osd_map_max_advance
-
wrongly marked
-
FS_DEGRADED
-
POOL_APP_NOT_ENABLED
-
CACHE_POOL_NO_HIT_SET
-
POOL_FULL
-
SMALLER_PG
-
pool\(s\) full
-
OSD_DOWN
-
missing hit_sets
-
CACHE_POOL_NEAR_FULL
-
PG_AVAILABILITY
-
PG_DEGRADED
-
application not enabled
-
overall HEALTH_
- conf:
- client:
- rgw_crypt_require_ssl:
False
- rgw crypt s3 kms encryption keys:
testkey-1=YmluCmJvb3N0CmJvb3N0LWJ1aWxkCmNlcGguY29uZgo= testkey-2=aWIKTWFrZWZpbGUKbWFuCm91dApzcmMKVGVzdGluZwo=
- mon:
- mon debug unsafe allow tier with nonempty snaps:
True
- mon warn on pool no app:
False
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- osd:
- debug ms:
1
- debug journal:
20
- osd_class_load_list:
cephfs hello journal lock log numops rbd refcount replica_log rgw sdk statelog timeindex user version
- debug osd:
25
- osd_class_default_list:
cephfs hello journal lock log numops rbd refcount replica_log rgw sdk statelog timeindex user version
- osd map max advance:
1000
- debug filestore:
20
- add_osds_to_crush:
True
-
- print:
**** done ceph xfs
-
- sequential:
-
- print:
**** done workload v12.2.2
-
- install.upgrade:
- mon.a:
- project:
ceph
- tag:
v12.2.5
- mon.b:
- project:
ceph
- tag:
v12.2.5
-
- parallel:
-
workload_luminous
-
upgrade-sequence_luminous
-
- print:
**** done parallel luminous v12.2.5
-
- install.upgrade:
- mon.a:
- project:
ceph
- tag:
v12.2.7
- mon.b:
- project:
ceph
- tag:
v12.2.7
-
- parallel:
-
workload_luminous
-
upgrade-sequence_luminous
-
- print:
**** done parallel luminous v12.2.7
-
- install.upgrade:
-
- parallel:
-
workload_luminous
-
upgrade-sequence_luminous
-
- print:
**** done parallel luminous branch
teuthology_branch:
master
verbose:
True
pcp_grafana_url:
priority:
user:
queue:
posted:
2018-08-08 20:19:12
started:
2018-08-08 20:20:26
updated:
2018-08-08 21:48:26
status_class:
success
runtime:
1:28:00
wait_time:
0:11:43