- log_href:
http://qa-proxy.ceph.com/teuthology/teuthology-2019-07-18_01:25:03-upgrade:nautilus-p2p-nautilus-distro-basic-smithi/4125797/teuthology.log
- archive_path:
/home/teuthworker/archive/teuthology-2019-07-18_01:25:03-upgrade:nautilus-p2p-nautilus-distro-basic-smithi/4125797
- description:
upgrade:nautilus-p2p/nautilus-p2p-parallel/{point-to-point-upgrade.yaml supported-all-distro/rhel_7.yaml}
- duration:
2:21:33
- email:
ceph-qa@ceph.io
- failure_reason:
- flavor:
basic
- job_id:
4125797
- kernel:
- last_in_suite:
False
- machine_type:
smithi
- name:
teuthology-2019-07-18_01:25:03-upgrade:nautilus-p2p-nautilus-distro-basic-smithi
- nuke_on_error:
True
- os_type:
rhel
- os_version:
7.6
- overrides:
- ceph:
- log-whitelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
-
reached quota
-
scrub
-
osd_map_max_advance
-
wrongly marked
-
FS_DEGRADED
-
POOL_APP_NOT_ENABLED
-
CACHE_POOL_NO_HIT_SET
-
POOL_FULL
-
SMALLER_PG
-
pool\(s\) full
-
OSD_DOWN
-
missing hit_sets
-
CACHE_POOL_NEAR_FULL
-
PG_AVAILABILITY
-
PG_DEGRADED
-
application not enabled
-
cache pools at or near target size
-
filesystem is degraded
-
OBJECT_MISPLACED
-
failed to encode map
- fs:
xfs
- conf:
- client:
- rgw_crypt_require_ssl:
False
- rgw crypt s3 kms encryption keys:
testkey-1=YmluCmJvb3N0CmJvb3N0LWJ1aWxkCmNlcGguY29uZgo= testkey-2=aWIKTWFrZWZpbGUKbWFuCm91dApzcmMKVGVzdGluZwo=
- global:
- mon_warn_on_pool_no_app:
False
- osd:
- debug ms:
1
- debug journal:
20
- osd_class_load_list:
*
- debug osd:
25
- osd_class_default_list:
*
- osd map max advance:
1000
- debug filestore:
20
- mon:
- mon debug unsafe allow tier with nonempty snaps:
True
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- sha1:
4f8fa0a0024755aae7d95567c63f11d6862d55be
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- workunit:
- sha1:
4f8fa0a0024755aae7d95567c63f11d6862d55be
- branch:
nautilus
- install:
- ceph:
- sha1:
4f8fa0a0024755aae7d95567c63f11d6862d55be
- admin_socket:
- owner:
scheduled_teuthology@teuthology
- pid:
- roles:
-
['mon.a', 'mds.a', 'osd.0', 'osd.1', 'osd.2', 'mgr.x']
-
['mon.b', 'mon.c', 'osd.3', 'osd.4', 'osd.5', 'client.0']
-
['client.1']
- sentry_event:
- status:
pass
- success:
True
- branch:
nautilus
- seed:
- sha1:
4f8fa0a0024755aae7d95567c63f11d6862d55be
- subset:
- suite:
- suite_branch:
nautilus
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
4f8fa0a0024755aae7d95567c63f11d6862d55be
- targets:
- smithi037.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDDQNiMEZGjnp5D2kt5S31GlJymNXfNonA3V6RJZwZTR3MknfCOktoTZzx8wMYUuXxExOcHWLXU9uBTr0j3MQI88NgGGmw+Logwk8E7i+mhLlOxgIVhev/5FB4LK8InA6feEgFntUhbw8nG5XJRNbQE/Ja6MYr7zlyUy6OvEKfQ0yBCWSP6btRk/l2PgNySQaqtGpQuCZM+NsXP+70yxyuIW62QgRV725H+qaX3qX/h7wFVgGrxE1D32VtNbZ/hgdgxZACFOMKMcBE8zPqmra0tz+WonZketJhRDCp2Dleb8bsOtPFFEmwmUbuk8n3xeU4HdyXnbrYTKRalmylmkUBl
- smithi153.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDB3oR+UPiRoeN6SxJBWlVvGBz6N5DBnwxcg3A36wn8ONozNbNPI9WPcA7xC0rsNWEHEtaJZFIujuJ1xE61zfdaoU3O2zkMly2tqrJAeUt8tVp0aqNlvz3TcL6llIY7MyNQmyXo3FKeVGSIxxcnCEXgjHcLi4kOLSxZW77aEok8UtnoEk6ScBUDrU6W8vMNICIkE0UPIYBBM3EUWZQB1YwYZaFsnPPMQWWzOEz3nzk/aImaGhrHhBTyxxIyMjW1baV9Q0WRud0kaMPJHQcg/VlNpVhiu3UGnW+AO4lf/nc8PoWACap4szX45JC4kAyOyZcznHct+TdeQBh2taoQnGZN
- smithi059.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQChu8xtvIIxAcSOBSpFdLCBZbvduu7rC+197pinysMqdP29ZUafm/VAFAqbrJLvo6l0WG5COVhX/c+/epTt03+ix2Ty78WHsNnmUUj8iD3+088T/phQCSLFtNq7mrCM7wGotSkBAa9HE1qQ47MDsqsZnfYqYWeCCqOSgAhY3DvLOU1U/UFy83Kyl+Y82lnLbQHcfsCDLzHFdDVGr3SclpB5ZY9DFc8ZNuXQL8NbtWs/o9gz51BRltsOhQOoyuskQNk6BbOYTPBOLOwGsGzrkaKo4b3yVz4GSu9KIxy4ut/6TlvsboujmYVPVw6O1VLpTmlNom/0d+CI9rz/eNlOo4DJ
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock:
-
- print:
**** done nautilus v14.2.0 about to install
-
- install:
- tag:
v14.2.0
- sha1:
4f8fa0a0024755aae7d95567c63f11d6862d55be
-
- print:
**** done v14.2.0 install
-
- ceph:
- fs:
xfs
- sha1:
4f8fa0a0024755aae7d95567c63f11d6862d55be
- cluster:
ceph
- log-whitelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
-
reached quota
-
scrub
-
osd_map_max_advance
-
wrongly marked
-
FS_DEGRADED
-
POOL_APP_NOT_ENABLED
-
CACHE_POOL_NO_HIT_SET
-
POOL_FULL
-
SMALLER_PG
-
pool\(s\) full
-
OSD_DOWN
-
missing hit_sets
-
CACHE_POOL_NEAR_FULL
-
PG_AVAILABILITY
-
PG_DEGRADED
-
application not enabled
-
cache pools at or near target size
-
filesystem is degraded
-
OBJECT_MISPLACED
-
failed to encode map
- conf:
- client:
- rgw_crypt_require_ssl:
False
- rgw crypt s3 kms encryption keys:
testkey-1=YmluCmJvb3N0CmJvb3N0LWJ1aWxkCmNlcGguY29uZgo= testkey-2=aWIKTWFrZWZpbGUKbWFuCm91dApzcmMKVGVzdGluZwo=
- global:
- mon_warn_on_pool_no_app:
False
- osd:
- debug ms:
1
- debug journal:
20
- osd_class_load_list:
*
- debug osd:
25
- osd_class_default_list:
*
- osd map max advance:
1000
- debug filestore:
20
- mon:
- mon debug unsafe allow tier with nonempty snaps:
True
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- add_osds_to_crush:
True
-
- print:
**** done ceph xfs
-
- sequential:
-
- print:
**** done workload v14.2.0
-
- install.upgrade:
- mon.a:
- project:
ceph
- tag:
v14.2.1
- mon.b:
- project:
ceph
- tag:
v14.2.1
-
- print:
**** done v14.2.1 install
-
- parallel:
-
workload_nautilus
-
upgrade-sequence_nautilus
-
- print:
**** done workload_nautilus v14.2.1
-
- install.upgrade:
-
- parallel:
-
workload_nautilus
-
upgrade-sequence_nautilus
-
- print:
**** done parallel nautilus branch
teuthology_branch:
master
verbose:
True
pcp_grafana_url:
priority:
user:
queue:
posted:
2019-07-18 01:25:38
started:
2019-07-18 17:58:33
updated:
2019-07-18 21:10:35
status_class:
success
runtime:
3:12:02
wait_time:
0:50:29