- log_href:
http://qa-proxy.ceph.com/teuthology/yuriw-2019-11-01_14:30:15-rados-wip-yuri8-testing-2019-10-31-1633-luminous-distro-basic-smithi/4463069/teuthology.log
- archive_path:
/home/teuthworker/archive/yuriw-2019-11-01_14:30:15-rados-wip-yuri8-testing-2019-10-31-1633-luminous-distro-basic-smithi/4463069
- description:
rados/mgr/{clusters/2-node-mgr.yaml debug/mgr.yaml objectstore/bluestore-comp.yaml tasks/module_selftest.yaml}
- duration:
0:10:51
- email:
ceph-qa@ceph.io
- failure_reason:
- flavor:
basic
- job_id:
4463069
- kernel:
- last_in_suite:
False
- machine_type:
smithi
- name:
yuriw-2019-11-01_14:30:15-rados-wip-yuri8-testing-2019-10-31-1633-luminous-distro-basic-smithi
- nuke_on_error:
True
- os_type:
- os_version:
- overrides:
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- workunit:
- sha1:
824835c6d1f830378f0795c54accd74bda44fc94
- branch:
wip-yuri8-testing-2019-10-31-1633-luminous
- ceph:
- log-whitelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
- fs:
xfs
- conf:
- mds:
- client:
- debug ms:
1
- debug mgrc:
20
- debug client:
20
- osd:
- mon osd full ratio:
0.9
- debug ms:
1
- debug mgrc:
20
- bluestore fsck on mount:
True
- debug osd:
25
- bluestore compression mode:
aggressive
- debug bluestore:
20
- debug bluefs:
20
- osd objectstore:
bluestore
- mon osd backfillfull_ratio:
0.85
- bluestore block size:
96636764160
- debug filestore:
20
- debug rocksdb:
10
- mon osd nearfull ratio:
0.8
- osd failsafe full ratio:
0.95
- debug journal:
20
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- mgr:
- debug ms:
1
- debug mgr:
20
- sha1:
824835c6d1f830378f0795c54accd74bda44fc94
- install:
- ceph:
- sha1:
824835c6d1f830378f0795c54accd74bda44fc94
- admin_socket:
- branch:
wip-yuri8-testing-2019-10-31-1633-luminous
- thrashosds:
- bdev_inject_crash_probability:
0.5
- bdev_inject_crash:
2
- owner:
scheduled_yuriw@teuthology
- pid:
- roles:
-
['mgr.x', 'mon.a', 'mon.c', 'mds.a', 'mds.c', 'osd.0', 'client.0']
-
['mgr.y', 'mgr.z', 'mon.b', 'mds.b', 'osd.1', 'osd.2', 'client.1']
- sentry_event:
- status:
pass
- success:
True
- branch:
wip-yuri8-testing-2019-10-31-1633-luminous
- seed:
- sha1:
824835c6d1f830378f0795c54accd74bda44fc94
- subset:
- suite:
- suite_branch:
wip-yuri8-testing-2019-10-31-1633-luminous
- suite_path:
- suite_relpath:
- suite_repo:
- suite_sha1:
824835c6d1f830378f0795c54accd74bda44fc94
- targets:
- smithi008.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDDsyl/ek6lyqLCXt9KrTP8vJk0pQknjJxm+qOSUpWhqbX9vb2858Uzw5XKjiEE09qWFASgJM7qFYEoPh8M1SLNBJRdp2LVHwoUZf/dNuACQH+lgwWMbLeJVWtf61Grro9TARX7Ba1KqIeMRX2qOORDR+dvw5zaPBk6hL1k8IllUsQbUzUhLtGGXzz1KXihwTlmSbwUGXjmbcbK8Z775N8g6NDux+3y4ThJAusYpEXHj8ZHUmM67U8gEJdo909MWyvUcYHxyFV25Ef+Xxa9173pnfVvS7yoiyG2Rro0WkuY9d3uh5V3g64CF50lKkaYnylHPBj35E5ueVJRCIrMLI7x
- smithi069.front.sepia.ceph.com:
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFU3K4+No24tAqqF3OSYFk97GRrmyXZAiELr/qz7P+Rd53bB5pdN3307HRC/Ey3/awI/3p7AxDhr1Blk3igkWSZu/0cv6HI3zo5vTqrNLmi2J8hh2ZZTUoURocIrGOMiclnk3SOS5QrIUCiEpHCa88JrFECrzyoRXR/31Yu2FRMOnu7INK24CBDhXdSU9CLGi7Jicb6BTNMikCOJ5lU52oe0JZtgyQCpjeS77q8BLJzR1HCSGbe4SVDnx0MKVk62O1n7k1bKCy0/HKU8FKldMmwSfFMYbwZ7lXIT//5Y14tfL8QgqPfO9C4tgoPYY+xqiqX4OTDVpT3OXFLwAuq4Vd
- tasks:
-
- internal.buildpackages_prep:
-
- internal.lock_machines:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock:
-
- install:
-
- ceph:
- fs:
xfs
- wait-for-scrub:
False
- sha1:
824835c6d1f830378f0795c54accd74bda44fc94
- cluster:
ceph
- log-whitelist:
-
overall HEALTH_
-
\(MGR_DOWN\)
-
\(PG_
-
replacing it with standby
-
No standby daemons available
-
Reduced data availability
-
Degraded data redundancy
-
objects misplaced
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
- conf:
- mds:
- client:
- debug ms:
1
- debug mgrc:
20
- debug client:
20
- osd:
- mon osd full ratio:
0.9
- debug ms:
1
- debug mgrc:
20
- bluestore fsck on mount:
True
- debug osd:
25
- bluestore compression mode:
aggressive
- debug bluestore:
20
- debug bluefs:
20
- osd objectstore:
bluestore
- mon osd backfillfull_ratio:
0.85
- bluestore block size:
96636764160
- debug filestore:
20
- debug rocksdb:
10
- mon osd nearfull ratio:
0.8
- osd failsafe full ratio:
0.95
- debug journal:
20
- mon:
- debug mon:
20
- debug paxos:
20
- debug ms:
1
- mgr:
- debug ms:
1
- debug mgr:
20
-
- cephfs_test_runner:
- modules:
-
tasks.mgr.test_module_selftest
teuthology_branch:
master
verbose:
True
pcp_grafana_url:
priority:
user:
queue:
posted:
2019-11-01 14:32:40
started:
2019-11-01 15:48:52
updated:
2019-11-01 16:12:51
status_class:
success
runtime:
0:23:59
wait_time:
0:13:08