ID
Status
Ceph Branch
Suite Branch
Teuthology Branch
Machine
OS
Nodes
Description
Failure Reason
master
master
master
ovh
ubuntu 16.04
smoke/1node/{clusters/{fixed-1.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/ceph-deploy.yaml}
{'ovh009.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh009', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/cfuse_workunit_suites_blogbench.yaml}
Command failed on ovh070 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
centos 7.5
smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/centos_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml}
{'ovh080.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'directory_mode': None, 'force': True, 'remote_src': None, 'dest': '/usr/lib64/nagios/plugins/check_mem.sh', 'selevel': None, 'original_basename': 'check_mem.sh', 'regexp': None, 'owner': 'root', 'follow': False, 'validate': None, 'local_follow': None, 'src': '/home/ubuntu/.ansible/tmp/ansible-tmp-1543938402.82-61732398205915/source', 'group': 'root', 'unsafe_writes': None, 'delimiter': None, 'seuser': None, 'serole': None, 'content': None, 'setype': None, 'mode': 493, 'attributes': None, 'backup': False}}, '_ansible_no_log': False, 'diff': [], 'msg': 'Destination directory /usr/lib64/nagios/plugins does not exist', 'checksum': '39df629b10ded370443e2e4c84d690332c95104d', 'changed': False}, 'ovh070.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'directory_mode': None, 'force': True, 'remote_src': None, 'dest': '/usr/lib64/nagios/plugins/check_mem.sh', 'selevel': None, 'original_basename': 'check_mem.sh', 'regexp': None, 'owner': 'root', 'follow': False, 'validate': None, 'local_follow': None, 'src': '/home/ubuntu/.ansible/tmp/ansible-tmp-1543938417.02-117149017718477/source', 'group': 'root', 'unsafe_writes': None, 'delimiter': None, 'seuser': None, 'serole': None, 'content': None, 'setype': None, 'mode': 493, 'attributes': None, 'backup': False}}, '_ansible_no_log': False, 'diff': [], 'msg': 'Destination directory /usr/lib64/nagios/plugins does not exist', 'checksum': '39df629b10ded370443e2e4c84d690332c95104d', 'changed': False}, 'ovh088.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'directory_mode': None, 'force': True, 'remote_src': None, 'dest': '/usr/lib64/nagios/plugins/check_mem.sh', 'selevel': None, 'original_basename': 'check_mem.sh', 'regexp': None, 'owner': 'root', 'follow': False, 'validate': None, 'local_follow': None, 'src': '/home/ubuntu/.ansible/tmp/ansible-tmp-1543938414.66-260308669807373/source', 'group': 'root', 'unsafe_writes': None, 'delimiter': None, 'seuser': None, 'serole': None, 'content': None, 'setype': None, 'mode': 493, 'attributes': None, 'backup': False}}, '_ansible_no_log': False, 'diff': [], 'msg': 'Destination directory /usr/lib64/nagios/plugins does not exist', 'checksum': '39df629b10ded370443e2e4c84d690332c95104d', 'changed': False}, 'ovh061.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'directory_mode': None, 'force': True, 'remote_src': None, 'dest': '/usr/lib64/nagios/plugins/check_mem.sh', 'selevel': None, 'original_basename': 'check_mem.sh', 'regexp': None, 'owner': 'root', 'follow': False, 'validate': None, 'local_follow': None, 'src': '/home/ubuntu/.ansible/tmp/ansible-tmp-1543938426.71-15560571314183/source', 'group': 'root', 'unsafe_writes': None, 'delimiter': None, 'seuser': None, 'serole': None, 'content': None, 'setype': None, 'mode': 493, 'attributes': None, 'backup': False}}, '_ansible_no_log': False, 'diff': [], 'msg': 'Destination directory /usr/lib64/nagios/plugins does not exist', 'checksum': '39df629b10ded370443e2e4c84d690332c95104d', 'changed': False}}
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/cfuse_workunit_suites_fsstress.yaml}
Command failed on ovh055 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/cfuse_workunit_suites_iozone.yaml}
Command failed on ovh094 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/cfuse_workunit_suites_pjd.yaml}
Command failed on ovh075 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/kclient_workunit_direct_io.yaml}
Command failed on ovh033 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/kclient_workunit_suites_dbench.yaml}
Command failed on ovh003 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/kclient_workunit_suites_fsstress.yaml}
Command failed on ovh037 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/kclient_workunit_suites_pjd.yaml}
Command failed on ovh073 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/libcephfs_interface_tests.yaml}
Command failed on ovh068 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/mon_thrash.yaml}
Command failed on ovh065 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_api_tests.yaml}
Command failed on ovh002 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_bench.yaml}
Command failed on ovh039 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_cache_snaps.yaml}
Command failed on ovh070 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
ubuntu 16.04
smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml}
{'ovh061.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh061', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh093.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh093', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh014.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh014', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh030.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh030', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_cls_all.yaml}
Command failed on ovh012 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_ec_snaps.yaml}
Command failed on ovh040 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_python.yaml}
Command failed on ovh089 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_workunit_loadgen_mix.yaml}
Command failed on ovh022 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_api_tests.yaml}
Command failed on ovh058 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_cli_import_export.yaml}
Command failed on ovh050 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_fsx.yaml}
Command failed on ovh057 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_python_api_tests.yaml}
Command failed on ovh045 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_workunit_suites_iozone.yaml}
Command failed on ovh016 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rgw_ec_s3tests.yaml}
Command failed on ovh075 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rgw_s3tests.yaml}
Command failed on ovh051 with status 1: '\n sudo yum -y install ceph-mgr\n '
master
master
master
ovh
rhel 7.5
smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rgw_swift.yaml}
Command failed on ovh064 with status 1: '\n sudo yum -y install ceph-mgr\n '