Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
fail 3458257 2019-01-13 07:00:24 2019-01-13 07:00:28 2019-01-13 07:12:27 0:11:59 0:02:39 0:09:20 ovh master ubuntu 16.04 smoke/1node/{clusters/{fixed-1.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/ceph-deploy.yaml} 1
Failure Reason:

{'ovh030.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh030', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3458258 2019-01-13 07:00:25 2019-01-13 07:00:28 2019-01-13 08:00:28 1:00:00 0:07:52 0:52:08 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on ovh046 with status 1: '\n sudo yum -y install ceph\n '

fail 3458259 2019-01-13 07:00:26 2019-01-13 07:00:28 2019-01-13 08:38:29 1:38:01 0:18:07 1:19:54 ovh master centos 7.5 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/centos_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

ceph-deploy: Failed to zap osds

fail 3458260 2019-01-13 07:00:26 2019-01-13 07:00:29 2019-01-13 07:58:29 0:58:00 0:08:29 0:49:31 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh042 with status 1: '\n sudo yum -y install ceph\n '

fail 3458261 2019-01-13 07:00:27 2019-01-13 07:00:28 2019-01-13 08:06:29 1:06:01 0:08:20 0:57:41 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh073 with status 1: '\n sudo yum -y install ceph\n '

fail 3458262 2019-01-13 07:00:28 2019-01-13 07:00:29 2019-01-13 08:02:29 1:02:00 0:07:52 0:54:08 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh056 with status 1: '\n sudo yum -y install ceph\n '

fail 3458263 2019-01-13 07:00:29 2019-01-13 07:00:30 2019-01-13 08:00:30 1:00:00 0:08:05 0:51:55 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_direct_io.yaml} 3
Failure Reason:

Command failed on ovh064 with status 1: '\n sudo yum -y install ceph\n '

fail 3458264 2019-01-13 07:00:29 2019-01-13 07:00:30 2019-01-13 08:06:31 1:06:01 0:08:10 0:57:51 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on ovh053 with status 1: '\n sudo yum -y install ceph\n '

fail 3458265 2019-01-13 07:00:30 2019-01-13 07:00:31 2019-01-13 07:58:31 0:58:00 0:08:03 0:49:57 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh011 with status 1: '\n sudo yum -y install ceph\n '

fail 3458266 2019-01-13 07:00:31 2019-01-13 07:00:32 2019-01-13 08:02:32 1:02:00 0:08:00 0:54:00 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh039 with status 1: '\n sudo yum -y install ceph\n '

fail 3458267 2019-01-13 07:00:32 2019-01-13 07:00:33 2019-01-13 08:16:33 1:16:00 0:07:58 1:08:02 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/libcephfs_interface_tests.yaml} 3
Failure Reason:

Command failed on ovh038 with status 1: '\n sudo yum -y install ceph\n '

fail 3458268 2019-01-13 07:00:32 2019-01-13 07:00:34 2019-01-13 08:12:39 1:12:05 0:08:12 1:03:53 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/mon_thrash.yaml} 3
Failure Reason:

Command failed on ovh027 with status 1: '\n sudo yum -y install ceph\n '

fail 3458269 2019-01-13 07:00:33 2019-01-13 07:00:34 2019-01-13 07:58:34 0:58:00 0:08:16 0:49:44 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_api_tests.yaml} 3
Failure Reason:

Command failed on ovh080 with status 1: '\n sudo yum -y install ceph\n '

fail 3458270 2019-01-13 07:00:34 2019-01-13 07:00:35 2019-01-13 08:04:35 1:04:00 0:08:11 0:55:49 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_bench.yaml} 3
Failure Reason:

Command failed on ovh094 with status 1: '\n sudo yum -y install ceph\n '

fail 3458271 2019-01-13 07:00:35 2019-01-13 07:00:36 2019-01-13 08:02:36 1:02:00 0:07:58 0:54:02 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cache_snaps.yaml} 3
Failure Reason:

Command failed on ovh014 with status 1: '\n sudo yum -y install ceph\n '

fail 3458272 2019-01-13 07:00:35 2019-01-13 07:00:37 2019-01-13 07:20:36 0:19:59 0:02:52 0:17:07 ovh master ubuntu 16.04 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

{'ovh041.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh041', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh095.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh095', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh033.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh033', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh074.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh074', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3458273 2019-01-13 07:00:36 2019-01-13 07:00:37 2019-01-13 08:08:38 1:08:01 0:08:26 0:59:35 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cls_all.yaml} 3
Failure Reason:

Command failed on ovh068 with status 1: '\n sudo yum -y install ceph\n '

fail 3458274 2019-01-13 07:00:37 2019-01-13 07:00:38 2019-01-13 08:02:38 1:02:00 0:07:50 0:54:10 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_ec_snaps.yaml} 3
Failure Reason:

Command failed on ovh089 with status 1: '\n sudo yum -y install ceph\n '

fail 3458275 2019-01-13 07:00:38 2019-01-13 07:00:39 2019-01-13 08:06:39 1:06:00 0:07:53 0:58:07 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_python.yaml} 3
Failure Reason:

Command failed on ovh049 with status 1: '\n sudo yum -y install ceph\n '

fail 3458276 2019-01-13 07:00:38 2019-01-13 07:00:40 2019-01-13 07:58:40 0:58:00 0:07:53 0:50:07 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_workunit_loadgen_mix.yaml} 3
Failure Reason:

Command failed on ovh044 with status 1: '\n sudo yum -y install ceph\n '

fail 3458277 2019-01-13 07:00:39 2019-01-13 07:00:41 2019-01-13 08:10:41 1:10:00 0:08:30 1:01:30 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_api_tests.yaml} 3
Failure Reason:

Command failed on ovh024 with status 1: '\n sudo yum -y install ceph\n '

fail 3458278 2019-01-13 07:00:40 2019-01-13 07:00:41 2019-01-13 08:28:41 1:28:00 0:08:17 1:19:43 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_cli_import_export.yaml} 3
Failure Reason:

Command failed on ovh012 with status 1: '\n sudo yum -y install ceph\n '

fail 3458279 2019-01-13 07:00:41 2019-01-13 07:00:42 2019-01-13 08:16:42 1:16:00 0:07:41 1:08:19 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_fsx.yaml} 3
Failure Reason:

Command failed on ovh095 with status 1: '\n sudo yum -y install ceph\n '

fail 3458280 2019-01-13 07:00:41 2019-01-13 07:00:42 2019-01-13 08:06:43 1:06:01 0:07:46 0:58:15 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_python_api_tests.yaml} 3
Failure Reason:

Command failed on ovh074 with status 1: '\n sudo yum -y install ceph\n '

fail 3458281 2019-01-13 07:00:42 2019-01-13 07:00:43 2019-01-13 08:46:44 1:46:01 0:07:40 1:38:21 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh065 with status 1: '\n sudo yum -y install ceph\n '

fail 3458282 2019-01-13 07:00:43 2019-01-13 07:00:44 2019-01-13 08:46:45 1:46:01 0:08:06 1:37:55 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_ec_s3tests.yaml} 3
Failure Reason:

Command failed on ovh048 with status 1: '\n sudo yum -y install ceph\n '

fail 3458283 2019-01-13 07:00:43 2019-01-13 07:00:55 2019-01-13 09:07:01 2:06:06 0:07:58 1:58:08 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_s3tests.yaml} 3
Failure Reason:

Command failed on ovh100 with status 1: '\n sudo yum -y install ceph\n '

fail 3458284 2019-01-13 07:00:44 2019-01-13 07:00:55 2019-01-13 08:44:56 1:44:01 0:07:35 1:36:26 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_swift.yaml} 3
Failure Reason:

Command failed on ovh073 with status 1: '\n sudo yum -y install ceph\n '