Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
fail 3478371 2019-01-18 07:00:47 2019-01-18 08:20:50 2019-01-18 08:32:50 0:12:00 0:02:47 0:09:13 ovh master ubuntu 16.04 smoke/1node/{clusters/{fixed-1.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/ceph-deploy.yaml} 1
Failure Reason:

{'ovh089.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh089', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3478372 2019-01-18 07:00:48 2019-01-18 08:28:50 2019-01-18 09:28:50 1:00:00 0:07:59 0:52:01 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on ovh081 with status 1: '\n sudo yum -y install ceph\n '

fail 3478373 2019-01-18 07:00:48 2019-01-18 08:32:58 2019-01-18 10:16:58 1:44:00 0:19:23 1:24:37 ovh master centos 7.5 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/centos_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

ceph-deploy: Failed to zap osds

fail 3478374 2019-01-18 07:00:49 2019-01-18 08:45:04 2019-01-18 09:39:04 0:54:00 0:08:08 0:45:52 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh040 with status 1: '\n sudo yum -y install ceph\n '

fail 3478375 2019-01-18 07:00:50 2019-01-18 08:51:17 2019-01-18 09:43:17 0:52:00 0:08:02 0:43:58 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh064 with status 1: '\n sudo yum -y install ceph\n '

fail 3478376 2019-01-18 07:00:50 2019-01-18 09:07:08 2019-01-18 10:01:08 0:54:00 0:08:09 0:45:51 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh067 with status 1: '\n sudo yum -y install ceph\n '

fail 3478377 2019-01-18 07:00:51 2019-01-18 09:09:06 2019-01-18 10:13:06 1:04:00 0:08:24 0:55:36 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_direct_io.yaml} 3
Failure Reason:

Command failed on ovh053 with status 1: '\n sudo yum -y install ceph\n '

fail 3478378 2019-01-18 07:00:52 2019-01-18 09:10:59 2019-01-18 09:58:59 0:48:00 0:09:03 0:38:57 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on ovh037 with status 1: '\n sudo yum -y install ceph\n '

fail 3478379 2019-01-18 07:00:53 2019-01-18 09:28:56 2019-01-18 10:24:56 0:56:00 0:07:43 0:48:17 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh091 with status 1: '\n sudo yum -y install ceph\n '

fail 3478380 2019-01-18 07:00:54 2019-01-18 09:39:07 2019-01-18 10:31:06 0:51:59 0:07:39 0:44:20 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh040 with status 1: '\n sudo yum -y install ceph\n '

fail 3478381 2019-01-18 07:00:54 2019-01-18 09:43:20 2019-01-18 10:39:20 0:56:00 0:08:10 0:47:50 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/libcephfs_interface_tests.yaml} 3
Failure Reason:

Command failed on ovh010 with status 1: '\n sudo yum -y install ceph\n '

fail 3478382 2019-01-18 07:00:55 2019-01-18 09:59:02 2019-01-18 10:45:02 0:46:00 0:07:41 0:38:19 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/mon_thrash.yaml} 3
Failure Reason:

Command failed on ovh036 with status 1: '\n sudo yum -y install ceph\n '

fail 3478383 2019-01-18 07:00:56 2019-01-18 10:01:10 2019-01-18 10:49:10 0:48:00 0:07:32 0:40:28 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_api_tests.yaml} 3
Failure Reason:

Command failed on ovh031 with status 1: '\n sudo yum -y install ceph\n '

fail 3478384 2019-01-18 07:00:56 2019-01-18 10:09:03 2019-01-18 10:55:03 0:46:00 0:07:42 0:38:18 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_bench.yaml} 3
Failure Reason:

Command failed on ovh037 with status 1: '\n sudo yum -y install ceph\n '

fail 3478385 2019-01-18 07:00:57 2019-01-18 10:13:09 2019-01-18 11:13:09 1:00:00 0:07:39 0:52:21 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cache_snaps.yaml} 3
Failure Reason:

Command failed on ovh053 with status 1: '\n sudo yum -y install ceph\n '

fail 3478386 2019-01-18 07:00:58 2019-01-18 10:15:10 2019-01-18 10:41:10 0:26:00 0:03:46 0:22:14 ovh master ubuntu 16.04 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

{'ovh084.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh084', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh028.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh028', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh045.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh045', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh076.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh076', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3478387 2019-01-18 07:00:58 2019-01-18 10:17:11 2019-01-18 11:23:11 1:06:00 0:07:52 0:58:08 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cls_all.yaml} 3
Failure Reason:

Command failed on ovh089 with status 1: '\n sudo yum -y install ceph\n '

fail 3478388 2019-01-18 07:00:59 2019-01-18 10:24:58 2019-01-18 11:18:58 0:54:00 0:07:52 0:46:08 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_ec_snaps.yaml} 3
Failure Reason:

Command failed on ovh078 with status 1: '\n sudo yum -y install ceph\n '

fail 3478389 2019-01-18 07:01:00 2019-01-18 10:29:09 2019-01-18 11:17:09 0:48:00 0:07:48 0:40:12 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_python.yaml} 3
Failure Reason:

Command failed on ovh002 with status 1: '\n sudo yum -y install ceph\n '

fail 3478390 2019-01-18 07:01:01 2019-01-18 10:31:15 2019-01-18 11:19:15 0:48:00 0:08:03 0:39:57 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_workunit_loadgen_mix.yaml} 3
Failure Reason:

Command failed on ovh076 with status 1: '\n sudo yum -y install ceph\n '

fail 3478391 2019-01-18 07:01:01 2019-01-18 10:39:21 2019-01-18 11:39:21 1:00:00 0:08:05 0:51:55 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_api_tests.yaml} 3
Failure Reason:

Command failed on ovh010 with status 1: '\n sudo yum -y install ceph\n '

fail 3478392 2019-01-18 07:01:02 2019-01-18 10:41:15 2019-01-18 11:37:15 0:56:00 0:07:47 0:48:13 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_cli_import_export.yaml} 3
Failure Reason:

Command failed on ovh097 with status 1: '\n sudo yum -y install ceph\n '

fail 3478393 2019-01-18 07:01:03 2019-01-18 10:45:16 2019-01-18 11:33:16 0:48:00 0:08:14 0:39:46 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_fsx.yaml} 3
Failure Reason:

Command failed on ovh070 with status 1: '\n sudo yum -y install ceph\n '

fail 3478394 2019-01-18 07:01:03 2019-01-18 10:49:23 2019-01-18 11:39:23 0:50:00 0:08:28 0:41:32 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_python_api_tests.yaml} 3
Failure Reason:

Command failed on ovh067 with status 1: '\n sudo yum -y install ceph\n '

fail 3478395 2019-01-18 07:01:04 2019-01-18 10:55:05 2019-01-18 11:43:05 0:48:00 0:07:40 0:40:20 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh037 with status 1: '\n sudo yum -y install ceph\n '

fail 3478396 2019-01-18 07:01:05 2019-01-18 11:13:12 2019-01-18 12:13:12 1:00:00 0:07:50 0:52:10 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_ec_s3tests.yaml} 3
Failure Reason:

Command failed on ovh009 with status 1: '\n sudo yum -y install ceph\n '

fail 3478397 2019-01-18 07:01:06 2019-01-18 11:17:11 2019-01-18 12:05:11 0:48:00 0:08:07 0:39:53 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_s3tests.yaml} 3
Failure Reason:

Command failed on ovh066 with status 1: '\n sudo yum -y install ceph\n '

fail 3478398 2019-01-18 07:01:06 2019-01-18 11:19:11 2019-01-18 12:07:11 0:48:00 0:08:21 0:39:39 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_swift.yaml} 3
Failure Reason:

Command failed on ovh076 with status 1: '\n sudo yum -y install ceph\n '