Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
fail 3378343 2018-12-19 07:00:26 2018-12-19 07:02:08 2018-12-19 07:32:07 0:29:59 0:04:50 0:25:09 ovh master ubuntu 16.04 smoke/1node/{clusters/{fixed-1.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/ceph-deploy.yaml} 1
Failure Reason:

{'ovh067.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh067', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3378344 2018-12-19 07:00:27 2018-12-19 07:02:08 2018-12-19 08:04:08 1:02:00 0:07:57 0:54:03 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on ovh064 with status 1: '\n sudo yum -y install ceph\n '

fail 3378345 2018-12-19 07:00:28 2018-12-19 07:04:13 2018-12-19 13:40:19 6:36:06 0:20:39 6:15:27 ovh master centos 7.5 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/centos_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

ceph-deploy: Failed during gather keys

fail 3378346 2018-12-19 07:00:29 2018-12-19 07:06:04 2018-12-19 09:22:05 2:16:01 0:09:01 2:07:00 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh067 with status 1: '\n sudo yum -y install ceph\n '

fail 3378347 2018-12-19 07:00:30 2018-12-19 07:06:04 2018-12-19 09:30:06 2:24:02 0:08:42 2:15:20 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh032 with status 1: '\n sudo yum -y install ceph\n '

fail 3378348 2018-12-19 07:00:31 2018-12-19 07:07:00 2018-12-19 08:03:00 0:56:00 0:07:42 0:48:18 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh097 with status 1: '\n sudo yum -y install ceph\n '

fail 3378349 2018-12-19 07:00:31 2018-12-19 07:15:59 2018-12-19 09:24:00 2:08:01 0:08:36 1:59:25 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_direct_io.yaml} 3
Failure Reason:

Command failed on ovh037 with status 1: '\n sudo yum -y install ceph\n '

fail 3378350 2018-12-19 07:00:32 2018-12-19 07:20:05 2018-12-19 10:08:06 2:48:01 0:07:47 2:40:14 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on ovh058 with status 1: '\n sudo yum -y install ceph\n '

fail 3378351 2018-12-19 07:00:33 2018-12-19 07:20:44 2018-12-19 09:00:45 1:40:01 0:08:05 1:31:56 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh083 with status 1: '\n sudo yum -y install ceph\n '

fail 3378352 2018-12-19 07:00:34 2018-12-19 07:21:55 2018-12-19 09:17:56 1:56:01 0:07:50 1:48:11 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh089 with status 1: '\n sudo yum -y install ceph\n '

fail 3378353 2018-12-19 07:00:34 2018-12-19 07:24:19 2018-12-19 08:28:19 1:04:00 0:08:24 0:55:36 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/libcephfs_interface_tests.yaml} 3
Failure Reason:

Command failed on ovh033 with status 1: '\n sudo yum -y install ceph\n '

fail 3378354 2018-12-19 07:00:35 2018-12-19 07:26:08 2018-12-19 08:30:08 1:04:00 0:07:50 0:56:10 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/mon_thrash.yaml} 3
Failure Reason:

Command failed on ovh052 with status 1: '\n sudo yum -y install ceph\n '

fail 3378355 2018-12-19 07:00:36 2018-12-19 07:32:17 2018-12-19 09:10:18 1:38:01 0:07:50 1:30:11 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_api_tests.yaml} 3
Failure Reason:

Command failed on ovh046 with status 1: '\n sudo yum -y install ceph\n '

fail 3378356 2018-12-19 07:00:37 2018-12-19 07:32:17 2018-12-19 08:50:17 1:18:00 0:07:49 1:10:11 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_bench.yaml} 3
Failure Reason:

Command failed on ovh038 with status 1: '\n sudo yum -y install ceph\n '

fail 3378357 2018-12-19 07:00:38 2018-12-19 07:34:09 2018-12-19 08:30:09 0:56:00 0:07:36 0:48:24 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cache_snaps.yaml} 3
Failure Reason:

Command failed on ovh044 with status 1: '\n sudo yum -y install ceph\n '

fail 3378358 2018-12-19 07:00:39 2018-12-19 07:34:43 2018-12-19 11:06:46 3:32:03 0:02:51 3:29:12 ovh master ubuntu 16.04 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

{'ovh037.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh037', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh100.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh100', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh044.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh044', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh023.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh023', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3378359 2018-12-19 07:00:39 2018-12-19 07:36:04 2018-12-19 09:06:05 1:30:01 0:07:40 1:22:21 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cls_all.yaml} 3
Failure Reason:

Command failed on ovh091 with status 1: '\n sudo yum -y install ceph\n '

fail 3378360 2018-12-19 07:00:40 2018-12-19 07:36:04 2018-12-19 08:56:05 1:20:01 0:07:45 1:12:16 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_ec_snaps.yaml} 3
Failure Reason:

Command failed on ovh048 with status 1: '\n sudo yum -y install ceph\n '

fail 3378361 2018-12-19 07:00:41 2018-12-19 07:38:09 2018-12-19 10:22:16 2:44:07 0:08:28 2:35:39 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_python.yaml} 3
Failure Reason:

Command failed on ovh075 with status 1: '\n sudo yum -y install ceph\n '

fail 3378362 2018-12-19 07:00:42 2018-12-19 07:47:52 2018-12-19 09:19:53 1:32:01 0:08:38 1:23:23 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_workunit_loadgen_mix.yaml} 3
Failure Reason:

Command failed on ovh068 with status 1: '\n sudo yum -y install ceph\n '

fail 3378363 2018-12-19 07:00:42 2018-12-19 07:50:38 2018-12-19 10:42:40 2:52:02 0:08:15 2:43:47 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_api_tests.yaml} 3
Failure Reason:

Command failed on ovh054 with status 1: '\n sudo yum -y install ceph\n '

fail 3378364 2018-12-19 07:00:43 2018-12-19 07:52:16 2018-12-19 09:06:17 1:14:01 0:09:01 1:05:00 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_cli_import_export.yaml} 3
Failure Reason:

Command failed on ovh075 with status 1: '\n sudo yum -y install ceph\n '

fail 3378365 2018-12-19 07:00:44 2018-12-19 07:54:15 2018-12-19 09:32:16 1:38:01 0:08:55 1:29:06 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_fsx.yaml} 3
Failure Reason:

Command failed on ovh028 with status 1: '\n sudo yum -y install ceph\n '

fail 3378366 2018-12-19 07:00:45 2018-12-19 07:56:17 2018-12-19 10:18:23 2:22:06 0:07:49 2:14:17 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_python_api_tests.yaml} 3
Failure Reason:

Command failed on ovh084 with status 1: '\n sudo yum -y install ceph\n '

fail 3378367 2018-12-19 07:00:46 2018-12-19 08:00:59 2018-12-19 09:51:00 1:50:01 0:08:43 1:41:18 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh095 with status 1: '\n sudo yum -y install ceph\n '

fail 3378368 2018-12-19 07:00:47 2018-12-19 08:02:05 2018-12-19 09:40:06 1:38:01 0:07:42 1:30:19 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_ec_s3tests.yaml} 3
Failure Reason:

Command failed on ovh090 with status 1: '\n sudo yum -y install ceph\n '

fail 3378369 2018-12-19 07:00:47 2018-12-19 08:02:05 2018-12-19 10:22:07 2:20:02 0:08:01 2:12:01 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_s3tests.yaml} 3
Failure Reason:

Command failed on ovh092 with status 1: '\n sudo yum -y install ceph\n '

fail 3378370 2018-12-19 07:00:48 2018-12-19 08:03:01 2018-12-19 09:31:02 1:28:01 0:08:44 1:19:17 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_swift.yaml} 3
Failure Reason:

Command failed on ovh020 with status 1: '\n sudo yum -y install ceph\n '