Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
fail 3421689 2019-01-04 07:00:45 2019-01-04 07:03:42 2019-01-04 07:17:41 0:13:59 0:02:43 0:11:16 ovh master ubuntu 16.04 smoke/1node/{clusters/{fixed-1.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/ceph-deploy.yaml} 1
Failure Reason:

{'ovh092.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh092', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3421690 2019-01-04 07:00:45 2019-01-04 07:03:54 2019-01-04 08:09:54 1:06:00 0:07:41 0:58:19 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on ovh002 with status 1: '\n sudo yum -y install ceph\n '

fail 3421691 2019-01-04 07:00:46 2019-01-04 07:05:34 2019-01-04 11:57:39 4:52:05 0:19:32 4:32:33 ovh master centos 7.5 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/centos_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

ceph-deploy: Failed during gather keys

fail 3421692 2019-01-04 07:00:47 2019-01-04 07:05:50 2019-01-04 08:07:50 1:02:00 0:08:01 0:53:59 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh023 with status 1: '\n sudo yum -y install ceph\n '

fail 3421693 2019-01-04 07:00:48 2019-01-04 07:12:10 2019-01-04 08:24:10 1:12:00 0:07:51 1:04:09 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh045 with status 1: '\n sudo yum -y install ceph\n '

fail 3421694 2019-01-04 07:00:48 2019-01-04 07:12:10 2019-01-04 08:18:10 1:06:00 0:07:43 0:58:17 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh062 with status 1: '\n sudo yum -y install ceph\n '

fail 3421695 2019-01-04 07:00:49 2019-01-04 07:15:43 2019-01-04 08:19:43 1:04:00 0:07:35 0:56:25 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_direct_io.yaml} 3
Failure Reason:

Command failed on ovh011 with status 1: '\n sudo yum -y install ceph\n '

fail 3421696 2019-01-04 07:00:50 2019-01-04 07:15:50 2019-01-04 08:27:50 1:12:00 0:07:43 1:04:17 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on ovh069 with status 1: '\n sudo yum -y install ceph\n '

fail 3421697 2019-01-04 07:00:51 2019-01-04 07:17:21 2019-01-04 08:49:22 1:32:01 0:07:43 1:24:18 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh010 with status 1: '\n sudo yum -y install ceph\n '

fail 3421698 2019-01-04 07:00:51 2019-01-04 07:17:42 2019-01-04 08:11:42 0:54:00 0:07:36 0:46:24 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh076 with status 1: '\n sudo yum -y install ceph\n '

fail 3421699 2019-01-04 07:00:52 2019-01-04 07:18:10 2019-01-04 08:28:10 1:10:00 0:07:55 1:02:05 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/libcephfs_interface_tests.yaml} 3
Failure Reason:

Command failed on ovh054 with status 1: '\n sudo yum -y install ceph\n '

fail 3421700 2019-01-04 07:00:53 2019-01-04 07:18:19 2019-01-04 08:14:19 0:56:00 0:07:34 0:48:26 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/mon_thrash.yaml} 3
Failure Reason:

Command failed on ovh093 with status 1: '\n sudo yum -y install ceph\n '

fail 3421701 2019-01-04 07:00:53 2019-01-04 07:20:12 2019-01-04 09:14:13 1:54:01 0:07:58 1:46:03 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_api_tests.yaml} 3
Failure Reason:

Command failed on ovh092 with status 1: '\n sudo yum -y install ceph\n '

fail 3421702 2019-01-04 07:00:54 2019-01-04 07:20:12 2019-01-04 08:50:12 1:30:00 0:07:52 1:22:08 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_bench.yaml} 3
Failure Reason:

Command failed on ovh032 with status 1: '\n sudo yum -y install ceph\n '

fail 3421703 2019-01-04 07:00:55 2019-01-04 07:23:42 2019-01-04 08:47:42 1:24:00 0:07:32 1:16:28 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cache_snaps.yaml} 3
Failure Reason:

Command failed on ovh015 with status 1: '\n sudo yum -y install ceph\n '

fail 3421704 2019-01-04 07:00:56 2019-01-04 07:25:55 2019-01-04 10:25:57 3:00:02 0:02:51 2:57:11 ovh master ubuntu 16.04 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

{'ovh067.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh067', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh090.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh090', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh028.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh028', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh001.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh001', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3421705 2019-01-04 07:00:56 2019-01-04 07:31:50 2019-01-04 08:49:50 1:18:00 0:07:48 1:10:12 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cls_all.yaml} 3
Failure Reason:

Command failed on ovh037 with status 1: '\n sudo yum -y install ceph\n '

fail 3421706 2019-01-04 07:00:57 2019-01-04 07:31:50 2019-01-04 08:25:50 0:54:00 0:07:28 0:46:32 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_ec_snaps.yaml} 3
Failure Reason:

Command failed on ovh003 with status 1: '\n sudo yum -y install ceph\n '

fail 3421707 2019-01-04 07:00:58 2019-01-04 07:34:03 2019-01-04 08:28:03 0:54:00 0:07:21 0:46:39 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_python.yaml} 3
Failure Reason:

Command failed on ovh042 with status 1: '\n sudo yum -y install ceph\n '

fail 3421708 2019-01-04 07:00:59 2019-01-04 07:36:07 2019-01-04 08:32:07 0:56:00 0:07:46 0:48:14 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_workunit_loadgen_mix.yaml} 3
Failure Reason:

Command failed on ovh080 with status 1: '\n sudo yum -y install ceph\n '

fail 3421709 2019-01-04 07:00:59 2019-01-04 07:41:53 2019-01-04 08:35:53 0:54:00 0:07:46 0:46:14 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_api_tests.yaml} 3
Failure Reason:

Command failed on ovh073 with status 1: '\n sudo yum -y install ceph\n '

fail 3421710 2019-01-04 07:01:00 2019-01-04 07:45:57 2019-01-04 09:33:58 1:48:01 0:07:51 1:40:10 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_cli_import_export.yaml} 3
Failure Reason:

Command failed on ovh052 with status 1: '\n sudo yum -y install ceph\n '

fail 3421711 2019-01-04 07:01:01 2019-01-04 07:47:59 2019-01-04 09:18:00 1:30:01 0:08:11 1:21:50 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_fsx.yaml} 3
Failure Reason:

Command failed on ovh063 with status 1: '\n sudo yum -y install ceph\n '

fail 3421712 2019-01-04 07:01:02 2019-01-04 07:47:59 2019-01-04 08:57:59 1:10:00 0:07:57 1:02:03 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_python_api_tests.yaml} 3
Failure Reason:

Command failed on ovh023 with status 1: '\n sudo yum -y install ceph\n '

fail 3421713 2019-01-04 07:01:02 2019-01-04 07:48:06 2019-01-04 09:06:06 1:18:00 0:07:50 1:10:10 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh069 with status 1: '\n sudo yum -y install ceph\n '

fail 3421714 2019-01-04 07:01:03 2019-01-04 07:49:40 2019-01-04 09:21:40 1:32:00 0:07:50 1:24:10 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_ec_s3tests.yaml} 3
Failure Reason:

Command failed on ovh003 with status 1: '\n sudo yum -y install ceph\n '

fail 3421715 2019-01-04 07:01:04 2019-01-04 07:53:42 2019-01-04 08:53:43 1:00:01 0:07:33 0:52:28 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_s3tests.yaml} 3
Failure Reason:

Command failed on ovh076 with status 1: '\n sudo yum -y install ceph\n '

fail 3421716 2019-01-04 07:01:05 2019-01-04 07:54:02 2019-01-04 11:14:05 3:20:03 0:07:55 3:12:08 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_swift.yaml} 3
Failure Reason:

Command failed on ovh069 with status 1: '\n sudo yum -y install ceph\n '