Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
fail 3454064 2019-01-12 07:00:26 2019-01-12 07:00:28 2019-01-12 07:06:27 0:05:59 0:02:37 0:03:22 ovh master ubuntu 16.04 smoke/1node/{clusters/{fixed-1.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/ceph-deploy.yaml} 1
Failure Reason:

{'ovh055.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh055', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3454065 2019-01-12 07:00:27 2019-01-12 07:00:50 2019-01-12 07:54:50 0:54:00 0:07:49 0:46:11 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on ovh059 with status 1: '\n sudo yum -y install ceph\n '

fail 3454066 2019-01-12 07:00:28 2019-01-12 07:02:45 2019-01-12 10:34:48 3:32:03 0:20:23 3:11:40 ovh master centos 7.5 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/centos_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

ceph-deploy: Failed during gather keys

fail 3454067 2019-01-12 07:00:29 2019-01-12 07:02:45 2019-01-12 08:04:55 1:02:10 0:07:52 0:54:18 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh028 with status 1: '\n sudo yum -y install ceph\n '

fail 3454068 2019-01-12 07:00:30 2019-01-12 07:04:12 2019-01-12 08:04:17 1:00:05 0:07:51 0:52:14 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh041 with status 1: '\n sudo yum -y install ceph\n '

fail 3454069 2019-01-12 07:00:31 2019-01-12 07:06:40 2019-01-12 07:56:40 0:50:00 0:07:51 0:42:09 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh093 with status 1: '\n sudo yum -y install ceph\n '

fail 3454070 2019-01-12 07:00:31 2019-01-12 07:10:02 2019-01-12 08:00:02 0:50:00 0:07:44 0:42:16 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_direct_io.yaml} 3
Failure Reason:

Command failed on ovh020 with status 1: '\n sudo yum -y install ceph\n '

fail 3454071 2019-01-12 07:00:32 2019-01-12 07:10:32 2019-01-12 07:58:32 0:48:00 0:07:49 0:40:11 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on ovh085 with status 1: '\n sudo yum -y install ceph\n '

fail 3454072 2019-01-12 07:00:33 2019-01-12 07:11:43 2019-01-12 08:25:44 1:14:01 0:08:10 1:05:51 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh087 with status 1: '\n sudo yum -y install ceph\n '

fail 3454073 2019-01-12 07:00:34 2019-01-12 07:12:48 2019-01-12 08:12:47 0:59:59 0:07:30 0:52:29 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh046 with status 1: '\n sudo yum -y install ceph\n '

fail 3454074 2019-01-12 07:00:34 2019-01-12 07:12:53 2019-01-12 08:20:53 1:08:00 0:07:58 1:00:02 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/libcephfs_interface_tests.yaml} 3
Failure Reason:

Command failed on ovh089 with status 1: '\n sudo yum -y install ceph\n '

fail 3454075 2019-01-12 07:00:35 2019-01-12 07:16:54 2019-01-12 08:16:54 1:00:00 0:07:35 0:52:25 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/mon_thrash.yaml} 3
Failure Reason:

Command failed on ovh075 with status 1: '\n sudo yum -y install ceph\n '

fail 3454076 2019-01-12 07:00:36 2019-01-12 07:18:41 2019-01-12 08:20:41 1:02:00 0:07:43 0:54:17 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_api_tests.yaml} 3
Failure Reason:

Command failed on ovh022 with status 1: '\n sudo yum -y install ceph\n '

fail 3454077 2019-01-12 07:00:37 2019-01-12 07:18:42 2019-01-12 08:10:42 0:52:00 0:07:50 0:44:10 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_bench.yaml} 3
Failure Reason:

Command failed on ovh088 with status 1: '\n sudo yum -y install ceph\n '

fail 3454078 2019-01-12 07:00:37 2019-01-12 07:24:34 2019-01-12 08:20:33 0:55:59 0:07:45 0:48:14 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cache_snaps.yaml} 3
Failure Reason:

Command failed on ovh032 with status 1: '\n sudo yum -y install ceph\n '

fail 3454079 2019-01-12 07:00:38 2019-01-12 07:24:56 2019-01-12 07:50:56 0:26:00 0:02:55 0:23:05 ovh master ubuntu 16.04 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

{'ovh084.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh084', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh034.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh034', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh061.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh061', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh060.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh060', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3454080 2019-01-12 07:00:39 2019-01-12 07:26:52 2019-01-12 08:20:52 0:54:00 0:07:48 0:46:12 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cls_all.yaml} 3
Failure Reason:

Command failed on ovh051 with status 1: '\n sudo yum -y install ceph\n '

fail 3454081 2019-01-12 07:00:40 2019-01-12 07:26:52 2019-01-12 08:20:52 0:54:00 0:07:30 0:46:30 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_ec_snaps.yaml} 3
Failure Reason:

Command failed on ovh096 with status 1: '\n sudo yum -y install ceph\n '

fail 3454082 2019-01-12 07:00:41 2019-01-12 07:29:07 2019-01-12 08:25:07 0:56:00 0:07:53 0:48:07 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_python.yaml} 3
Failure Reason:

Command failed on ovh095 with status 1: '\n sudo yum -y install ceph\n '

fail 3454083 2019-01-12 07:00:41 2019-01-12 07:30:51 2019-01-12 08:26:51 0:56:00 0:07:39 0:48:21 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_workunit_loadgen_mix.yaml} 3
Failure Reason:

Command failed on ovh099 with status 1: '\n sudo yum -y install ceph\n '

fail 3454084 2019-01-12 07:00:42 2019-01-12 07:30:51 2019-01-12 08:26:51 0:56:00 0:07:31 0:48:29 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_api_tests.yaml} 3
Failure Reason:

Command failed on ovh082 with status 1: '\n sudo yum -y install ceph\n '

fail 3454085 2019-01-12 07:00:43 2019-01-12 07:32:51 2019-01-12 08:30:51 0:58:00 0:07:31 0:50:29 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_cli_import_export.yaml} 3
Failure Reason:

Command failed on ovh081 with status 1: '\n sudo yum -y install ceph\n '

fail 3454086 2019-01-12 07:00:44 2019-01-12 07:34:51 2019-01-12 08:32:51 0:58:00 0:08:26 0:49:34 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_fsx.yaml} 3
Failure Reason:

Command failed on ovh050 with status 1: '\n sudo yum -y install ceph\n '

fail 3454087 2019-01-12 07:00:44 2019-01-12 07:34:51 2019-01-12 08:38:51 1:04:00 0:07:38 0:56:22 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_python_api_tests.yaml} 3
Failure Reason:

Command failed on ovh100 with status 1: '\n sudo yum -y install ceph\n '

fail 3454088 2019-01-12 07:00:45 2019-01-12 07:38:44 2019-01-12 08:36:44 0:58:00 0:08:42 0:49:18 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh034 with status 1: '\n sudo yum -y install ceph\n '

fail 3454089 2019-01-12 07:00:46 2019-01-12 07:44:46 2019-01-12 08:44:46 1:00:00 0:07:59 0:52:01 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_ec_s3tests.yaml} 3
Failure Reason:

Command failed on ovh070 with status 1: '\n sudo yum -y install ceph\n '

fail 3454090 2019-01-12 07:00:47 2019-01-12 07:44:46 2019-01-12 08:38:46 0:54:00 0:07:34 0:46:26 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_s3tests.yaml} 3
Failure Reason:

Command failed on ovh014 with status 1: '\n sudo yum -y install ceph\n '

fail 3454091 2019-01-12 07:00:48 2019-01-12 07:48:44 2019-01-12 09:04:45 1:16:01 0:07:27 1:08:34 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_swift.yaml} 3
Failure Reason:

Command failed on ovh088 with status 1: '\n sudo yum -y install ceph\n '