Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
fail 2528972 2018-05-13 07:00:47 2018-05-13 07:05:58 2018-05-13 07:13:57 0:07:59 0:02:35 0:05:24 ovh master ubuntu 16.04 smoke/1node/{clusters/{fixed-1.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/ceph-deploy.yaml} 1
Failure Reason:

{'ovh028.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh028', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 2528973 2018-05-13 07:00:48 2018-05-13 07:05:58 2018-05-13 09:22:00 2:16:02 0:07:18 2:08:44 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on ovh084 with status 1: "sudo yum -y install '' ceph"

fail 2528974 2018-05-13 07:00:48 2018-05-13 07:08:07 2018-05-13 08:04:08 0:56:01 0:08:04 0:47:57 ovh master centos 7.4 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/centos_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

Command failed on ovh009 with status 5: 'sudo stop ceph-all || sudo service ceph stop || sudo systemctl stop ceph.target'

fail 2528975 2018-05-13 07:00:49 2018-05-13 07:08:45 2018-05-13 07:58:45 0:50:00 0:06:34 0:43:26 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh059 with status 1: "sudo yum -y install '' ceph-radosgw"

fail 2528976 2018-05-13 07:00:50 2018-05-13 07:11:54 2018-05-13 08:05:54 0:54:00 0:06:18 0:47:42 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/cfuse_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh060 with status 1: "sudo yum -y install '' ceph-radosgw"

fail 2528977 2018-05-13 07:00:50 2018-05-13 07:12:37 2018-05-13 07:58:37 0:46:00 0:06:25 0:39:35 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh028 with status 1: "sudo yum -y install '' ceph-radosgw"

fail 2528978 2018-05-13 07:00:51 2018-05-13 07:13:56 2018-05-13 08:17:57 1:04:01 0:07:19 0:56:42 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/kclient_workunit_direct_io.yaml} 3
Failure Reason:

Command failed on ovh080 with status 1: "sudo yum -y install '' ceph"

fail 2528979 2018-05-13 07:00:52 2018-05-13 07:13:58 2018-05-13 08:05:59 0:52:01 0:06:32 0:45:29 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/kclient_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on ovh069 with status 1: "sudo yum -y install '' ceph-radosgw"

fail 2528980 2018-05-13 07:00:52 2018-05-13 07:19:58 2018-05-13 08:05:58 0:46:00 0:07:11 0:38:49 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/kclient_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh006 with status 1: "sudo yum -y install '' ceph"

fail 2528981 2018-05-13 07:00:53 2018-05-13 07:19:58 2018-05-13 09:38:01 2:18:03 0:07:13 2:10:50 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/kclient_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh017 with status 1: "sudo yum -y install '' ceph"

fail 2528982 2018-05-13 07:00:53 2018-05-13 07:21:58 2018-05-13 08:29:59 1:08:01 0:06:44 1:01:17 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/libcephfs_interface_tests.yaml} 3
Failure Reason:

Command failed on ovh034 with status 1: "sudo yum -y install '' ceph"

fail 2528983 2018-05-13 07:00:54 2018-05-13 07:22:26 2018-05-13 08:28:27 1:06:01 0:07:29 0:58:32 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/mon_thrash.yaml} 3
Failure Reason:

Command failed on ovh039 with status 1: "sudo yum -y install '' ceph"

fail 2528984 2018-05-13 07:00:55 2018-05-13 07:26:38 2018-05-13 08:24:39 0:58:01 0:06:01 0:52:00 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_api_tests.yaml} 3
Failure Reason:

Command failed on ovh058 with status 1: "sudo yum -y install '' ceph-radosgw"

fail 2528985 2018-05-13 07:00:55 2018-05-13 07:29:37 2018-05-13 08:25:38 0:56:01 0:07:13 0:48:48 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_bench.yaml} 3
Failure Reason:

Command failed on ovh087 with status 1: "sudo yum -y install '' ceph"

fail 2528986 2018-05-13 07:00:56 2018-05-13 07:31:30 2018-05-13 08:45:31 1:14:01 0:07:29 1:06:32 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_cache_snaps.yaml} 3
Failure Reason:

Command failed on ovh059 with status 1: "sudo yum -y install '' ceph"

fail 2528987 2018-05-13 07:00:57 2018-05-13 07:31:30 2018-05-13 09:11:32 1:40:02 0:02:47 1:37:15 ovh master ubuntu 16.04 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

{'ovh083.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh083', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh020.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh020', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh023.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh023', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh095.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh095', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 2528988 2018-05-13 07:00:57 2018-05-13 07:34:13 2018-05-13 11:58:18 4:24:05 0:07:19 4:16:46 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_cls_all.yaml} 3
Failure Reason:

Command failed on ovh099 with status 1: "sudo yum -y install '' ceph"

fail 2528989 2018-05-13 07:00:58 2018-05-13 07:37:48 2018-05-13 08:25:48 0:48:00 0:06:15 0:41:45 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_ec_snaps.yaml} 3
Failure Reason:

Command failed on ovh052 with status 1: "sudo yum -y install '' ceph-radosgw"

fail 2528990 2018-05-13 07:00:58 2018-05-13 07:37:48 2018-05-13 08:41:48 1:04:00 0:06:45 0:57:15 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_python.yaml} 3
Failure Reason:

Command failed on ovh022 with status 1: "sudo yum -y install '' ceph"

fail 2528991 2018-05-13 07:00:59 2018-05-13 07:44:15 2018-05-13 08:32:15 0:48:00 0:06:56 0:41:04 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rados_workunit_loadgen_mix.yaml} 3
Failure Reason:

Command failed on ovh051 with status 1: "sudo yum -y install '' ceph"

fail 2528992 2018-05-13 07:01:00 2018-05-13 07:45:49 2018-05-13 08:51:50 1:06:01 0:11:47 0:54:14 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_api_tests.yaml} 3
Failure Reason:

Command failed on ovh023 with status 1: "sudo yum -y install '' ceph-radosgw"

fail 2528993 2018-05-13 07:01:00 2018-05-13 07:47:42 2018-05-13 07:59:41 0:11:59 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_cli_import_export.yaml} 3
Failure Reason:

Command failed on ovh033 with status 1: 'sudo yum upgrade -y linux-firmware'

fail 2528994 2018-05-13 07:01:01 2018-05-13 07:49:49 2018-05-13 08:51:49 1:02:00 0:11:04 0:50:56 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_fsx.yaml} 3
Failure Reason:

Command failed on ovh053 with status 1: "sudo yum -y install '' ceph"

fail 2528995 2018-05-13 07:01:01 2018-05-13 07:54:21 2018-05-13 08:52:21 0:58:00 0:11:47 0:46:13 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_python_api_tests.yaml} 3
Failure Reason:

Command failed on ovh095 with status 1: "sudo yum -y install '' ceph"

fail 2528996 2018-05-13 07:01:02 2018-05-13 07:58:54 2018-05-13 09:38:55 1:40:01 0:07:06 1:32:55 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rbd_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh009 with status 1: "sudo yum -y install '' ceph"

fail 2528997 2018-05-13 07:01:03 2018-05-13 07:58:54 2018-05-13 09:38:55 1:40:01 0:05:58 1:34:03 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rgw_ec_s3tests.yaml} 3
Failure Reason:

Command failed on ovh066 with status 1: "sudo yum -y install '' ceph-radosgw"

fail 2528998 2018-05-13 07:01:03 2018-05-13 07:59:42 2018-05-13 08:51:43 0:52:01 0:10:32 0:41:29 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rgw_s3tests.yaml} 3
Failure Reason:

Command failed on ovh091 with status 1: "sudo yum -y install '' ceph"

fail 2528999 2018-05-13 07:01:04 2018-05-13 08:04:26 2018-05-13 09:18:27 1:14:01 0:05:53 1:08:08 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore.yaml tasks/rgw_swift.yaml} 3
Failure Reason:

Command failed on ovh051 with status 1: "sudo yum -y install '' ceph-radosgw"