Status Job ID Links Posted Started Updated
Runtime
Duration
In Waiting
Machine Teuthology Branch OS Type OS Version Description Nodes
fail 3417464 2019-01-03 07:02:06 2019-01-03 07:05:35 2019-01-03 07:17:35 0:12:00 0:02:33 0:09:27 ovh master ubuntu 16.04 smoke/1node/{clusters/{fixed-1.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/ceph-deploy.yaml} 1
Failure Reason:

{'ovh094.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh094', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3417465 2019-01-03 07:02:07 2019-01-03 07:05:45 2019-01-03 08:07:45 1:02:00 0:07:50 0:54:10 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_blogbench.yaml} 3
Failure Reason:

Command failed on ovh027 with status 1: '\n sudo yum -y install ceph\n '

fail 3417466 2019-01-03 07:02:08 2019-01-03 07:08:02 2019-01-03 12:26:07 5:18:05 0:21:12 4:56:53 ovh master centos 7.5 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/centos_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

ceph-deploy: Failed during gather keys

fail 3417467 2019-01-03 07:02:08 2019-01-03 07:09:38 2019-01-03 08:01:38 0:52:00 0:07:42 0:44:18 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh072 with status 1: '\n sudo yum -y install ceph\n '

fail 3417468 2019-01-03 07:02:09 2019-01-03 07:13:56 2019-01-03 08:07:56 0:54:00 0:07:37 0:46:23 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh080 with status 1: '\n sudo yum -y install ceph\n '

fail 3417469 2019-01-03 07:02:10 2019-01-03 07:14:06 2019-01-03 08:10:05 0:55:59 0:07:43 0:48:16 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/cfuse_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh094 with status 1: '\n sudo yum -y install ceph\n '

fail 3417470 2019-01-03 07:02:11 2019-01-03 07:17:48 2019-01-03 08:05:47 0:47:59 0:07:47 0:40:12 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_direct_io.yaml} 3
Failure Reason:

Command failed on ovh026 with status 1: '\n sudo yum -y install ceph\n '

fail 3417471 2019-01-03 07:02:11 2019-01-03 07:17:48 2019-01-03 08:11:48 0:54:00 0:07:43 0:46:17 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_dbench.yaml} 3
Failure Reason:

Command failed on ovh029 with status 1: '\n sudo yum -y install ceph\n '

fail 3417472 2019-01-03 07:02:12 2019-01-03 07:21:52 2019-01-03 08:17:51 0:55:59 0:07:27 0:48:32 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_fsstress.yaml} 3
Failure Reason:

Command failed on ovh065 with status 1: '\n sudo yum -y install ceph\n '

fail 3417473 2019-01-03 07:02:13 2019-01-03 07:25:49 2019-01-03 08:17:49 0:52:00 0:07:45 0:44:15 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/kclient_workunit_suites_pjd.yaml} 3
Failure Reason:

Command failed on ovh068 with status 1: '\n sudo yum -y install ceph\n '

fail 3417474 2019-01-03 07:02:14 2019-01-03 07:26:02 2019-01-03 08:22:02 0:56:00 0:07:42 0:48:18 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/libcephfs_interface_tests.yaml} 3
Failure Reason:

Command failed on ovh070 with status 1: '\n sudo yum -y install ceph\n '

fail 3417475 2019-01-03 07:02:14 2019-01-03 07:26:57 2019-01-03 08:22:56 0:55:59 0:07:24 0:48:35 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/mon_thrash.yaml} 3
Failure Reason:

Command failed on ovh010 with status 1: '\n sudo yum -y install ceph\n '

fail 3417476 2019-01-03 07:02:15 2019-01-03 07:30:09 2019-01-03 08:38:09 1:08:00 0:07:41 1:00:19 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_api_tests.yaml} 3
Failure Reason:

Command failed on ovh006 with status 1: '\n sudo yum -y install ceph\n '

fail 3417477 2019-01-03 07:02:16 2019-01-03 07:31:46 2019-01-03 08:51:47 1:20:01 0:07:47 1:12:14 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_bench.yaml} 3
Failure Reason:

Command failed on ovh037 with status 1: '\n sudo yum -y install ceph\n '

fail 3417478 2019-01-03 07:02:17 2019-01-03 07:31:46 2019-01-03 08:31:46 1:00:00 0:07:38 0:52:22 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cache_snaps.yaml} 3
Failure Reason:

Command failed on ovh052 with status 1: '\n sudo yum -y install ceph\n '

fail 3417479 2019-01-03 07:02:17 2019-01-03 07:32:05 2019-01-03 08:30:05 0:58:00 0:02:45 0:55:15 ovh master ubuntu 16.04 smoke/systemd/{clusters/{fixed-4.yaml openstack.yaml} distros/ubuntu_latest.yaml objectstore/filestore-xfs.yaml tasks/systemd.yaml} 4
Failure Reason:

{'ovh031.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh031', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh029.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh029', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh065.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh065', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}, 'ovh098.front.sepia.ceph.com': {'_ansible_parsed': True, 'invocation': {'module_args': {'comment': None, 'ssh_key_bits': 0, 'update_password': 'always', 'non_unique': False, 'force': False, 'skeleton': None, 'expires': None, 'ssh_key_passphrase': None, 'groups': ['fuse', 'kvm', 'disk'], 'createhome': True, 'home': None, 'move_home': False, 'password': None, 'generate_ssh_key': None, 'append': True, 'uid': None, 'ssh_key_comment': 'ansible-generated on ovh098', 'group': None, 'name': 'ubuntu', 'local': None, 'seuser': None, 'system': False, 'remove': False, 'state': 'present', 'ssh_key_file': None, 'login_class': None, 'shell': None, 'ssh_key_type': 'rsa'}}, 'changed': False, '_ansible_no_log': False, 'msg': 'Group kvm does not exist'}}

fail 3417480 2019-01-03 07:02:18 2019-01-03 07:33:55 2019-01-03 08:41:55 1:08:00 0:07:45 1:00:15 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_cls_all.yaml} 3
Failure Reason:

Command failed on ovh057 with status 1: '\n sudo yum -y install ceph\n '

fail 3417481 2019-01-03 07:02:19 2019-01-03 07:34:07 2019-01-03 08:48:07 1:14:00 0:07:32 1:06:28 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_ec_snaps.yaml} 3
Failure Reason:

Command failed on ovh069 with status 1: '\n sudo yum -y install ceph\n '

fail 3417482 2019-01-03 07:02:20 2019-01-03 07:35:07 2019-01-03 08:55:07 1:20:00 0:07:48 1:12:12 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_python.yaml} 3
Failure Reason:

Command failed on ovh038 with status 1: '\n sudo yum -y install ceph\n '

fail 3417483 2019-01-03 07:02:20 2019-01-03 07:43:46 2019-01-03 08:33:46 0:50:00 0:07:31 0:42:29 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rados_workunit_loadgen_mix.yaml} 3
Failure Reason:

Command failed on ovh051 with status 1: '\n sudo yum -y install ceph\n '

fail 3417484 2019-01-03 07:02:21 2019-01-03 07:48:06 2019-01-03 08:48:06 1:00:00 0:07:25 0:52:35 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_api_tests.yaml} 3
Failure Reason:

Command failed on ovh007 with status 1: '\n sudo yum -y install ceph\n '

fail 3417485 2019-01-03 07:02:22 2019-01-03 07:58:51 2019-01-03 09:08:51 1:10:00 0:08:11 1:01:49 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_cli_import_export.yaml} 3
Failure Reason:

Command failed on ovh015 with status 1: '\n sudo yum -y install ceph\n '

fail 3417486 2019-01-03 07:02:23 2019-01-03 08:01:50 2019-01-03 08:55:50 0:54:00 0:07:25 0:46:35 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_fsx.yaml} 3
Failure Reason:

Command failed on ovh080 with status 1: '\n sudo yum -y install ceph\n '

fail 3417487 2019-01-03 07:02:24 2019-01-03 08:03:03 2019-01-03 08:55:03 0:52:00 0:07:49 0:44:11 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_python_api_tests.yaml} 3
Failure Reason:

Command failed on ovh002 with status 1: '\n sudo yum -y install ceph\n '

fail 3417488 2019-01-03 07:02:24 2019-01-03 08:05:50 2019-01-03 09:11:49 1:05:59 0:07:58 0:58:01 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rbd_workunit_suites_iozone.yaml} 3
Failure Reason:

Command failed on ovh064 with status 1: '\n sudo yum -y install ceph\n '

fail 3417489 2019-01-03 07:02:25 2019-01-03 08:05:50 2019-01-03 09:21:50 1:16:00 0:07:51 1:08:09 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_ec_s3tests.yaml} 3
Failure Reason:

Command failed on ovh054 with status 1: '\n sudo yum -y install ceph\n '

fail 3417490 2019-01-03 07:02:26 2019-01-03 08:05:50 2019-01-03 09:19:49 1:13:59 0:07:26 1:06:33 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_s3tests.yaml} 3
Failure Reason:

Command failed on ovh082 with status 1: '\n sudo yum -y install ceph\n '

fail 3417491 2019-01-03 07:02:27 2019-01-03 08:07:48 2019-01-03 09:03:48 0:56:00 0:08:16 0:47:44 ovh master rhel 7.5 smoke/basic/{clusters/{fixed-3-cephfs.yaml openstack.yaml} objectstore/bluestore-bitmap.yaml tasks/rgw_swift.yaml} 3
Failure Reason:

Command failed on ovh094 with status 1: '\n sudo yum -y install ceph\n '