- log_href:
http://qa-proxy.ceph.com/teuthology/vallariag-2024-09-02_11:40:02-nvmeof-main-distro-default-smithi/7886690/teuthology.log
- archive_path:
/home/teuthworker/archive/vallariag-2024-09-02_11:40:02-nvmeof-main-distro-default-smithi/7886690
- description:
nvmeof/basic/{base/install centos_latest clusters/4-gateways-2-initiator conf/{disable-pool-app} workloads/nvmeof_scalability}
- duration:
0:25:08
- email:
- failure_reason:
- flavor:
default
- job_id:
7886690
- kernel:
- last_in_suite:
False
- machine_type:
smithi
- name:
vallariag-2024-09-02_11:40:02-nvmeof-main-distro-default-smithi
- nuke_on_error:
- os_type:
centos
- os_version:
9.stream
- overrides:
- admin_socket:
- ceph:
- conf:
- global:
- mon warn on pool no app:
False
- mgr:
- debug mgr:
20
- debug ms:
1
- mon:
- debug mon:
20
- debug ms:
1
- debug paxos:
20
- mon down mkfs grace:
300
- osd:
- debug ms:
1
- debug osd:
20
- flavor:
default
- log-ignorelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
- sha1:
b59673c44bd569f9f3db37f87bced695dec5fcbf
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- install:
- ceph:
- flavor:
default
- sha1:
b59673c44bd569f9f3db37f87bced695dec5fcbf
- workunit:
- branch:
wip-nvmeof-teuthology-v6
- sha1:
6bdd3521695a0621868ce5034eaba1fd0b6e3cd1
- owner:
scheduled_vallariag@teuthology
- pid:
- roles:
-
['client.0']
-
['client.1']
-
['host.a', 'mon.a', 'mgr.x', 'osd.0', 'client.2', 'ceph.nvmeof.nvmeof.a']
-
['host.b', 'mon.b', 'osd.1', 'client.3', 'ceph.nvmeof.nvmeof.b']
-
['host.c', 'mon.c', 'osd.2', 'client.4', 'ceph.nvmeof.nvmeof.c']
-
['host.d', 'osd.3', 'client.5', 'ceph.nvmeof.nvmeof.d']
- sentry_event:
- status:
pass
- success:
True
- branch:
main
- seed:
4612
- sha1:
b59673c44bd569f9f3db37f87bced695dec5fcbf
- subset:
- suite:
nvmeof
- suite_branch:
wip-nvmeof-teuthology-v6
- suite_path:
/home/teuthworker/src/github.com_vallariag_ceph_6bdd3521695a0621868ce5034eaba1fd0b6e3cd1/qa
- suite_relpath:
qa
- suite_repo:
https://github.com/vallariag/ceph.git
- suite_sha1:
6bdd3521695a0621868ce5034eaba1fd0b6e3cd1
- targets:
- smithi026.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBA3zL34DzwTpYzZH2nH/ddV7h6OCF+qNMPwK2BUdF6815nNo7Ofgoxsi52vgtzBDFv33vYCKyfyjcS/HfPrKeSg=
- smithi134.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLcmeqU/r5yYeu4epJPiZYS0VO2AzVbWOzltYWyPXY0vULeGrdjhutfNT9FSAEcqeJWGM1Ib8GgXup8XsB2jW+Q=
- smithi144.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBFV8qORuCM7yG5IHVrKW2fKJ25rRga0WV7ijrZMqa0Fu45xk23HWbt6YkiWlTw+964Zbx7unz8lFFipzDygfNhc=
- smithi145.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNdCPtiTe+WSYs2WvMEO657Gwslsnzs8ix7Y40BRQHdl2Xuo49xH+IoBPvaNsoU1x5kcC1vPN+9k0wwU4Wn+gQ4=
- smithi177.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBF0drUTdu8m6DtBI0lLklT3SAsNh6ADMYoBZWch7g8Yq86/Fs2+pA8lN2MzaUJ0gcs8sHb2bkzsdqaVZwbGwYqA=
- smithi183.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBFScws4j2Qt3YN0OKr4rB/NDVf6WYRy5HImR2UjChhxVYLOJY8XO+ArsVdUI6M2WCSMioGxIrpDFMO9kXZfswTY=
- tasks:
-
- internal.buildpackages_prep:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock:
-
- install:
- extra_packages:
- flavor:
default
- sha1:
b59673c44bd569f9f3db37f87bced695dec5fcbf
-
- cephadm:
-
- cephadm.shell:
- host.a:
-
ceph orch status
-
ceph orch ps
-
ceph orch host ls
-
ceph orch device ls
-
ceph osd lspools
-
- nvmeof:
- gateway_config:
- cli_image:
quay.io/ceph/nvmeof-cli:1.2
- namespaces_count:
20
- subsystems_count:
3
- gw_image:
quay.io/ceph/nvmeof:1.2
- installer:
host.a
- rbd:
- image_name_prefix:
myimage
- pool_name:
mypool
-
- cephadm.wait_for_service:
- service:
nvmeof.mypool.mygroup0
-
- workunit:
- clients:
- client.0:
-
nvmeof/setup_subsystem.sh
-
nvmeof/basic_tests.sh
-
nvmeof/fio_test.sh --rbd_iostat
- env:
- IOSTAT_INTERVAL:
10
- RBD_IMAGE_PREFIX:
myimage
- RBD_POOL:
mypool
- RUNTIME:
60
- no_coverage_and_limits:
True
- timeout:
30m
- branch:
wip-nvmeof-teuthology-v6
- sha1:
6bdd3521695a0621868ce5034eaba1fd0b6e3cd1
-
- workunit:
- clients:
- client.0:
-
nvmeof/scalability_test.sh nvmeof.a,nvmeof.b
-
nvmeof/scalability_test.sh nvmeof.b,nvmeof.c,nvmeof.d
- env:
- no_coverage_and_limits:
True
- timeout:
30m
- branch:
wip-nvmeof-teuthology-v6
- sha1:
6bdd3521695a0621868ce5034eaba1fd0b6e3cd1
teuthology_branch:
main
verbose:
True
pcp_grafana_url:
priority:
50
user:
vallariag
queue:
posted:
2024-09-02 11:40:10
started:
2024-09-02 11:42:52
updated:
2024-09-02 12:16:07
status_class:
success
runtime:
0:33:15
wait_time:
0:08:07