Description: rbd/nvmeof/{base/install centos_latest cluster/{fixed-4 openstack} conf/{disable-pool-app} workloads/nvmeof_initiator}

Log: http://qa-proxy.ceph.com/teuthology/teuthology-2024-04-23_20:16:13-rbd-main-distro-default-smithi/7670150/teuthology.log

Sentry event: https://sentry.ceph.com/organizations/ceph/?query=5cf0cdb7f3ce41dfadf3e85a9e8b52e9

Failure Reason:

Command failed (workunit test rbd/nvmeof_setup_subsystem.sh) on smithi134 with status 22: 'mkdir -p -- /home/ubuntu/cephtest/mnt.2/client.2/tmp && cd -- /home/ubuntu/cephtest/mnt.2/client.2/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=c37f63b3705f40f1bfcd7d8aeb311499c65cfa1f TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="2" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.2 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.2 CEPH_MNT=/home/ubuntu/cephtest/mnt.2 RBD_IMAGE_PREFIX=myimage RBD_POOL=mypool timeout 3h /home/ubuntu/cephtest/clone.client.2/qa/workunits/rbd/nvmeof_setup_subsystem.sh'

  • log_href: http://qa-proxy.ceph.com/teuthology/teuthology-2024-04-23_20:16:13-rbd-main-distro-default-smithi/7670150/teuthology.log
  • archive_path: /home/teuthworker/archive/teuthology-2024-04-23_20:16:13-rbd-main-distro-default-smithi/7670150
  • description: rbd/nvmeof/{base/install centos_latest cluster/{fixed-4 openstack} conf/{disable-pool-app} workloads/nvmeof_initiator}
  • duration: 0:23:05
  • email: ceph-qa@ceph.com
  • failure_reason: Command failed (workunit test rbd/nvmeof_setup_subsystem.sh) on smithi134 with status 22: 'mkdir -p -- /home/ubuntu/cephtest/mnt.2/client.2/tmp && cd -- /home/ubuntu/cephtest/mnt.2/client.2/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=c37f63b3705f40f1bfcd7d8aeb311499c65cfa1f TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="2" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.2 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.2 CEPH_MNT=/home/ubuntu/cephtest/mnt.2 RBD_IMAGE_PREFIX=myimage RBD_POOL=mypool timeout 3h /home/ubuntu/cephtest/clone.client.2/qa/workunits/rbd/nvmeof_setup_subsystem.sh'
  • flavor: default
  • job_id: 7670150
  • kernel:
    • kdb: 1
    • sha1: distro
  • last_in_suite: False
  • machine_type: smithi
  • name: teuthology-2024-04-23_20:16:13-rbd-main-distro-default-smithi
  • nuke_on_error: True
  • os_type: centos
  • os_version: 9.stream
  • overrides:
    • admin_socket:
      • branch: main
    • ceph:
      • conf:
        • global:
          • mon warn on pool no app: False
        • mgr:
          • debug mgr: 20
          • debug ms: 1
        • mon:
          • debug mon: 20
          • debug ms: 1
          • debug paxos: 20
        • osd:
          • debug ms: 1
          • debug osd: 20
      • flavor: default
      • log-ignorelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
      • sha1: 8254cb5a247caa8e3d35e534a1fefe2362a0a4b8
    • ceph-deploy:
      • conf:
        • client:
          • log file: /var/log/ceph/ceph-$name.$pid.log
        • mon:
      • install:
        • ceph:
          • flavor: default
          • sha1: 8254cb5a247caa8e3d35e534a1fefe2362a0a4b8
      • selinux:
        • allowlist:
          • scontext=system_u:system_r:getty_t:s0
      • workunit:
        • branch: main
        • sha1: c37f63b3705f40f1bfcd7d8aeb311499c65cfa1f
    • owner: scheduled_teuthology@teuthology
    • pid:
    • roles:
      • ['host.a', 'mon.a', 'mgr.x', 'osd.0', 'osd.1', 'client.0', 'ceph.nvmeof.nvmeof.a']
      • ['host.b', 'mon.b', 'osd.2', 'osd.3', 'osd.4', 'client.1', 'ceph.nvmeof.nvmeof.b']
      • ['client.2']
      • ['client.3']
    • sentry_event: https://sentry.ceph.com/organizations/ceph/?query=5cf0cdb7f3ce41dfadf3e85a9e8b52e9
    • status: fail
    • success: False
    • branch: main
    • seed: 4807
    • sha1: 8254cb5a247caa8e3d35e534a1fefe2362a0a4b8
    • subset: 19/128
    • suite: rbd
    • suite_branch: main
    • suite_path: /home/teuthworker/src/git.ceph.com_ceph_c37f63b3705f40f1bfcd7d8aeb311499c65cfa1f/qa
    • suite_relpath: qa
    • suite_repo: https://git.ceph.com/ceph.git
    • suite_sha1: c37f63b3705f40f1bfcd7d8aeb311499c65cfa1f
    • targets:
      • smithi033.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBH4Q86Tms1hcLl/+DVNVP6F30Pk5bhO8Z7L/f95mBiTeSrn/3S2YzIBrhFLIA8WLkRP5m7ORQeS2gMnFPtje2Xo=
      • smithi067.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBD2mxtGi5eUwopKfJpxfG+i3fHz1lQ6KQHKe31joc8vd9f2B/jt2bJ2hOCvTlq3Al+mKnuB+8trL0vSF7Wuulec=
      • smithi134.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBIcCdJcP2aHvjT6HJVNnsnMwff3EQqVbWgUSvW3YQNlmKTOn5I2j+d/ieEG+9xXm5UNtcRJjjDc8PmOUACHMaa8=
      • smithi155.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEHXRUUnbT4+C0CenwOJ24Y+qINRdQyGvWSUJDAhQy1VQNm0HZXBRHp6YnTot9RXqcVtBpP/olnKt/+GR1o8Ydk=
    • tasks:
      • internal.check_packages:
      • internal.buildpackages_prep:
      • internal.save_config:
      • internal.check_lock:
      • internal.add_remotes:
      • console_log:
      • internal.connect:
      • internal.push_inventory:
      • internal.serialize_remote_roles:
      • internal.check_conflict:
      • internal.check_ceph_data:
      • internal.vm_setup:
      • kernel:
        • kdb: 1
        • sha1: distro
      • internal.base:
      • internal.archive_upload:
      • internal.archive:
      • internal.coredump:
      • internal.sudo:
      • internal.syslog:
      • internal.timer:
      • pcp:
      • selinux:
      • ansible.cephlab:
      • clock:
      • install:
      • cephadm:
      • cephadm.shell:
        • host.a:
          • ceph orch status
          • ceph orch ps
          • ceph orch host ls
          • ceph orch device ls
          • ceph osd lspools
      • nvmeof:
        • client: client.0
        • gateway_config:
          • cli_version: latest
          • namespaces_count: 128
        • rbd:
          • image_name_prefix: myimage
          • pool_name: mypool
        • version: default
      • cephadm.wait_for_service:
        • service: nvmeof.mypool
      • workunit:
        • clients:
          • client.2:
            • rbd/nvmeof_setup_subsystem.sh
        • env:
          • RBD_IMAGE_PREFIX: myimage
          • RBD_POOL: mypool
        • no_coverage_and_limits: True
        • branch: main
        • sha1: c37f63b3705f40f1bfcd7d8aeb311499c65cfa1f
      • workunit:
        • clients:
          • client.2:
            • rbd/nvmeof_basic_tests.sh
            • rbd/nvmeof_fio_test.sh --start_ns 1 --end_ns 64 --rbd_iostat
          • client.3:
            • rbd/nvmeof_basic_tests.sh
            • rbd/nvmeof_fio_test.sh --start_ns 65 --end_ns 128
        • env:
          • IOSTAT_INTERVAL: 10
          • RBD_POOL: mypool
          • RUNTIME: 600
        • no_coverage_and_limits: True
        • timeout: 30m
    • teuthology_branch: main
    • verbose: False
    • pcp_grafana_url:
    • priority: 950
    • user: teuthology
    • queue:
    • posted: 2024-04-23 20:18:20
    • started: 2024-04-23 20:19:23
    • updated: 2024-04-23 20:52:37
    • status_class: danger
    • runtime: 0:33:14
    • wait_time: 0:10:09