Description: powercycle/osd/{clusters/3osd-1per-target ignorelist_health objectstore/bluestore-low-osd-mem-target powercycle/default supported-all-distro/centos_latest tasks/cfuse_workunit_suites_fsx thrashosds-health}

Log: http://qa-proxy.ceph.com/teuthology/yuriw-2024-04-11_20:31:05-powercycle-reef-release-distro-default-smithi/7652608/teuthology.log

Sentry event: https://sentry.ceph.com/organizations/ceph/?query=82ace81e80f24736a5d8251f57258171

Failure Reason:

Command failed (workunit test suites/fsx.sh) on smithi078 with status 2: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=d540ebaca6b131a1dd560e7f69e024b133bbaa42 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 CEPH_MNT=/home/ubuntu/cephtest/mnt.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 6h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/fsx.sh'

  • log_href: http://qa-proxy.ceph.com/teuthology/yuriw-2024-04-11_20:31:05-powercycle-reef-release-distro-default-smithi/7652608/teuthology.log
  • archive_path: /home/teuthworker/archive/yuriw-2024-04-11_20:31:05-powercycle-reef-release-distro-default-smithi/7652608
  • description: powercycle/osd/{clusters/3osd-1per-target ignorelist_health objectstore/bluestore-low-osd-mem-target powercycle/default supported-all-distro/centos_latest tasks/cfuse_workunit_suites_fsx thrashosds-health}
  • duration: 0:12:07
  • email:
  • failure_reason: Command failed (workunit test suites/fsx.sh) on smithi078 with status 2: 'mkdir -p -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && cd -- /home/ubuntu/cephtest/mnt.0/client.0/tmp && CEPH_CLI_TEST_DUP_COMMAND=1 CEPH_REF=d540ebaca6b131a1dd560e7f69e024b133bbaa42 TESTDIR="/home/ubuntu/cephtest" CEPH_ARGS="--cluster ceph" CEPH_ID="0" PATH=$PATH:/usr/sbin CEPH_BASE=/home/ubuntu/cephtest/clone.client.0 CEPH_ROOT=/home/ubuntu/cephtest/clone.client.0 CEPH_MNT=/home/ubuntu/cephtest/mnt.0 adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage timeout 6h /home/ubuntu/cephtest/clone.client.0/qa/workunits/suites/fsx.sh'
  • flavor: default
  • job_id: 7652608
  • kernel:
    • kdb: 1
    • sha1: distro
  • last_in_suite: False
  • machine_type: smithi
  • name: yuriw-2024-04-11_20:31:05-powercycle-reef-release-distro-default-smithi
  • nuke_on_error: True
  • os_type: centos
  • os_version: 9.stream
  • overrides:
    • admin_socket:
      • branch: reef-release
    • ceph:
      • conf:
        • mgr:
          • debug mgr: 20
          • debug ms: 1
        • mon:
          • debug mon: 20
          • debug ms: 1
          • debug paxos: 20
        • osd:
          • bdev async discard: True
          • bdev enable discard: True
          • bluestore block size: 96636764160
          • bluestore fsck on mount: True
          • debug bluefs: 1/20
          • debug bluestore: 1/20
          • debug ms: 1
          • debug osd: 20
          • debug rocksdb: 4/10
          • mon osd backfillfull_ratio: 0.85
          • mon osd full ratio: 0.9
          • mon osd nearfull ratio: 0.8
          • osd blocked scrub grace period: 3600
          • osd failsafe full ratio: 0.95
          • osd max markdown count: 1000
          • osd memory target: 1610612736
          • osd objectstore: bluestore
      • flavor: default
      • fs: xfs
      • log-ignorelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
        • \(MDS_TRIM\)
        • \(MDS_SLOW_REQUEST\)
        • MDS_SLOW_METADATA_IO
        • Behind on trimming
        • overall HEALTH_
        • \(OSDMAP_FLAGS\)
        • \(OSD_
        • \(PG_
        • \(POOL_
        • \(CACHE_POOL_
        • \(SMALLER_PGP_NUM\)
        • \(OBJECT_
        • \(SLOW_OPS\)
        • \(REQUEST_SLOW\)
        • \(TOO_FEW_PGS\)
        • slow request
        • timeout on replica
        • late reservation from
      • sha1: d540ebaca6b131a1dd560e7f69e024b133bbaa42
    • ceph-deploy:
      • conf:
        • client:
          • log file: /var/log/ceph/ceph-$name.$pid.log
        • mon:
      • install:
        • ceph:
          • flavor: default
          • sha1: d540ebaca6b131a1dd560e7f69e024b133bbaa42
      • thrashosds:
        • bdev_inject_crash: 2
        • bdev_inject_crash_probability: 0.5
      • workunit:
        • branch: reef-release
        • sha1: d540ebaca6b131a1dd560e7f69e024b133bbaa42
    • owner: scheduled_yuriw@teuthology
    • pid:
    • roles:
      • ['mon.a', 'mon.b', 'mon.c', 'mgr.x', 'mgr.y', 'mds.a', 'client.0']
      • ['osd.0']
      • ['osd.1']
      • ['osd.2']
    • sentry_event: https://sentry.ceph.com/organizations/ceph/?query=82ace81e80f24736a5d8251f57258171
    • status: fail
    • success: False
    • branch: reef-release
    • seed: 3563
    • sha1: d540ebaca6b131a1dd560e7f69e024b133bbaa42
    • subset: 111/120000
    • suite: powercycle
    • suite_branch: reef-release
    • suite_path: /home/teuthworker/src/github.com_ceph_ceph-c_d540ebaca6b131a1dd560e7f69e024b133bbaa42/qa
    • suite_relpath: qa
    • suite_repo: https://github.com/ceph/ceph-ci.git
    • suite_sha1: d540ebaca6b131a1dd560e7f69e024b133bbaa42
    • targets:
      • smithi078.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBAwFi0JCG61qH/nDs4l7gwraHCgHfSWxd0kcvW8O5afVGT/a42f/KFCws5HFr8CXFXSzUYYgYlb3vv4E2rPbzEk=
      • smithi080.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBNCnMWvC7qhttkrV+hcanxO2nMLmVliq8D6WutDztz/WwX0Ms5MVj0iPlnKDm7lDb1Op1j8JMII4v13iaDGE9zs=
      • smithi131.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBGik160s+7DpgyfH8pmnvi/QPhm7AcRWjwuiFXyoziWdpxeJjqTIdsnAgTSDBrzQHUOKeUl1fSyUaN+MRACO3sA=
      • smithi187.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBKwJkYL04UbONzr9OvyRwTxEaoRUKaairTIiYfvKNZf5yZ9twGQLLF4peyou5CVrCToDgbux9rFsfHhrF2qYuwE=
    • tasks:
      • internal.check_packages:
      • internal.buildpackages_prep:
      • internal.save_config:
      • internal.check_lock:
      • internal.add_remotes:
      • console_log:
      • internal.connect:
      • internal.push_inventory:
      • internal.serialize_remote_roles:
      • internal.check_conflict:
      • internal.check_ceph_data:
      • internal.vm_setup:
      • kernel:
        • kdb: 1
        • sha1: distro
      • internal.base:
      • internal.archive_upload:
      • internal.archive:
      • internal.coredump:
      • internal.sudo:
      • internal.syslog:
      • internal.timer:
      • pcp:
      • selinux:
      • ansible.cephlab:
      • clock:
      • install:
        • extra_system_packages:
          • deb:
            • bison
            • flex
            • libelf-dev
            • libssl-dev
            • libaio-dev
            • libtool-bin
            • uuid-dev
            • xfslibs-dev
          • rpm:
            • bison
            • flex
            • elfutils-libelf-devel
            • openssl-devel
            • libaio-devel
            • libtool
            • libuuid-devel
            • xfsprogs-devel
        • flavor: default
        • sha1: d540ebaca6b131a1dd560e7f69e024b133bbaa42
      • ceph:
      • thrashosds:
        • chance_down: 1.0
        • powercycle: True
        • timeout: 600
        • sighup_delay: 0.1
        • optrack_toggle_delay: 2.0
        • dump_ops_enable: true
        • noscrub_toggle_delay: 2.0
        • random_eio: 0.0
        • bdev_inject_crash: 2
        • bdev_inject_crash_probability: 0.5
      • ceph-fuse:
      • workunit:
        • clients:
          • all:
            • suites/fsx.sh
        • timeout: 6h
        • branch: reef-release
        • sha1: d540ebaca6b131a1dd560e7f69e024b133bbaa42
    • teuthology_branch: main
    • verbose: True
    • pcp_grafana_url:
    • priority: 99
    • user: yuriw
    • queue:
    • posted: 2024-04-11 20:31:11
    • started: 2024-04-12 02:48:37
    • updated: 2024-04-12 03:25:52
    • status_class: danger
    • runtime: 0:37:15
    • wait_time: 0:25:08