Nodes: smithi156

Description: fs/nfs/{cluster/{1-node} overrides/ignorelist_health supported-random-distros$/{centos_latest} tasks/nfs}

Log: http://qa-proxy.ceph.com/teuthology/teuthology-2024-04-03_22:24:02-fs-reef-distro-default-smithi/7638936/teuthology.log

  • log_href: http://qa-proxy.ceph.com/teuthology/teuthology-2024-04-03_22:24:02-fs-reef-distro-default-smithi/7638936/teuthology.log
  • archive_path: /home/teuthworker/archive/teuthology-2024-04-03_22:24:02-fs-reef-distro-default-smithi/7638936
  • description: fs/nfs/{cluster/{1-node} overrides/ignorelist_health supported-random-distros$/{centos_latest} tasks/nfs}
  • duration: 0:28:30
  • email: ceph-qa@ceph.com
  • failure_reason:
  • flavor: default
  • job_id: 7638936
  • kernel:
    • kdb: True
    • sha1: distro
  • last_in_suite: False
  • machine_type: smithi
  • name: teuthology-2024-04-03_22:24:02-fs-reef-distro-default-smithi
  • nuke_on_error: True
  • os_type: centos
  • os_version: 9.stream
  • overrides:
    • admin_socket:
      • branch: reef
    • ceph:
      • conf:
        • mgr:
          • debug mgr: 20
          • debug ms: 1
        • mon:
          • debug mon: 20
          • debug ms: 1
          • debug paxos: 20
        • osd:
          • debug ms: 1
          • debug osd: 20
      • flavor: default
      • log-ignorelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
        • FS_DEGRADED
        • FS_INLINE_DATA_DEPRECATED
        • FS_WITH_FAILED_MDS
        • MDS_ALL_DOWN
        • MDS_DAMAGE
        • MDS_DEGRADED
        • MDS_FAILED
        • MDS_INSUFFICIENT_STANDBY
        • MDS_UP_LESS_THAN_MAX
        • POOL_APP_NOT_ENABLED
        • overall HEALTH_
        • Replacing daemon
      • sha1: 052adcadd0534eddb0d73c5afbc083a84cd9dc46
    • ceph-deploy:
      • conf:
        • client:
          • log file: /var/log/ceph/ceph-$name.$pid.log
        • mon:
      • install:
        • ceph:
          • flavor: default
          • sha1: 052adcadd0534eddb0d73c5afbc083a84cd9dc46
      • workunit:
        • branch: reef
        • sha1: 800ec6d64a7fa0648ad53edfc6e50b41fc4cd674
    • owner: scheduled_teuthology@teuthology
    • pid:
    • roles:
      • ['host.a', 'mon.a', 'mgr.x', 'osd.0', 'osd.1', 'osd.2', 'client.0']
    • sentry_event:
    • status: pass
    • success: True
    • branch: reef
    • seed: 3144
    • sha1: 052adcadd0534eddb0d73c5afbc083a84cd9dc46
    • subset: 362/512
    • suite: fs
    • suite_branch: reef
    • suite_path: /home/teuthworker/src/git.ceph.com_ceph_800ec6d64a7fa0648ad53edfc6e50b41fc4cd674/qa
    • suite_relpath: qa
    • suite_repo: https://git.ceph.com/ceph.git
    • suite_sha1: 800ec6d64a7fa0648ad53edfc6e50b41fc4cd674
    • targets:
      • smithi156.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHr6ODBMZiQAMTh9HNzW3ICOLFuEWnv1vrJ9UmHmOWBnixlMP38NIHUBvGKHZu/SKqUgQWyfyI5uuxs0ONZXlIM=
    • tasks:
      • internal.check_packages:
      • internal.buildpackages_prep:
      • internal.save_config:
      • internal.check_lock:
      • internal.add_remotes:
      • console_log:
      • internal.connect:
      • internal.push_inventory:
      • internal.serialize_remote_roles:
      • internal.check_conflict:
      • internal.check_ceph_data:
      • internal.vm_setup:
      • kernel:
        • kdb: True
        • sha1: distro
      • internal.base:
      • internal.archive_upload:
      • internal.archive:
      • internal.coredump:
      • internal.sudo:
      • internal.syslog:
      • internal.timer:
      • pcp:
      • selinux:
      • ansible.cephlab:
      • clock:
      • install:
      • cephadm:
      • cephadm.shell:
        • host.a:
          • ceph orch apply mds a
      • cephfs_test_runner:
        • modules:
          • tasks.cephfs.test_nfs
    • teuthology_branch: main
    • verbose: False
    • pcp_grafana_url:
    • priority: 930
    • user: teuthology
    • queue:
    • posted: 2024-04-03 22:25:17
    • started: 2024-04-06 11:42:54
    • updated: 2024-04-06 12:21:14
    • status_class: success
    • runtime: 0:38:20
    • wait_time: 0:09:50