Description: orch:cephadm/smb/{0-distro/centos_9.stream tasks/deploy_smb_basic}

Log: http://qa-proxy.ceph.com/teuthology/phlogistonjohn-2024-03-22_03:02:08-orch:cephadm-wip-phlogistonjohn-testing-2024-03-21-1835-distro-default-smithi/7616048/teuthology.log

Failure Reason:

"2024-03-22T05:16:39.591587+0000 mon.a (mon.0) 240 : cluster [WRN] Health check failed: 1 pool(s) do not have an application enabled (POOL_APP_NOT_ENABLED)" in cluster log

  • log_href: http://qa-proxy.ceph.com/teuthology/phlogistonjohn-2024-03-22_03:02:08-orch:cephadm-wip-phlogistonjohn-testing-2024-03-21-1835-distro-default-smithi/7616048/teuthology.log
  • archive_path: /home/teuthworker/archive/phlogistonjohn-2024-03-22_03:02:08-orch:cephadm-wip-phlogistonjohn-testing-2024-03-21-1835-distro-default-smithi/7616048
  • description: orch:cephadm/smb/{0-distro/centos_9.stream tasks/deploy_smb_basic}
  • duration: 0:17:43
  • email: jmulligan@redhat.com
  • failure_reason: "2024-03-22T05:16:39.591587+0000 mon.a (mon.0) 240 : cluster [WRN] Health check failed: 1 pool(s) do not have an application enabled (POOL_APP_NOT_ENABLED)" in cluster log
  • flavor: default
  • job_id: 7616048
  • kernel:
    • kdb: True
    • sha1: distro
  • last_in_suite: False
  • machine_type: smithi
  • name: phlogistonjohn-2024-03-22_03:02:08-orch:cephadm-wip-phlogistonjohn-testing-2024-03-21-1835-distro-default-smithi
  • nuke_on_error: True
  • os_type: centos
  • os_version: 9.stream
  • overrides:
    • admin_socket:
      • branch: wip-phlogistonjohn-testing-2024-03-21-1835
    • ceph:
      • conf:
        • mgr:
          • debug mgr: 20
          • debug ms: 1
        • mon:
          • debug mon: 20
          • debug ms: 1
          • debug paxos: 20
        • osd:
          • debug ms: 1
          • debug osd: 20
      • flavor: default
      • log-ignorelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
      • sha1: ea01217f8a79678e64ef6a5d68a043a4ef21ca37
    • ceph-deploy:
      • conf:
        • client:
          • log file: /var/log/ceph/ceph-$name.$pid.log
        • mon:
      • install:
        • ceph:
          • flavor: default
          • sha1: ea01217f8a79678e64ef6a5d68a043a4ef21ca37
      • selinux:
        • allowlist:
          • scontext=system_u:system_r:logrotate_t:s0
      • workunit:
        • branch: wip-phlogistonjohn-testing-2024-03-21-1835
        • sha1: ea01217f8a79678e64ef6a5d68a043a4ef21ca37
    • owner: scheduled_phlogistonjohn@teuthology
    • pid:
    • roles:
      • ['host.a', 'mon.a', 'mgr.x', 'osd.0', 'osd.1', 'client.0']
      • ['host.b', 'cephadm.exclude']
    • sentry_event:
    • status: fail
    • success: False
    • branch: wip-phlogistonjohn-testing-2024-03-21-1835
    • seed: 5374
    • sha1: ea01217f8a79678e64ef6a5d68a043a4ef21ca37
    • subset: 1/24
    • suite: orch:cephadm
    • suite_branch: wip-phlogistonjohn-testing-2024-03-21-1835
    • suite_path: /home/teuthworker/src/git.ceph.com_ceph-c_ea01217f8a79678e64ef6a5d68a043a4ef21ca37/qa
    • suite_relpath: qa
    • suite_repo: https://git.ceph.com/ceph-ci.git
    • suite_sha1: ea01217f8a79678e64ef6a5d68a043a4ef21ca37
    • targets:
      • smithi018.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBIz2b9g04RbOf3QX+Fh+B3tyabhBn7PYloeHenGZQ4kHzjzv4WhY4DBNV8uFZN0IXHWyUAP4dE8xDfCfUZH8Res=
      • smithi137.front.sepia.ceph.com: ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBLlLNoKiZJW/8KL9dMNpvTRoiGuzwt1LpmAeQNAWqT0EbRYtGgNbNf1kYPTEKWcmptgZuraN4dKY8XPpMBHecA8=
    • tasks:
      • internal.check_packages:
      • internal.buildpackages_prep:
      • internal.save_config:
      • internal.check_lock:
      • internal.add_remotes:
      • console_log:
      • internal.connect:
      • internal.push_inventory:
      • internal.serialize_remote_roles:
      • internal.check_conflict:
      • internal.check_ceph_data:
      • internal.vm_setup:
      • kernel:
        • kdb: True
        • sha1: distro
      • internal.base:
      • internal.archive_upload:
      • internal.archive:
      • internal.coredump:
      • internal.sudo:
      • internal.syslog:
      • internal.timer:
      • pcp:
      • selinux:
      • ansible.cephlab:
      • clock:
      • install:
      • cephadm.configure_samba_client_container:
        • role: host.b
      • cephadm:
      • cephadm.shell:
        • host.a:
          • ceph fs volume create cephfs
      • cephadm.wait_for_service:
        • service: mds.cephfs
      • cephadm.shell:
        • host.a:
          • cmd: ceph fs subvolumegroup create cephfs g1
          • cmd: ceph fs subvolume create cephfs sub1 --group-name=g1 --mode=0777
          • cmd: ceph fs authorize cephfs client.smbdata / rw
          • cmd: ceph osd pool create .smb --yes-i-really-mean-it
          • cmd: ceph osd pool application enable .smb smb
          • cmd: rados --pool=.smb --namespace=saserv1 put conf.toml /dev/stdin
          • stdin: samba-container-config = "v0" [configs.saserv1] shares = ["share1"] globals = ["default", "domain"] instance_name = "SAMBA" [shares.share1.options] "vfs objects" = "ceph" path = "/" "ceph:config_file" = "/etc/ceph/ceph.conf" "ceph:user_id" = "smbdata" "kernel share modes" = "no" "read only" = "no" "browseable" = "yes" [globals.default.options] "server min protocol" = "SMB2" "load printers" = "no" "printing" = "bsd" "printcap name" = "/dev/null" "disable spoolss" = "yes" "guest ok" = "no" [globals.domain.options] security = "USER" workgroup = "STANDALONE1" [[users.all_entries]] name = "smbuser1" password = "insecure321"
      • cephadm.apply:
        • specs:
          • cluster_id: saserv1
          • config_uri: rados://.smb/saserv1/conf.toml
          • include_ceph_users:
            • client.smbdata
          • placement:
            • count: 1
          • service_id: saserv1
          • service_type: smb
      • cephadm.wait_for_service:
        • service: smb.saserv1
      • cephadm.exec:
        • host.b:
          • sleep 30
          • {{ctx.samba_client_container_cmd|join(' ')}} smbclient -U smbuser1%insecure321 //{{'host.a'|role_to_remote|attr('ip_address')}}/share1 -c ls
    • teuthology_branch: main
    • verbose: False
    • pcp_grafana_url:
    • priority: 95
    • user: phlogistonjohn
    • queue:
    • posted: 2024-03-22 03:02:42
    • started: 2024-03-22 04:51:22
    • updated: 2024-03-22 05:19:46
    • status_class: danger
    • runtime: 0:28:24
    • wait_time: 0:10:41