- log_href:
http://qa-proxy.ceph.com/teuthology/teuthology-2024-09-16_20:08:15-orch-main-distro-default-smithi/7907861/teuthology.log
- archive_path:
/home/teuthworker/archive/teuthology-2024-09-16_20:08:15-orch-main-distro-default-smithi/7907861
- description:
orch/cephadm/smb/{0-distro/ubuntu_22.04 tasks/deploy_smb_mgr_ctdb_res_dom}
- duration:
0:27:25
- email:
ceph-qa@ceph.com
- failure_reason:
- flavor:
- job_id:
7907861
- kernel:
- last_in_suite:
False
- machine_type:
smithi
- name:
teuthology-2024-09-16_20:08:15-orch-main-distro-default-smithi
- nuke_on_error:
- os_type:
ubuntu
- os_version:
22.04
- overrides:
- admin_socket:
- ceph:
- conf:
- mgr:
- debug mgr:
20
- debug ms:
1
- mon:
- debug mon:
20
- debug ms:
1
- debug paxos:
20
- osd:
- debug ms:
1
- debug osd:
20
- flavor:
default
- log-ignorelist:
-
\(MDS_ALL_DOWN\)
-
\(MDS_UP_LESS_THAN_MAX\)
- log-only-match:
- sha1:
7498493279982dfe87f00616198a5967475f6169
- ceph-deploy:
- conf:
- client:
- log file:
/var/log/ceph/ceph-$name.$pid.log
- mon:
- install:
- ceph:
- flavor:
default
- sha1:
7498493279982dfe87f00616198a5967475f6169
- workunit:
- branch:
main
- sha1:
2b698643d05f73e79c2d2888228947a84a4ac0bd
- owner:
scheduled_teuthology@teuthology
- pid:
- roles:
-
['host.a', 'mon.a', 'mgr.x', 'osd.0', 'osd.1', 'client.0']
-
['host.b', 'mon.b', 'osd.2', 'osd.3']
-
['host.c', 'mon.c', 'osd.4', 'osd.5']
-
['host.d', 'cephadm.exclude']
- sentry_event:
- status:
pass
- success:
True
- branch:
main
- seed:
3649
- sha1:
7498493279982dfe87f00616198a5967475f6169
- subset:
42/64
- suite:
orch
- suite_branch:
main
- suite_path:
/home/teuthworker/src/git.ceph.com_ceph_2b698643d05f73e79c2d2888228947a84a4ac0bd/qa
- suite_relpath:
qa
- suite_repo:
https://git.ceph.com/ceph.git
- suite_sha1:
2b698643d05f73e79c2d2888228947a84a4ac0bd
- targets:
- smithi040.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEW8uoTEJjeE2DfH3W8fNfdPbiK7lh+1Swx1vGqgJxS0rWnGbUH8/nnXH9heDeMZ3IdaJepKvKB5EFGLWA4v960=
- smithi044.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBCJPCDqLMpKcZb84trkLtCBtMHf1jb31Pc4h9fG41zWG48jFoHDICYVYulJ9ycnZIGAuwl+Xu2AB0J7nam/YXdw=
- smithi089.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBBKN0li1YkuDjtTdMKenxaufDyk6xQxRBT8S1MShviJFmeexgcWGDq5a5yNDcWph9dirwL5uZTYTqgAFu2QoFnE=
- smithi177.front.sepia.ceph.com:
ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBJUjtqr+Xtu56HwTDApUmiiY2oSNUrCOBmCSa0FgZUXoE3B7SA3CdlRQI2uQz8XJ0SISwMHYFDY7Fomyyw9hhPA=
- tasks:
-
- internal.buildpackages_prep:
-
- internal.save_config:
-
- internal.check_lock:
-
- internal.add_remotes:
-
- console_log:
-
- internal.connect:
-
- internal.push_inventory:
-
- internal.serialize_remote_roles:
-
- internal.check_conflict:
-
- internal.check_ceph_data:
-
- internal.vm_setup:
-
- kernel:
-
- internal.base:
-
- internal.archive_upload:
-
- internal.archive:
-
- internal.coredump:
-
- internal.sudo:
-
- internal.syslog:
-
- internal.timer:
-
- pcp:
-
- selinux:
-
- ansible.cephlab:
-
- clock:
-
- cephadm.deploy_samba_ad_dc:
-
- cephadm:
-
- cephadm.shell:
- host.a:
-
ceph fs volume create cephfs
-
- cephadm.wait_for_service:
-
- cephadm.shell:
- host.a:
-
- cmd:
ceph fs subvolumegroup create cephfs smb
-
- cmd:
ceph fs subvolume create cephfs sv1 --group-name=smb --mode=0777
-
- cmd:
ceph fs subvolume create cephfs sv2 --group-name=smb --mode=0777
-
- cmd:
ceph mgr module enable smb
-
- cmd:
sleep 30
-
- cmd:
ceph smb apply -i -
- stdin:
# --- Begin Embedded YAML
- resource_type: ceph.smb.cluster
cluster_id: adctdb1
auth_mode: active-directory
domain_settings:
realm: DOMAIN1.SINK.TEST
join_sources:
- source_type: resource
ref: join1-admin
custom_dns:
- "172.21.15.177"
placement:
count: 3
- resource_type: ceph.smb.join.auth
auth_id: join1-admin
auth:
username: Administrator
password: Passw0rd
- resource_type: ceph.smb.share
cluster_id: adctdb1
share_id: share1
cephfs:
volume: cephfs
subvolumegroup: smb
subvolume: sv1
path: /
- resource_type: ceph.smb.share
cluster_id: adctdb1
share_id: share2
cephfs:
volume: cephfs
subvolumegroup: smb
subvolume: sv2
path: /
# --- End Embedded YAML
cephadm.wait_for_service:
cephadm.shell:
- host.a:
-
- cmd:
rados --pool=.smb -N adctdb1 get cluster.meta.json /dev/stdout
cephadm.exec:
- host.d:
-
sleep 30
-
{{ctx.samba_client_container_cmd|join(' ')}} smbclient -U DOMAIN1\\ckent%1115Rose. //{{'host.a'|role_to_remote|attr('ip_address')}}/share1 -c ls
-
{{ctx.samba_client_container_cmd|join(' ')}} smbclient -U DOMAIN1\\ckent%1115Rose. //{{'host.a'|role_to_remote|attr('ip_address')}}/share2 -c ls
cephadm.exec:
- host.a:
-
{{ctx.cephadm}} ls --no-detail | {{ctx.cephadm}} shell jq -r 'map(select(.name | startswith("smb.adctdb1")))[-1].name' > /tmp/svcname
-
{{ctx.cephadm}} enter -n $(cat /tmp/svcname) ctdb status > /tmp/ctdb_status
-
cat /tmp/ctdb_status
-
grep 'pnn:0 .*OK' /tmp/ctdb_status
-
grep 'pnn:1 .*OK' /tmp/ctdb_status
-
grep 'pnn:2 .*OK' /tmp/ctdb_status
-
grep 'Number of nodes:3' /tmp/ctdb_status
-
rm -rf /tmp/svcname /tmp/ctdb_status
cephadm.exec:
- host.d:
-
sleep 30
-
{{ctx.samba_client_container_cmd|join(' ')}} smbclient -U DOMAIN1\\ckent%1115Rose. //{{'host.c'|role_to_remote|attr('ip_address')}}/share1 -c ls
-
{{ctx.samba_client_container_cmd|join(' ')}} smbclient -U DOMAIN1\\ckent%1115Rose. //{{'host.c'|role_to_remote|attr('ip_address')}}/share2 -c ls
cephadm.shell:
- host.a:
-
- cmd:
ceph smb apply -i -
- stdin:
# --- Begin Embedded YAML
- resource_type: ceph.smb.cluster
cluster_id: adctdb1
intent: removed
- resource_type: ceph.smb.join.auth
auth_id: join1-admin
intent: removed
- resource_type: ceph.smb.share
cluster_id: adctdb1
share_id: share1
intent: removed
- resource_type: ceph.smb.share
cluster_id: adctdb1
share_id: share2
intent: removed
# --- End Embedded YAML
cephadm.wait_for_service_not_present:
teuthology_branch:
main
verbose:
False
pcp_grafana_url:
priority:
950
user:
teuthology
queue:
posted:
2024-09-16 20:13:36
started:
2024-09-17 12:29:46
updated:
2024-09-17 13:09:19
status_class:
success
runtime:
0:39:33
wait_time:
0:12:08