Description: orch:rook/smoke/{0-distro/ubuntu_20.04 0-kubeadm 0-nvme-loop 1-rook 2-workload/none 3-final cluster/3-node k8s/1.21 net/calico rook/1.7.0}

Log: http://qa-proxy.ceph.com/teuthology/sage-2021-11-15_18:25:33-orch:rook-wip-sage2-testing-2021-11-10-1508-distro-basic-smithi/6504189/teuthology.log

Sentry event: https://sentry.ceph.com/organizations/ceph/?query=77dab582dded485fa8b0ef2b522847e9

Failure Reason:

Command failed on smithi059 with status 127: '\'toolbox()\' \'{\n\' \'\' \'\' \'\' kubectl -n rook-ceph exec -it deploy/rook-ceph-tools -- \'\' \'"$@"\n}\ntoolbox\' ceph orch osd rm 0 \'--force\nremoved_pv=""\nwhile\' \'[\' \'"$removed_pv"\' = \'""\' \']\ndo\n\' \'\' \'\' \'\' \'removed_pv=`kubectl\' get pv \'|\' grep Released \'|\' cut -f1 -d \'"\' \'"`\n\' \'\' \'\' \'\' sleep \'3s\ndone\ntarget_path=`kubectl\' get pv \'$removed_pv\' -o \'jsonpath=\'"\'"\'{.spec.local.path}\'"\'"\'`\ntoolbox\' ceph orch device zap \'`hostname`\' \'$target_path\' \'--force\nzap_completion="0"\nwhile\' \'[\' \'"$zap_completion"\' = \'"0"\' \'\' \']\ndo\n\' \'\' \'\' \'\' \'zap_completion=`kubectl\' get job -n rook-ceph rook-ceph-device-zap -o \'jsonpath=\'"\'"\'{.status.succeeded.path}\'"\'"\'`\n\' \'\' \'\' \'\' sleep \'3s\ndone\nkubectl\' patch pv \'$removed_pv\' -p \'\'"\'"\'{"spec":{"claimRef":\' \'null}}\'"\'"\'\ntoolbox\' ceph orch apply osd \'--all-available-devices\nkubectl\' delete job rook-ceph-device-zap -n \'rook-ceph\nnum_osd="0"\nwhile\' \'[\' \'"$num_osd"\' \'!=\' \'"$orig_num_osd"\' \']\ndo\n\' \'\' \'\' \'\' echo \'"waiting\' for osd to come back \'up"\n\' \'\' \'\' \'\' \'num_osd=`toolbox\' ceph osd stat \'|\' cut -f3 -d \'"\' \'"`\n\' \'\' \'\' \'\' sleep \'30s\ndone\n\''

  • log_href: http://qa-proxy.ceph.com/teuthology/sage-2021-11-15_18:25:33-orch:rook-wip-sage2-testing-2021-11-10-1508-distro-basic-smithi/6504189/teuthology.log
  • archive_path: /home/teuthworker/archive/sage-2021-11-15_18:25:33-orch:rook-wip-sage2-testing-2021-11-10-1508-distro-basic-smithi/6504189
  • description: orch:rook/smoke/{0-distro/ubuntu_20.04 0-kubeadm 0-nvme-loop 1-rook 2-workload/none 3-final cluster/3-node k8s/1.21 net/calico rook/1.7.0}
  • duration: 0:17:54
  • email:
  • failure_reason: Command failed on smithi059 with status 127: '\'toolbox()\' \'{\n\' \'\' \'\' \'\' kubectl -n rook-ceph exec -it deploy/rook-ceph-tools -- \'\' \'"$@"\n}\ntoolbox\' ceph orch osd rm 0 \'--force\nremoved_pv=""\nwhile\' \'[\' \'"$removed_pv"\' = \'""\' \']\ndo\n\' \'\' \'\' \'\' \'removed_pv=`kubectl\' get pv \'|\' grep Released \'|\' cut -f1 -d \'"\' \'"`\n\' \'\' \'\' \'\' sleep \'3s\ndone\ntarget_path=`kubectl\' get pv \'$removed_pv\' -o \'jsonpath=\'"\'"\'{.spec.local.path}\'"\'"\'`\ntoolbox\' ceph orch device zap \'`hostname`\' \'$target_path\' \'--force\nzap_completion="0"\nwhile\' \'[\' \'"$zap_completion"\' = \'"0"\' \'\' \']\ndo\n\' \'\' \'\' \'\' \'zap_completion=`kubectl\' get job -n rook-ceph rook-ceph-device-zap -o \'jsonpath=\'"\'"\'{.status.succeeded.path}\'"\'"\'`\n\' \'\' \'\' \'\' sleep \'3s\ndone\nkubectl\' patch pv \'$removed_pv\' -p \'\'"\'"\'{"spec":{"claimRef":\' \'null}}\'"\'"\'\ntoolbox\' ceph orch apply osd \'--all-available-devices\nkubectl\' delete job rook-ceph-device-zap -n \'rook-ceph\nnum_osd="0"\nwhile\' \'[\' \'"$num_osd"\' \'!=\' \'"$orig_num_osd"\' \']\ndo\n\' \'\' \'\' \'\' echo \'"waiting\' for osd to come back \'up"\n\' \'\' \'\' \'\' \'num_osd=`toolbox\' ceph osd stat \'|\' cut -f3 -d \'"\' \'"`\n\' \'\' \'\' \'\' sleep \'30s\ndone\n\''
  • flavor:
  • job_id: 6504189
  • kernel:
    • kdb: True
    • sha1: distro
  • last_in_suite: False
  • machine_type: smithi
  • name: sage-2021-11-15_18:25:33-orch:rook-wip-sage2-testing-2021-11-10-1508-distro-basic-smithi
  • nuke_on_error: True
  • os_type: ubuntu
  • os_version: 20.04
  • overrides:
    • admin_socket:
      • branch: wip-sage2-testing-2021-11-10-1508
    • ceph:
      • conf:
        • mgr:
          • debug mgr: 20
          • debug ms: 1
        • mon:
          • debug mon: 20
          • debug ms: 1
          • debug paxos: 20
        • osd:
          • debug ms: 1
          • debug osd: 20
      • log-ignorelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
      • log-whitelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
      • sha1: f51261b810e0a244c0d8f9a37834e5287f5f60c3
    • ceph-deploy:
      • conf:
        • client:
          • log file: /var/log/ceph/ceph-$name.$pid.log
        • mon:
          • osd default pool size: 2
    • install:
      • ceph:
        • sha1: f51261b810e0a244c0d8f9a37834e5287f5f60c3
    • kernel:
      • hwe: True
    • kubeadm:
      • pod_network: calico
      • version: 1.21
    • rook:
      • rook_branch: v1.7.0
      • rook_image: rook/ceph:v1.7.0
    • workunit:
      • branch: wip-sage3-testing-2021-11-15-1324
      • sha1: b59f290ddf72292b1789ad736487acfa97b4da87
  • owner: scheduled_sage@teuthology
  • pid:
  • roles:
    • ['host.a', 'client.a']
    • ['host.b', 'client.b']
    • ['host.c', 'client.c']
  • sentry_event: https://sentry.ceph.com/organizations/ceph/?query=77dab582dded485fa8b0ef2b522847e9
  • status: fail
  • success: False
  • branch: wip-sage2-testing-2021-11-10-1508
  • seed:
  • sha1: f51261b810e0a244c0d8f9a37834e5287f5f60c3
  • subset:
  • suite:
  • suite_branch: wip-sage3-testing-2021-11-15-1324
  • suite_path:
  • suite_relpath:
  • suite_repo:
  • suite_sha1: b59f290ddf72292b1789ad736487acfa97b4da87
  • targets:
    • smithi059.front.sepia.ceph.com: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC6O7dGjWErlvT+7YaOq913GacPw4+PZLsrkxfLH/ZDYK9wSA/cKShtwy1pLEQvkHRbtkIZJE8zpampqzklHJqQs8HKNLIsuexlrYKBp4yGiNmWpT/mVdQfFVRHVrgIkG0FxxAUKPOf1Yh5pzGUkARq9Qpejl5WXb47eqBNwIgB8jcBFsfsHQjRiPHrWkkPSWUR/tOkHanMIokb8+8dTwzMdRww6zRHtfbAsQdDcWFfgtciO0ZvbRfjfHoSN29/byPuClEiBc667dhm6rfTdvfGayJr90B+WSJK522V2RARSBufWGjOaLwCUyV6NB6VL4/Bpay4nURcfIMJ9G9sPOwRh7j1zybioWZ7I8YXH4j7/rWTaYqNunt5KexYoDqpv99jJLY+xKa8UIaiYq+BTux7WeUMFPaNCCwSS17QfjInUmj+NY8n6upZyQfsUv1tnthnVhfD2z+FOyrWt+ZKQu87KDggoSmzfRzug3JcwLERzxzRFPJWN3TyycqTLa7bdOU=
    • smithi102.front.sepia.ceph.com: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDnHeXzsiEqdQcsHuPb59FlwHew+t7Ep+ou0epCY0qJoYkWYo+uiRlDRcZOPLxaLh/Q187CDuX+928jobCkbDNxyIpPK+aCySPqNuObFQEWvLsJz/p8YWYVZW0FqNuat1Uw5vw57emjkWFhH1YjOHlOuIPn013piwDcqgu+uWU/fF/osD2mBKaBGgj2/k8/KoxbYxWR76+jjenDzecw0FRB8mQdTZfRrg4Rr57rdxdR4TFpRNExLJr2FElASrq9vuZwEelcxLDJFiFSNBmuNv+IW7Nb4rBR4ieP0LVUyEphqNXleIroYIcswKGKOn5Xc2JjIU6gJzxm5spBHxVZXYK2/yCwB/svZNnBafP+yQ0LQl5DE4qcUQV12cq30cqORH3U3+9zPLOFvnRBYHbPdn3iZLoqLP7Nb02tcOgt2u+Sq+aj4O0UZsnhWBWuW/bfJ+KrGp9kAcyzEP0xAuZI03130HyOnilv7FQJxZwXsh5joGtkuIZMx8NOnCSAyX3wZSk=
    • smithi140.front.sepia.ceph.com: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDqlKVjg+fGoAFanAmAZvnkRUFN1Fr9Fl+X/8nbJePJ8UNE8P9SAJTJbGfxxBUilt+oLe/JAI5ExgBqK7dyrqpi6/ZgLgtc0mkSqbyJX5WPdaDZBwQW5zg1WY5vnkhVTabPLhMjPIT8G+YdpsQwLrYXtHXYQBvFY8IUHkibs3I8oNb718nSYmGCIxmvKLJcnF7eDXeuLeC3UFwZzpMwls70s+WBMBvBr1ylhgNMYFeOG7FjbJ6Jj+sy1YQldQRy+gRIgF46XfNzZhLmXacPT10GphKGRVSepMAGcZhWgnyEQS/vOpi9vjzwrtxVT+q69bpqNddO+GNqjM6RI8gv1VNh4y9uQsnBpnbP6fhD2JXR66e/ISkJSX9kajaAmshgGBB0H+goUy9ckOKf7FfAE/OfhemMshq1hVuBKbPs5WYTTqYxl/izNAcmfY6/AFnjibLs0IsuTpI/ThYft3wwt/Mi/32dNUO6oBGCndBZIUkMgNpb3Bxz3gYEuB6HBBWnrxU=
  • tasks:
    • internal.check_packages:
    • internal.buildpackages_prep:
    • internal.save_config:
    • internal.check_lock:
    • internal.add_remotes:
    • console_log:
    • internal.connect:
    • internal.push_inventory:
    • internal.serialize_remote_roles:
    • internal.check_conflict:
    • internal.check_ceph_data:
    • internal.vm_setup:
    • kernel:
      • kdb: True
      • sha1: distro
    • internal.base:
    • internal.archive_upload:
    • internal.archive:
    • internal.coredump:
    • internal.sudo:
    • internal.syslog:
    • internal.timer:
    • pcp:
    • selinux:
    • ansible.cephlab:
    • clock:
    • kubeadm:
    • nvme_loop:
    • rook:
    • rook.shell:
      • ceph -s
      • ceph orch status
      • ceph orch ps
      • ceph orch ls
      • ceph orch device ls
    • rook.shell:
      • commands:
        • ceph orch status
        • ceph orch ps
        • ceph orch ls
        • ceph orch host ls
        • ceph orch device ls
        • ceph orch apply rgw foo
        • ceph orch apply mds foo
        • ceph orch apply rbd-mirror
        • ceph orch apply nfs foo --port 12777
    • rook.kubeshell:
      • commands:
        • toolbox() { kubectl -n rook-ceph exec -it deploy/rook-ceph-tools -- "$@" } toolbox ceph orch osd rm 0 --force removed_pv="" while [ "$removed_pv" = "" ] do removed_pv=`kubectl get pv | grep Released | cut -f1 -d " "` sleep 3s done target_path=`kubectl get pv $removed_pv -o jsonpath='{.spec.local.path}'` toolbox ceph orch device zap `hostname` $target_path --force zap_completion="0" while [ "$zap_completion" = "0" ] do zap_completion=`kubectl get job -n rook-ceph rook-ceph-device-zap -o jsonpath='{.status.succeeded.path}'` sleep 3s done kubectl patch pv $removed_pv -p '{"spec":{"claimRef": null}}' toolbox ceph orch apply osd --all-available-devices kubectl delete job rook-ceph-device-zap -n rook-ceph num_osd="0" while [ "$num_osd" != "$orig_num_osd" ] do echo "waiting for osd to come back up" num_osd=`toolbox ceph osd stat | cut -f3 -d " "` sleep 30s done
  • teuthology_branch: master
  • verbose: False
  • pcp_grafana_url:
  • priority:
  • user:
  • queue:
  • posted: 2021-11-15 18:25:47
  • started: 2021-11-15 18:28:48
  • updated: 2021-11-15 18:56:45
  • status_class: danger
  • runtime: 0:27:57
  • wait_time: 0:10:03