Description: krbd/unmap/{ceph/ceph clusters/separate-client conf filestore-xfs kernels/single-major-off tasks/unmap}

Log: http://qa-proxy.ceph.com/teuthology/dis-2020-11-17_23:10:22-krbd-master-distro-basic-smithi/5634282/teuthology.log

Sentry event: https://sentry.ceph.com/organizations/ceph/?query=76d91da5589345adb3d86637551425e1

Failure Reason:

Command failed on smithi122 with status 1: 'CEPH_REF=master CEPH_ID="0" PATH=$PATH:/usr/sbin adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage /home/ubuntu/cephtest/virtualenv/bin/cram -v -- /home/ubuntu/cephtest/archive/cram.client.0/*.t'

  • log_href: http://qa-proxy.ceph.com/teuthology/dis-2020-11-17_23:10:22-krbd-master-distro-basic-smithi/5634282/teuthology.log
  • archive_path: /home/teuthworker/archive/dis-2020-11-17_23:10:22-krbd-master-distro-basic-smithi/5634282
  • description: krbd/unmap/{ceph/ceph clusters/separate-client conf filestore-xfs kernels/single-major-off tasks/unmap}
  • duration: 0:50:52
  • email: idryomov@gmail.com
  • failure_reason: Command failed on smithi122 with status 1: 'CEPH_REF=master CEPH_ID="0" PATH=$PATH:/usr/sbin adjust-ulimits ceph-coverage /home/ubuntu/cephtest/archive/coverage /home/ubuntu/cephtest/virtualenv/bin/cram -v -- /home/ubuntu/cephtest/archive/cram.client.0/*.t'
  • flavor: basic
  • job_id: 5634282
  • kernel:
    • sha1: distro
    • kdb: True
  • last_in_suite: False
  • machine_type: smithi
  • name: dis-2020-11-17_23:10:22-krbd-master-distro-basic-smithi
  • nuke_on_error: True
  • os_type: ubuntu
  • os_version:
  • overrides:
    • kernel:
      • client:
        • deb: /home/dis/linux-image-5.9.0-rc4-ceph-g2de719647172_5.9.0-rc4-ceph-g2de719647172-1_amd64.deb
        • flavor: basic
        • kdb: True
    • ceph-deploy:
      • fs: xfs
      • filestore: True
      • conf:
        • global:
          • osd pool default size: 2
          • osd pool default pg num: 128
          • osd pool default pgp num: 128
          • osd crush chooseleaf type: 0
        • client:
          • log file: /var/log/ceph/ceph-$name.$pid.log
        • mon:
          • osd default pool size: 2
        • osd:
          • osd sloppy crc: True
          • osd objectstore: filestore
    • workunit:
      • sha1: c2b9152eac72e88c2d045dbe16e2e1230edad3a5
      • branch: master
    • ceph:
      • mon_bind_addrvec: False
      • log-whitelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
      • crush_tunables: bobtail
      • sha1: c2b9152eac72e88c2d045dbe16e2e1230edad3a5
      • fs: xfs
      • mon_bind_msgr2: False
      • conf:
        • global:
          • ms bind msgr2: False
        • mgr:
          • debug ms: 1
          • debug mgr: 20
        • client:
          • rbd default features: 1
        • mon:
          • debug paxos: 20
          • debug mon: 20
          • debug ms: 1
        • osd:
          • debug ms: 20
          • debug journal: 20
          • debug osd: 25
          • osd objectstore: filestore
          • osd sloppy crc: True
          • debug filestore: 20
      • log-ignorelist:
        • \(MDS_ALL_DOWN\)
        • \(MDS_UP_LESS_THAN_MAX\)
    • install:
      • ceph:
        • sha1: c2b9152eac72e88c2d045dbe16e2e1230edad3a5
    • admin_socket:
      • branch: master
  • owner: scheduled_dis@teuthology
  • pid:
  • roles:
    • ['mon.a', 'mgr.x', 'osd.0', 'osd.1', 'osd.2']
    • ['client.0']
  • sentry_event: https://sentry.ceph.com/organizations/ceph/?query=76d91da5589345adb3d86637551425e1
  • status: fail
  • success: False
  • branch: master
  • seed:
  • sha1: c2b9152eac72e88c2d045dbe16e2e1230edad3a5
  • subset:
  • suite:
  • suite_branch: master
  • suite_path:
  • suite_relpath:
  • suite_repo:
  • suite_sha1: c2b9152eac72e88c2d045dbe16e2e1230edad3a5
  • targets:
    • smithi117.front.sepia.ceph.com: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC3Xium/Hsd2mbd/wxgl2BE90ZafzvpsmlbbNz0nRSD9qpCwXYk/3H7mR5k6puz626pzHD//rMO6sl0HorT7yIvrr8Njlpn071385xqjeNeR0kkb2zNn6hB/IzEjH/381eIHzjHCouQfZqUicCucScC1fHPNWhAJNv2ZMWThWwxFOacjLP4ck5usTS3yOdxXSS4sGXpxYolzxcMqfTrIYzdbZa54zT9Bcqd4VIPlFtMQmTJUde3b0gptwnkHLvav8dp3Qz+hQYpAA9Y6vK2yjW8K19MwUJ4XLcQHkW2YWLcxmK9TX+3C0BsXMGcaICNE93rUXM+9ZfVNt9YYtLjNV0B
    • smithi122.front.sepia.ceph.com: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDyBs2Kqlq4FjuASGbgKaGgLMFLTbN5x7zlx/J2szLeZFumamHcKYmP1ldT+ls/n3KX4L/tT/CZhR+cWcj0ym6R1rzziLlyEeDVfaThxeR3QbvVsTsaiATZBSAttQu8oTawciHq+gsUSM3GzWHjuxxAUk5rO92Jpe++ctgnvOg5MwGZZSfOsBmeztBKIcjvy6toB+WMySR4zs4j0QNgkLFleBd6hx3jQRk7clGyGkx6BpsIEKZVyy4uo/zYBSefXUSTL7OFvvyqWRVET3PLgCZTaJlZxu3JR/waXRN6ISrb8H9E0YvxpgYNnVcQFK+2EVnW4XeSJWfqxAuO1h5HEpDt
  • tasks:
    • internal.check_packages:
    • internal.buildpackages_prep:
    • internal.lock_machines:
      • 2
      • smithi
    • internal.save_config:
    • internal.check_lock:
    • internal.add_remotes:
    • console_log:
    • internal.connect:
    • internal.push_inventory:
    • internal.serialize_remote_roles:
    • internal.check_conflict:
    • internal.check_ceph_data:
    • internal.vm_setup:
    • kernel:
      • sha1: distro
      • kdb: True
    • internal.base:
    • internal.archive_upload:
    • internal.archive:
    • internal.coredump:
    • internal.sudo:
    • internal.syslog:
    • internal.timer:
    • pcp:
    • selinux:
    • ansible.cephlab:
    • clock:
    • install:
    • ceph:
    • exec:
      • client.0:
        • ceph osd getcrushmap -o /dev/stdout | crushtool -d - | sed -e 's/alg straw2/alg straw/g' | crushtool -c /dev/stdin -o /dev/stdout | ceph osd setcrushmap -i /dev/stdin
    • exec:
      • client.0:
        • modprobe -r rbd
        • modprobe --first-time rbd single_major=0
        • grep -q N /sys/module/rbd/parameters/single_major
    • cram:
      • clients:
        • client.0:
          • src/test/cli-integration/rbd/unmap.t
      • branch: master
      • sha1: c2b9152eac72e88c2d045dbe16e2e1230edad3a5
  • teuthology_branch: master
  • verbose: True
  • pcp_grafana_url:
  • priority:
  • user:
  • queue:
  • posted: 2020-11-17 23:10:57
  • started: 2020-11-17 23:24:28
  • updated: 2020-11-18 00:24:29
  • status_class: danger
  • runtime: 1:00:01
  • wait_time: 0:09:09