Nodes: vpm021

Description: rados/singleton-nomsgr/{all/13234.yaml}

Log: http://qa-proxy.ceph.com/teuthology/teuthology-2016-07-13_03:00:03-rados-hammer-distro-basic-vps/312155/teuthology.log

Sentry event: http://sentry.ceph.com/sepia/teuthology/?q=f8bf242b66024d16826f540f48da43e3

Performance graphs: http://pcp.front.sepia.ceph.com:44323/grafana/index.html#/dashboard/script/index.js?time_to=2016-07-13T15%3A48%3A31&time_from=2016-07-13T13%3A39%3A17&hosts=vpm021

Failure Reason:

{'vpm021.front.sepia.ceph.com': {'cmd': '/usr/bin/git reset --hard origin/master', '_ansible_no_log': False, 'stdout': '', 'changed': False, 'invocation': {'module_name': 'git', 'module_args': {'force': True, 'track_submodules': False, 'reference': None, 'dest': '/var/lib/teuthworker/.cache/src/keys', 'verify_commit': False, 'clone': True, 'update': True, 'ssh_opts': None, 'repo': 'https://github.com/ceph/keys', 'bare': False, 'refspec': None, 'executable': None, 'remote': 'origin', 'recursive': True, 'accept_hostkey': False, 'depth': None, 'version': 'master', 'key_file': None}}, 'failed': True, 'stderr': "fatal: Unable to create '/home/teuthworker/.cache/src/keys/.git/index.lock': File exists.\n\nIf no other git process is currently running, this probably means a\ngit process crashed in this repository earlier. Make sure no other git\nprocess is running and remove the file manually to continue.\n", 'rc': 128, '_ansible_delegated_vars': {'ansible_host': 'localhost'}, 'stdout_lines': [], 'msg': 'Failed to checkout master'}}

  • log_href: http://qa-proxy.ceph.com/teuthology/teuthology-2016-07-13_03:00:03-rados-hammer-distro-basic-vps/312155/teuthology.log
  • archive_path: /var/lib/teuthworker/archive/teuthology-2016-07-13_03:00:03-rados-hammer-distro-basic-vps/312155
  • description: rados/singleton-nomsgr/{all/13234.yaml}
  • duration: 2:09:25
  • email: ceph-qa@ceph.com
  • failure_reason: {'vpm021.front.sepia.ceph.com': {'cmd': '/usr/bin/git reset --hard origin/master', '_ansible_no_log': False, 'stdout': '', 'changed': False, 'invocation': {'module_name': 'git', 'module_args': {'force': True, 'track_submodules': False, 'reference': None, 'dest': '/var/lib/teuthworker/.cache/src/keys', 'verify_commit': False, 'clone': True, 'update': True, 'ssh_opts': None, 'repo': 'https://github.com/ceph/keys', 'bare': False, 'refspec': None, 'executable': None, 'remote': 'origin', 'recursive': True, 'accept_hostkey': False, 'depth': None, 'version': 'master', 'key_file': None}}, 'failed': True, 'stderr': "fatal: Unable to create '/home/teuthworker/.cache/src/keys/.git/index.lock': File exists.\n\nIf no other git process is currently running, this probably means a\ngit process crashed in this repository earlier. Make sure no other git\nprocess is running and remove the file manually to continue.\n", 'rc': 128, '_ansible_delegated_vars': {'ansible_host': 'localhost'}, 'stdout_lines': [], 'msg': 'Failed to checkout master'}}
  • flavor:
  • job_id: 312155
  • kernel:
    • sha1: distro
    • kdb: True
  • last_in_suite: False
  • machine_type: vps
  • name: teuthology-2016-07-13_03:00:03-rados-hammer-distro-basic-vps
  • nuke_on_error: True
  • os_type: ubuntu
  • os_version:
  • overrides:
    • ceph:
      • log-whitelist:
        • slow request
        • osd_map_cache_size
        • slow request
        • scrub mismatch
        • ScrubResult
        • failed to encode
      • conf:
        • mon:
          • mon warn on legacy crush tunables: False
          • mon min osdmap epochs: 3
          • debug mon: 20
          • debug paxos: 20
          • debug ms: 1
        • osd:
          • debug ms: 1
          • debug journal: 20
          • debug osd: 20
          • osd map cache size: 2
          • osd map max advance: 1
          • debug filestore: 20
      • sha1: 387d5c1ba836833a0cf11ddf9a4fb8690a532878
    • ceph-deploy:
      • conf:
        • client:
          • log file: /var/log/ceph/ceph-$name.$pid.log
        • mon:
          • osd default pool size: 2
          • debug mon: 1
          • debug paxos: 20
          • debug ms: 20
    • workunit:
      • sha1: 387d5c1ba836833a0cf11ddf9a4fb8690a532878
    • install:
      • ceph:
        • sha1: 387d5c1ba836833a0cf11ddf9a4fb8690a532878
    • admin_socket:
      • branch: hammer
  • owner: scheduled_teuthology@teuthology
  • pid:
  • roles:
    • ['mon.a', 'mds.a', 'osd.0', 'osd.1', 'mon.b', 'mon.c', 'osd.2', 'client.0']
  • sentry_event: http://sentry.ceph.com/sepia/teuthology/?q=f8bf242b66024d16826f540f48da43e3
  • status: fail
  • success: False
  • branch: hammer
  • seed:
  • sha1: 387d5c1ba836833a0cf11ddf9a4fb8690a532878
  • subset:
  • suite:
  • suite_branch: hammer
  • suite_path:
  • suite_relpath:
  • suite_repo:
  • suite_sha1:
  • targets:
    • ubuntu@vpm021.front.sepia.ceph.com: ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCbzkcEhVRL8UUswHuwBvqLz0io/v/kflNl0/F4oo2JLCvIEYBbetWhT0gyUSd3DXLvnooTyrGFFbbMiQyUoYZm1r8G9AH9A93sqHAyhh0W6bkLJscW8dKm9OnQ4E4BGuDJr+1xC803KD2aNrWvKzkUo+cB/PALovpbznWUtDJm6+2gQA59+5s9Pxpdz0Zih54IidsSw7yAKUbbqVzLKaeVBwEEncrwZNhGvYjAJHOkN1+Ff8CkfdmvIA5fRb9ai4zC72kOm+lIwLUVa95lIbFm3rHh/wRUgVzXU3EfnhWhijxTBAd5Q6tXn2LVKecr5g+7pKUgF3bNBHUlJfjVeZbP
  • tasks:
    • internal.check_packages:
    • internal.buildpackages_prep:
    • internal.lock_machines:
      • 1
      • vps
    • internal.save_config:
    • internal.check_lock:
    • internal.add_remotes:
    • internal.connect:
    • internal.push_inventory:
    • internal.serialize_remote_roles:
    • internal.check_conflict:
    • internal.check_ceph_data:
    • internal.vm_setup:
    • kernel:
      • sha1: distro
      • kdb: True
    • internal.base:
    • internal.archive_upload:
    • internal.archive:
    • internal.coredump:
    • internal.sudo:
    • internal.syslog:
    • internal.timer:
    • pcp:
    • selinux:
    • ansible.cephlab:
    • clock.check:
    • install:
      • tag: v0.67.10
    • print: **** done installing dumpling
    • ceph:
      • fs: xfs
    • print: **** done ceph
    • full_sequential:
      • ceph_manager.create_pool:
        • args:
          • newpool
        • kwargs:
          • pg_num: 32
      • sleep:
        • duration: 30
      • ceph_manager.wait_for_clean:
      • ceph_manager.kill_osd:
        • kwargs:
          • osd: 0
      • ceph_manager.kill_osd:
        • kwargs:
          • osd: 1
      • ceph_manager.kill_osd:
        • kwargs:
          • osd: 2
      • print: **** done killing osds
      • loop:
        • body:
          • ceph_manager.set_pool_property:
            • args:
              • newpool
              • min_size
              • 2
          • ceph_manager.set_pool_property:
            • args:
              • newpool
              • min_size
              • 1
        • count: 10
      • install.upgrade:
        • mon.a:
          • branch: firefly
      • print: **** done upgrading to firefly
      • ceph.restart:
        • mon.a
        • mon.b
        • mon.c
      • print: **** done upgrading restarting mons
      • loop:
        • body:
          • ceph_manager.set_pool_property:
            • args:
              • newpool
              • min_size
              • 2
          • ceph_manager.set_pool_property:
            • args:
              • newpool
              • min_size
              • 1
        • count: 10
      • sleep:
        • duration: 10
      • install.upgrade:
        • mon.a:
      • print: **** done upgrading to branch
      • ceph.restart:
        • mon.a
        • mon.b
        • mon.c
      • loop:
        • body:
          • ceph_manager.set_pool_property:
            • args:
              • newpool
              • min_size
              • 2
          • ceph_manager.set_pool_property:
            • args:
              • newpool
              • min_size
              • 1
        • count: 10
      • sleep:
        • duration: 10
      • print: **** about to start osds
      • ceph_manager.revive_osd:
        • kwargs:
          • osd: 0
      • ceph_manager.revive_osd:
        • kwargs:
          • osd: 1
      • ceph_manager.revive_osd:
        • kwargs:
          • osd: 2
      • sleep:
        • duration: 30
      • ceph_manager.wait_for_clean:
      • print: **** done!
  • teuthology_branch: master
  • verbose: True
  • pcp_grafana_url: http://pcp.front.sepia.ceph.com:44323/grafana/index.html#/dashboard/script/index.js?time_to=2016-07-13T15%3A48%3A31&time_from=2016-07-13T13%3A39%3A17&hosts=vpm021
  • priority:
  • user:
  • queue:
  • posted: 2016-07-13 10:01:49
  • started: 2016-07-13 13:37:10
  • updated: 2016-07-13 15:53:13
  • status_class: danger
  • runtime: 2:16:03
  • wait_time: 0:06:38