From: Brad Hubbard Date: Mon, 17 Feb 2020 23:14:01 +0000 (+1000) Subject: task/ceph_ansible: Work with LVM scratch devices X-Git-Tag: 1.1.0~155^2 X-Git-Url: http://git.apps.os.sepia.ceph.com/?a=commitdiff_plain;h=d015627abe577fcbbc8fb38910e11c5dc672f209;p=teuthology.git task/ceph_ansible: Work with LVM scratch devices Signed-off-by: Brad Hubbard --- diff --git a/teuthology/task/ceph_ansible.py b/teuthology/task/ceph_ansible.py index d5fe471a82..ef936d6022 100644 --- a/teuthology/task/ceph_ansible.py +++ b/teuthology/task/ceph_ansible.py @@ -7,7 +7,7 @@ import yaml from teuthology.task import Task from tempfile import NamedTemporaryFile from teuthology.config import config as teuth_config -from teuthology.misc import get_scratch_devices +from teuthology.misc import get_scratch_devices, get_file from teuthology import contextutil from teuthology.orchestra import run from teuthology import misc @@ -308,7 +308,22 @@ class CephAnsible(Task): roles = self.ctx.cluster.remotes[remote] dev_needed = len([role for role in roles if role.startswith('osd')]) - host_vars['devices'] = get_scratch_devices(remote)[0:dev_needed] + if teuth_config.get('ceph_ansible') and \ + self.ctx.machine_type in teuth_config['ceph_ansible']['has_lvm_scratch_disks']: + devices = get_file(remote, "/scratch_devs").split() + vols = [] + + for dev in devices: + if 'vg_nvme' in dev: + splitpath = dev.split('/') + vol = dict() + vol['data_vg'] = splitpath[2] + vol['data'] = splitpath[3] + vols.append(vol) + extra_vars['lvm_volumes'] = vols + self.config.update({'vars': extra_vars}) + else: + host_vars['devices'] = get_scratch_devices(remote)[0:dev_needed] if 'monitor_interface' not in extra_vars: host_vars['monitor_interface'] = remote.interface if 'radosgw_interface' not in extra_vars: