Skip to content

Commit 4b25cd0

Browse files
committed
Refactored using low-level DB query on volume_metadata
1 parent 58ab6ee commit 4b25cd0

File tree

8 files changed

+61
-110
lines changed

8 files changed

+61
-110
lines changed

cinder/db/api.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -281,6 +281,12 @@ def volume_get_all(context, marker=None, limit=None, sort_keys=None,
281281
offset=offset)
282282

283283

284+
def get_host_by_volume_metadata(key, value, filters=None):
285+
"""Returns the host with the most volumes matching volume metadata."""
286+
return IMPL.get_host_by_volume_metadata(key, value,
287+
filters=filters)
288+
289+
284290
def calculate_resource_count(context, resource_type, filters):
285291
return IMPL.calculate_resource_count(context, resource_type, filters)
286292

cinder/db/sqlalchemy/api.py

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2176,6 +2176,38 @@ def volume_get_all(context, marker=None, limit=None, sort_keys=None,
21762176
return query.all()
21772177

21782178

2179+
def get_host_by_volume_metadata(meta_key, meta_value, filters=None):
2180+
session = get_session()
2181+
count_label = func.count().label("n")
2182+
query = session.query(
2183+
func.substring_index(models.Volume.host, '@', 1).label("h"),
2184+
count_label
2185+
).join(
2186+
models.VolumeMetadata,
2187+
models.VolumeMetadata.volume_id == models.Volume.id
2188+
).filter(
2189+
models.VolumeMetadata.key == meta_key,
2190+
models.VolumeMetadata.value == meta_value,
2191+
models.Volume.deleted == 0,
2192+
models.Volume.host.isnot(None)
2193+
)
2194+
2195+
if filters:
2196+
az = filters.get('availability_zone')
2197+
if az:
2198+
query = query.filter(
2199+
models.Volume.availability_zone == az)
2200+
2201+
query = query.group_by("h")\
2202+
.order_by(desc(count_label)).limit(1)
2203+
2204+
with session.begin():
2205+
result = query.first()
2206+
if result:
2207+
return result[0]
2208+
return None
2209+
2210+
21792211
@require_context
21802212
def get_volume_summary(context, project_only, filters=None):
21812213
"""Retrieves all volumes summary.

cinder/objects/volume.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -692,16 +692,3 @@ def get_all_active_by_window(cls, context, begin, end):
692692
expected_attrs = cls._get_expected_attrs(context)
693693
return base.obj_make_list(context, cls(context), objects.Volume,
694694
volumes, expected_attrs=expected_attrs)
695-
696-
@classmethod
697-
def get_all_by_metadata(cls, context, project_id, metadata, marker=None,
698-
limit=None, sort_keys=None, sort_dirs=None,
699-
filters=None, offset=None):
700-
query_filters = {'metadata': metadata}
701-
if filters:
702-
query_filters.update(filters)
703-
volumes = db.volume_get_all_by_project(
704-
context, project_id, marker, limit, sort_keys=sort_keys,
705-
sort_dirs=sort_dirs, filters=query_filters, offset=offset)
706-
return base.obj_make_list(context, cls(context), objects.Volume,
707-
volumes)

cinder/scheduler/base_filter.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -92,7 +92,6 @@ def get_filtered_objects(self, filter_classes, objs,
9292
each resource.
9393
"""
9494
list_objs = list(objs)
95-
all_objs = list(objs)
9695
LOG.debug("Starting with %d host(s)", len(list_objs))
9796
# The 'part_filter_results' list just tracks the number of hosts
9897
# before and after the filter, unless the filter returns zero
@@ -106,11 +105,6 @@ def get_filtered_objects(self, filter_classes, objs,
106105
start_count = len(list_objs)
107106
filter_class = filter_cls()
108107

109-
# SAP
110-
# All available backends are needed in ShardFilter
111-
if hasattr(filter_class, 'all_backend_states'):
112-
setattr(filter_class, 'all_backend_states', all_objs)
113-
114108
if filter_class.run_filter_for_index(index):
115109
objs = filter_class.filter_all(list_objs, filter_properties)
116110
if objs is None:

cinder/scheduler/filters/shard_filter.py

Lines changed: 16 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,7 @@
1919
from oslo_config import cfg
2020
from oslo_log import log as logging
2121

22-
from cinder import context as cinder_context
23-
from cinder.objects.volume import VolumeList
22+
from cinder import db
2423
from cinder.scheduler import filters
2524
from cinder.service_auth import SERVICE_USER_GROUP
2625
from cinder import utils as cinder_utils
@@ -66,9 +65,6 @@ class ShardFilter(filters.BaseBackendFilter):
6665
_CAPABILITY_NAME = 'vcenter-shard'
6766
_ALL_SHARDS = "sharding_enabled"
6867

69-
# To be populated by the host manager
70-
all_backend_states = []
71-
7268
def _get_keystone_adapter(self):
7369
"""Return a keystone adapter
7470
@@ -151,11 +147,7 @@ def _get_shards(self, project_id):
151147
return self._PROJECT_SHARD_CACHE.get(project_id)
152148

153149
def _is_vmware(self, backend_state):
154-
# We only need the shard filter for vmware based pools
155150
if backend_state.vendor_name != 'VMware':
156-
LOG.info(
157-
"Shard Filter ignoring backend %s as it's not "
158-
"vmware based driver", backend_state.backend_id)
159151
return False
160152
return True
161153

@@ -172,8 +164,10 @@ def _filter_by_k8s_cluster(self, backends, filter_properties):
172164
project_id = vol_props.get('project_id', None)
173165
metadata = vol_props.get('metadata', {})
174166

167+
is_vmware = any(self._is_vmware(b) for b in backends)
175168
if (not metadata or not project_id
176-
or spec.get('snapshot_id')):
169+
or spec.get('snapshot_id')
170+
or not is_vmware):
177171
return backends
178172

179173
cluster_name = metadata.get(CSI_CLUSTER_METADATA_KEY)
@@ -186,42 +180,28 @@ def _filter_by_k8s_cluster(self, backends, filter_properties):
186180
if availability_zone:
187181
query_filters = {'availability_zone': availability_zone}
188182

189-
query_metadata = {CSI_CLUSTER_METADATA_KEY: cluster_name}
190-
k8s_volumes = VolumeList.get_all_by_metadata(
191-
cinder_context.get_admin_context(),
192-
project_id, query_metadata,
183+
k8s_host = db.get_host_by_volume_metadata(
184+
key=CSI_CLUSTER_METADATA_KEY,
185+
value=cluster_name,
193186
filters=query_filters)
194187

195-
if not k8s_volumes:
196-
return backends
197-
198-
k8s_hosts = set(volume_utils.extract_host(v.host, 'host')
199-
for v in k8s_volumes
200-
if v.id != spec.get('volume_id') and v.host)
201-
if not k8s_hosts:
188+
if not k8s_host:
202189
return backends
203190

204-
def _backend_shards(backend_state):
205-
cap = backend_state.capabilities.get(self._CAPABILITY_NAME)
206-
return cap.split(',') if cap else []
207-
208-
hosts_shards_map = {
209-
volume_utils.extract_host(bs.host, 'host'):
210-
_backend_shards(bs)
211-
for bs in self.all_backend_states}
212-
213-
k8s_shards = set()
214-
for host in k8s_hosts:
215-
shards = hosts_shards_map[host]
216-
k8s_shards.update(shards)
217-
218191
return [
219192
b for b in backends if
220193
(not self._is_vmware(b) or
221-
set(_backend_shards(b)) & k8s_shards)
194+
volume_utils.extract_host(b.host, 'host') == k8s_host)
222195
]
223196

224197
def _backend_passes(self, backend_state, filter_properties):
198+
# We only need the shard filter for vmware based pools
199+
if not self._is_vmware(backend_state):
200+
LOG.info(
201+
"Shard Filter ignoring backend %s as it's not "
202+
"vmware based driver", backend_state.backend_id)
203+
return True
204+
225205
spec = filter_properties.get('request_spec', {})
226206
vol = spec.get('volume_properties', {})
227207
project_id = vol.get('project_id', None)

cinder/tests/unit/objects/test_volume.py

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -694,24 +694,3 @@ def test_populate_consistencygroup(self, mock_db_grp_create):
694694
volume.populate_consistencygroup()
695695
self.assertEqual(volume.group_id, volume.consistencygroup_id)
696696
self.assertEqual(volume.group.id, volume.consistencygroup.id)
697-
698-
@mock.patch('cinder.db.volume_get_all_by_project')
699-
def test_get_by_metadata(self, get_all_by_project):
700-
db_volume = fake_volume.fake_db_volume()
701-
get_all_by_project.return_value = [db_volume]
702-
703-
volumes = objects.VolumeList.get_all_by_metadata(
704-
self.context, mock.sentinel.project_id, mock.sentinel.metadata,
705-
mock.sentinel.marker, mock.sentinel.limit,
706-
mock.sentinel.sorted_keys, mock.sentinel.sorted_dirs)
707-
708-
self.assertEqual(1, len(volumes))
709-
TestVolume._compare(self, db_volume, volumes[0])
710-
711-
get_all_by_project.assert_called_once_with(
712-
self.context, mock.sentinel.project_id,
713-
mock.sentinel.marker, mock.sentinel.limit,
714-
sort_keys=mock.sentinel.sorted_keys,
715-
sort_dirs=mock.sentinel.sorted_dirs,
716-
filters={'metadata': mock.sentinel.metadata},
717-
offset=None)

cinder/tests/unit/scheduler/test_base_filter.py

Lines changed: 0 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -90,15 +90,6 @@ class FakeFilter5(BaseFakeFilter):
9090
pass
9191

9292

93-
class FakeFilterAllBackends(BaseFakeFilter):
94-
"""Derives from BaseFakeFilter but has no entry point.
95-
96-
Should not be included.
97-
"""
98-
all_backend_states = None
99-
pass
100-
101-
10293
class FilterA(base_filter.BaseFilter):
10394
def filter_all(self, list_objs, filter_properties):
10495
# return all but the first object
@@ -181,17 +172,3 @@ def test_get_filtered_objects_with_filter_run_once(self):
181172
result = self._get_filtered_objects(filter_classes, index=2)
182173
self.assertEqual(filter_objs_expected, result)
183174
self.assertEqual(1, fake5_filter_all.call_count)
184-
185-
@mock.patch.object(FakeFilterAllBackends, 'all_backend_states',
186-
new_callable=mock.PropertyMock)
187-
@mock.patch.object(FakeFilterAllBackends, 'filter_all')
188-
def test_get_filtered_objects_with_all_backend_states(self, filter_all,
189-
all_backends):
190-
filter_objs_expected = [1, 2, 3, 4]
191-
filter_classes = [FakeFilterAllBackends]
192-
filter_all.return_value = filter_objs_expected
193-
self._get_filtered_objects(filter_classes)
194-
all_backends.assert_has_calls([
195-
mock.call(),
196-
mock.call(filter_objs_expected)
197-
])

cinder/tests/unit/scheduler/test_shard_filter.py

Lines changed: 7 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616

1717
from cinder import context
1818
from cinder.tests.unit import fake_constants
19-
from cinder.tests.unit import fake_volume
2019
from cinder.tests.unit.scheduler import fakes
2120
from cinder.tests.unit.scheduler.test_host_filters \
2221
import BackendFiltersTestCase
@@ -229,9 +228,10 @@ def test_noop_for_find_backend_by_connector_without_hint(self):
229228
self.backend_passes(host, self.props)
230229

231230
@mock.patch('cinder.context.get_admin_context')
232-
@mock.patch('cinder.objects.volume.VolumeList.get_all_by_metadata')
233-
def test_same_shard_for_k8s_volumes(self, mock_get_all,
231+
@mock.patch('cinder.db.get_host_by_volume_metadata')
232+
def test_same_shard_for_k8s_volumes(self, mock_get_hosts,
234233
mock_get_context):
234+
CSI_KEY = 'cinder.csi.openstack.org/cluster'
235235
all_backends = [
236236
fakes.FakeBackendState(
237237
'volume-vc-a-0@backend#pool1',
@@ -244,13 +244,9 @@ def test_same_shard_for_k8s_volumes(self, mock_get_all,
244244
]
245245
mock_get_context.return_value = self.context
246246
fake_meta = {
247-
'cinder.csi.openstack.org/cluster': 'cluster-1',
247+
CSI_KEY: 'cluster-1',
248248
}
249-
mock_get_all.return_value = [
250-
fake_volume.fake_volume_obj(self.context, metadata=fake_meta,
251-
host='volume-vc-a-1@backend#pool3')
252-
]
253-
self.filt_cls.all_backend_states = all_backends
249+
mock_get_hosts.return_value = 'volume-vc-a-1'
254250
self.filt_cls._PROJECT_SHARD_CACHE['baz'] = ['sharding_enabled',
255251
'vc-a-1']
256252
filter_props = dict(self.props)
@@ -264,8 +260,8 @@ def test_same_shard_for_k8s_volumes(self, mock_get_all,
264260

265261
filtered = self.filt_cls.filter_all(all_backends, filter_props)
266262

267-
mock_get_all.assert_called_once_with(
268-
self.context, 'baz', fake_meta, filters={
263+
mock_get_hosts.assert_called_once_with(
264+
key=CSI_KEY, value=fake_meta[CSI_KEY], filters={
269265
'availability_zone': 'az-1'
270266
})
271267
self.assertEqual(len(filtered), 1)

0 commit comments

Comments
 (0)