]> git-server-git.apps.pok.os.sepia.ceph.com Git - ceph.git/commitdiff
mgr/dashboard: reduce method cyclomatic complexity
authorVallari Agrawal <val.agl002@gmail.com>
Sun, 13 Feb 2022 07:36:01 +0000 (13:06 +0530)
committerVallari Agrawal <val.agl002@gmail.com>
Wed, 16 Mar 2022 23:18:38 +0000 (04:48 +0530)
resolves https://tracker.ceph.com/issues/41221

Signed-off-by: Vallari Agrawal <val.agl002@gmail.com>
src/pybind/mgr/dashboard/controllers/_base_controller.py
src/pybind/mgr/dashboard/controllers/_task.py
src/pybind/mgr/dashboard/controllers/cephfs.py
src/pybind/mgr/dashboard/controllers/docs.py
src/pybind/mgr/dashboard/controllers/osd.py
src/pybind/mgr/dashboard/controllers/pool.py
src/pybind/mgr/dashboard/controllers/rbd.py
src/pybind/mgr/dashboard/services/ceph_service.py
src/pybind/mgr/dashboard/services/rbd.py
src/pybind/mgr/dashboard/services/rgw_client.py
src/pybind/mgr/dashboard/services/tcmu_service.py

index 4fcc8442f1bc82dcd0cb3047ab1715c224eef58d..ac7bc4a6b022cae8a2c6ffdfb0f36b3eec9343db 100644 (file)
@@ -231,6 +231,16 @@ class BaseController:
                 result.append(cls.Endpoint(cls, func))
         return result
 
+    @staticmethod
+    def get_client_version():
+        try:
+            client_version = APIVersion.from_mime_type(
+                cherrypy.request.headers['Accept'])
+        except Exception:
+            raise cherrypy.HTTPError(
+                415, "Unable to find version in request header")
+        return client_version
+
     @staticmethod
     def _request_wrapper(func, method, json_response, xml,  # pylint: disable=unused-argument
                          version: Optional[APIVersion]):
@@ -247,12 +257,7 @@ class BaseController:
             kwargs.update(params)
 
             if version is not None:
-                try:
-                    client_version = APIVersion.from_mime_type(
-                        cherrypy.request.headers['Accept'])
-                except Exception:
-                    raise cherrypy.HTTPError(
-                        415, "Unable to find version in request header")
+                client_version = BaseController.get_client_version()
 
                 if version.supports(client_version):
                     ret = func(*args, **kwargs)
@@ -265,21 +270,17 @@ class BaseController:
 
             else:
                 ret = func(*args, **kwargs)
+
             if isinstance(ret, bytes):
                 ret = ret.decode('utf-8')
+
             if xml:
-                if version:
-                    cherrypy.response.headers['Content-Type'] = \
-                        'application/vnd.ceph.api.v{}+xml'.format(version)
-                else:
-                    cherrypy.response.headers['Content-Type'] = 'application/xml'
+                cherrypy.response.headers['Content-Type'] = (version.to_mime_type(subtype='xml')
+                                                             if version else 'application/xml')
                 return ret.encode('utf8')
             if json_response:
-                if version:
-                    cherrypy.response.headers['Content-Type'] = \
-                        'application/vnd.ceph.api.v{}+json'.format(version)
-                else:
-                    cherrypy.response.headers['Content-Type'] = 'application/json'
+                cherrypy.response.headers['Content-Type'] = (version.to_mime_type(subtype='json')
+                                                             if version else 'application/json')
                 ret = json.dumps(ret).encode('utf8')
             return ret
         return inner
index 33399e8e0f6edf7dd4f34bd9235d039f19ffbaca..f03a1ff67dc6d247440956cb34f0ee399762b432 100644 (file)
@@ -37,27 +37,32 @@ class Task:
 
         return arg_map
 
+    def _get_metadata(self, arg_map):
+        metadata = {}
+        for k, v in self.metadata.items():
+            if isinstance(v, str) and v and v[0] == '{' and v[-1] == '}':
+                param = v[1:-1]
+                try:
+                    pos = int(param)
+                    metadata[k] = arg_map[pos]
+                except ValueError:
+                    if param.find('.') == -1:
+                        metadata[k] = arg_map[param]
+                    else:
+                        path = param.split('.')
+                        metadata[k] = arg_map[path[0]]
+                        for i in range(1, len(path)):
+                            metadata[k] = metadata[k][path[i]]
+            else:
+                metadata[k] = v
+        return metadata
+
     def __call__(self, func):
         @wraps(func)
         def wrapper(*args, **kwargs):
             arg_map = self._gen_arg_map(func, args, kwargs)
-            metadata = {}
-            for k, v in self.metadata.items():
-                if isinstance(v, str) and v and v[0] == '{' and v[-1] == '}':
-                    param = v[1:-1]
-                    try:
-                        pos = int(param)
-                        metadata[k] = arg_map[pos]
-                    except ValueError:
-                        if param.find('.') == -1:
-                            metadata[k] = arg_map[param]
-                        else:
-                            path = param.split('.')
-                            metadata[k] = arg_map[path[0]]
-                            for i in range(1, len(path)):
-                                metadata[k] = metadata[k][path[i]]
-                else:
-                    metadata[k] = v
+            metadata = self._get_metadata(arg_map)
+
             task = TaskManager.run(self.name, metadata, func, args, kwargs,
                                    exception_handler=self.exception_handler)
             try:
index d32cbe4bcfab6b307b285c5e865126b5f685f884..108be0f4e98577549c989f85549c17b45352a37c 100644 (file)
@@ -130,6 +130,42 @@ class CephFS(RESTController):
             return
         mds_versions[metadata.get('ceph_version', 'unknown')].append(metadata_key)
 
+    def _find_standby_replays(self, mdsmap_info, rank_table):
+        # pylint: disable=unused-variable
+        for gid_str, daemon_info in mdsmap_info.items():
+            if daemon_info['state'] != "up:standby-replay":
+                continue
+
+            inos = mgr.get_latest("mds", daemon_info['name'], "mds_mem.ino")
+            dns = mgr.get_latest("mds", daemon_info['name'], "mds_mem.dn")
+            dirs = mgr.get_latest("mds", daemon_info['name'], "mds_mem.dir")
+            caps = mgr.get_latest("mds", daemon_info['name'], "mds_mem.cap")
+
+            activity = CephService.get_rate(
+                "mds", daemon_info['name'], "mds_log.replay")
+
+            rank_table.append(
+                {
+                    "rank": "{0}-s".format(daemon_info['rank']),
+                    "state": "standby-replay",
+                    "mds": daemon_info['name'],
+                    "activity": activity,
+                    "dns": dns,
+                    "inos": inos,
+                    "dirs": dirs,
+                    "caps": caps
+                }
+            )
+
+    def get_standby_table(self, standbys, mds_versions):
+        standby_table = []
+        for standby in standbys:
+            self._append_mds_metadata(mds_versions, standby['name'])
+            standby_table.append({
+                'name': standby['name']
+            })
+        return standby_table
+
     # pylint: disable=too-many-statements,too-many-branches
     def fs_status(self, fs_id):
         mds_versions: dict = defaultdict(list)
@@ -161,12 +197,9 @@ class CephFS(RESTController):
                 dirs = mgr.get_latest("mds", info['name'], "mds_mem.dir")
                 caps = mgr.get_latest("mds", info['name'], "mds_mem.cap")
 
-                if rank == 0:
-                    client_count = mgr.get_latest("mds", info['name'],
-                                                  "mds_sessions.session_count")
-                elif client_count == 0:
-                    # In case rank 0 was down, look at another rank's
-                    # sessionmap to get an indication of clients.
+                # In case rank 0 was down, look at another rank's
+                # sessionmap to get an indication of clients.
+                if rank == 0 or client_count == 0:
                     client_count = mgr.get_latest("mds", info['name'],
                                                   "mds_sessions.session_count")
 
@@ -214,32 +247,7 @@ class CephFS(RESTController):
                     }
                 )
 
-        # Find the standby replays
-        # pylint: disable=unused-variable
-        for gid_str, daemon_info in mdsmap['info'].items():
-            if daemon_info['state'] != "up:standby-replay":
-                continue
-
-            inos = mgr.get_latest("mds", daemon_info['name'], "mds_mem.ino")
-            dns = mgr.get_latest("mds", daemon_info['name'], "mds_mem.dn")
-            dirs = mgr.get_latest("mds", daemon_info['name'], "mds_mem.dir")
-            caps = mgr.get_latest("mds", daemon_info['name'], "mds_mem.cap")
-
-            activity = CephService.get_rate(
-                "mds", daemon_info['name'], "mds_log.replay")
-
-            rank_table.append(
-                {
-                    "rank": "{0}-s".format(daemon_info['rank']),
-                    "state": "standby-replay",
-                    "mds": daemon_info['name'],
-                    "activity": activity,
-                    "dns": dns,
-                    "inos": inos,
-                    "dirs": dirs,
-                    "caps": caps
-                }
-            )
+        self._find_standby_replays(mdsmap['info'], rank_table)
 
         df = mgr.get("df")
         pool_stats = {p['id']: p['stats'] for p in df['pools']}
@@ -259,12 +267,7 @@ class CephFS(RESTController):
                 "avail": stats['max_avail']
             })
 
-        standby_table = []
-        for standby in fsmap['standbys']:
-            self._append_mds_metadata(mds_versions, standby['name'])
-            standby_table.append({
-                'name': standby['name']
-            })
+        standby_table = self.get_standby_table(fsmap['standbys'], mds_versions)
 
         return {
             "cephfs": {
index ab9f5687b532ba33424d12233d5e19bfde8cf12e..d5c71383e340b3b629c217bec3e319c57e037226 100644 (file)
@@ -267,6 +267,51 @@ class Docs(BaseController):
 
         return parameters
 
+    @staticmethod
+    def _process_func_attr(func):
+        summary = ''
+        version = None
+        response = {}
+        p_info = []
+
+        if hasattr(func, '__method_map_method__'):
+            version = func.__method_map_method__['version']
+        elif hasattr(func, '__resource_method__'):
+            version = func.__resource_method__['version']
+        elif hasattr(func, '__collection_method__'):
+            version = func.__collection_method__['version']
+
+        if hasattr(func, 'doc_info'):
+            if func.doc_info['summary']:
+                summary = func.doc_info['summary']
+            response = func.doc_info['response']
+            p_info = func.doc_info['parameters']
+
+        return summary, version, response, p_info
+
+    @classmethod
+    def _get_params(cls, endpoint, para_info):
+        params = []
+
+        def extend_params(endpoint_params, param_name):
+            if endpoint_params:
+                params.extend(
+                    cls._gen_params(
+                        cls._add_param_info(endpoint_params, para_info), param_name))
+
+        extend_params(endpoint.path_params, 'path')
+        extend_params(endpoint.query_params, 'query')
+        return params
+
+    @classmethod
+    def set_request_body_param(cls, endpoint_param, method, methods, p_info):
+        if endpoint_param:
+            params_info = cls._add_param_info(endpoint_param, p_info)
+            methods[method.lower()]['requestBody'] = {
+                'content': {
+                    'application/json': {
+                        'schema': cls._gen_schema_for_content(params_info)}}}
+
     @classmethod
     def gen_paths(cls, all_endpoints):
         # pylint: disable=R0912
@@ -287,34 +332,8 @@ class Docs(BaseController):
                 method = endpoint.method
                 func = endpoint.func
 
-                summary = ''
-                version = None
-                resp = {}
-                p_info = []
-
-                if hasattr(func, '__method_map_method__'):
-                    version = func.__method_map_method__['version']
-
-                elif hasattr(func, '__resource_method__'):
-                    version = func.__resource_method__['version']
-
-                elif hasattr(func, '__collection_method__'):
-                    version = func.__collection_method__['version']
-
-                if hasattr(func, 'doc_info'):
-                    if func.doc_info['summary']:
-                        summary = func.doc_info['summary']
-                    resp = func.doc_info['response']
-                    p_info = func.doc_info['parameters']
-                params = []
-                if endpoint.path_params:
-                    params.extend(
-                        cls._gen_params(
-                            cls._add_param_info(endpoint.path_params, p_info), 'path'))
-                if endpoint.query_params:
-                    params.extend(
-                        cls._gen_params(
-                            cls._add_param_info(endpoint.query_params, p_info), 'query'))
+                summary, version, resp, p_info = cls._process_func_attr(func)
+                params = cls._get_params(endpoint, p_info)
 
                 methods[method.lower()] = {
                     'tags': [cls._get_tag(endpoint)],
@@ -326,19 +345,8 @@ class Docs(BaseController):
                     methods[method.lower()]['summary'] = summary
 
                 if method.lower() in ['post', 'put']:
-                    if endpoint.body_params:
-                        body_params = cls._add_param_info(endpoint.body_params, p_info)
-                        methods[method.lower()]['requestBody'] = {
-                            'content': {
-                                'application/json': {
-                                    'schema': cls._gen_schema_for_content(body_params)}}}
-
-                    if endpoint.query_params:
-                        query_params = cls._add_param_info(endpoint.query_params, p_info)
-                        methods[method.lower()]['requestBody'] = {
-                            'content': {
-                                'application/json': {
-                                    'schema': cls._gen_schema_for_content(query_params)}}}
+                    cls.set_request_body_param(endpoint.body_params, method, methods, p_info)
+                    cls.set_request_body_param(endpoint.query_params, method, methods, p_info)
 
                 if endpoint.is_secure:
                     methods[method.lower()]['security'] = [{'jwt': []}]
index ceebb5acdafbfde4129831fdbe34ee64deddff6d..85633087c97b0d8fe85227305b252c35ba141c6a 100644 (file)
@@ -83,17 +83,21 @@ class Osd(RESTController):
             osd_spec = str(osd_id)
             if 'osd' not in osd:
                 continue  # pragma: no cover - simple early continue
-            for stat in ['osd.op_w', 'osd.op_in_bytes', 'osd.op_r', 'osd.op_out_bytes']:
-                prop = stat.split('.')[1]
-                rates = CephService.get_rates('osd', osd_spec, stat)
-                osd['stats'][prop] = get_most_recent_rate(rates)
-                osd['stats_history'][prop] = rates
-            # Gauge stats
-            for stat in ['osd.numpg', 'osd.stat_bytes', 'osd.stat_bytes_used']:
-                osd['stats'][stat.split('.')[1]] = mgr.get_latest('osd', osd_spec, stat)
+            self.gauge_stats(osd, osd_spec)
             osd['operational_status'] = self._get_operational_status(osd_id, removing_osd_ids)
         return list(osds.values())
 
+    @staticmethod
+    def gauge_stats(osd, osd_spec):
+        for stat in ['osd.op_w', 'osd.op_in_bytes', 'osd.op_r', 'osd.op_out_bytes']:
+            prop = stat.split('.')[1]
+            rates = CephService.get_rates('osd', osd_spec, stat)
+            osd['stats'][prop] = get_most_recent_rate(rates)
+            osd['stats_history'][prop] = rates
+            # Gauge stats
+        for stat in ['osd.numpg', 'osd.stat_bytes', 'osd.stat_bytes_used']:
+            osd['stats'][stat.split('.')[1]] = mgr.get_latest('osd', osd_spec, stat)
+
     @RESTController.Collection('GET', version=APIVersion.EXPERIMENTAL)
     @ReadPermission
     def settings(self):
index 386d584404c88ce64ab781c043eecfcc7db572cf..269abfbc8ab7f867c49b05a8080e3a67262fe3fb 100644 (file)
@@ -179,7 +179,6 @@ class Pool(RESTController):
         self._wait_for_pgs(pool)
 
     def _set_pool_values(self, pool, application_metadata, flags, update_existing, kwargs):
-        update_name = False
         current_pool = self._get(pool)
         if update_existing and kwargs.get('compression_mode') == 'unset':
             self._prepare_compression_removal(current_pool.get('options'), kwargs)
@@ -187,29 +186,32 @@ class Pool(RESTController):
             CephService.send_command('mon', 'osd pool set', pool=pool, var='allow_ec_overwrites',
                                      val='true')
         if application_metadata is not None:
-            def set_app(what, app):
-                CephService.send_command('mon', 'osd pool application ' + what, pool=pool, app=app,
-                                         yes_i_really_mean_it=True)
+            def set_app(app_metadata, set_app_what):
+                for app in app_metadata:
+                    CephService.send_command('mon', 'osd pool application ' + set_app_what,
+                                             pool=pool, app=app, yes_i_really_mean_it=True)
+
             if update_existing:
                 original_app_metadata = set(
                     cast(Iterable[Any], current_pool.get('application_metadata')))
             else:
                 original_app_metadata = set()
 
-            for app in original_app_metadata - set(application_metadata):
-                set_app('disable', app)
-            for app in set(application_metadata) - original_app_metadata:
-                set_app('enable', app)
-
-        def set_key(key, value):
-            CephService.send_command('mon', 'osd pool set', pool=pool, var=key, val=str(value))
+            set_app(original_app_metadata - set(application_metadata), 'disable')
+            set_app(set(application_metadata) - original_app_metadata, 'enable')
 
         quotas = {}
         quotas['max_objects'] = kwargs.pop('quota_max_objects', None)
         quotas['max_bytes'] = kwargs.pop('quota_max_bytes', None)
         self._set_quotas(pool, quotas)
+        self._set_pool_keys(pool, kwargs)
 
-        for key, value in kwargs.items():
+    def _set_pool_keys(self, pool, pool_items):
+        def set_key(key, value):
+            CephService.send_command('mon', 'osd pool set', pool=pool, var=key, val=str(value))
+
+        update_name = False
+        for key, value in pool_items.items():
             if key == 'pool':
                 update_name = True
                 destpool = value
index 252ddfc61c1ba36845a2c38f5e6ee821027966f6..066b235214721445f21e35a034c0cfe568a1f8e0 100644 (file)
@@ -165,17 +165,17 @@ class Rbd(RESTController):
                 # check disabled features
                 _sort_features(curr_features, enable=False)
                 for feature in curr_features:
-                    if feature not in features and feature in self.ALLOW_DISABLE_FEATURES:
-                        if feature not in format_bitmask(image.features()):
-                            continue
+                    if (feature not in features
+                       and feature in self.ALLOW_DISABLE_FEATURES
+                       and feature in format_bitmask(image.features())):
                         f_bitmask = format_features([feature])
                         image.update_features(f_bitmask, False)
                 # check enabled features
                 _sort_features(features)
                 for feature in features:
-                    if feature not in curr_features and feature in self.ALLOW_ENABLE_FEATURES:
-                        if feature in format_bitmask(image.features()):
-                            continue
+                    if (feature not in curr_features
+                       and feature in self.ALLOW_ENABLE_FEATURES
+                       and feature not in format_bitmask(image.features())):
                         f_bitmask = format_features([feature])
                         image.update_features(f_bitmask, True)
 
index b261dc6b02b0c8d879939500e7933e63a12d7304..233a5736dd845c9c58e4c654a7ecbda159e4cae7 100644 (file)
@@ -270,11 +270,9 @@ class CephService(object):
                         continue
                 else:
                     dev_smart_data = {}
-                for dev_id, dev_data in dev_smart_data.items():
-                    if 'error' in dev_data:
-                        logger.warning(
-                            '[SMART] Error retrieving smartctl data for device ID "%s": %s',
-                            dev_id, dev_data)
+
+                CephService.log_dev_data_error(dev_smart_data)
+
                 break
 
             return dev_smart_data
@@ -282,6 +280,14 @@ class CephService(object):
                        device['devid'])
         return {}
 
+    @staticmethod
+    def log_dev_data_error(dev_smart_data):
+        for dev_id, dev_data in dev_smart_data.items():
+            if 'error' in dev_data:
+                logger.warning(
+                    '[SMART] Error retrieving smartctl data for device ID "%s": %s',
+                    dev_id, dev_data)
+
     @staticmethod
     def get_devices_by_host(hostname):
         # type: (str) -> dict
index 4506938abc358aca1a5f7e14e6367f5c6c1b06fc..039482ddd87256132999860ba4d15070c2891a14 100644 (file)
@@ -281,11 +281,7 @@ class RbdService(object):
                 data_pool_name = None
             stat['data_pool'] = data_pool_name
 
-            try:
-                stat['parent'] = img.get_parent_image_spec()
-            except rbd.ImageNotFound:
-                # no parent image
-                stat['parent'] = None
+            stat['parent'] = cls._rbd_image_stat_parent(img)
 
             # snapshots
             stat['snapshots'] = []
@@ -330,6 +326,16 @@ class RbdService(object):
 
             return stat
 
+    @classmethod
+    def _rbd_image_stat_parent(cls, img):
+        stat_parent = None
+        try:
+            stat_parent = img.get_parent_image_spec()
+        except rbd.ImageNotFound:
+            # no parent image
+            stat_parent = None
+        return stat_parent
+
     @classmethod
     def _rbd_image_refs(cls, ioctx):
         rbd_inst = rbd.RBD()
index 1c1acddd69eb6f5e2dd06cacb9bbe17c99b271d3..d7cff2fa0557eba0ae6ffca571fc59cf3a21e6fa 100644 (file)
@@ -706,27 +706,8 @@ class RgwClient(RestClient):
         """
         # pylint: disable=unused-argument
 
-        # Do some validations.
-        try:
-            retention_period_days = int(retention_period_days) if retention_period_days else 0
-            retention_period_years = int(retention_period_years) if retention_period_years else 0
-            if retention_period_days < 0 or retention_period_years < 0:
-                raise ValueError
-        except (TypeError, ValueError):
-            msg = "Retention period must be a positive integer."
-            raise DashboardException(msg=msg, component='rgw')
-        if retention_period_days and retention_period_years:
-            # https://docs.aws.amazon.com/AmazonS3/latest/API/archive-RESTBucketPUTObjectLockConfiguration.html
-            msg = "Retention period requires either Days or Years. "\
-                "You can't specify both at the same time."
-            raise DashboardException(msg=msg, component='rgw')
-        if not retention_period_days and not retention_period_years:
-            msg = "Retention period requires either Days or Years. "\
-                "You must specify at least one."
-            raise DashboardException(msg=msg, component='rgw')
-        if not isinstance(mode, str) or mode.upper() not in ['COMPLIANCE', 'GOVERNANCE']:
-            msg = "Retention mode must be either COMPLIANCE or GOVERNANCE."
-            raise DashboardException(msg=msg, component='rgw')
+        retention_period_days, retention_period_years = self.perform_validations(
+            retention_period_days, retention_period_years, mode)
 
         # Generate the XML data like this:
         # <ObjectLockConfiguration>
@@ -761,3 +742,26 @@ class RgwClient(RestClient):
             _ = request(data=data)  # type: ignore
         except RequestException as e:
             raise DashboardException(msg=str(e), component='rgw')
+
+    def perform_validations(self, retention_period_days, retention_period_years, mode):
+        try:
+            retention_period_days = int(retention_period_days) if retention_period_days else 0
+            retention_period_years = int(retention_period_years) if retention_period_years else 0
+            if retention_period_days < 0 or retention_period_years < 0:
+                raise ValueError
+        except (TypeError, ValueError):
+            msg = "Retention period must be a positive integer."
+            raise DashboardException(msg=msg, component='rgw')
+        if retention_period_days and retention_period_years:
+            # https://docs.aws.amazon.com/AmazonS3/latest/API/archive-RESTBucketPUTObjectLockConfiguration.html
+            msg = "Retention period requires either Days or Years. "\
+                "You can't specify both at the same time."
+            raise DashboardException(msg=msg, component='rgw')
+        if not retention_period_days and not retention_period_years:
+            msg = "Retention period requires either Days or Years. "\
+                "You must specify at least one."
+            raise DashboardException(msg=msg, component='rgw')
+        if not isinstance(mode, str) or mode.upper() not in ['COMPLIANCE', 'GOVERNANCE']:
+            msg = "Retention mode must be either COMPLIANCE or GOVERNANCE."
+            raise DashboardException(msg=msg, component='rgw')
+        return retention_period_days, retention_period_years
index aceeaff3de042097a8f7fc10d6c477bcaf578ea9..9defa2f18e630a53ce37fa561343125d42829160 100644 (file)
@@ -83,16 +83,7 @@ class TcmuService(object):
 
         # clear up races w/ tcmu-runner clients that haven't detected
         # loss of optimized path
-        for image in images.values():
-            optimized_daemon = image.get('optimized_daemon', None)
-            if optimized_daemon:
-                for daemon_name in image['optimized_paths']:
-                    if daemon_name != optimized_daemon:
-                        daemon = daemons[daemon_name]
-                        daemon['optimized_paths'] -= 1
-                        daemon['non_optimized_paths'] += 1
-                        image['non_optimized_paths'].append(daemon_name)
-                image['optimized_paths'] = [optimized_daemon]
+        TcmuService.remove_undetected_clients(images, daemons, daemon)
 
         return {
             'daemons': sorted(daemons.values(),
@@ -106,3 +97,16 @@ class TcmuService(object):
             if image['pool_name'] == pool_name and image['name'] == image_name:
                 return image
         return None
+
+    @staticmethod
+    def remove_undetected_clients(images, daemons, daemon):
+        for image in images.values():
+            optimized_daemon = image.get('optimized_daemon', None)
+            if optimized_daemon:
+                for daemon_name in image['optimized_paths']:
+                    if daemon_name != optimized_daemon:
+                        daemon = daemons[daemon_name]
+                        daemon['optimized_paths'] -= 1
+                        daemon['non_optimized_paths'] += 1
+                        image['non_optimized_paths'].append(daemon_name)
+                image['optimized_paths'] = [optimized_daemon]