]> git.apps.os.sepia.ceph.com Git - ceph.git/commitdiff
mgr/cephadm: only deploy 3 mons by default when there are 4 hosts
authorAdam King <adking@redhat.com>
Wed, 30 Sep 2020 13:44:36 +0000 (09:44 -0400)
committerAdam King <adking@redhat.com>
Mon, 5 Oct 2020 12:18:59 +0000 (08:18 -0400)
Fixes: https://tracker.ceph.com/issues/47234
Signed-off-by: Adam King <adking@redhat.com>
src/pybind/mgr/cephadm/schedule.py
src/pybind/mgr/cephadm/tests/test_scheduling.py

index 4b42099b293a22baf13f7aad32d8264bcedcc1eb..26bee82d9d5d3a4b2af062cfb7e7c3ffb11e5555 100644 (file)
@@ -122,8 +122,30 @@ class HostAssignment(object):
         # If we don't have <count> the list of candidates is definitive.
         if count is None:
             logger.debug('Provided hosts: %s' % candidates)
+            # if asked to place even number of mons, deploy 1 less
+            if self.spec.service_type == 'mon' and (len(candidates) % 2) == 0:
+                logger.info("deploying %s monitor(s) instead of %s so monitors may achieve consensus" % (
+                    len(candidates) - 1, len(candidates)))
+                return candidates[0:len(candidates)-1]
             return candidates
 
+        # if asked to place even number of mons, deploy 1 less
+        if self.spec.service_type == 'mon':
+            # if count >= number of candidates then number of candidates
+            # is determining factor in how many mons will be placed
+            if count >= len(candidates):
+                if (len(candidates) % 2) == 0:
+                    logger.info("deploying %s monitor(s) instead of %s so monitors may achieve consensus" % (
+                        len(candidates) - 1, len(candidates)))
+                    count = len(candidates) - 1
+            # if count < number of candidates then count is determining
+            # factor in how many mons will get placed
+            else:
+                if (count % 2) == 0:
+                    logger.info(
+                        "deploying %s monitor(s) instead of %s so monitors may achieve consensus" % (count - 1, count))
+                    count = count - 1
+
         # prefer hosts that already have services.
         # this avoids re-assigning to _new_ hosts
         # and constant re-distribution of hosts when new nodes are
index d57b4c08aae7bf0a4a69951efdfa04458373735a..efbbb9b99d90b902a754ea3626cb0d9c8f681e4d 100644 (file)
@@ -756,3 +756,115 @@ def test_active_assignment(service_type, placement, hosts, daemons, expected):
         hosts=[HostSpec(h) for h in hosts],
         get_daemons_func=lambda _: daemons).place()
     assert sorted([h.hostname for h in hosts]) in expected
+
+class OddMonsTest(NamedTuple):
+    service_type: str
+    placement: PlacementSpec
+    hosts: List[str]
+    daemons: List[DaemonDescription]
+    expected_count: int
+
+
+@pytest.mark.parametrize("service_type,placement,hosts,daemons,expected_count",
+                         [
+                             OddMonsTest(
+                                 'mon',
+                                 PlacementSpec(count=5),
+                                 'host1 host2 host3 host4'.split(),
+                                 [],
+                                 3
+                             ),
+                             OddMonsTest(
+                                 'mon',
+                                 PlacementSpec(count=4),
+                                 'host1 host2 host3 host4 host5'.split(),
+                                 [],
+                                 3
+                             ),
+                             OddMonsTest(
+                                 'mon',
+                                 PlacementSpec(count=5),
+                                 'host1 host2 host3 host4 host5'.split(),
+                                 [],
+                                 5
+                             ),
+                             OddMonsTest(
+                                 'mon',
+                                 PlacementSpec(hosts='host1 host2 host3 host4'.split()),
+                                 'host1 host2 host3 host4 host5'.split(),
+                                 [],
+                                 3
+                             ),
+                             OddMonsTest(
+                                 'mon',
+                                 PlacementSpec(hosts='host1 host2 host3 host4 host5'.split()),
+                                 'host1 host2 host3 host4 host5'.split(),
+                                 [],
+                                 5
+                             ),
+                             OddMonsTest(
+                                 'mon',
+                                 PlacementSpec(host_pattern='*'),
+                                 'host1 host2 host3 host4'.split(),
+                                 [],
+                                 3
+                             ),
+                             OddMonsTest(
+                                 'mon',
+                                 PlacementSpec(count=5, hosts='host1 host2 host3 host4'.split()),
+                                 'host1 host2 host3 host4 host5'.split(),
+                                 [],
+                                 3
+                             ),
+                             OddMonsTest(
+                                 'mon',
+                                 PlacementSpec(count=2, hosts='host1 host2 host3'.split()),
+                                 'host1 host2 host3 host4 host5'.split(),
+                                 [],
+                                 1
+                             ),
+                             OddMonsTest(
+                                 'mon',
+                                 PlacementSpec(count=5),
+                                 'host1 host2 host3 host4'.split(),
+                                 [
+                                     DaemonDescription('mon', 'a', 'host1'),
+                                     DaemonDescription('mon', 'b', 'host2'),
+                                     DaemonDescription('mon', 'c', 'host3'),
+                                 ],
+                                 3
+                             ),
+                            OddMonsTest(
+                                'mon',
+                                PlacementSpec(count=5),
+                                'host1 host2 host3 host4'.split(),
+                                [
+                                   DaemonDescription('mon', 'a', 'host1'),
+                                   DaemonDescription('mon', 'b', 'host2'),
+                                ],
+                                3
+                            ),
+                            OddMonsTest(
+                                'mon',
+                                PlacementSpec(hosts='host1 host2 host3 host4'.split()),
+                                'host1 host2 host3 host4 host5'.split(),
+                                [
+                                   DaemonDescription('mon', 'a', 'host1'),
+                                   DaemonDescription('mon', 'b', 'host2'),
+                                   DaemonDescription('mon', 'c', 'host3'),
+                                ],
+                                3
+                            ),
+
+                         ])
+def test_odd_mons(service_type, placement, hosts, daemons, expected_count):
+
+    spec = ServiceSpec(service_type=service_type,
+                       service_id=None,
+                       placement=placement)
+
+    hosts = HostAssignment(
+        spec=spec,
+        hosts=[HostSpec(h) for h in hosts],
+        get_daemons_func=lambda _: daemons).place()
+    assert len(hosts) == expected_count