bitmap allocator.
It was used a real minimum threshold before this fix which allowed e.g.
allocated extent length to be equal to min_length + 1.
Signed-off-by: Igor Fedotov <ifedotov@suse.com>
if (!ctx->free_count) {
ctx->free_l1_pos = l1_pos;
} else if (l1_pos != next_free_l1_pos){
- break;
+ // check if already found extent fits min_length
+ if (ctx->free_count * l1_granularity >= min_length) {
+ break;
+ }
+ // if not - proceed with the next one
+ ctx->free_l1_pos = l1_pos;
+ ctx->free_count = 0;
}
next_free_l1_pos = l1_pos + 1;
++ctx->free_count;
if (l >= min_length &&
(ctx->min_affordable_len == 0 ||
(l < ctx->min_affordable_len))) {
- ctx->min_affordable_len = l;
+
+ ctx->min_affordable_len = p2align(l, min_length);
ctx->min_affordable_l0_pos_start = p0;
}
if (mode == STOP_ON_PARTIAL) {
interval_t res = { 0, 0 };
uint64_t l0_w = slotset_width * CHILD_PER_SLOT_L0;
- if (length <= l0_granularity) {
+ if (unlikely(length <= l0_granularity)) {
search_ctx_t ctx;
_analyze_partials(pos_start, pos_end, l0_granularity, l0_granularity,
STOP_ON_PARTIAL, &ctx);
res = interval_t(ctx.free_l1_pos * l1_granularity, l);
return res;
}
- } else if (length == l1_granularity) {
+ } else if (unlikely(length == l1_granularity)) {
search_ctx_t ctx;
_analyze_partials(pos_start, pos_end, length, min_length, STOP_ON_EMPTY, &ctx);
- // allocate exactly matched entry if any
+ // allocate using contiguous extent found at l1 if any
if (ctx.free_count) {
+
auto l = std::min(length, ctx.free_count * l1_granularity);
assert((l % l0_granularity) == 0);
auto pos_end = ctx.free_l1_pos * l0_w + l / l0_granularity;
return res;
}
if (ctx.min_affordable_len) {
- assert(ctx.min_affordable_len >= min_length);
- assert((ctx.min_affordable_len % l0_granularity) == 0);
- auto pos_end = ctx.min_affordable_l0_pos_start +
- ctx.min_affordable_len / l0_granularity;
- _mark_alloc_l1_l0(ctx.min_affordable_l0_pos_start, pos_end);
- res = interval_t(ctx.min_affordable_l0_pos_start * l0_granularity,
- ctx.min_affordable_len);
- return res;
+ auto pos0 = ctx.min_affordable_l0_pos_start;
+ _mark_alloc_l1_l0(pos0, pos0 + ctx.min_affordable_len / l0_granularity);
+ return interval_t(pos0 * l0_granularity, ctx.min_affordable_len);
}
} else {
search_ctx_t ctx;
res = interval_t(ctx.affordable_l0_pos_start * l0_granularity, length);
return res;
}
- // allocate exactly matched entry if any
- if (ctx.free_count) {
+ // allocate using contiguous extent found at l1 if affordable
+ if (ctx.free_count && ctx.free_count * l1_granularity >= min_length) {
- auto l = std::min(length, ctx.free_count * l1_granularity);
+ auto l = p2align(std::min(length, ctx.free_count * l1_granularity),
+ min_length);
assert((l % l0_granularity) == 0);
auto pos_end = ctx.free_l1_pos * l0_w + l / l0_granularity;
return res;
}
if (ctx.min_affordable_len) {
- assert(ctx.min_affordable_len >= min_length);
- assert((ctx.min_affordable_len % l0_granularity) == 0);
- auto pos_end = ctx.min_affordable_l0_pos_start +
- ctx.min_affordable_len / l0_granularity;
- _mark_alloc_l1_l0(ctx.min_affordable_l0_pos_start, pos_end);
- res = interval_t(ctx.min_affordable_l0_pos_start * l0_granularity,
- ctx.min_affordable_len);
- return res;
+ auto pos0 = ctx.min_affordable_l0_pos_start;
+ _mark_alloc_l1_l0(pos0, pos0 + ctx.min_affordable_len / l0_granularity);
+ return interval_t(pos0 * l0_granularity, ctx.min_affordable_len);
}
}
return res;
typedef std::vector<slot_t> slot_vector_t;
#else
#include "include/assert.h"
+#include "common/likely.h"
#include "os/bluestore/bluestore_types.h"
#include "include/mempool.h"
uint64_t len,
interval_vector_t* res)
{
+ auto it = res->rbegin();
+
if (max_length) {
+ if (it != res->rend() && it->offset + it->length == offset) {
+ auto l = max_length - it->length;
+ if (l >= len) {
+ it->length += len;
+ return;
+ } else {
+ offset += l;
+ len -= l;
+ it->length += l;
+ }
+ }
+
while (len > max_length) {
res->emplace_back(offset, max_length);
offset += max_length;
len -= max_length;
}
+ res->emplace_back(offset, len);
+ return;
+ }
+
+ if (it != res->rend() && it->offset + it->length == offset) {
+ it->length += len;
+ } else {
+ res->emplace_back(offset, len);
}
- res->emplace_back(offset, len);
}
bool _allocate_l0(uint64_t length,
void _init(uint64_t capacity, uint64_t _alloc_unit, bool mark_as_free = true)
{
+ assert(isp2(_alloc_unit));
l1._init(capacity, _alloc_unit, mark_as_free);
l2_granularity =
{
uint64_t prev_allocated = *allocated;
uint64_t d = CHILD_PER_SLOT;
- assert(min_length <= l2_granularity);
+ assert(isp2(min_length));
+ assert(min_length <= l1._level_granularity());
assert(max_length == 0 || max_length >= min_length);
assert(max_length == 0 || (max_length % min_length) == 0);
assert(length >= min_length);
uint64_t allocated4 = 0;
interval_vector_t a4;
al2.allocate_l2(0x1000, 0x1000, &allocated4, &a4);
- ASSERT_TRUE(a4.size() == 1);
- ASSERT_TRUE(allocated4 == 0x1000);
- ASSERT_TRUE(a4[0].offset == i);
- ASSERT_TRUE(a4[0].length == 0x1000);
+ ASSERT_EQ(a4.size(), 1);
+ ASSERT_EQ(allocated4, 0x1000);
+ ASSERT_EQ(a4[0].offset, i);
+ ASSERT_EQ(a4[0].length, 0x1000);
allocated4 = 0;
a4.clear();
- al2.allocate_l2(_1m - 0x1000, _1m - 0x1000, &allocated4, &a4);
- ASSERT_TRUE(a4.size() == 1);
- ASSERT_TRUE(allocated4 == _1m - 0x1000);
- ASSERT_TRUE(a4[0].offset == i + 0x1000);
- ASSERT_TRUE(a4[0].length == _1m - 0x1000);
+ al2.allocate_l2(_1m - 0x1000, 0x1000, &allocated4, &a4);
+ ASSERT_EQ(a4.size(), 1);
+ ASSERT_EQ(allocated4, _1m - 0x1000);
+ ASSERT_EQ(a4[0].offset, i + 0x1000);
+ ASSERT_EQ(a4[0].length, _1m - 0x1000);
if (0 == (i % (1 * 1024 * _1m))) {
std::cout << "allocH " << i / 1024 / 1024 << " mb of "
<< capacity / 1024 / 1024 << std::endl;
uint64_t allocated = 0;
interval_vector_t a;
al2.allocate_l2(0x2000, 0x2000, &allocated, &a);
- ASSERT_TRUE(a.size() == 0);
+ ASSERT_EQ(a.size(), 0);
}
std::cout << "End try in " << time(NULL) - t << " seconds" << std::endl;
}
uint64_t allocated = 0;
interval_vector_t a;
al2.allocate_l2(_2m, _2m, &allocated, &a);
- ASSERT_TRUE(a.size() == 0);
+ ASSERT_EQ(a.size(), 0);
}
std::cout << "End try in " << time(NULL) - t << " seconds" << std::endl;
}
- ASSERT_TRUE((capacity / _1m) * 0x1000 == al2.debug_get_free());
+ ASSERT_EQ((capacity / _1m) * 0x1000, al2.debug_get_free());
std::cout << "Done L2 Huge" << std::endl;
}