f->dump_string("length", len_hex);
f->close_section();
};
- alloc->dump(iterated_allocation);
+ alloc->foreach(iterated_allocation);
f->close_section();
f->close_section();
} else if (command == "bluestore allocator score " + name) {
score_sum += get_score(len);
sum += len;
};
- dump(iterated_allocation);
+ foreach(iterated_allocation);
double ideal = get_score(sum);
void release(const PExtentVector& release_set);
virtual void dump() = 0;
- virtual void dump(std::function<void(uint64_t offset, uint64_t length)> notify) = 0;
+ virtual void foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify) = 0;
virtual void init_add_free(uint64_t offset, uint64_t length) = 0;
virtual void init_rm_free(uint64_t offset, uint64_t length) = 0;
<< std::dec
<< dendl;
}
-
ldout(cct, 0) << __func__ << " range_size_tree: " << dendl;
for (auto& rs : range_size_tree) {
ldout(cct, 0) << std::hex
}
}
-void AvlAllocator::dump(std::function<void(uint64_t offset, uint64_t length)> notify)
+void AvlAllocator::foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify)
+{
+ std::lock_guard l(lock);
+ _foreach(notify);
+}
+
+void AvlAllocator::_foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify) const
{
for (auto& rs : range_tree) {
notify(rs.start, rs.end - rs.start);
double get_fragmentation() override;
void dump() override;
- void dump(std::function<void(uint64_t offset, uint64_t length)> notify) override;
+ void foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify) override;
void init_add_free(uint64_t offset, uint64_t length) override;
void init_rm_free(uint64_t offset, uint64_t length) override;
void shutdown() override;
return (static_cast<double>(range_tree.size() - 1) / (free_blocks - 1));
}
void _dump() const;
+ void _foreach(std::function<void(uint64_t offset, uint64_t length)>) const;
uint64_t _lowest_size_available() {
auto rs = range_size_tree.begin();
auto it = bins_overall.begin();
while (it != bins_overall.end()) {
ldout(cct, 0) << __func__
- << " bin " << it->first
- << "(< " << byte_u_t((1 << (it->first + 1)) * get_min_alloc_size()) << ")"
- << " : " << it->second << " extents"
- << dendl;
+ << " bin " << it->first
+ << "(< " << byte_u_t((1 << (it->first + 1)) * get_min_alloc_size()) << ")"
+ << " : " << it->second << " extents"
+ << dendl;
++it;
}
}
-
-void BitmapAllocator::dump(std::function<void(uint64_t offset, uint64_t length)> notify)
-{
- size_t alloc_size = get_min_alloc_size();
- auto multiply_by_alloc_size = [alloc_size, notify](size_t off, size_t len) {
- notify(off * alloc_size, len * alloc_size);
- };
- std::lock_guard lck(lock);
- l1.dump(multiply_by_alloc_size);
-}
}
void dump() override;
- void dump(std::function<void(uint64_t offset, uint64_t length)> notify) override;
+ void foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify) override
+ {
+ foreach_internal(notify);
+ }
double get_fragmentation() override
{
- return _get_fragmentation();
+ return get_fragmentation_internal();
}
void init_add_free(uint64_t offset, uint64_t length) override;
total += p2align(len, alloc_size);
};
if (alloc[dev]) {
- alloc[dev]->dump(iterated_allocation);
+ alloc[dev]->foreach(iterated_allocation);
}
return total;
}
auto count_entries = [&](uint64_t extent_offset, uint64_t extent_length) {
(*p_num_entries)++;
};
- src_alloc->dump(count_entries);
+ src_alloc->foreach(count_entries);
dout(5) << "count num_entries=" << *p_num_entries << dendl;
derr << "zero length extent!!! offset=" << extent_offset << ", index=" << idx << dendl;
}
};
- src_alloc->dump(copy_entries);
+ src_alloc->foreach(copy_entries);
dout(5) << "copy num_entries=" << idx << dendl;
if (idx > *p_num_entries) {
p_curr = buffer; // recycle the buffer
}
};
- allocator->dump(iterated_allocation);
+ allocator->foreach(iterated_allocation);
// if got null extent -> fail the operation
if (ret != 0) {
derr << "Illegal extent, fail store operation" << dendl;
}
};
- alloc1->dump(iterated_mapper1);
- alloc2->dump(iterated_mapper2);
+ alloc1->foreach(iterated_mapper1);
+ alloc2->foreach(iterated_mapper2);
qsort(arr1.get(), std::min(idx1, extent_count), sizeof(extent_t), cmpfunc);
qsort(arr2.get(), std::min(idx2, extent_count), sizeof(extent_t), cmpfunc);
auto count_entries = [&](uint64_t extent_offset, uint64_t extent_length) {
stats.insert_count++;
};
- allocator->dump(count_entries);
+ allocator->foreach(count_entries);
ret = compare_allocators(allocator.get(), alloc, stats.insert_count, memory_target);
if (ret == 0) {
dout(5) << "Allocator drive - file integrity check OK" << dendl;
txn = db->get_transaction();
}
};
- allocator->dump(iterated_insert);
+ allocator->foreach(iterated_insert);
if (idx % max_txn != 0) {
db->submit_transaction_sync(txn);
}
auto count_entries = [&](uint64_t extent_offset, uint64_t extent_length) {
insert_count++;
};
- temp_allocator->dump(count_entries);
+ temp_allocator->foreach(count_entries);
uint64_t memory_target = cct->_conf.get_val<Option::size_t>("osd_memory_target");
int ret = compare_allocators(allocator, temp_allocator, insert_count, memory_target);
}
}
-void BtreeAllocator::dump(std::function<void(uint64_t offset, uint64_t length)> notify)
+void BtreeAllocator::foreach(std::function<void(uint64_t offset, uint64_t length)> notify)
{
+ std::lock_guard l(lock);
for (auto& rs : range_tree) {
notify(rs.first, rs.second - rs.first);
}
double get_fragmentation() override;
void dump() override;
- void dump(std::function<void(uint64_t offset, uint64_t length)> notify) override;
+ void foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify) override;
void init_add_free(uint64_t offset, uint64_t length) override;
void init_rm_free(uint64_t offset, uint64_t length) override;
void shutdown() override;
<< dendl;
}
-void HybridAllocator::dump(std::function<void(uint64_t offset, uint64_t length)> notify)
+void HybridAllocator::foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify)
{
- AvlAllocator::dump(notify);
+ std::lock_guard l(lock);
+ AvlAllocator::_foreach(notify);
if (bmap_alloc) {
- bmap_alloc->dump(notify);
+ bmap_alloc->foreach(notify);
}
}
double get_fragmentation() override;
void dump() override;
- void dump(std::function<void(uint64_t offset, uint64_t length)> notify) override;
+ void foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify) override;
void init_rm_free(uint64_t offset, uint64_t length) override;
void shutdown() override;
}
}
-void StupidAllocator::dump(std::function<void(uint64_t offset, uint64_t length)> notify)
+void StupidAllocator::foreach(std::function<void(uint64_t offset, uint64_t length)> notify)
{
std::lock_guard l(lock);
for (unsigned bin = 0; bin < free.size(); ++bin) {
double get_fragmentation() override;
void dump() override;
- void dump(std::function<void(uint64_t offset, uint64_t length)> notify) override;
+ void foreach(std::function<void(uint64_t offset, uint64_t length)> notify) override;
void init_add_free(uint64_t offset, uint64_t length) override;
void init_rm_free(uint64_t offset, uint64_t length) override;
std::lock_guard l(lock);
}
-void ZonedAllocator::dump(std::function<void(uint64_t offset,
- uint64_t length)> notify)
+void ZonedAllocator::foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify)
{
std::lock_guard l(lock);
}
uint64_t get_free() override;
void dump() override;
- void dump(std::function<void(uint64_t offset,
- uint64_t length)> notify) override;
+ void foreach(
+ std::function<void(uint64_t offset, uint64_t length)> notify) override;
int64_t pick_zone_to_clean(float min_score, uint64_t min_saved);
void set_cleaning_zone(uint32_t zone) {
{
return count_0s(~slot_val, start_pos);
}
-void AllocatorLevel01Loose::dump(
+void AllocatorLevel01Loose::foreach_internal(
std::function<void(uint64_t offset, uint64_t length)> notify)
{
size_t len = 0;
static inline ssize_t count_0s(slot_t slot_val, size_t start_pos);
static inline ssize_t count_1s(slot_t slot_val, size_t start_pos);
- void dump(std::function<void(uint64_t offset, uint64_t length)> notify);
+ void foreach_internal(std::function<void(uint64_t offset, uint64_t length)> notify);
};
_mark_l2_on_l1(l2_pos, l2_pos_end);
return allocated;
}
+
+ void foreach_internal(
+ std::function<void(uint64_t offset, uint64_t length)> notify)
+ {
+ size_t alloc_size = get_min_alloc_size();
+ auto multiply_by_alloc_size = [alloc_size, notify](size_t off, size_t len) {
+ notify(off * alloc_size, len * alloc_size);
+ };
+ std::lock_guard l(lock);
+ l1.foreach_internal(multiply_by_alloc_size);
+ }
+ double get_fragmentation_internal() {
+ std::lock_guard l(lock);
+ return l1.get_fragmentation();
+ }
+
protected:
ceph::mutex lock = ceph::make_mutex("AllocatorLevel02::lock");
L1 l1;
{
last_pos = 0;
}
- double _get_fragmentation() {
- std::lock_guard l(lock);
- return l1.get_fragmentation();
- }
};
#endif
ceph_assert(len > 0);
free_sum += len;
};
- alloc->dump(iterated_allocation);
+ alloc->foreach(iterated_allocation);
EXPECT_GT(1, alloc->get_fragmentation_score());
EXPECT_EQ(capacity, free_sum + allocated_cnt);
}