From 7fe685c922bae3b52755209e35feddd972749cff Mon Sep 17 00:00:00 2001 From: Pere Diaz Bou Date: Fri, 26 Apr 2024 11:33:25 +0200 Subject: [PATCH] os/bluestore: fix buffercacheshard store arg bluestore_tyeps Signed-off-by: Pere Diaz Bou --- src/test/objectstore/test_bluestore_types.cc | 1266 +++++++++--------- 1 file changed, 622 insertions(+), 644 deletions(-) diff --git a/src/test/objectstore/test_bluestore_types.cc b/src/test/objectstore/test_bluestore_types.cc index db0ee38089119..f02da9df5c97c 100644 --- a/src/test/objectstore/test_bluestore_types.cc +++ b/src/test/objectstore/test_bluestore_types.cc @@ -1,18 +1,18 @@ // -*- mode:C++; tab-width:8; c-basic-offset:2; indent-tabs-mode:t -*- // vim: ts=8 sw=2 smarttab -#include "include/types.h" -#include "os/bluestore/bluestore_types.h" -#include "gtest/gtest.h" -#include "include/stringify.h" +#include "common/ceph_argparse.h" #include "common/ceph_time.h" +#include "global/global_context.h" +#include "global/global_init.h" +#include "include/stringify.h" +#include "include/types.h" +#include "os/bluestore/AvlAllocator.h" #include "os/bluestore/BlueStore.h" +#include "os/bluestore/bluestore_types.h" #include "os/bluestore/simple_bitmap.h" -#include "os/bluestore/AvlAllocator.h" -#include "common/ceph_argparse.h" -#include "global/global_init.h" -#include "global/global_context.h" #include "perfglue/heap_profiler.h" +#include "gtest/gtest.h" #include @@ -49,14 +49,15 @@ TEST(bluestore, sizeof) { P(range_seg_t); P(sb_info_t); P(SimpleBitmap); - cout << "map\t" << sizeof(map) << std::endl; - cout << "map\t" << sizeof(map) << std::endl; + cout << "map\t" << sizeof(map) + << std::endl; + cout << "map\t" << sizeof(map) << std::endl; } -void dump_mempools() -{ +void dump_mempools() { ostringstream ostr; - auto f = Formatter::create_unique("json-pretty", "json-pretty", "json-pretty"); + auto f = + Formatter::create_unique("json-pretty", "json-pretty", "json-pretty"); ostr << "Mempools: "; f->open_object_section("mempools"); mempool::dump(f.get()); @@ -85,7 +86,7 @@ TEST(sb_info_space_efficient_map_t, basic) { sb_info_space_efficient_map_t sb_info; const size_t num_shared = 1000; for (size_t i = 0; i < num_shared; i += 2) { - auto& sbi = sb_info.add_maybe_stray(i); + auto &sbi = sb_info.add_maybe_stray(i); sbi.pool_id = i; } ASSERT_TRUE(sb_info.find(0) != sb_info.end()); @@ -120,23 +121,22 @@ TEST(sb_info_space_efficient_map_t, size) { sb_info_space_efficient_map_t sb_info; BlueStore store(g_ceph_context, "", 4096); - BlueStore::OnodeCacheShard* oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard* bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); for (size_t i = 0; i < num_shared; i++) { - auto& sbi = sb_info.add_or_adopt(i); + auto &sbi = sb_info.add_or_adopt(i); // primarily to silent the 'unused' warning ceph_assert(sbi.pool_id == sb_info_t::INVALID_POOL_ID); } dump_mempools(); } -TEST(bluestore_extent_ref_map_t, add) -{ +TEST(bluestore_extent_ref_map_t, add) { bluestore_extent_ref_map_t m; m.get(10, 10); ASSERT_EQ(1u, m.ref_map.size()); @@ -160,8 +160,7 @@ TEST(bluestore_extent_ref_map_t, add) ASSERT_EQ(1u, m.ref_map.size()); } -TEST(bluestore_extent_ref_map_t, get) -{ +TEST(bluestore_extent_ref_map_t, get) { bluestore_extent_ref_map_t m; m.get(00, 30); cout << m << std::endl; @@ -207,8 +206,7 @@ TEST(bluestore_extent_ref_map_t, get) ASSERT_EQ(1u, m.ref_map[28].refs); } -TEST(bluestore_extent_ref_map_t, put) -{ +TEST(bluestore_extent_ref_map_t, put) { bluestore_extent_ref_map_t m; PExtentVector r; bool maybe_unshared = false; @@ -273,8 +271,7 @@ TEST(bluestore_extent_ref_map_t, put) ASSERT_TRUE(maybe_unshared); } -TEST(bluestore_extent_ref_map_t, contains) -{ +TEST(bluestore_extent_ref_map_t, contains) { bluestore_extent_ref_map_t m; m.get(10, 30); ASSERT_TRUE(m.contains(10, 30)); @@ -302,8 +299,7 @@ TEST(bluestore_extent_ref_map_t, contains) ASSERT_FALSE(m.contains(4000, 30)); } -TEST(bluestore_extent_ref_map_t, intersects) -{ +TEST(bluestore_extent_ref_map_t, intersects) { bluestore_extent_ref_map_t m; m.get(10, 30); ASSERT_TRUE(m.intersects(10, 30)); @@ -329,8 +325,7 @@ TEST(bluestore_extent_ref_map_t, intersects) ASSERT_FALSE(m.intersects(55, 1)); } -TEST(bluestore_blob_t, calc_csum) -{ +TEST(bluestore_blob_t, calc_csum) { bufferlist bl; bl.append("asdfghjkqwertyuizxcvbnm,"); bufferlist bl2; @@ -345,10 +340,9 @@ TEST(bluestore_blob_t, calc_csum) n.append("12345678"); for (unsigned csum_type = Checksummer::CSUM_NONE + 1; - csum_type < Checksummer::CSUM_MAX; - ++csum_type) { + csum_type < Checksummer::CSUM_MAX; ++csum_type) { cout << "csum_type " << Checksummer::get_csum_type_string(csum_type) - << std::endl; + << std::endl; bluestore_blob_t b; int bad_off; @@ -397,46 +391,42 @@ TEST(bluestore_blob_t, calc_csum) } } -TEST(bluestore_blob_t, csum_bench) -{ +TEST(bluestore_blob_t, csum_bench) { bufferlist bl; bufferptr bp(10485760); for (char *a = bp.c_str(); a < bp.c_str() + bp.length(); ++a) *a = (unsigned long)a & 0xff; bl.append(bp); int count = 256; - for (unsigned csum_type = 1; - csum_type < Checksummer::CSUM_MAX; - ++csum_type) { + for (unsigned csum_type = 1; csum_type < Checksummer::CSUM_MAX; ++csum_type) { bluestore_blob_t b; b.init_csum(csum_type, 12, bl.length()); ceph::mono_clock::time_point start = ceph::mono_clock::now(); - for (int i = 0; i(end - start); - double mbsec = (double)count * (double)bl.length() / 1000000.0 / (double)dur.count() * 1000000000.0; - cout << "csum_type " << Checksummer::get_csum_type_string(csum_type) - << ", " << dur << " seconds, " - << mbsec << " MB/sec" << std::endl; + double mbsec = (double)count * (double)bl.length() / 1000000.0 / + (double)dur.count() * 1000000000.0; + cout << "csum_type " << Checksummer::get_csum_type_string(csum_type) << ", " + << dur << " seconds, " << mbsec << " MB/sec" << std::endl; } } -TEST(Blob, put_ref) -{ +TEST(Blob, put_ref) { { BlueStore store(g_ceph_context, "", 4096); - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); BlueStore::Blob b(coll.get()); b.dirty_blob().allocated_test(bluestore_pextent_t(0x40715000, 0x2000)); b.dirty_blob().allocated_test( - bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, 0x8000)); + bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, 0x8000)); b.dirty_blob().allocated_test(bluestore_pextent_t(0x4071f000, 0x5000)); b.get_ref(coll.get(), 0, 0x1200); b.get_ref(coll.get(), 0xae00, 0x4200); @@ -458,78 +448,78 @@ TEST(Blob, put_ref) unsigned mas = 4096; BlueStore store(g_ceph_context, "", 8192); - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); { BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; b.allocated_test(bluestore_pextent_t(0, mas * 2)); - B.get_ref(coll.get(), 0, mas*2); + B.get_ref(coll.get(), 0, mas * 2); ASSERT_EQ(mas * 2, B.get_referenced_bytes()); - ASSERT_TRUE(b.is_allocated(0, mas*2)); - ASSERT_TRUE(B.put_ref(coll.get(), 0, mas*2, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 2)); + ASSERT_TRUE(B.put_ref(coll.get(), 0, mas * 2, &r)); ASSERT_EQ(0u, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(1u, r.size()); ASSERT_EQ(0u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); - ASSERT_FALSE(b.is_allocated(0, mas*2)); + ASSERT_EQ(mas * 2, r[0].length); + ASSERT_FALSE(b.is_allocated(0, mas * 2)); ASSERT_FALSE(b.is_allocated(0, mas)); ASSERT_FALSE(b.is_allocated(mas, 0)); ASSERT_FALSE(b.get_extents()[0].is_valid()); - ASSERT_EQ(mas*2, b.get_extents()[0].length); + ASSERT_EQ(mas * 2, b.get_extents()[0].length); } { BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; b.allocated_test(bluestore_pextent_t(123, mas * 2)); - B.get_ref(coll.get(), 0, mas*2); + B.get_ref(coll.get(), 0, mas * 2); ASSERT_EQ(mas * 2, B.get_referenced_bytes()); ASSERT_FALSE(B.put_ref(coll.get(), 0, mas, &r)); ASSERT_EQ(mas, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*2)); + ASSERT_TRUE(b.is_allocated(0, mas * 2)); ASSERT_TRUE(B.put_ref(coll.get(), mas, mas, &r)); ASSERT_EQ(0u, B.get_referenced_bytes()); ASSERT_EQ(0u, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(1u, r.size()); ASSERT_EQ(123u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); - ASSERT_FALSE(b.is_allocated(0, mas*2)); + ASSERT_EQ(mas * 2, r[0].length); + ASSERT_FALSE(b.is_allocated(0, mas * 2)); ASSERT_FALSE(b.get_extents()[0].is_valid()); - ASSERT_EQ(mas*2, b.get_extents()[0].length); + ASSERT_EQ(mas * 2, b.get_extents()[0].length); } { BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; b.allocated_test(bluestore_pextent_t(1, mas)); b.allocated_test(bluestore_pextent_t(2, mas)); b.allocated_test(bluestore_pextent_t(3, mas)); b.allocated_test(bluestore_pextent_t(4, mas)); - B.get_ref(coll.get(), 0, mas*4); + B.get_ref(coll.get(), 0, mas * 4); ASSERT_EQ(mas * 4, B.get_referenced_bytes()); ASSERT_FALSE(B.put_ref(coll.get(), mas, mas, &r)); ASSERT_EQ(mas * 3, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*4)); + ASSERT_TRUE(b.is_allocated(0, mas * 4)); ASSERT_TRUE(b.is_allocated(mas, mas)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*2, mas, &r)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 2, mas, &r)); ASSERT_EQ(mas * 2, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(mas*2, mas)); - ASSERT_TRUE(b.is_allocated(0, mas*4)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*3, mas, &r)); + ASSERT_TRUE(b.is_allocated(mas * 2, mas)); + ASSERT_TRUE(b.is_allocated(0, mas * 4)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 3, mas, &r)); ASSERT_EQ(mas, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(2u, r.size()); @@ -537,8 +527,8 @@ TEST(Blob, put_ref) ASSERT_EQ(mas, r[0].length); ASSERT_EQ(4u, r[1].offset); ASSERT_EQ(mas, r[1].length); - ASSERT_TRUE(b.is_allocated(0, mas*2)); - ASSERT_FALSE(b.is_allocated(mas*2, mas*2)); + ASSERT_TRUE(b.is_allocated(0, mas * 2)); + ASSERT_FALSE(b.is_allocated(mas * 2, mas * 2)); ASSERT_TRUE(b.get_extents()[0].is_valid()); ASSERT_TRUE(b.get_extents()[1].is_valid()); ASSERT_FALSE(b.get_extents()[2].is_valid()); @@ -546,7 +536,7 @@ TEST(Blob, put_ref) } { BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; b.allocated_test(bluestore_pextent_t(1, mas)); b.allocated_test(bluestore_pextent_t(2, mas)); @@ -554,19 +544,19 @@ TEST(Blob, put_ref) b.allocated_test(bluestore_pextent_t(4, mas)); b.allocated_test(bluestore_pextent_t(5, mas)); b.allocated_test(bluestore_pextent_t(6, mas)); - B.get_ref(coll.get(), 0, mas*6); + B.get_ref(coll.get(), 0, mas * 6); ASSERT_EQ(mas * 6, B.get_referenced_bytes()); ASSERT_FALSE(B.put_ref(coll.get(), mas, mas, &r)); ASSERT_EQ(mas * 5, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*6)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*2, mas, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 6)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 2, mas, &r)); ASSERT_EQ(mas * 4, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*6)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*3, mas, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 6)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 3, mas, &r)); ASSERT_EQ(mas * 3, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(2u, r.size()); @@ -574,9 +564,9 @@ TEST(Blob, put_ref) ASSERT_EQ(mas, r[0].length); ASSERT_EQ(4u, r[1].offset); ASSERT_EQ(mas, r[1].length); - ASSERT_TRUE(b.is_allocated(0, mas*2)); - ASSERT_FALSE(b.is_allocated(mas*2, mas*2)); - ASSERT_TRUE(b.is_allocated(mas*4, mas*2)); + ASSERT_TRUE(b.is_allocated(0, mas * 2)); + ASSERT_FALSE(b.is_allocated(mas * 2, mas * 2)); + ASSERT_TRUE(b.is_allocated(mas * 4, mas * 2)); ASSERT_EQ(5u, b.get_extents().size()); ASSERT_TRUE(b.get_extents()[0].is_valid()); ASSERT_TRUE(b.get_extents()[1].is_valid()); @@ -586,30 +576,30 @@ TEST(Blob, put_ref) } { BlueStore::Blob B(coll); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; b.allocated_test(bluestore_pextent_t(1, mas * 6)); - B.get_ref(coll.get(), 0, mas*6); + B.get_ref(coll.get(), 0, mas * 6); ASSERT_EQ(mas * 6, B.get_referenced_bytes()); ASSERT_FALSE(B.put_ref(coll.get(), mas, mas, &r)); ASSERT_EQ(mas * 5, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*6)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*2, mas, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 6)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 2, mas, &r)); ASSERT_EQ(mas * 4, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*6)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*3, mas, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 6)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 3, mas, &r)); ASSERT_EQ(mas * 3, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(1u, r.size()); ASSERT_EQ(0x2001u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); - ASSERT_TRUE(b.is_allocated(0, mas*2)); - ASSERT_FALSE(b.is_allocated(mas*2, mas*2)); - ASSERT_TRUE(b.is_allocated(mas*4, mas*2)); + ASSERT_EQ(mas * 2, r[0].length); + ASSERT_TRUE(b.is_allocated(0, mas * 2)); + ASSERT_FALSE(b.is_allocated(mas * 2, mas * 2)); + ASSERT_TRUE(b.is_allocated(mas * 4, mas * 2)); ASSERT_EQ(3u, b.get_extents().size()); ASSERT_TRUE(b.get_extents()[0].is_valid()); ASSERT_FALSE(b.get_extents()[1].is_valid()); @@ -617,36 +607,36 @@ TEST(Blob, put_ref) } { BlueStore::Blob B(coll); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; b.allocated_test(bluestore_pextent_t(1, mas * 4)); b.allocated_test(bluestore_pextent_t(2, mas * 4)); b.allocated_test(bluestore_pextent_t(3, mas * 4)); - B.get_ref(coll.get(), 0, mas*12); + B.get_ref(coll.get(), 0, mas * 12); ASSERT_EQ(mas * 12, B.get_referenced_bytes()); ASSERT_FALSE(B.put_ref(coll.get(), mas, mas, &r)); ASSERT_EQ(mas * 11, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*12)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*9, mas, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 12)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 9, mas, &r)); ASSERT_EQ(mas * 10, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*12)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*2, mas*7, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 12)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 2, mas * 7, &r)); ASSERT_EQ(mas * 3, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(3u, r.size()); ASSERT_EQ(0x2001u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); + ASSERT_EQ(mas * 2, r[0].length); ASSERT_EQ(0x2u, r[1].offset); - ASSERT_EQ(mas*4, r[1].length); + ASSERT_EQ(mas * 4, r[1].length); ASSERT_EQ(0x3u, r[2].offset); - ASSERT_EQ(mas*2, r[2].length); - ASSERT_TRUE(b.is_allocated(0, mas*2)); - ASSERT_FALSE(b.is_allocated(mas*2, mas*8)); - ASSERT_TRUE(b.is_allocated(mas*10, mas*2)); + ASSERT_EQ(mas * 2, r[2].length); + ASSERT_TRUE(b.is_allocated(0, mas * 2)); + ASSERT_FALSE(b.is_allocated(mas * 2, mas * 8)); + ASSERT_TRUE(b.is_allocated(mas * 10, mas * 2)); ASSERT_EQ(3u, b.get_extents().size()); ASSERT_TRUE(b.get_extents()[0].is_valid()); ASSERT_FALSE(b.get_extents()[1].is_valid()); @@ -654,36 +644,36 @@ TEST(Blob, put_ref) } { BlueStore::Blob B(coll); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; b.allocated_test(bluestore_pextent_t(1, mas * 4)); b.allocated_test(bluestore_pextent_t(2, mas * 4)); b.allocated_test(bluestore_pextent_t(3, mas * 4)); - B.get_ref(coll.get(), 0, mas*12); + B.get_ref(coll.get(), 0, mas * 12); ASSERT_EQ(mas * 12, B.get_referenced_bytes()); ASSERT_FALSE(B.put_ref(coll.get(), mas, mas, &r)); ASSERT_EQ(mas * 11, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*12)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*9, mas, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 12)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 9, mas, &r)); ASSERT_EQ(mas * 10, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*12)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*2, mas*7, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 12)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 2, mas * 7, &r)); ASSERT_EQ(mas * 3, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(3u, r.size()); ASSERT_EQ(0x2001u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); + ASSERT_EQ(mas * 2, r[0].length); ASSERT_EQ(0x2u, r[1].offset); - ASSERT_EQ(mas*4, r[1].length); + ASSERT_EQ(mas * 4, r[1].length); ASSERT_EQ(0x3u, r[2].offset); - ASSERT_EQ(mas*2, r[2].length); - ASSERT_TRUE(b.is_allocated(0, mas*2)); - ASSERT_FALSE(b.is_allocated(mas*2, mas*8)); - ASSERT_TRUE(b.is_allocated(mas*10, mas*2)); + ASSERT_EQ(mas * 2, r[2].length); + ASSERT_TRUE(b.is_allocated(0, mas * 2)); + ASSERT_FALSE(b.is_allocated(mas * 2, mas * 8)); + ASSERT_TRUE(b.is_allocated(mas * 10, mas * 2)); ASSERT_EQ(3u, b.get_extents().size()); ASSERT_TRUE(b.get_extents()[0].is_valid()); ASSERT_FALSE(b.get_extents()[1].is_valid()); @@ -693,61 +683,61 @@ TEST(Blob, put_ref) cout << "r " << r << " " << b << std::endl; ASSERT_EQ(1u, r.size()); ASSERT_EQ(0x1u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); + ASSERT_EQ(mas * 2, r[0].length); ASSERT_EQ(2u, b.get_extents().size()); ASSERT_FALSE(b.get_extents()[0].is_valid()); ASSERT_TRUE(b.get_extents()[1].is_valid()); - ASSERT_TRUE(B.put_ref(coll.get(), mas*10, mas*2, &r)); + ASSERT_TRUE(B.put_ref(coll.get(), mas * 10, mas * 2, &r)); ASSERT_EQ(mas * 0, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(1u, r.size()); ASSERT_EQ(0x2003u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); + ASSERT_EQ(mas * 2, r[0].length); ASSERT_EQ(1u, b.get_extents().size()); ASSERT_FALSE(b.get_extents()[0].is_valid()); } { BlueStore::Blob B(coll); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; b.allocated_test(bluestore_pextent_t(1, mas * 4)); b.allocated_test(bluestore_pextent_t(2, mas * 4)); b.allocated_test(bluestore_pextent_t(3, mas * 4)); - B.get_ref(coll.get(), 0, mas*12); + B.get_ref(coll.get(), 0, mas * 12); ASSERT_EQ(mas * 12, B.get_referenced_bytes()); ASSERT_FALSE(B.put_ref(coll.get(), mas, mas, &r)); ASSERT_EQ(mas * 11, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*12)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*9, mas, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 12)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 9, mas, &r)); ASSERT_EQ(mas * 10, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*12)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*2, mas*7, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 12)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 2, mas * 7, &r)); ASSERT_EQ(mas * 3, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(3u, r.size()); ASSERT_EQ(0x2001u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); + ASSERT_EQ(mas * 2, r[0].length); ASSERT_EQ(0x2u, r[1].offset); - ASSERT_EQ(mas*4, r[1].length); + ASSERT_EQ(mas * 4, r[1].length); ASSERT_EQ(0x3u, r[2].offset); - ASSERT_EQ(mas*2, r[2].length); - ASSERT_TRUE(b.is_allocated(0, mas*2)); - ASSERT_FALSE(b.is_allocated(mas*2, mas*8)); - ASSERT_TRUE(b.is_allocated(mas*10, mas*2)); + ASSERT_EQ(mas * 2, r[2].length); + ASSERT_TRUE(b.is_allocated(0, mas * 2)); + ASSERT_FALSE(b.is_allocated(mas * 2, mas * 8)); + ASSERT_TRUE(b.is_allocated(mas * 10, mas * 2)); ASSERT_EQ(3u, b.get_extents().size()); ASSERT_TRUE(b.get_extents()[0].is_valid()); ASSERT_FALSE(b.get_extents()[1].is_valid()); ASSERT_TRUE(b.get_extents()[2].is_valid()); - ASSERT_FALSE(B.put_ref(coll.get(), mas*10, mas*2, &r)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 10, mas * 2, &r)); ASSERT_EQ(mas * 1, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(1u, r.size()); ASSERT_EQ(0x2003u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); + ASSERT_EQ(mas * 2, r[0].length); ASSERT_EQ(2u, b.get_extents().size()); ASSERT_TRUE(b.get_extents()[0].is_valid()); ASSERT_FALSE(b.get_extents()[1].is_valid()); @@ -756,39 +746,39 @@ TEST(Blob, put_ref) cout << "r " << r << " " << b << std::endl; ASSERT_EQ(1u, r.size()); ASSERT_EQ(0x1u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); + ASSERT_EQ(mas * 2, r[0].length); ASSERT_EQ(1u, b.get_extents().size()); ASSERT_FALSE(b.get_extents()[0].is_valid()); } { BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; b.allocated_test(bluestore_pextent_t(1, mas * 8)); - B.get_ref(coll.get(), 0, mas*8); + B.get_ref(coll.get(), 0, mas * 8); ASSERT_EQ(mas * 8, B.get_referenced_bytes()); ASSERT_FALSE(B.put_ref(coll.get(), 0, mas, &r)); ASSERT_EQ(mas * 7, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*8)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*7, mas, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 8)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 7, mas, &r)); ASSERT_EQ(mas * 6, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*8)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*2, mas, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 8)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 2, mas, &r)); ASSERT_EQ(mas * 5, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); ASSERT_TRUE(b.is_allocated(0, 8)); - ASSERT_FALSE(B.put_ref(coll.get(), mas*3, mas*4, &r)); + ASSERT_FALSE(B.put_ref(coll.get(), mas * 3, mas * 4, &r)); ASSERT_EQ(mas * 1, B.get_referenced_bytes()); ASSERT_EQ(1u, r.size()); ASSERT_EQ(0x2001u, r[0].offset); - ASSERT_EQ(mas*6, r[0].length); - ASSERT_TRUE(b.is_allocated(0, mas*2)); - ASSERT_FALSE(b.is_allocated(mas*2, mas*6)); + ASSERT_EQ(mas * 6, r[0].length); + ASSERT_TRUE(b.is_allocated(0, mas * 2)); + ASSERT_FALSE(b.is_allocated(mas * 2, mas * 6)); ASSERT_EQ(2u, b.get_extents().size()); ASSERT_TRUE(b.get_extents()[0].is_valid()); ASSERT_FALSE(b.get_extents()[1].is_valid()); @@ -797,34 +787,34 @@ TEST(Blob, put_ref) cout << "r " << r << " " << b << std::endl; ASSERT_EQ(1u, r.size()); ASSERT_EQ(0x1u, r[0].offset); - ASSERT_EQ(mas*2, r[0].length); + ASSERT_EQ(mas * 2, r[0].length); ASSERT_EQ(1u, b.get_extents().size()); ASSERT_FALSE(b.get_extents()[0].is_valid()); } // verify csum chunk size if factored in properly { BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); PExtentVector r; - b.allocated_test(bluestore_pextent_t(0, mas*4)); + b.allocated_test(bluestore_pextent_t(0, mas * 4)); b.init_csum(Checksummer::CSUM_CRC32C, 14, mas * 4); - B.get_ref(coll.get(), 0, mas*4); + B.get_ref(coll.get(), 0, mas * 4); ASSERT_EQ(mas * 4, B.get_referenced_bytes()); - ASSERT_TRUE(b.is_allocated(0, mas*4)); - ASSERT_FALSE(B.put_ref(coll.get(), 0, mas*3, &r)); + ASSERT_TRUE(b.is_allocated(0, mas * 4)); + ASSERT_FALSE(B.put_ref(coll.get(), 0, mas * 3, &r)); ASSERT_EQ(mas * 1, B.get_referenced_bytes()); cout << "r " << r << " " << b << std::endl; ASSERT_EQ(0u, r.size()); - ASSERT_TRUE(b.is_allocated(0, mas*4)); + ASSERT_TRUE(b.is_allocated(0, mas * 4)); ASSERT_TRUE(b.get_extents()[0].is_valid()); - ASSERT_EQ(mas*4, b.get_extents()[0].length); + ASSERT_EQ(mas * 4, b.get_extents()[0].length); } { BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); b.allocated_test(bluestore_pextent_t(0x40101000, 0x4000)); - b.allocated_test(bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, - 0x13000)); + b.allocated_test( + bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, 0x13000)); b.allocated_test(bluestore_pextent_t(0x40118000, 0x7000)); B.get_ref(coll.get(), 0x0, 0x3800); @@ -842,7 +832,7 @@ TEST(Blob, put_ref) } { BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); b.allocated_test(bluestore_pextent_t(1, 0x5000)); b.allocated_test(bluestore_pextent_t(2, 0x5000)); B.get_ref(coll.get(), 0x0, 0xa000); @@ -859,7 +849,7 @@ TEST(Blob, put_ref) } { BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); b.allocated_test(bluestore_pextent_t(1, 0x7000)); b.allocated_test(bluestore_pextent_t(2, 0x7000)); B.get_ref(coll.get(), 0x0, 0xe000); @@ -875,19 +865,20 @@ TEST(Blob, put_ref) ASSERT_EQ(1u, r[0].offset); ASSERT_EQ(0x7000u, r[0].length); ASSERT_EQ(2u, r[1].offset); - ASSERT_EQ(0x3000u, r[1].length); // we have 0x1000 bytes less due to - // alignment caused by min_alloc_size = 0x2000 + ASSERT_EQ(0x3000u, + r[1].length); // we have 0x1000 bytes less due to + // alignment caused by min_alloc_size = 0x2000 } { BlueStore store(g_ceph_context, "", 0x4000); - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); BlueStore::Blob B(coll.get()); - bluestore_blob_t& b = B.dirty_blob(); + bluestore_blob_t &b = B.dirty_blob(); b.allocated_test(bluestore_pextent_t(1, 0x5000)); b.allocated_test(bluestore_pextent_t(2, 0x7000)); B.get_ref(coll.get(), 0x0, 0xc000); @@ -908,8 +899,7 @@ TEST(Blob, put_ref) } } -TEST(bluestore_blob_t, can_split) -{ +TEST(bluestore_blob_t, can_split) { bluestore_blob_t a; ASSERT_TRUE(a.can_split()); a.flags = bluestore_blob_t::FLAG_SHARED; @@ -920,8 +910,7 @@ TEST(bluestore_blob_t, can_split) ASSERT_FALSE(a.can_split()); } -TEST(bluestore_blob_t, can_split_at) -{ +TEST(bluestore_blob_t, can_split_at) { bluestore_blob_t a; a.allocated_test(bluestore_pextent_t(0x10000, 0x2000)); a.allocated_test(bluestore_pextent_t(0x20000, 0x2000)); @@ -934,14 +923,13 @@ TEST(bluestore_blob_t, can_split_at) ASSERT_FALSE(a.can_split_at(0x2800)); } -TEST(bluestore_blob_t, prune_tail) -{ +TEST(bluestore_blob_t, prune_tail) { bluestore_blob_t a; a.allocated_test(bluestore_pextent_t(0x10000, 0x2000)); a.allocated_test(bluestore_pextent_t(0x20000, 0x2000)); ASSERT_FALSE(a.can_prune_tail()); a.allocated_test( - bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, 0x2000)); + bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, 0x2000)); ASSERT_TRUE(a.can_prune_tail()); a.prune_tail(); ASSERT_FALSE(a.can_prune_tail()); @@ -949,7 +937,7 @@ TEST(bluestore_blob_t, prune_tail) ASSERT_EQ(0x4000u, a.get_logical_length()); a.allocated_test( - bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, 0x2000)); + bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, 0x2000)); a.init_csum(Checksummer::CSUM_CRC32C_8, 12, 0x6000); ASSERT_EQ(6u, a.csum_data.length()); ASSERT_TRUE(a.can_prune_tail()); @@ -961,17 +949,16 @@ TEST(bluestore_blob_t, prune_tail) bluestore_blob_t b; b.allocated_test( - bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, 0x2000)); + bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, 0x2000)); ASSERT_FALSE(a.can_prune_tail()); } -TEST(Blob, split) -{ +TEST(Blob, split) { BlueStore store(g_ceph_context, "", 4096); - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); { BlueStore::Blob L(coll.get()); @@ -1017,13 +1004,12 @@ TEST(Blob, split) } } -TEST(Blob, legacy_decode) -{ +TEST(Blob, legacy_decode) { BlueStore store(g_ceph_context, "", 4096); - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); bufferlist bl, bl2; { @@ -1040,34 +1026,26 @@ TEST(Blob, legacy_decode) size_t bound = 0, bound2 = 0; - B.bound_encode( - bound, - 1, /*struct_v*/ - 0, /*sbid*/ - false); + B.bound_encode(bound, 1, /*struct_v*/ + 0, /*sbid*/ + false); fake_ref_map.bound_encode(bound); - B.bound_encode( - bound2, - 2, /*struct_v*/ - 0, /*sbid*/ - true); + B.bound_encode(bound2, 2, /*struct_v*/ + 0, /*sbid*/ + true); { auto app = bl.get_contiguous_appender(bound); auto app2 = bl2.get_contiguous_appender(bound2); - B.encode( - app, - 1, /*struct_v*/ - 0, /*sbid*/ - false); + B.encode(app, 1, /*struct_v*/ + 0, /*sbid*/ + false); fake_ref_map.encode(app); - B.encode( - app2, - 2, /*struct_v*/ - 0, /*sbid*/ - true); + B.encode(app2, 2, /*struct_v*/ + 0, /*sbid*/ + true); } auto p = bl.front().begin_deep(); @@ -1076,37 +1054,30 @@ TEST(Blob, legacy_decode) BlueStore::Blob Bres2(coll.get()); uint64_t sbid, sbid2; - Bres.decode( - p, - 1, /*struct_v*/ - &sbid, - true, - coll.get()); - Bres2.decode( - p2, - 2, /*struct_v*/ - &sbid2, - true, - coll.get()); + Bres.decode(p, 1, /*struct_v*/ + &sbid, true, coll.get()); + Bres2.decode(p2, 2, /*struct_v*/ + &sbid2, true, coll.get()); ASSERT_EQ(0xff0u + 1u, Bres.get_blob_use_tracker().get_referenced_bytes()); ASSERT_EQ(0xff0u + 1u, Bres2.get_blob_use_tracker().get_referenced_bytes()); - ASSERT_TRUE(Bres.get_blob_use_tracker().equal(Bres2.get_blob_use_tracker())); + ASSERT_TRUE( + Bres.get_blob_use_tracker().equal(Bres2.get_blob_use_tracker())); } } -TEST(ExtentMap, seek_lextent) -{ +TEST(ExtentMap, seek_lextent) { BlueStore store(g_ceph_context, "", 4096); - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); BlueStore::Onode onode(coll.get(), ghobject_t(), ""); - BlueStore::ExtentMap em(&onode, - g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap em( + &onode, + g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); BlueStore::BlobRef br(coll->new_blob()); ASSERT_EQ(em.extent_map.end(), em.seek_lextent(0)); @@ -1148,17 +1119,17 @@ TEST(ExtentMap, seek_lextent) ASSERT_EQ(em.extent_map.end(), em.seek_lextent(500)); } -TEST(ExtentMap, has_any_lextents) -{ +TEST(ExtentMap, has_any_lextents) { BlueStore store(g_ceph_context, "", 4096); - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); BlueStore::Onode onode(coll.get(), ghobject_t(), ""); - BlueStore::ExtentMap em(&onode, - g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap em( + &onode, + g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); BlueStore::BlobRef b(coll->new_blob()); ASSERT_FALSE(em.has_any_lextents(0, 0)); @@ -1198,26 +1169,25 @@ TEST(ExtentMap, has_any_lextents) ASSERT_FALSE(em.has_any_lextents(500, 1000)); } -void erase_and_delete(BlueStore::ExtentMap& em, size_t v) -{ +void erase_and_delete(BlueStore::ExtentMap &em, size_t v) { auto d = em.find(v); ASSERT_NE(d, em.extent_map.end()); em.extent_map.erase(d); delete &*d; } -TEST(ExtentMap, compress_extent_map) -{ +TEST(ExtentMap, compress_extent_map) { BlueStore store(g_ceph_context, "", 4096); - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); - + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); + auto coll = ceph::make_ref(&store, oc, bc, coll_t()); BlueStore::Onode onode(coll.get(), ghobject_t(), ""); - BlueStore::ExtentMap em(&onode, - g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap em( + &onode, + g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); BlueStore::BlobRef b1(coll->new_blob()); BlueStore::BlobRef b2(coll->new_blob()); BlueStore::BlobRef b3(coll->new_blob()); @@ -1273,7 +1243,7 @@ public: static constexpr uint32_t au_size = 4096; uint32_t blob_size = 65536; - size_t csum_order = 12; //1^12 = 4096 bytes + size_t csum_order = 12; // 1^12 = 4096 bytes struct au { uint32_t chksum; @@ -1284,35 +1254,30 @@ public: // test onode that glues some simplifications in representation // with actual BlueStore's onode struct t_onode { - BlueStore::OnodeRef onode; //actual BS onode - std::vector data; //map to AUs + BlueStore::OnodeRef onode; // actual BS onode + std::vector data; // map to AUs static constexpr uint32_t empty = std::numeric_limits::max(); }; - void print(std::ostream& out, t_onode& onode) - { + void print(std::ostream &out, t_onode &onode) { for (size_t i = 0; i < onode.data.size(); ++i) { - if (i != 0) out << " "; + if (i != 0) + out << " "; if (onode.data[i] == t_onode::empty) { - out << "-"; + out << "-"; } else { - out << std::hex << onode.data[i] - << "/" << disk[onode.data[i]].chksum - << ":" << std::dec << disk[onode.data[i]].refs; + out << std::hex << onode.data[i] << "/" << disk[onode.data[i]].chksum + << ":" << std::dec << disk[onode.data[i]].refs; } } } - explicit ExtentMapFixture() - : store(g_ceph_context, "", au_size) - { + explicit ExtentMapFixture() : store(g_ceph_context, "", au_size) { oc = BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); - bc = BlueStore::BufferCacheShard::create(g_ceph_context, "lru", NULL); + bc = BlueStore::BufferCacheShard::create(&store, "lru", NULL); coll = ceph::make_ref(&store, oc, bc, coll_t()); } - void SetUp() override { - } - void TearDown() override { - } + void SetUp() override {} + void TearDown() override {} // takes new space from disk, initializes csums // returns index of first au @@ -1326,15 +1291,13 @@ public: } return pos; } - void release(uint32_t& au_idx) { + void release(uint32_t &au_idx) { if (au_idx != t_onode::empty) { disk_unref(au_idx); } au_idx = t_onode::empty; } - void disk_ref(uint32_t au_idx) { - ++disk[au_idx].refs; - } + void disk_ref(uint32_t au_idx) { ++disk[au_idx].refs; } void disk_unref(uint32_t au_idx) { ceph_assert(disk[au_idx].refs > 0); --disk[au_idx].refs; @@ -1346,16 +1309,16 @@ public: return res; } - void fillup(t_onode& onode, uint32_t end) { + void fillup(t_onode &onode, uint32_t end) { if (end > onode.data.size()) { size_t e = onode.data.size(); onode.data.resize(end); for (; e < end; ++e) { - onode.data[e] = t_onode::empty; + onode.data[e] = t_onode::empty; } } } - void punch_hole(t_onode& onode, uint32_t off, uint32_t len) { + void punch_hole(t_onode &onode, uint32_t off, uint32_t len) { ceph_assert((off % au_size) == 0); ceph_assert((len % au_size) == 0); uint32_t i = off / au_size; @@ -1363,14 +1326,14 @@ public: fillup(onode, end); while (i < end && i < onode.data.size()) { if (onode.data[i] != t_onode::empty) - release(onode.data[i]); + release(onode.data[i]); onode.data[i] = t_onode::empty; i++; } store.debug_punch_hole(coll, onode.onode, off, len); } - void write(t_onode& onode, uint32_t off, uint32_t len) { + void write(t_onode &onode, uint32_t off, uint32_t len) { ceph_assert((off % au_size) == 0); ceph_assert((len % au_size) == 0); punch_hole(onode, off, len); @@ -1389,30 +1352,30 @@ public: } // below simulation of write performed by BlueStore::do_write() - auto helper_blob_write = [&]( - uint32_t log_off, // logical offset of blob to put to onode - uint32_t empty_aus, // amount of unreferenced aus in the beginning - uint32_t first_au, // first au that will be referenced - uint32_t num_aus // number of aus, first, first+1.. first+num_au-1 - ) { - uint32_t blob_length = (empty_aus + num_aus) * au_size; - BlueStore::BlobRef b(coll->new_blob()); - bluestore_blob_t& bb = b->dirty_blob(); - bb.init_csum(Checksummer::CSUM_CRC32C, csum_order, blob_length); - for(size_t i = 0; i < num_aus; ++i) { - bb.set_csum_item(empty_aus + i, disk[first_au + i].chksum); - } - - PExtentVector pextents; - pextents.emplace_back(first_au * au_size, num_aus * au_size); - bb.allocated(empty_aus * au_size, num_aus * au_size, pextents); - - auto *ext = new BlueStore::Extent(log_off, empty_aus * au_size, - num_aus * au_size, b); - onode.onode->extent_map.extent_map.insert(*ext); - b->get_ref(coll.get(), empty_aus * au_size, num_aus * au_size); - bb.mark_used(empty_aus * au_size, num_aus * au_size); - }; + auto helper_blob_write = + [&](uint32_t log_off, // logical offset of blob to put to onode + uint32_t empty_aus, // amount of unreferenced aus in the beginning + uint32_t first_au, // first au that will be referenced + uint32_t num_aus // number of aus, first, first+1.. first+num_au-1 + ) { + uint32_t blob_length = (empty_aus + num_aus) * au_size; + BlueStore::BlobRef b(coll->new_blob()); + bluestore_blob_t &bb = b->dirty_blob(); + bb.init_csum(Checksummer::CSUM_CRC32C, csum_order, blob_length); + for (size_t i = 0; i < num_aus; ++i) { + bb.set_csum_item(empty_aus + i, disk[first_au + i].chksum); + } + + PExtentVector pextents; + pextents.emplace_back(first_au * au_size, num_aus * au_size); + bb.allocated(empty_aus * au_size, num_aus * au_size, pextents); + + auto *ext = new BlueStore::Extent(log_off, empty_aus * au_size, + num_aus * au_size, b); + onode.onode->extent_map.extent_map.insert(*ext); + b->get_ref(coll.get(), empty_aus * au_size, num_aus * au_size); + bb.mark_used(empty_aus * au_size, num_aus * au_size); + }; size_t off_blob_aligned = p2align(off, blob_size); size_t off_blob_roundup = p2align(off + blob_size, blob_size); @@ -1431,7 +1394,7 @@ public: }; } - void dup(t_onode& ofrom, t_onode& oto, uint64_t off, uint64_t len) { + void dup(t_onode &ofrom, t_onode &oto, uint64_t off, uint64_t len) { ceph_assert((off % au_size) == 0); ceph_assert((len % au_size) == 0); punch_hole(oto, off, len); @@ -1443,84 +1406,87 @@ public: while (i < end) { oto.data[i] = ofrom.data[i]; if (oto.data[i] != t_onode::empty) { - disk_ref(oto.data[i]); + disk_ref(oto.data[i]); } ++i; } BlueStore::TransContext txc(store.cct, coll.get(), nullptr, nullptr); - ofrom.onode->extent_map.dup_esb(&store, &txc, coll, ofrom.onode, oto.onode, off, len, off); + ofrom.onode->extent_map.dup_esb(&store, &txc, coll, ofrom.onode, oto.onode, + off, len, off); } - int32_t compare(t_onode& onode) { + int32_t compare(t_onode &onode) { BlueStore::ExtentMap::debug_au_vector_t debug = - onode.onode->extent_map.debug_list_disk_layout(); + onode.onode->extent_map.debug_list_disk_layout(); size_t pos = 0; for (size_t i = 0; i < debug.size(); ++i) { if (debug[i].disk_offset == -1ULL) { - size_t len = debug[i].disk_length; - size_t l = len / au_size; - if (pos + l > onode.data.size()) { - return pos + l; - } - while (l > 0) { - if (onode.data[pos] != t_onode::empty) { - return pos; - } - --l; - ++pos; - }; + size_t len = debug[i].disk_length; + size_t l = len / au_size; + if (pos + l > onode.data.size()) { + return pos + l; + } + while (l > 0) { + if (onode.data[pos] != t_onode::empty) { + return pos; + } + --l; + ++pos; + }; } else { - ceph_assert(pos < onode.data.size()); - uint32_t au = onode.data[pos]; - if (debug[i].disk_offset != au * au_size || - debug[i].disk_length != au_size || - debug[i].chksum != disk[au].chksum) { - return pos; - } - if ((int32_t)debug[i].ref_cnts == -1) { - if (disk[au].refs != 1) { - return pos; - } - } else { - if (disk[au].refs != debug[i].ref_cnts) { - return pos; - } - } - ++pos; + ceph_assert(pos < onode.data.size()); + uint32_t au = onode.data[pos]; + if (debug[i].disk_offset != au * au_size || + debug[i].disk_length != au_size || + debug[i].chksum != disk[au].chksum) { + return pos; + } + if ((int32_t)debug[i].ref_cnts == -1) { + if (disk[au].refs != 1) { + return pos; + } + } else { + if (disk[au].refs != debug[i].ref_cnts) { + return pos; + } + } + ++pos; } } // remaining aus must be empty while (pos < onode.data.size()) { if (onode.data[pos] != t_onode::empty) { - return pos; + return pos; } ++pos; } return -1; } - bool check(t_onode& onode) { + bool check(t_onode &onode) { int32_t res = compare(onode); if (res != -1) { - cout << "Discrepancy at 0x" << std::hex << res * au_size << std::dec << std::endl; + cout << "Discrepancy at 0x" << std::hex << res * au_size << std::dec + << std::endl; cout << "Simulated: "; print(cout, onode); cout << std::endl; - cout << "Onode: " << onode.onode->extent_map.debug_list_disk_layout() << std::endl; + cout << "Onode: " << onode.onode->extent_map.debug_list_disk_layout() + << std::endl; return false; } return true; } - void print(t_onode& onode) { + void print(t_onode &onode) { cout << "Simulated: "; print(cout, onode); cout << std::endl; - cout << "Onode: " << onode.onode->extent_map.debug_list_disk_layout() << std::endl; + cout << "Onode: " << onode.onode->extent_map.debug_list_disk_layout() + << std::endl; } }; -TEST_F(ExtentMapFixture, walk) -{ +TEST_F(ExtentMapFixture, walk) { std::vector X; for (size_t i = 0; i < 100; i++) { X.push_back(create()); @@ -1528,15 +1494,14 @@ TEST_F(ExtentMapFixture, walk) for (size_t i = 0; i < 100 - 1; i++) { write(X[i], (i + 2) * au_size, 4 * au_size); - dup(X[i], X[i+1], (i + 1) * au_size, 8 * au_size); + dup(X[i], X[i + 1], (i + 1) * au_size, 8 * au_size); } for (size_t i = 0; i < 100; i++) { ASSERT_EQ(check(X[i]), true); } } -TEST_F(ExtentMapFixture, pyramid) -{ +TEST_F(ExtentMapFixture, pyramid) { constexpr size_t H = 100; std::vector X; for (size_t i = 0; i < H; i++) { @@ -1552,8 +1517,7 @@ TEST_F(ExtentMapFixture, pyramid) } } -TEST_F(ExtentMapFixture, rain) -{ +TEST_F(ExtentMapFixture, rain) { constexpr size_t H = 100; constexpr size_t W = 100; std::vector X; @@ -1569,8 +1533,7 @@ TEST_F(ExtentMapFixture, rain) } } -TEST_F(ExtentMapFixture, pollock) -{ +TEST_F(ExtentMapFixture, pollock) { constexpr size_t H = 100; constexpr size_t W = 100; std::vector X; @@ -1590,8 +1553,7 @@ TEST_F(ExtentMapFixture, pollock) } } -TEST_F(ExtentMapFixture, carousel) -{ +TEST_F(ExtentMapFixture, carousel) { constexpr size_t R = 10; constexpr size_t CNT = 300; constexpr size_t W = 100; @@ -1612,8 +1574,7 @@ TEST_F(ExtentMapFixture, carousel) } } -TEST_F(ExtentMapFixture, petri) -{ +TEST_F(ExtentMapFixture, petri) { constexpr size_t R = 10; constexpr size_t CNT = 300; constexpr size_t W = 100; @@ -1637,25 +1598,25 @@ TEST_F(ExtentMapFixture, petri) } } -TEST(ExtentMap, dup_extent_map) -{ +TEST(ExtentMap, dup_extent_map) { BlueStore store(g_ceph_context, "", 4096); - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); - size_t csum_order = 12; //1^12 = 4096 bytes + size_t csum_order = 12; // 1^12 = 4096 bytes auto coll = ceph::make_ref(&store, oc, bc, coll_t()); std::unique_ptr formatter(Formatter::create("json")); /////////////////////////// - //constructing onode1 - BlueStore::OnodeRef onode1(new BlueStore::Onode(coll.get(), ghobject_t(), "")); - - //BlueStore::ExtentMap em1(&onode1, - // g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); - BlueStore::ExtentMap& em1 = onode1->extent_map; + // constructing onode1 + BlueStore::OnodeRef onode1( + new BlueStore::Onode(coll.get(), ghobject_t(), "")); + + // BlueStore::ExtentMap em1(&onode1, + // g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap &em1 = onode1->extent_map; /////////////////////////// // constructing extent/Blob: 0x0~2000 at <0x100000~2000> size_t ext1_offs = 0x0; @@ -1664,7 +1625,7 @@ TEST(ExtentMap, dup_extent_map) BlueStore::BlobRef b1 = coll->new_blob(); auto &_b1 = b1->dirty_blob(); _b1.init_csum(Checksummer::CSUM_CRC32C, csum_order, ext1_len); - for(size_t i = 0; i < _b1.get_csum_count(); i++) { + for (size_t i = 0; i < _b1.get_csum_count(); i++) { *(_b1.get_csum_item_ptr(i)) = i + 1; } PExtentVector pextents; @@ -1677,16 +1638,18 @@ TEST(ExtentMap, dup_extent_map) _b1.mark_used(ext1->blob_offset, ext1->length); /////////////////////////// - //constructing onode2 which is a full clone from onode1 - BlueStore::OnodeRef onode2(new BlueStore::Onode(coll.get(), ghobject_t(), "")); - //BlueStore::ExtentMap em2(&onode2, - // g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); - BlueStore::ExtentMap& em2 = onode2->extent_map; + // constructing onode2 which is a full clone from onode1 + BlueStore::OnodeRef onode2( + new BlueStore::Onode(coll.get(), ghobject_t(), "")); + // BlueStore::ExtentMap em2(&onode2, + // g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap &em2 = onode2->extent_map; { BlueStore::TransContext txc(store.cct, coll.get(), nullptr, nullptr); - //em1.dup(&store, &txc, coll, em2, ext1_offs, ext1_len, ext1_offs); - onode1->extent_map.dup_esb(&store, &txc, coll, onode1, onode2, ext1_offs, ext1_len, ext1_offs); + // em1.dup(&store, &txc, coll, em2, ext1_offs, ext1_len, ext1_offs); + onode1->extent_map.dup_esb(&store, &txc, coll, onode1, onode2, ext1_offs, + ext1_len, ext1_offs); em1.dump(formatter.get()); // see the log if any formatter->flush(std::cout); @@ -1704,17 +1667,18 @@ TEST(ExtentMap, dup_extent_map) ASSERT_EQ(b1->get_shared_blob(), b2->get_shared_blob()); auto &_b2 = b2->get_blob(); ASSERT_EQ(_b1.get_csum_count(), _b2.get_csum_count()); - for(size_t i = 0; i < _b2.get_csum_count(); i++) { + for (size_t i = 0; i < _b2.get_csum_count(); i++) { ASSERT_EQ(*(_b1.get_csum_item_ptr(i)), *(_b2.get_csum_item_ptr(i))); } } /////////////////////////// - //constructing onode3 which is partial clone (tail part) from onode2 - BlueStore::OnodeRef onode3(new BlueStore::Onode(coll.get(), ghobject_t(), "")); - //BlueStore::ExtentMap em3(&onode3, - // g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); - BlueStore::ExtentMap& em3 = onode3->extent_map; + // constructing onode3 which is partial clone (tail part) from onode2 + BlueStore::OnodeRef onode3( + new BlueStore::Onode(coll.get(), ghobject_t(), "")); + // BlueStore::ExtentMap em3(&onode3, + // g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap &em3 = onode3->extent_map; { size_t clone_shift = 0x1000; ceph_assert(ext1_len > clone_shift); @@ -1722,7 +1686,8 @@ TEST(ExtentMap, dup_extent_map) size_t clone_len = ext1_len - clone_shift; BlueStore::TransContext txc(store.cct, coll.get(), nullptr, nullptr); - onode1->extent_map.dup_esb(&store, &txc, coll, onode1, onode3, clone_offs, clone_len, clone_offs); + onode1->extent_map.dup_esb(&store, &txc, coll, onode1, onode3, clone_offs, + clone_len, clone_offs); em1.dump(formatter.get()); // see the log if any formatter->flush(std::cout); std::cout << std::endl; @@ -1746,17 +1711,18 @@ TEST(ExtentMap, dup_extent_map) ASSERT_EQ(ll, ext1_len); auto &_b3 = b3->get_blob(); ASSERT_EQ(_b1.get_csum_count(), _b3.get_csum_count()); - for(size_t i = 0; i < _b3.get_csum_count(); i++) { + for (size_t i = 0; i < _b3.get_csum_count(); i++) { ASSERT_EQ(*(_b1.get_csum_item_ptr(i)), *(_b3.get_csum_item_ptr(i))); } } /////////////////////////// - //constructing onode4 which is partial clone (head part) from onode2 - BlueStore::OnodeRef onode4(new BlueStore::Onode(coll.get(), ghobject_t(), "")); - //BlueStore::ExtentMap em4(&onode4, - // g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); - BlueStore::ExtentMap& em4 = onode4->extent_map; + // constructing onode4 which is partial clone (head part) from onode2 + BlueStore::OnodeRef onode4( + new BlueStore::Onode(coll.get(), ghobject_t(), "")); + // BlueStore::ExtentMap em4(&onode4, + // g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap &em4 = onode4->extent_map; { size_t clone_shift = 0; @@ -1765,7 +1731,8 @@ TEST(ExtentMap, dup_extent_map) size_t clone_offs = ext1_offs + clone_shift; BlueStore::TransContext txc(store.cct, coll.get(), nullptr, nullptr); - onode2->extent_map.dup_esb(&store, &txc, coll, onode2, onode4, clone_offs, clone_len, clone_offs); + onode2->extent_map.dup_esb(&store, &txc, coll, onode2, onode4, clone_offs, + clone_len, clone_offs); em2.dump(formatter.get()); // see the log if any formatter->flush(std::cout); std::cout << std::endl; @@ -1795,15 +1762,13 @@ TEST(ExtentMap, dup_extent_map) ASSERT_EQ(csum_entries, _b4.get_csum_count()); ASSERT_GT(_b2.get_csum_count(), csum_entries); - for(size_t i = 0; i < csum_entries; i++) { + for (size_t i = 0; i < csum_entries; i++) { ASSERT_EQ(*(_b2.get_csum_item_ptr(i)), *(_b4.get_csum_item_ptr(i))); } } } - -void clear_and_dispose(BlueStore::old_extent_map_t& old_em) -{ +void clear_and_dispose(BlueStore::old_extent_map_t &old_em) { auto oep = old_em.begin(); while (oep != old_em.end()) { auto &lo = *oep; @@ -1812,43 +1777,42 @@ void clear_and_dispose(BlueStore::old_extent_map_t& old_em) } } -TEST(GarbageCollector, BasicTest) -{ - BlueStore::OnodeCacheShard *oc = BlueStore::OnodeCacheShard::create( - g_ceph_context, "lru", NULL); - BlueStore::BufferCacheShard *bc = BlueStore::BufferCacheShard::create( - g_ceph_context, "lru", NULL); - +TEST(GarbageCollector, BasicTest) { BlueStore store(g_ceph_context, "", 4096); + BlueStore::OnodeCacheShard *oc = + BlueStore::OnodeCacheShard::create(g_ceph_context, "lru", NULL); + BlueStore::BufferCacheShard *bc = + BlueStore::BufferCacheShard::create(&store, "lru", NULL); + auto coll = ceph::make_ref(&store, oc, bc, coll_t()); BlueStore::Onode onode(coll.get(), ghobject_t(), ""); - BlueStore::ExtentMap em(&onode, - g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap em( + &onode, + g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); BlueStore::old_extent_map_t old_extents; - - /* - min_alloc_size = 4096 - original disposition - extent1 - -> blob1 - extent2 - -> blob2 - extent3 - -> blob1 - extent4 - -> blob3 - on write(300~100) resulted in - extent1 - -> blob1 - extent2 - -> blob2 - extent3 - -> blob4 - extent4 - -> blob3 - */ + /* + min_alloc_size = 4096 + original disposition + extent1 + -> blob1 + extent2 + -> blob2 + extent3 + -> blob1 + extent4 + -> blob3 + on write(300~100) resulted in + extent1 + -> blob1 + extent2 + -> blob2 + extent3 + -> blob4 + extent4 + -> blob3 + */ { BlueStore::GarbageCollector gc(g_ceph_context); int64_t saving; @@ -1870,11 +1834,11 @@ TEST(GarbageCollector, BasicTest) em.extent_map.insert(*new BlueStore::Extent(4096, 0, 10, b3)); b3->get_ref(coll.get(), 0, 10); - old_extents.push_back(*new BlueStore::OldExtent(300, 300, 10, b1)); + old_extents.push_back(*new BlueStore::OldExtent(300, 300, 10, b1)); saving = gc.estimate(300, 100, em, old_extents, 4096); ASSERT_EQ(saving, 1); - auto& to_collect = gc.get_extents_to_collect(); + auto &to_collect = gc.get_extents_to_collect(); ASSERT_EQ(to_collect.num_intervals(), 1u); { auto it = to_collect.begin(); @@ -1885,31 +1849,32 @@ TEST(GarbageCollector, BasicTest) em.clear(); clear_and_dispose(old_extents); } - /* - original disposition - min_alloc_size = 0x10000 - extent1 - -> blob1 - Write 0x8000~37000 resulted in the following extent map prior to GC - for the last write_small(0x30000~0xf000): - - extent1 - -> blob1 - extent2 - -> blob2 - extent3 - -> blob3 - extent4 - -> blob4 - extent5 - -> blob1 - */ + /* + original disposition + min_alloc_size = 0x10000 + extent1 + -> blob1 + Write 0x8000~37000 resulted in the following extent map prior to GC + for the last write_small(0x30000~0xf000): + + extent1 + -> blob1 + extent2 + -> blob2 + extent3 + -> blob3 + extent4 + -> blob4 + extent5 + -> blob1 + */ { BlueStore store(g_ceph_context, "", 0x10000); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); BlueStore::Onode onode(coll.get(), ghobject_t(), ""); - BlueStore::ExtentMap em(&onode, - g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap em( + &onode, + g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); BlueStore::old_extent_map_t old_extents; BlueStore::GarbageCollector gc(g_ceph_context); @@ -1927,32 +1892,34 @@ TEST(GarbageCollector, BasicTest) em.extent_map.insert(*new BlueStore::Extent(0, 0, 0x8000, b1)); b1->get_ref(coll.get(), 0, 0x8000); em.extent_map.insert( - *new BlueStore::Extent(0x8000, 0x8000, 0x8000, b2)); // new extent + *new BlueStore::Extent(0x8000, 0x8000, 0x8000, b2)); // new extent b2->get_ref(coll.get(), 0x8000, 0x8000); em.extent_map.insert( - *new BlueStore::Extent(0x10000, 0, 0x20000, b3)); // new extent + *new BlueStore::Extent(0x10000, 0, 0x20000, b3)); // new extent b3->get_ref(coll.get(), 0, 0x20000); em.extent_map.insert( - *new BlueStore::Extent(0x30000, 0, 0xf000, b4)); // new extent + *new BlueStore::Extent(0x30000, 0, 0xf000, b4)); // new extent b4->get_ref(coll.get(), 0, 0xf000); em.extent_map.insert(*new BlueStore::Extent(0x3f000, 0x3f000, 0x1000, b1)); b1->get_ref(coll.get(), 0x3f000, 0x1000); - old_extents.push_back(*new BlueStore::OldExtent(0x8000, 0x8000, 0x8000, b1)); old_extents.push_back( - *new BlueStore::OldExtent(0x10000, 0x10000, 0x20000, b1)); - old_extents.push_back(*new BlueStore::OldExtent(0x30000, 0x30000, 0xf000, b1)); + *new BlueStore::OldExtent(0x8000, 0x8000, 0x8000, b1)); + old_extents.push_back( + *new BlueStore::OldExtent(0x10000, 0x10000, 0x20000, b1)); + old_extents.push_back( + *new BlueStore::OldExtent(0x30000, 0x30000, 0xf000, b1)); saving = gc.estimate(0x30000, 0xf000, em, old_extents, 0x10000); ASSERT_EQ(saving, 2); - auto& to_collect = gc.get_extents_to_collect(); + auto &to_collect = gc.get_extents_to_collect(); ASSERT_EQ(to_collect.num_intervals(), 2u); { auto it1 = to_collect.begin(); auto it2 = ++to_collect.begin(); using p = decltype(*it1); { - auto v1 = p{0x0ul ,0x8000ul}; + auto v1 = p{0x0ul, 0x8000ul}; auto v2 = p{0x0ul, 0x8000ul}; ASSERT_TRUE(*it1 == v1 || *it2 == v2); } @@ -1966,20 +1933,20 @@ TEST(GarbageCollector, BasicTest) em.clear(); clear_and_dispose(old_extents); } - /* - original disposition - min_alloc_size = 0x1000 - extent1 - -> blob1 - write 0x3000~4000 resulted in the following extent map - (future feature - suppose we can compress incoming write prior to - GC invocation) - - extent1 - -> blob1 - extent2 - -> blob2 - */ + /* + original disposition + min_alloc_size = 0x1000 + extent1 + -> blob1 + write 0x3000~4000 resulted in the following extent map + (future feature - suppose we can compress incoming write prior to + GC invocation) + + extent1 + -> blob1 + extent2 + -> blob2 + */ { BlueStore::GarbageCollector gc(g_ceph_context); int64_t saving; @@ -1993,45 +1960,47 @@ TEST(GarbageCollector, BasicTest) em.extent_map.insert(*new BlueStore::Extent(0, 0, 0x3000, b1)); b1->get_ref(coll.get(), 0, 0x3000); em.extent_map.insert( - *new BlueStore::Extent(0x3000, 0, 0x4000, b2)); // new extent + *new BlueStore::Extent(0x3000, 0, 0x4000, b2)); // new extent b2->get_ref(coll.get(), 0, 0x4000); - old_extents.push_back(*new BlueStore::OldExtent(0x3000, 0x3000, 0x1000, b1)); + old_extents.push_back( + *new BlueStore::OldExtent(0x3000, 0x3000, 0x1000, b1)); saving = gc.estimate(0x3000, 0x4000, em, old_extents, 0x1000); ASSERT_EQ(saving, 0); - auto& to_collect = gc.get_extents_to_collect(); + auto &to_collect = gc.get_extents_to_collect(); ASSERT_EQ(to_collect.num_intervals(), 0u); em.clear(); clear_and_dispose(old_extents); } - /* - original disposition - min_alloc_size = 0x10000 - extent0 - -> blob0 - extent1 - -> blob1 - write 0x8000~37000 resulted in the following extent map prior - to GC for the last write_small(0x30000~0xf000) - - extent0 - -> blob0 - extent2 - -> blob2 - extent3 - -> blob3 - extent4 - -> blob4 - extent5 - -> blob1 - */ + /* + original disposition + min_alloc_size = 0x10000 + extent0 + -> blob0 + extent1 + -> blob1 + write 0x8000~37000 resulted in the following extent map prior + to GC for the last write_small(0x30000~0xf000) + + extent0 + -> blob0 + extent2 + -> blob2 + extent3 + -> blob3 + extent4 + -> blob4 + extent5 + -> blob1 + */ { BlueStore store(g_ceph_context, "", 0x10000); auto coll = ceph::make_ref(&store, oc, bc, coll_t()); BlueStore::Onode onode(coll.get(), ghobject_t(), ""); - BlueStore::ExtentMap em(&onode, - g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); + BlueStore::ExtentMap em( + &onode, + g_ceph_context->_conf->bluestore_extent_map_inline_shard_prealloc_size); BlueStore::old_extent_map_t old_extents; BlueStore::GarbageCollector gc(g_ceph_context); @@ -2052,26 +2021,27 @@ TEST(GarbageCollector, BasicTest) em.extent_map.insert(*new BlueStore::Extent(0, 0, 0x8000, b0)); b0->get_ref(coll.get(), 0, 0x8000); em.extent_map.insert( - *new BlueStore::Extent(0x8000, 0x8000, 0x8000, b2)); // new extent + *new BlueStore::Extent(0x8000, 0x8000, 0x8000, b2)); // new extent b2->get_ref(coll.get(), 0x8000, 0x8000); em.extent_map.insert( - *new BlueStore::Extent(0x10000, 0, 0x20000, b3)); // new extent + *new BlueStore::Extent(0x10000, 0, 0x20000, b3)); // new extent b3->get_ref(coll.get(), 0, 0x20000); em.extent_map.insert( - *new BlueStore::Extent(0x30000, 0, 0xf000, b4)); // new extent + *new BlueStore::Extent(0x30000, 0, 0xf000, b4)); // new extent b4->get_ref(coll.get(), 0, 0xf000); em.extent_map.insert(*new BlueStore::Extent(0x3f000, 0x1f000, 0x1000, b1)); b1->get_ref(coll.get(), 0x1f000, 0x1000); - old_extents.push_back(*new BlueStore::OldExtent(0x8000, 0x8000, 0x8000, b0)); old_extents.push_back( - *new BlueStore::OldExtent(0x10000, 0x10000, 0x10000, b0)); + *new BlueStore::OldExtent(0x8000, 0x8000, 0x8000, b0)); + old_extents.push_back( + *new BlueStore::OldExtent(0x10000, 0x10000, 0x10000, b0)); old_extents.push_back( - *new BlueStore::OldExtent(0x20000, 0x00000, 0x1f000, b1)); + *new BlueStore::OldExtent(0x20000, 0x00000, 0x1f000, b1)); saving = gc.estimate(0x30000, 0xf000, em, old_extents, 0x10000); ASSERT_EQ(saving, 2); - auto& to_collect = gc.get_extents_to_collect(); + auto &to_collect = gc.get_extents_to_collect(); ASSERT_EQ(to_collect.num_intervals(), 2u); { auto it1 = to_collect.begin(); @@ -2080,7 +2050,7 @@ TEST(GarbageCollector, BasicTest) { auto v1 = p{0x0ul, 0x8000ul}; auto v2 = p{0x0ul, 0x8000ul}; - ASSERT_TRUE(*it1 == v1 || *it2 == v2); + ASSERT_TRUE(*it1 == v1 || *it2 == v2); } { auto v1 = p{0x3f000ul, 0x1000ul}; @@ -2094,8 +2064,7 @@ TEST(GarbageCollector, BasicTest) } } -TEST(BlueStoreRepairer, StoreSpaceTracker) -{ +TEST(BlueStoreRepairer, StoreSpaceTracker) { BlueStoreRepairer::StoreSpaceTracker bmap0; bmap0.init((uint64_t)4096 * 1024 * 1024 * 1024, 0x1000); ASSERT_EQ(bmap0.granularity, 2 * 1024 * 1024U); @@ -2171,16 +2140,16 @@ TEST(BlueStoreRepairer, StoreSpaceTracker) ASSERT_TRUE(bmap.is_used(hoid, 0xc1000)); interval_set extents; - extents.insert(0,0x500); - extents.insert(0x800,0x100); - extents.insert(0x1000,0x1000); - extents.insert(0xa001,1); - extents.insert(0xa0000,0xff8); + extents.insert(0, 0x500); + extents.insert(0x800, 0x100); + extents.insert(0x1000, 0x1000); + extents.insert(0xa001, 1); + extents.insert(0xa0000, 0xff8); ASSERT_EQ(3u, bmap.filter_out(extents)); ASSERT_TRUE(bmap.is_used(cid)); ASSERT_TRUE(bmap.is_used(hoid)); - + BlueStoreRepairer::StoreSpaceTracker bmap2; bmap2.init((uint64_t)0x3223b1d1000, 0x10000); ASSERT_EQ(0x1a0000u, bmap2.granularity); @@ -2193,8 +2162,7 @@ TEST(BlueStoreRepairer, StoreSpaceTracker) ASSERT_TRUE(bmap2.is_used(hoid, 0x3223b19ffff)); } -TEST(bluestore_blob_t, unused) -{ +TEST(bluestore_blob_t, unused) { { bluestore_blob_t b; uint64_t min_alloc_size = 64 << 10; // 64 kB @@ -2205,7 +2173,8 @@ TEST(bluestore_blob_t, unused) uint64_t suggested_boff = 0; PExtentVector extents; extents.emplace_back(0x1a560000, min_alloc_size); - b.allocated(p2align(suggested_boff, min_alloc_size), 0 /*no matter*/, extents); + b.allocated(p2align(suggested_boff, min_alloc_size), 0 /*no matter*/, + extents); b.mark_used(offset, length); ASSERT_FALSE(b.is_unused(offset, length)); @@ -2235,7 +2204,8 @@ TEST(bluestore_blob_t, unused) uint64_t suggested_boff = 0x11000; PExtentVector extents; extents.emplace_back(0x1a560000, min_alloc_size); - b.allocated(p2align(suggested_boff, min_alloc_size), 0 /*no matter*/, extents); + b.allocated(p2align(suggested_boff, min_alloc_size), 0 /*no matter*/, + extents); b.add_unused(0, offset); b.add_unused(offset + length, min_alloc_size * 2 - offset - length); b.mark_used(offset, length); @@ -2278,7 +2248,8 @@ TEST(bluestore_blob_t, unused) ASSERT_FALSE(b.is_unused(offset, length)); ASSERT_FALSE(b.is_unused(offset, unused_granularity)); - ASSERT_TRUE(b.is_unused(0, offset / unused_granularity * unused_granularity)); + ASSERT_TRUE( + b.is_unused(0, offset / unused_granularity * unused_granularity)); ASSERT_TRUE(b.is_unused(offset + length, offset0 - offset - length)); auto end0_aligned = round_up_to(offset0 + length, unused_granularity); ASSERT_TRUE(b.is_unused(end0_aligned, min_alloc_size * 3 - end0_aligned)); @@ -2289,18 +2260,15 @@ TEST(bluestore_blob_t, unused) // https://tracker.ceph.com/issues/51682 // Basic map_any functionality is tested as well though. // -TEST(bluestore_blob_t, wrong_map_bl_in_51682) -{ +TEST(bluestore_blob_t, wrong_map_bl_in_51682) { { bluestore_blob_t b; uint64_t min_alloc_size = 4 << 10; // 64 kB b.allocated_test(bluestore_pextent_t(0x17ba000, 4 * min_alloc_size)); b.allocated_test(bluestore_pextent_t(0x17bf000, 4 * min_alloc_size)); - b.allocated_test( - bluestore_pextent_t( - bluestore_pextent_t::INVALID_OFFSET, - 1 * min_alloc_size)); + b.allocated_test(bluestore_pextent_t(bluestore_pextent_t::INVALID_OFFSET, + 1 * min_alloc_size)); b.allocated_test(bluestore_pextent_t(0x153c44d000, 7 * min_alloc_size)); b.mark_used(0, 0x8000); @@ -2310,34 +2278,31 @@ TEST(bluestore_blob_t, wrong_map_bl_in_51682) bufferlist bl; bl.append(s); const size_t num_expected_entries = 5; - uint64_t expected[num_expected_entries][2] = { - {0x17ba000, 0x4000}, - {0x17bf000, 0x3000}, - {0x17c0000, 0x3000}, - {0xffffffffffffffff, 0x1000}, - {0x153c44d000, 0x3000}}; + uint64_t expected[num_expected_entries][2] = {{0x17ba000, 0x4000}, + {0x17bf000, 0x3000}, + {0x17c0000, 0x3000}, + {0xffffffffffffffff, 0x1000}, + {0x153c44d000, 0x3000}}; size_t expected_pos = 0; - b.map_bl(0, bl, - [&](uint64_t o, bufferlist& bl) { - ASSERT_EQ(o, expected[expected_pos][0]); - ASSERT_EQ(bl.length(), expected[expected_pos][1]); - ++expected_pos; - }); + b.map_bl(0, bl, [&](uint64_t o, bufferlist &bl) { + ASSERT_EQ(o, expected[expected_pos][0]); + ASSERT_EQ(bl.length(), expected[expected_pos][1]); + ++expected_pos; + }); // 0x5000 is an improper offset presumably provided when doing a repair - b.map_bl(0x5000, bl, - [&](uint64_t o, bufferlist& bl) { - ASSERT_EQ(o, expected[expected_pos][0]); - ASSERT_EQ(bl.length(), expected[expected_pos][1]); - ++expected_pos; - }); + b.map_bl(0x5000, bl, [&](uint64_t o, bufferlist &bl) { + ASSERT_EQ(o, expected[expected_pos][0]); + ASSERT_EQ(bl.length(), expected[expected_pos][1]); + ++expected_pos; + }); ASSERT_EQ(expected_pos, num_expected_entries); } } //--------------------------------------------------------------------------------- -static int verify_extent(const extent_t & ext, const extent_t *ext_arr, uint64_t ext_arr_size, uint64_t idx) -{ - const extent_t & ext_ref = ext_arr[idx]; +static int verify_extent(const extent_t &ext, const extent_t *ext_arr, + uint64_t ext_arr_size, uint64_t idx) { + const extent_t &ext_ref = ext_arr[idx]; if (ext.offset == ext_ref.offset && ext.length == ext_ref.length) { return 0; } else { @@ -2346,25 +2311,27 @@ static int verify_extent(const extent_t & ext, const extent_t *ext_arr, uint64_t std::cerr << "Null extent was returned at idx = " << idx << std::endl; } unsigned start = std::max(((int32_t)(idx)-3), 0); - unsigned end = std::min(idx+3, ext_arr_size); + unsigned end = std::min(idx + 3, ext_arr_size); for (unsigned j = start; j < end; j++) { - const extent_t & ext_ref = ext_arr[j]; - std::cerr << j << ") ref_ext = [" << ext_ref.offset << ", " << ext_ref.length << "]" << std::endl; + const extent_t &ext_ref = ext_arr[j]; + std::cerr << j << ") ref_ext = [" << ext_ref.offset << ", " + << ext_ref.length << "]" << std::endl; } - std::cerr << idx << ") ext = [" << ext.offset << ", " << ext.length << "]" << std::endl; + std::cerr << idx << ") ext = [" << ext.offset << ", " << ext.length + << "]" << std::endl; return -1; } } //--------------------------------------------------------------------------------- -static int test_extents(uint64_t index, extent_t *ext_arr, uint64_t ext_arr_size, SimpleBitmap& sbmap, bool set) -{ - const uint64_t MAX_JUMP_BIG = 1523; - const uint64_t MAX_JUMP_SMALL = 19; - const uint64_t MAX_LEN_BIG = 523; - const uint64_t MAX_LEN_SMALL = 23; - - uint64_t n = sbmap.get_size(); +static int test_extents(uint64_t index, extent_t *ext_arr, + uint64_t ext_arr_size, SimpleBitmap &sbmap, bool set) { + const uint64_t MAX_JUMP_BIG = 1523; + const uint64_t MAX_JUMP_SMALL = 19; + const uint64_t MAX_LEN_BIG = 523; + const uint64_t MAX_LEN_SMALL = 23; + + uint64_t n = sbmap.get_size(); uint64_t offset = 0; unsigned length, jump, i; for (i = 0; i < ext_arr_size; i++) { @@ -2392,28 +2359,30 @@ static int test_extents(uint64_t index, extent_t *ext_arr, uint64_t ext_arr_size success = sbmap.clr(offset, length); } if (!success) { - std::cerr << "Failed sbmap." << (set ? "set(" : "clr(") << offset << ", " << length << ")"<< std::endl; + std::cerr << "Failed sbmap." << (set ? "set(" : "clr(") << offset << ", " + << length << ")" << std::endl; return -1; } // if this is not the first entry and no jump -> merge extents - if ( (i==0) || (jump > 0) ) { + if ((i == 0) || (jump > 0)) { ext_arr[i] = {offset, length}; } else { // merge 2 extents - i --; + i--; ext_arr[i].length += length; } offset += length; } unsigned arr_size = std::min((uint64_t)i, ext_arr_size); std::cout << std::hex << std::right; - std::cout << "[" << index << "] " << (set ? "Set::" : "Clr::") << " extents count = 0x" << arr_size; + std::cout << "[" << index << "] " << (set ? "Set::" : "Clr::") + << " extents count = 0x" << arr_size; std::cout << std::dec << std::endl; offset = 0; extent_t ext; - for(unsigned i = 0; i < arr_size; i++) { + for (unsigned i = 0; i < arr_size; i++) { if (set) { ext = sbmap.get_next_set_extent(offset); } else { @@ -2434,43 +2403,46 @@ static int test_extents(uint64_t index, extent_t *ext_arr, uint64_t ext_arr_size if (ext.length == 0) { return 0; } else { - std::cerr << "sbmap.get_next_" << (set ? "set" : "clr") << "_extent(" << offset << ") return length = " << ext.length << std::endl; + std::cerr << "sbmap.get_next_" << (set ? "set" : "clr") << "_extent(" + << offset << ") return length = " << ext.length << std::endl; return -1; } } //--------------------------------------------------------------------------------- -TEST(SimpleBitmap, basic) -{ +TEST(SimpleBitmap, basic) { const uint64_t MAX_EXTENTS_COUNT = 7131177; - std::unique_ptr ext_arr = std::make_unique(MAX_EXTENTS_COUNT); + std::unique_ptr ext_arr = + std::make_unique(MAX_EXTENTS_COUNT); ASSERT_TRUE(ext_arr != nullptr); const uint64_t BIT_COUNT = 4ULL << 30; // 4Gb = 512MB SimpleBitmap sbmap(g_ceph_context, BIT_COUNT); // use current time as seed for random generator std::srand(std::time(nullptr)); - for (unsigned i = 0; i < 3; i++ ) { - memset(ext_arr.get(), 0, sizeof(extent_t)*MAX_EXTENTS_COUNT); + for (unsigned i = 0; i < 3; i++) { + memset(ext_arr.get(), 0, sizeof(extent_t) * MAX_EXTENTS_COUNT); sbmap.clear_all(); - ASSERT_TRUE(test_extents(i, ext_arr.get(), MAX_EXTENTS_COUNT, sbmap, true) == 0); + ASSERT_TRUE( + test_extents(i, ext_arr.get(), MAX_EXTENTS_COUNT, sbmap, true) == 0); - memset(ext_arr.get(), 0, sizeof(extent_t)*MAX_EXTENTS_COUNT); + memset(ext_arr.get(), 0, sizeof(extent_t) * MAX_EXTENTS_COUNT); sbmap.set_all(); - ASSERT_TRUE(test_extents(i, ext_arr.get(), MAX_EXTENTS_COUNT, sbmap, false) == 0); + ASSERT_TRUE( + test_extents(i, ext_arr.get(), MAX_EXTENTS_COUNT, sbmap, false) == 0); } } //--------------------------------------------------------------------------------- -static int test_intersections(unsigned test_idx, SimpleBitmap &sbmap, uint8_t map[], uint64_t map_size) -{ - const uint64_t MAX_LEN_BIG = 523; - const uint64_t MAX_LEN_SMALL = 23; +static int test_intersections(unsigned test_idx, SimpleBitmap &sbmap, + uint8_t map[], uint64_t map_size) { + const uint64_t MAX_LEN_BIG = 523; + const uint64_t MAX_LEN_SMALL = 23; - bool success; + bool success; uint64_t set_op_count = 0, clr_op_count = 0; unsigned length, i; - for (i = 0; i < map_size / (MAX_LEN_BIG*2); i++) { + for (i = 0; i < map_size / (MAX_LEN_BIG * 2); i++) { uint64_t offset = (std::rand() % (map_size - 1)); if (i & 1) { length = std::rand() % MAX_LEN_BIG; @@ -2486,22 +2458,23 @@ static int test_intersections(unsigned test_idx, SimpleBitmap &sbmap, uint8_t ma bool set = (std::rand() % 3); if (set) { success = sbmap.set(offset, length); - memset(map+offset, 0xFF, length); + memset(map + offset, 0xFF, length); set_op_count++; } else { success = sbmap.clr(offset, length); - memset(map+offset, 0x0, length); + memset(map + offset, 0x0, length); clr_op_count++; } if (!success) { - std::cerr << "Failed sbmap." << (set ? "set(" : "clr(") << offset << ", " << length << ")"<< std::endl; + std::cerr << "Failed sbmap." << (set ? "set(" : "clr(") << offset << ", " + << length << ")" << std::endl; return -1; } } uint64_t set_bit_count = 0; uint64_t clr_bit_count = 0; - for(uint64_t idx = 0; idx < map_size; idx++) { + for (uint64_t idx = 0; idx < map_size; idx++) { if (map[idx]) { set_bit_count++; success = sbmap.bit_is_set(idx); @@ -2510,37 +2483,42 @@ static int test_intersections(unsigned test_idx, SimpleBitmap &sbmap, uint8_t ma success = sbmap.bit_is_clr(idx); } if (!success) { - std::cerr << "expected: sbmap.bit_is_" << (map[idx] ? "set(" : "clr(") << idx << ")"<< std::endl; + std::cerr << "expected: sbmap.bit_is_" << (map[idx] ? "set(" : "clr(") + << idx << ")" << std::endl; return -1; } - } - std::cout << std::hex << std::right << __func__ ; - std::cout << " [" << test_idx << "] set_bit_count = 0x" << std::setfill('0') << std::setw(8) << set_bit_count - << ", clr_bit_count = 0x" << std::setfill('0') << std::setw(8) << clr_bit_count - << ", sum = 0x" << set_bit_count + clr_bit_count << std::endl; + std::cout << std::hex << std::right << __func__; + std::cout << " [" << test_idx << "] set_bit_count = 0x" << std::setfill('0') + << std::setw(8) << set_bit_count << ", clr_bit_count = 0x" + << std::setfill('0') << std::setw(8) << clr_bit_count + << ", sum = 0x" << set_bit_count + clr_bit_count << std::endl; std::cout << std::dec; uint64_t offset = 0; - for(uint64_t i = 0; i < (set_op_count + clr_op_count); i++) { + for (uint64_t i = 0; i < (set_op_count + clr_op_count); i++) { extent_t ext = sbmap.get_next_set_extent(offset); - //std::cout << "set_ext:: " << i << ") [" << ext.offset << ", " << ext.length << "]" << std::endl; + // std::cout << "set_ext:: " << i << ") [" << ext.offset << ", " << + // ext.length << "]" << std::endl; for (uint64_t idx = ext.offset; idx < ext.offset + ext.length; idx++) { if (map[idx] != 0xFF) { - std::cerr << "map[" << idx << "] is clear, but extent [" << ext.offset << ", " << ext.length << "] is set" << std::endl; - return -1; + std::cerr << "map[" << idx << "] is clear, but extent [" << ext.offset + << ", " << ext.length << "] is set" << std::endl; + return -1; } } offset = ext.offset + ext.length; } offset = 0; - for(uint64_t i = 0; i < (set_op_count + clr_op_count); i++) { + for (uint64_t i = 0; i < (set_op_count + clr_op_count); i++) { extent_t ext = sbmap.get_next_clr_extent(offset); - //std::cout << "clr_ext:: " << i << ") [" << ext.offset << ", " << ext.length << "]" << std::endl; + // std::cout << "clr_ext:: " << i << ") [" << ext.offset << ", " << + // ext.length << "]" << std::endl; for (uint64_t idx = ext.offset; idx < ext.offset + ext.length; idx++) { - if (map[idx] ) { - std::cerr << "map[" << idx << "] is set, but extent [" << ext.offset << ", " << ext.length << "] is free" << std::endl; - return -1; + if (map[idx]) { + std::cerr << "map[" << idx << "] is set, but extent [" << ext.offset + << ", " << ext.length << "] is free" << std::endl; + return -1; } } offset = ext.offset + ext.length; @@ -2550,18 +2528,17 @@ static int test_intersections(unsigned test_idx, SimpleBitmap &sbmap, uint8_t ma } //--------------------------------------------------------------------------------- -TEST(SimpleBitmap, intersection) -{ - const uint64_t MAP_SIZE = 1ULL << 30; // 1G +TEST(SimpleBitmap, intersection) { + const uint64_t MAP_SIZE = 1ULL << 30; // 1G SimpleBitmap sbmap(g_ceph_context, MAP_SIZE); // use current time as seed for random generator std::srand(std::time(nullptr)); - std::unique_ptr map = std::make_unique (MAP_SIZE); + std::unique_ptr map = std::make_unique(MAP_SIZE); ASSERT_TRUE(map != nullptr); - for (unsigned i = 0; i < 1; i++ ) { + for (unsigned i = 0; i < 1; i++) { sbmap.clear_all(); memset(map.get(), 0, MAP_SIZE); ASSERT_TRUE(test_intersections(i, sbmap, map.get(), MAP_SIZE) == 0); @@ -2572,41 +2549,42 @@ TEST(SimpleBitmap, intersection) } } - //--------------------------------------------------------------------------------- -static int test_extents_boundaries(uint64_t index, extent_t *ext_arr, uint64_t ext_arr_size, SimpleBitmap& sbmap, bool set) -{ - uint64_t n = sbmap.get_size(); +static int test_extents_boundaries(uint64_t index, extent_t *ext_arr, + uint64_t ext_arr_size, SimpleBitmap &sbmap, + bool set) { + uint64_t n = sbmap.get_size(); uint64_t offset = 0, k = 0; - for(unsigned i = 0; i < 64; i++) { + for (unsigned i = 0; i < 64; i++) { offset += i; if (offset >= n) { break; } - for(unsigned length = 1; length <= 128; length++) { + for (unsigned length = 1; length <= 128; length++) { if (offset + length >= n) { - break; + break; } if (k >= ext_arr_size) { - break; + break; } bool success; if (set) { - success = sbmap.set(offset, length); + success = sbmap.set(offset, length); } else { - success = sbmap.clr(offset, length); + success = sbmap.clr(offset, length); } if (!success) { - std::cerr << "Failed sbmap." << (set ? "set(" : "clr(") << offset << ", " << length << ")"<< std::endl; - return -1; + std::cerr << "Failed sbmap." << (set ? "set(" : "clr(") << offset + << ", " << length << ")" << std::endl; + return -1; } ext_arr[k++] = {offset, length}; if (length < 64) { - offset += 64; + offset += 64; } else { - offset += 128; + offset += 128; } } if (k >= ext_arr_size) { @@ -2615,13 +2593,14 @@ static int test_extents_boundaries(uint64_t index, extent_t *ext_arr, uint64_t e } unsigned arr_size = std::min((uint64_t)k, ext_arr_size); - std::cout << std::hex << std::right << __func__ ; - std::cout << " [" << index << "] " << (set ? "Set::" : "Clr::") << " extents count = 0x" << arr_size; + std::cout << std::hex << std::right << __func__; + std::cout << " [" << index << "] " << (set ? "Set::" : "Clr::") + << " extents count = 0x" << arr_size; std::cout << std::dec << std::endl; offset = 0; extent_t ext; - for(unsigned i = 0; i < arr_size; i++) { + for (unsigned i = 0; i < arr_size; i++) { if (set) { ext = sbmap.get_next_set_extent(offset); } else { @@ -2642,17 +2621,17 @@ static int test_extents_boundaries(uint64_t index, extent_t *ext_arr, uint64_t e if (ext.length == 0) { return 0; } else { - std::cerr << "sbmap.get_next_" << (set ? "set" : "clr") << "_extent(" << offset << ") return length = " << ext.length << std::endl; + std::cerr << "sbmap.get_next_" << (set ? "set" : "clr") << "_extent(" + << offset << ") return length = " << ext.length << std::endl; return -1; } - } //--------------------------------------------------------------------------------- -TEST(SimpleBitmap, boundaries) -{ +TEST(SimpleBitmap, boundaries) { const uint64_t MAX_EXTENTS_COUNT = 64 << 10; - std::unique_ptr ext_arr = std::make_unique(MAX_EXTENTS_COUNT); + std::unique_ptr ext_arr = + std::make_unique(MAX_EXTENTS_COUNT); ASSERT_TRUE(ext_arr != nullptr); // use current time as seed for random generator @@ -2661,26 +2640,27 @@ TEST(SimpleBitmap, boundaries) uint64_t bit_count = 32 << 20; // 32Mb = 4MB unsigned count = 0; for (unsigned i = 0; i < 64; i++) { - SimpleBitmap sbmap(g_ceph_context, bit_count+i); - memset(ext_arr.get(), 0, sizeof(extent_t)*MAX_EXTENTS_COUNT); + SimpleBitmap sbmap(g_ceph_context, bit_count + i); + memset(ext_arr.get(), 0, sizeof(extent_t) * MAX_EXTENTS_COUNT); sbmap.clear_all(); - ASSERT_TRUE(test_extents_boundaries(count, ext_arr.get(), MAX_EXTENTS_COUNT, sbmap, true) == 0); + ASSERT_TRUE(test_extents_boundaries(count, ext_arr.get(), MAX_EXTENTS_COUNT, + sbmap, true) == 0); - memset(ext_arr.get(), 0, sizeof(extent_t)*MAX_EXTENTS_COUNT); + memset(ext_arr.get(), 0, sizeof(extent_t) * MAX_EXTENTS_COUNT); sbmap.set_all(); - ASSERT_TRUE(test_extents_boundaries(count++, ext_arr.get(), MAX_EXTENTS_COUNT, sbmap, false) == 0); + ASSERT_TRUE(test_extents_boundaries(count++, ext_arr.get(), + MAX_EXTENTS_COUNT, sbmap, false) == 0); } } //--------------------------------------------------------------------------------- -TEST(SimpleBitmap, boundaries2) -{ +TEST(SimpleBitmap, boundaries2) { const uint64_t bit_count_base = 64 << 10; // 64Kb = 8MB - const extent_t null_extent = {0, 0}; + const extent_t null_extent = {0, 0}; for (unsigned i = 0; i < 64; i++) { - uint64_t bit_count = bit_count_base + i; - extent_t full_extent = {0, bit_count}; + uint64_t bit_count = bit_count_base + i; + extent_t full_extent = {0, bit_count}; SimpleBitmap sbmap(g_ceph_context, bit_count); sbmap.set(0, bit_count); @@ -2705,8 +2685,7 @@ TEST(SimpleBitmap, boundaries2) } } -TEST(shared_blob_2hash_tracker_t, basic_test) -{ +TEST(shared_blob_2hash_tracker_t, basic_test) { shared_blob_2hash_tracker_t t1(1024 * 1024, 4096); ASSERT_TRUE(t1.count_non_zero() == 0); @@ -2756,7 +2735,7 @@ TEST(shared_blob_2hash_tracker_t, basic_test) ASSERT_TRUE(t1.count_non_zero() != 0); - ASSERT_TRUE(!t1.test_all_zero(5,0x1000)); + ASSERT_TRUE(!t1.test_all_zero(5, 0x1000)); ASSERT_TRUE(!t1.test_all_zero(5, 0x2000)); ASSERT_TRUE(!t1.test_all_zero(5, 0x3000)); ASSERT_TRUE(t1.test_all_zero(5, 0x4000)); @@ -2771,14 +2750,13 @@ TEST(shared_blob_2hash_tracker_t, basic_test) ASSERT_TRUE(!t1.test_all_zero_range(5, 0, 0x9000)); } -TEST(bluestore_blob_use_tracker_t, mempool_stats_test) -{ - using mempool::bluestore_cache_other::allocated_items; +TEST(bluestore_blob_use_tracker_t, mempool_stats_test) { using mempool::bluestore_cache_other::allocated_bytes; + using mempool::bluestore_cache_other::allocated_items; uint64_t other_items0 = allocated_items(); uint64_t other_bytes0 = allocated_bytes(); { - bluestore_blob_use_tracker_t* t1 = new bluestore_blob_use_tracker_t; + bluestore_blob_use_tracker_t *t1 = new bluestore_blob_use_tracker_t; t1->init(1024 * 1024, 4096); ASSERT_EQ(256, allocated_items() - other_items0); // = 1M / 4K @@ -2789,7 +2767,7 @@ TEST(bluestore_blob_use_tracker_t, mempool_stats_test) ASSERT_EQ(allocated_bytes(), other_bytes0); } { - bluestore_blob_use_tracker_t* t1 = new bluestore_blob_use_tracker_t; + bluestore_blob_use_tracker_t *t1 = new bluestore_blob_use_tracker_t; t1->init(1024 * 1024, 4096); t1->add_tail(2048 * 1024, 4096); @@ -2802,7 +2780,7 @@ TEST(bluestore_blob_use_tracker_t, mempool_stats_test) ASSERT_EQ(allocated_bytes(), other_bytes0); } { - bluestore_blob_use_tracker_t* t1 = new bluestore_blob_use_tracker_t; + bluestore_blob_use_tracker_t *t1 = new bluestore_blob_use_tracker_t; t1->init(1024 * 1024, 4096); t1->prune_tail(512 * 1024); @@ -2815,8 +2793,8 @@ TEST(bluestore_blob_use_tracker_t, mempool_stats_test) ASSERT_EQ(allocated_bytes(), other_bytes0); } { - bluestore_blob_use_tracker_t* t1 = new bluestore_blob_use_tracker_t; - bluestore_blob_use_tracker_t* t2 = new bluestore_blob_use_tracker_t; + bluestore_blob_use_tracker_t *t1 = new bluestore_blob_use_tracker_t; + bluestore_blob_use_tracker_t *t2 = new bluestore_blob_use_tracker_t; t1->init(1024 * 1024, 4096); @@ -2839,9 +2817,9 @@ TEST(bluestore_blob_use_tracker_t, mempool_stats_test) int main(int argc, char **argv) { auto args = argv_to_vec(argc, argv); - auto cct = global_init(NULL, args, CEPH_ENTITY_TYPE_CLIENT, - CODE_ENVIRONMENT_UTILITY, - CINIT_FLAG_NO_DEFAULT_CONFIG_FILE); + auto cct = + global_init(NULL, args, CEPH_ENTITY_TYPE_CLIENT, CODE_ENVIRONMENT_UTILITY, + CINIT_FLAG_NO_DEFAULT_CONFIG_FILE); common_init_finish(g_ceph_context); ::testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); -- 2.39.5