*
*/
+#include "Writer.h"
+#include "include/intarith.h"
#include "os/bluestore/bluestore_types.h"
-#include "BlueStore.h"
-#include "Allocator.h"
-/// Signals that a range [offset~length] is no longer used.
-/// Collects allocation units that became unused into *released_disk.
-/// Returns:
-/// disk space size to release
-uint32_t BlueStore::Blob::put_ref_accumulate(
- Collection *coll,
- uint32_t offset,
- uint32_t length,
- PExtentVector *released_disk)
-{
- ceph_assert(length > 0);
- uint32_t res = 0;
- auto [in_blob_offset, in_blob_length] = used_in_blob.put_simple(offset, length);
- if (in_blob_length != 0) {
- bluestore_blob_t& b = dirty_blob();
- res = b.release_extents(in_blob_offset, in_blob_length, released_disk);
- return res;
- }
- return res;
-}
/// Empties range [offset~length] of object o that is in collection c.
/// Collects unused elements:
}
return p;
}
+
+
+/// Signals that a range [offset~length] is no longer used.
+/// Collects allocation units that became unused into *released_disk.
+/// Returns:
+/// disk space size to release
+uint32_t BlueStore::Blob::put_ref_accumulate(
+ Collection *coll,
+ uint32_t offset,
+ uint32_t length,
+ PExtentVector *released_disk)
+{
+ ceph_assert(length > 0);
+ uint32_t res = 0;
+ auto [in_blob_offset, in_blob_length] = used_in_blob.put_simple(offset, length);
+ if (in_blob_length != 0) {
+ bluestore_blob_t& b = dirty_blob();
+ res = b.release_extents(in_blob_offset, in_blob_length, released_disk);
+ return res;
+ }
+ return res;
+}
+
+inline void BlueStore::Blob::add_tail(
+ uint32_t new_blob_size,
+ uint32_t min_release_size)
+{
+ ceph_assert(p2phase(new_blob_size, min_release_size) == 0);
+ dirty_blob().add_tail(new_blob_size);
+ used_in_blob.add_tail(new_blob_size, min_release_size);
+}
+
+inline void bluestore_blob_use_tracker_t::init_and_ref(
+ uint32_t full_length,
+ uint32_t tracked_chunk)
+{
+ ceph_assert(p2phase(full_length, tracked_chunk) == 0);
+ uint32_t _num_au = full_length / tracked_chunk;
+ au_size = tracked_chunk;
+ if ( _num_au > 1) {
+ allocate(_num_au);
+ for (uint32_t i = 0; i < num_au; i++) {
+ bytes_per_au[i] = tracked_chunk;
+ }
+ } else {
+ total_bytes = full_length;
+ }
+}
+
+inline void bluestore_blob_t::allocated_full(
+ uint32_t length,
+ PExtentVector&& allocs)
+{
+ ceph_assert(extents.size() == 0);
+ extents.swap(allocs);
+ logical_length = length;
+}
+
uint32_t full_length,
uint32_t _au_size);
+ inline void init_and_ref(
+ uint32_t full_length,
+ uint32_t tracked_chunk);
+
void get(
uint32_t offset,
uint32_t len);
}
}
}
-
+ /// todo implement me!
+ unused_t get_unused_mask(uint32_t offset, uint32_t length, uint32_t chunk_size) {
+ if (has_unused()) {
+ return 0;
+ } else {
+ return 0;
+ }
+ }
// map_f_invoke templates intended to mask parameters which are not expected
// by the provided callback
template<class F, typename std::enable_if<std::is_invocable_r_v<
void split(uint32_t blob_offset, bluestore_blob_t& rb);
void allocated(uint32_t b_off, uint32_t length, const PExtentVector& allocs);
+ void allocated_full(uint32_t length, PExtentVector&& allocs);
void allocated_test(const bluestore_pextent_t& alloc); // intended for UT only
+ static constexpr uint64_t NO_ALLOCATION = std::numeric_limits<uint64_t>::max();
+ uint64_t get_allocation_at(uint32_t in_blob_offset) {
+ uint32_t loc = in_blob_offset;
+ for (auto e : extents) {
+ if (loc < e.length) {
+ //ceph_assert(e.is_valid());
+ if (e.is_valid()) {
+ return e.offset + loc;
+ } else {
+ return NO_ALLOCATION;
+ }
+ }
+ loc -= e.length;
+ }
+ ceph_assert(false);
+ };
/// updates blob's pextents container and return unused pextents eligible
/// for release.