char *data;
unsigned len;
atomic_t nref;
+ int mempool = mempool::mempool_buffer_anon;
mutable std::atomic_flag crc_spinlock = ATOMIC_FLAG_INIT;
map<pair<size_t, size_t>, pair<uint32_t, uint32_t> > crc_map;
explicit raw(unsigned l)
- : data(NULL), len(l), nref(0)
- { }
+ : data(NULL), len(l), nref(0) {
+ mempool::get_pool(mempool::pool_index_t(mempool)).adjust_count(1, len);
+ }
raw(char *c, unsigned l)
- : data(c), len(l), nref(0)
- { }
- virtual ~raw() {}
+ : data(c), len(l), nref(0) {
+ mempool::get_pool(mempool::pool_index_t(mempool)).adjust_count(1, len);
+ }
+ virtual ~raw() {
+ mempool::get_pool(mempool::pool_index_t(mempool)).adjust_count(
+ -1, -(int)len);
+ }
+
+ void _set_len(unsigned l) {
+ mempool::get_pool(mempool::pool_index_t(mempool)).adjust_count(
+ -1, -(int)len);
+ len = l;
+ mempool::get_pool(mempool::pool_index_t(mempool)).adjust_count(1, len);
+ }
+
+ void reassign_to_mempool(int pool) {
+ if (pool == mempool) {
+ return;
+ }
+ mempool::get_pool(mempool::pool_index_t(mempool)).adjust_count(
+ -1, -(int)len);
+ mempool = pool;
+ mempool::get_pool(mempool::pool_index_t(pool)).adjust_count(1, len);
+ }
+
+ void try_assign_to_mempool(int pool) {
+ if (mempool == mempool::mempool_buffer_anon) {
+ reassign_to_mempool(pool);
+ }
+ }
// no copying.
// cppcheck-suppress noExplicitConstructor
return r;
}
// update length with actual amount read
- len = r;
+ _set_len(r);
return 0;
}
{
std::swap(_len, other._len);
std::swap(_memcopy_count, other._memcopy_count);
+ std::swap(_mempool, other._mempool);
_buffers.swap(other._buffers);
append_buffer.swap(other.append_buffer);
//last_p.swap(other.last_p);
return is_aligned(CEPH_PAGE_SIZE);
}
+ void buffer::list::reassign_to_mempool(int pool)
+ {
+ _mempool = pool;
+ if (append_buffer.get_raw()) {
+ append_buffer.get_raw()->reassign_to_mempool(pool);
+ }
+ for (auto& p : _buffers) {
+ p.get_raw()->reassign_to_mempool(pool);
+ }
+ }
+
+ void buffer::list::try_assign_to_mempool(int pool)
+ {
+ _mempool = pool;
+ if (append_buffer.get_raw()) {
+ append_buffer.get_raw()->try_assign_to_mempool(pool);
+ }
+ for (auto& p : _buffers) {
+ p.get_raw()->try_assign_to_mempool(pool);
+ }
+ }
+
void buffer::list::rebuild()
{
if (_len == 0) {
return rebuild_aligned(CEPH_PAGE_SIZE);
}
+ void buffer::list::reserve(size_t prealloc)
+ {
+ if (append_buffer.unused_tail_length() < prealloc) {
+ append_buffer = buffer::create(prealloc);
+ if (_mempool) {
+ append_buffer.get_raw()->reassign_to_mempool(_mempool);
+ }
+ append_buffer.set_length(0); // unused, so far.
+ }
+ }
+
// sort-of-like-assignment-op
void buffer::list::claim(list& bl, unsigned int flags)
{
unsigned _len;
unsigned _memcopy_count; //the total of memcopy using rebuild().
ptr append_buffer; // where i put small appends.
+ int _mempool = -1;
public:
class iterator;
_memcopy_count = other._memcopy_count;
last_p = begin();
append_buffer.swap(other.append_buffer);
+ _mempool = other._mempool;
other.clear();
return *this;
}
const ptr& front() const { return _buffers.front(); }
const ptr& back() const { return _buffers.back(); }
+ void reassign_to_mempool(int pool);
+ void try_assign_to_mempool(int pool);
+
size_t get_append_buffer_unused_tail_length() const {
return append_buffer.unused_tail_length();
}
unsigned align_memory);
bool rebuild_page_aligned();
- void reserve(size_t prealloc) {
- if (append_buffer.unused_tail_length() < prealloc) {
- append_buffer = buffer::create(prealloc);
- append_buffer.set_length(0); // unused, so far.
- }
- }
+ void reserve(size_t prealloc);
// assignment-op with move semantics
const static unsigned int CLAIM_DEFAULT = 0;