@@ -51,6 +51,7 @@ AllocationClass::AllocationClass(ClassId classId,
5151 allocationSize_(allocSize),
5252 slabAlloc_(s),
5353 freedAllocations_{slabAlloc_.createSingleTierPtrCompressor <FreeAlloc>()} {
54+ curAllocatedSlabs_ = allocatedSlabs_.size ();
5455 checkState ();
5556}
5657
@@ -87,6 +88,12 @@ void AllocationClass::checkState() const {
8788 " Current allocation slab {} is not in allocated slabs list" ,
8889 currSlab_));
8990 }
91+
92+ if (curAllocatedSlabs_ != allocatedSlabs_.size ()) {
93+ throw std::invalid_argument (folly::sformat (
94+ " Mismatch in allocated slabs numbers"
95+ ));
96+ }
9097}
9198
9299// TODO(stuclar): Add poolId to the metadata to be serialized when cache shuts
@@ -116,10 +123,12 @@ AllocationClass::AllocationClass(
116123 freeSlabs_.push_back (slabAlloc_.getSlabForIdx (freeSlabIdx));
117124 }
118125
126+ curAllocatedSlabs_ = allocatedSlabs_.size ();
119127 checkState ();
120128}
121129
122130void AllocationClass::addSlabLocked (Slab* slab) {
131+ curAllocatedSlabs_.fetch_add (1 , std::memory_order_relaxed);
123132 canAllocate_ = true ;
124133 auto header = slabAlloc_.getSlabHeader (slab);
125134 header->classId = classId_;
@@ -168,6 +177,7 @@ void* AllocationClass::allocateLocked() {
168177 }
169178
170179 XDCHECK (canAllocate_);
180+ curAllocatedSize_.fetch_add (getAllocSize (), std::memory_order_relaxed);
171181
172182 // grab from the free list if possible.
173183 if (!freedAllocations_.empty ()) {
@@ -270,6 +280,7 @@ SlabReleaseContext AllocationClass::startSlabRelease(
270280 slab, getId ()));
271281 }
272282 *allocIt = allocatedSlabs_.back ();
283+ curAllocatedSlabs_.fetch_sub (1 , std::memory_order_relaxed);
273284 allocatedSlabs_.pop_back ();
274285
275286 // if slab is being carved currently, then update slabReleaseAllocMap
@@ -510,6 +521,7 @@ void AllocationClass::abortSlabRelease(const SlabReleaseContext& context) {
510521 }
511522 slabReleaseAllocMap_.erase (slabPtrVal);
512523 allocatedSlabs_.push_back (const_cast <Slab*>(slab));
524+ curAllocatedSlabs_.fetch_add (1 , std::memory_order_relaxed);
513525 // restore the classId and allocSize
514526 header->classId = classId_;
515527 header->allocSize = allocationSize_;
@@ -660,6 +672,8 @@ void AllocationClass::free(void* memory) {
660672 freedAllocations_.insert (*reinterpret_cast <FreeAlloc*>(memory));
661673 canAllocate_ = true ;
662674 });
675+
676+ curAllocatedSize_.fetch_sub (getAllocSize (), std::memory_order_relaxed);
663677}
664678
665679serialization::AllocationClassObject AllocationClass::saveState () const {
@@ -722,3 +736,11 @@ std::vector<bool>& AllocationClass::getSlabReleaseAllocMapLocked(
722736 const auto slabPtrVal = getSlabPtrValue (slab);
723737 return slabReleaseAllocMap_.at (slabPtrVal);
724738}
739+
740+ double AllocationClass::approxFreePercentage () const {
741+ if (getNumSlabs () == 0 )
742+ return 100.0 ;
743+
744+ return 100.0 - 100.0 * static_cast <double >(curAllocatedSize_.load (std::memory_order_relaxed)) /
745+ static_cast <double >(getNumSlabs () * Slab::kSize );
746+ }
0 commit comments