diff --git a/containers_cpp/include/embb/containers/internal/lock_free_tree_value_pool-inl.h b/containers_cpp/include/embb/containers/internal/lock_free_tree_value_pool-inl.h index 395745a..c1881db 100644 --- a/containers_cpp/include/embb/containers/internal/lock_free_tree_value_pool-inl.h +++ b/containers_cpp/include/embb/containers/internal/lock_free_tree_value_pool-inl.h @@ -42,7 +42,7 @@ template bool LockFreeTreeValuePool:: IsLeaf(int node) { - if (node >= size - 1 && node <= 2 * size - 1) { + if (node >= size_ - 1 && node <= 2 * size_ - 1) { return true; } return false; @@ -52,7 +52,7 @@ template bool LockFreeTreeValuePool:: IsValid(int node) { - return (node >= 0 && node <= 2 * size - 1); + return (node >= 0 && node <= 2 * size_ - 1); } template int LockFreeTreeValuePool:: NodeIndexToPoolIndex(int node) { assert(IsLeaf(node)); - return(node - (size - 1)); + return(node - (size_ - 1)); } template int LockFreeTreeValuePool:: PoolIndexToNodeIndex(int index) { - int node = index + (size - 1); + int node = index + (size_ - 1); assert(IsLeaf(node)); return node; } @@ -100,7 +100,7 @@ template int LockFreeTreeValuePool:: GetParentNode(int node) { int parent = (node - 1) / 2; - assert(parent >= 0 && parent < size - 1); + assert(parent >= 0 && parent < size_ - 1); return parent; } @@ -112,11 +112,11 @@ allocate_rec(int node, Type& element) { if (IsLeaf(node)) { int pool_index = NodeIndexToPoolIndex(node); - Type expected = pool[pool_index]; + Type expected = pool_[pool_index]; if (expected == Undefined) return -1; - if (pool[pool_index].CompareAndSwap(expected, Undefined)) { + if (pool_[pool_index].CompareAndSwap(expected, Undefined)) { element = expected; return pool_index; } @@ -131,11 +131,11 @@ allocate_rec(int node, Type& element) { // atomically decrement the value in the node if the result is greater than // or equal to zero. This cannot be done atomically. do { - current = tree[node]; + current = tree_[node]; desired = current - 1; if (desired < 0) return -1; - } while (!tree[node].CompareAndSwap(current, desired)); + } while (!tree_[node].CompareAndSwap(current, desired)); int leftResult = allocate_rec(GetLeftChildIndex(node), element); if (leftResult != -1) { @@ -156,7 +156,7 @@ Fill(int node, int elementsToStore, int power2Value) { if (IsLeaf(node)) return; - tree[node] = elementsToStore; + tree_[node] = elementsToStore; int postPower2Value = power2Value >> 1; @@ -188,14 +188,14 @@ Free(Type element, int index) { assert(element != Undefined); // Put the element back - pool[index].Store(element); + pool_[index].Store(element); - assert(index >= 0 && index < size); + assert(index >= 0 && index < size_); int node = PoolIndexToNodeIndex(index); while (!IsRoot(node)) { node = GetParentNode(node); - tree[node].FetchAndAdd(1); + tree_[node].FetchAndAdd(1); } } @@ -205,67 +205,67 @@ template< typename ForwardIterator > LockFreeTreeValuePool:: LockFreeTreeValuePool(ForwardIterator first, ForwardIterator last) { // Number of elements to store - real_size = static_cast(::std::distance(first, last)); + real_size_ = static_cast(::std::distance(first, last)); // Let k be smallest number so that real_size <= 2^k, size = 2^k - size = GetSmallestPowerByTwoValue(real_size); + size_ = GetSmallestPowerByTwoValue(real_size_); // Size of binary tree without the leaves - tree_size = size - 1; + tree_size_ = size_ - 1; // make sure, signed values are not negative - assert(tree_size >= 0); - assert(real_size >= 0); + assert(tree_size_ >= 0); + assert(real_size_ >= 0); - size_t tree_size_unsigned = static_cast(tree_size); - size_t real_size_unsigned = static_cast(real_size); + size_t tree_size_unsigned = static_cast(tree_size_); + size_t real_size_unsigned = static_cast(real_size_); // Pool stores elements of type T - pool = poolAllocator.allocate(real_size_unsigned); + pool_ = pool_allocator_.allocate(real_size_unsigned); // invoke inplace new for each pool element for (size_t i = 0; i != real_size_unsigned; ++i) { - new (&pool[i]) embb::base::Atomic(); + new (&pool_[i]) embb::base::Atomic(); } // Tree holds the counter of not allocated elements - tree = treeAllocator.allocate(tree_size_unsigned); + tree_ = tree_allocator_.allocate(tree_size_unsigned); // invoke inplace new for each tree element for (size_t i = 0; i != tree_size_unsigned; ++i) { - new (&tree[i]) embb::base::Atomic(); + new (&tree_[i]) embb::base::Atomic(); } int i = 0; // Store the elements from the range for (ForwardIterator curIter(first); curIter != last; ++curIter) { - pool[i++] = *curIter; + pool_[i++] = *curIter; } // Initialize the binary tree without leaves (counters) - Fill(0, static_cast(::std::distance(first, last)), size); + Fill(0, static_cast(::std::distance(first, last)), size_); } template LockFreeTreeValuePool:: ~LockFreeTreeValuePool() { - size_t tree_size_unsigned = static_cast(tree_size); - size_t real_size_unsigned = static_cast(real_size); + size_t tree_size_unsigned = static_cast(tree_size_); + size_t real_size_unsigned = static_cast(real_size_); - poolAllocator.deallocate(pool, real_size_unsigned); + pool_allocator_.deallocate(pool_, real_size_unsigned); // invoke destructor for each pool element for (size_t i = 0; i != real_size_unsigned; ++i) { - pool[i].~Atomic(); + pool_[i].~Atomic(); } - treeAllocator.deallocate(tree, tree_size_unsigned); + tree_allocator_.deallocate(tree_, tree_size_unsigned); // invoke destructor for each tree element for (size_t i = 0; i != tree_size_unsigned; ++i) { - tree[i].~Atomic(); + tree_[i].~Atomic(); } } diff --git a/containers_cpp/include/embb/containers/internal/object_pool-inl.h b/containers_cpp/include/embb/containers/internal/object_pool-inl.h index e53f3a9..c0f4bde 100644 --- a/containers_cpp/include/embb/containers/internal/object_pool-inl.h +++ b/containers_cpp/include/embb/containers/internal/object_pool-inl.h @@ -83,7 +83,8 @@ ReturningTrueIterator::operator!=(const self_type& rhs) { template bool ObjectPool:: IsContained(const Type &obj) const { - if ((&obj < &objects[0]) || (&obj > &objects[capacity - 1])) { + if ((&obj < &objects_array_[0]) || + (&obj > &objects_array_[value_pool_size_ - 1])) { return false; } else { return true; @@ -94,17 +95,17 @@ template int ObjectPool:: GetIndexOfObject(const Type &obj) const { assert(IsContained(obj)); - return(static_cast(&obj - &objects[0])); + return(static_cast(&obj - &objects_array_[0])); } template Type* ObjectPool::AllocateRaw() { bool val; - int allocated_index = p.Allocate(val); + int allocated_index = value_pool_.Allocate(val); if (allocated_index == -1) { return NULL; } else { - Type* ret_pointer = &(objects[allocated_index]); + Type* ret_pointer = &(objects_array_[allocated_index]); return ret_pointer; } @@ -112,16 +113,17 @@ Type* ObjectPool::AllocateRaw() { template size_t ObjectPool::GetCapacity() { - return capacity; + return capacity_; } template ObjectPool::ObjectPool(size_t capacity) : - capacity(capacity), - p(ReturningTrueIterator(0), ReturningTrueIterator( - ValuePool::GetMinimumElementCountForGuaranteedCapacity(capacity))) { - // Allocate the objects (without construction, just get the memory) - objects = objectAllocator.allocate(capacity); + capacity_(capacity), + value_pool_size_( + ValuePool::GetMinimumElementCountForGuaranteedCapacity(capacity)), + value_pool_(ReturningTrueIterator(0), ReturningTrueIterator( + value_pool_size_)), + objects_array_(object_allocator_.allocate(value_pool_size_)) { } template @@ -129,7 +131,7 @@ void ObjectPool::Free(Type* obj) { int index = GetIndexOfObject(*obj); obj->~Type(); - p.Free(true, index); + value_pool_.Free(true, index); } template @@ -190,7 +192,7 @@ Type* ObjectPool::Allocate( template ObjectPool::~ObjectPool() { // Deallocate the objects - objectAllocator.deallocate(objects, capacity); + object_allocator_.deallocate(objects_array_, value_pool_size_); } } // namespace containers } // namespace embb diff --git a/containers_cpp/include/embb/containers/internal/wait_free_array_value_pool-inl.h b/containers_cpp/include/embb/containers/internal/wait_free_array_value_pool-inl.h index 4ea799a..18ef996 100644 --- a/containers_cpp/include/embb/containers/internal/wait_free_array_value_pool-inl.h +++ b/containers_cpp/include/embb/containers/internal/wait_free_array_value_pool-inl.h @@ -35,21 +35,21 @@ Free(Type element, int index) { assert(element != Undefined); // Just put back the element - pool[index].Store(element); + pool_array_[index].Store(element); } template int WaitFreeArrayValuePool:: Allocate(Type & element) { - for (int i = 0; i != size; ++i) { + for (int i = 0; i != size_; ++i) { Type expected; // If the memory cell is not available, go ahead - if (Undefined == (expected = pool[i].Load())) + if (Undefined == (expected = pool_array_[i].Load())) continue; // Try to get the memory cell - if (pool[i].CompareAndSwap(expected, Undefined)) { + if (pool_array_[i].CompareAndSwap(expected, Undefined)) { // When the CAS was successful, this element is ours element = expected; return i; @@ -64,36 +64,36 @@ WaitFreeArrayValuePool:: WaitFreeArrayValuePool(ForwardIterator first, ForwardIterator last) { size_t dist = static_cast(std::distance(first, last)); - size = static_cast(dist); + size_ = static_cast(dist); // conversion may result in negative number. check! - assert(size >= 0); + assert(size_ >= 0); // Use the allocator to allocate an array of size dist - pool = allocator.allocate(dist); + pool_array_ = allocator_.allocate(dist); // invoke inplace new for each pool element for ( size_t i = 0; i != dist; ++i ) { - new (&pool[i]) embb::base::Atomic(); + new (&pool_array_[i]) embb::base::Atomic(); } int i = 0; // Store the elements of the range for (ForwardIterator curIter(first); curIter != last; ++curIter) { - pool[i++] = *curIter; + pool_array_[i++] = *curIter; } } template WaitFreeArrayValuePool::~WaitFreeArrayValuePool() { // invoke destructor for each pool element - for (int i = 0; i != size; ++i) { - pool[i].~Atomic(); + for (int i = 0; i != size_; ++i) { + pool_array_[i].~Atomic(); } // free memory - allocator.deallocate(pool, static_cast(size)); + allocator_.deallocate(pool_array_, static_cast(size_)); } template diff --git a/containers_cpp/include/embb/containers/lock_free_tree_value_pool.h b/containers_cpp/include/embb/containers/lock_free_tree_value_pool.h index 66f43c7..b6f99e0 100644 --- a/containers_cpp/include/embb/containers/lock_free_tree_value_pool.h +++ b/containers_cpp/include/embb/containers/lock_free_tree_value_pool.h @@ -123,22 +123,25 @@ class LockFreeTreeValuePool { LockFreeTreeValuePool& operator=(const LockFreeTreeValuePool&); // See algorithm description above - int size; + int size_; // See algorithm description above - int tree_size; + int tree_size_; // See algorithm description above - int real_size; + int real_size_; // The tree above the pool - embb::base::Atomic* tree; + embb::base::Atomic* tree_; // The actual pool - embb::base::Atomic* pool; + embb::base::Atomic* pool_; - PoolAllocator poolAllocator; - TreeAllocator treeAllocator; + // respective allocator + PoolAllocator pool_allocator_; + + // respective allocator + TreeAllocator tree_allocator_; /** * Computes smallest power of two fitting the specified value diff --git a/containers_cpp/include/embb/containers/object_pool.h b/containers_cpp/include/embb/containers/object_pool.h index 0a94708..5583e5f 100644 --- a/containers_cpp/include/embb/containers/object_pool.h +++ b/containers_cpp/include/embb/containers/object_pool.h @@ -35,7 +35,6 @@ namespace embb { namespace containers { - /** * \defgroup CPP_CONTAINERS_POOLS Pools * Concurrent pools @@ -62,22 +61,29 @@ class ObjectPool { /** * Allocator used to allocate elements of the object pool */ - ObjectAllocator objectAllocator; + ObjectAllocator object_allocator_; /** - * Array holding the allocated object + * Capacity of the object pool */ - Type* objects; + size_t capacity_; /** - * Capacity of the object pool + * The size of the underlying value pool. This is also the size of the object + * array in this class. It is assumed, that the valuepool manages indices in + * range [0;value_pool_size_-1]. */ - size_t capacity; + size_t value_pool_size_; /** * Underlying value pool */ - ValuePool p; + ValuePool value_pool_; + + /** + * Array holding the allocated object + */ + Type* objects_array_; /** * Helper providing a virtual iterator that just returns true in each @@ -108,7 +114,6 @@ class ObjectPool { bool IsContained(const Type &obj) const; int GetIndexOfObject(const Type &obj) const; Type* AllocateRaw(); - public: /** * Constructs an object pool with capacity \c capacity. diff --git a/containers_cpp/include/embb/containers/wait_free_array_value_pool.h b/containers_cpp/include/embb/containers/wait_free_array_value_pool.h index 68b8ce1..d174e8c 100644 --- a/containers_cpp/include/embb/containers/wait_free_array_value_pool.h +++ b/containers_cpp/include/embb/containers/wait_free_array_value_pool.h @@ -116,10 +116,10 @@ template > > class WaitFreeArrayValuePool { private: - int size; - embb::base::Atomic* pool; + int size_; + embb::base::Atomic* pool_array_; WaitFreeArrayValuePool(); - Allocator allocator; + Allocator allocator_; // Prevent copy-construction WaitFreeArrayValuePool(const WaitFreeArrayValuePool&);