Commit 5192f502 by Christian Kern

Merge branch 'development' into embb421_parallel_building_bug

parents b32fd724 e71d171f
......@@ -42,7 +42,7 @@ template<typename Type, Type Undefined, class PoolAllocator,
class TreeAllocator >
bool LockFreeTreeValuePool<Type, Undefined, PoolAllocator, TreeAllocator>::
IsLeaf(int node) {
if (node >= size - 1 && node <= 2 * size - 1) {
if (node >= size_ - 1 && node <= 2 * size_ - 1) {
return true;
}
return false;
......@@ -52,7 +52,7 @@ template<typename Type, Type Undefined, class PoolAllocator,
class TreeAllocator >
bool LockFreeTreeValuePool<Type, Undefined, PoolAllocator, TreeAllocator>::
IsValid(int node) {
return (node >= 0 && node <= 2 * size - 1);
return (node >= 0 && node <= 2 * size_ - 1);
}
template<typename Type, Type Undefined, class PoolAllocator,
......@@ -77,14 +77,14 @@ template<typename T, T Undefined, class PoolAllocator, class TreeAllocator >
int LockFreeTreeValuePool<T, Undefined, PoolAllocator, TreeAllocator>::
NodeIndexToPoolIndex(int node) {
assert(IsLeaf(node));
return(node - (size - 1));
return(node - (size_ - 1));
}
template<typename Type, Type Undefined, class PoolAllocator,
class TreeAllocator >
int LockFreeTreeValuePool<Type, Undefined, PoolAllocator, TreeAllocator>::
PoolIndexToNodeIndex(int index) {
int node = index + (size - 1);
int node = index + (size_ - 1);
assert(IsLeaf(node));
return node;
}
......@@ -100,7 +100,7 @@ template<typename T, T Undefined, class PoolAllocator, class TreeAllocator >
int LockFreeTreeValuePool<T, Undefined, PoolAllocator, TreeAllocator>::
GetParentNode(int node) {
int parent = (node - 1) / 2;
assert(parent >= 0 && parent < size - 1);
assert(parent >= 0 && parent < size_ - 1);
return parent;
}
......@@ -112,11 +112,11 @@ allocate_rec(int node, Type& element) {
if (IsLeaf(node)) {
int pool_index = NodeIndexToPoolIndex(node);
Type expected = pool[pool_index];
Type expected = pool_[pool_index];
if (expected == Undefined)
return -1;
if (pool[pool_index].CompareAndSwap(expected, Undefined)) {
if (pool_[pool_index].CompareAndSwap(expected, Undefined)) {
element = expected;
return pool_index;
}
......@@ -131,11 +131,11 @@ allocate_rec(int node, Type& element) {
// atomically decrement the value in the node if the result is greater than
// or equal to zero. This cannot be done atomically.
do {
current = tree[node];
current = tree_[node];
desired = current - 1;
if (desired < 0)
return -1;
} while (!tree[node].CompareAndSwap(current, desired));
} while (!tree_[node].CompareAndSwap(current, desired));
int leftResult = allocate_rec(GetLeftChildIndex(node), element);
if (leftResult != -1) {
......@@ -156,7 +156,7 @@ Fill(int node, int elementsToStore, int power2Value) {
if (IsLeaf(node))
return;
tree[node] = elementsToStore;
tree_[node] = elementsToStore;
int postPower2Value = power2Value >> 1;
......@@ -188,14 +188,14 @@ Free(Type element, int index) {
assert(element != Undefined);
// Put the element back
pool[index].Store(element);
pool_[index].Store(element);
assert(index >= 0 && index < size);
assert(index >= 0 && index < size_);
int node = PoolIndexToNodeIndex(index);
while (!IsRoot(node)) {
node = GetParentNode(node);
tree[node].FetchAndAdd(1);
tree_[node].FetchAndAdd(1);
}
}
......@@ -205,37 +205,76 @@ template< typename ForwardIterator >
LockFreeTreeValuePool<Type, Undefined, PoolAllocator, TreeAllocator>::
LockFreeTreeValuePool(ForwardIterator first, ForwardIterator last) {
// Number of elements to store
real_size = static_cast<int>(::std::distance(first, last));
real_size_ = static_cast<int>(::std::distance(first, last));
// Let k be smallest number so that real_size <= 2^k, size = 2^k
size = GetSmallestPowerByTwoValue(real_size);
size_ = GetSmallestPowerByTwoValue(real_size_);
// Size of binary tree without the leaves
tree_size = size - 1;
tree_size_ = size_ - 1;
// make sure, signed values are not negative
assert(tree_size_ >= 0);
assert(real_size_ >= 0);
size_t tree_size_unsigned = static_cast<size_t>(tree_size_);
size_t real_size_unsigned = static_cast<size_t>(real_size_);
// Pool stores elements of type T
pool = poolAllocator.allocate(static_cast<size_t>(real_size));
pool_ = pool_allocator_.allocate(real_size_unsigned);
// invoke inplace new for each pool element
for (size_t i = 0; i != real_size_unsigned; ++i) {
new (&pool_[i]) embb::base::Atomic<Type>();
}
// Tree holds the counter of not allocated elements
tree = treeAllocator.allocate(static_cast<size_t>(tree_size));
tree_ = tree_allocator_.allocate(tree_size_unsigned);
// invoke inplace new for each tree element
for (size_t i = 0; i != tree_size_unsigned; ++i) {
new (&tree_[i]) embb::base::Atomic<int>();
}
int i = 0;
// Store the elements from the range
for (ForwardIterator curIter(first); curIter != last; ++curIter) {
pool[i++] = *curIter;
pool_[i++] = *curIter;
}
// Initialize the binary tree without leaves (counters)
Fill(0, static_cast<int>(::std::distance(first, last)), size);
Fill(0, static_cast<int>(::std::distance(first, last)), size_);
}
template<typename Type, Type Undefined, class PoolAllocator,
class TreeAllocator >
LockFreeTreeValuePool<Type, Undefined, PoolAllocator, TreeAllocator>::
~LockFreeTreeValuePool() {
poolAllocator.deallocate(pool, static_cast<size_t>(real_size));
treeAllocator.deallocate(tree, static_cast<size_t>(tree_size));
size_t tree_size_unsigned = static_cast<size_t>(tree_size_);
size_t real_size_unsigned = static_cast<size_t>(real_size_);
pool_allocator_.deallocate(pool_, real_size_unsigned);
// invoke destructor for each pool element
for (size_t i = 0; i != real_size_unsigned; ++i) {
pool_[i].~Atomic();
}
tree_allocator_.deallocate(tree_, tree_size_unsigned);
// invoke destructor for each tree element
for (size_t i = 0; i != tree_size_unsigned; ++i) {
tree_[i].~Atomic();
}
}
template<typename Type, Type Undefined, class PoolAllocator,
class TreeAllocator >
size_t LockFreeTreeValuePool<Type, Undefined, PoolAllocator, TreeAllocator>::
GetMinimumElementCountForGuaranteedCapacity(size_t capacity) {
// for this value pool, this is just capacity...
return capacity;
}
} // namespace containers
......
......@@ -83,7 +83,8 @@ ReturningTrueIterator::operator!=(const self_type& rhs) {
template<class Type, typename ValuePool, class ObjectAllocator>
bool ObjectPool<Type, ValuePool, ObjectAllocator>::
IsContained(const Type &obj) const {
if ((&obj < &objects[0]) || (&obj > &objects[capacity - 1])) {
if ((&obj < &objects_array_[0]) ||
(&obj > &objects_array_[value_pool_size_ - 1])) {
return false;
} else {
return true;
......@@ -94,17 +95,17 @@ template<class Type, typename ValuePool, class ObjectAllocator>
int ObjectPool<Type, ValuePool, ObjectAllocator>::
GetIndexOfObject(const Type &obj) const {
assert(IsContained(obj));
return(static_cast<int>(&obj - &objects[0]));
return(static_cast<int>(&obj - &objects_array_[0]));
}
template<class Type, typename ValuePool, class ObjectAllocator>
Type* ObjectPool<Type, ValuePool, ObjectAllocator>::AllocateRaw() {
bool val;
int allocated_index = p.Allocate(val);
int allocated_index = value_pool_.Allocate(val);
if (allocated_index == -1) {
return NULL;
} else {
Type* ret_pointer = &(objects[allocated_index]);
Type* ret_pointer = &(objects_array_[allocated_index]);
return ret_pointer;
}
......@@ -112,15 +113,17 @@ Type* ObjectPool<Type, ValuePool, ObjectAllocator>::AllocateRaw() {
template<class Type, typename ValuePool, class ObjectAllocator>
size_t ObjectPool<Type, ValuePool, ObjectAllocator>::GetCapacity() {
return capacity;
return capacity_;
}
template<class Type, typename ValuePool, class ObjectAllocator>
ObjectPool<Type, ValuePool, ObjectAllocator>::ObjectPool(size_t capacity) :
capacity(capacity),
p(ReturningTrueIterator(0), ReturningTrueIterator(capacity)) {
// Allocate the objects (without construction, just get the memory)
objects = objectAllocator.allocate(capacity);
capacity_(capacity),
value_pool_size_(
ValuePool::GetMinimumElementCountForGuaranteedCapacity(capacity)),
value_pool_(ReturningTrueIterator(0), ReturningTrueIterator(
value_pool_size_)),
objects_array_(object_allocator_.allocate(value_pool_size_)) {
}
template<class Type, typename ValuePool, class ObjectAllocator>
......@@ -128,7 +131,7 @@ void ObjectPool<Type, ValuePool, ObjectAllocator>::Free(Type* obj) {
int index = GetIndexOfObject(*obj);
obj->~Type();
p.Free(true, index);
value_pool_.Free(true, index);
}
template<class Type, typename ValuePool, class ObjectAllocator>
......@@ -189,7 +192,7 @@ Type* ObjectPool<Type, ValuePool, ObjectAllocator>::Allocate(
template<class Type, typename ValuePool, class ObjectAllocator>
ObjectPool<Type, ValuePool, ObjectAllocator>::~ObjectPool() {
// Deallocate the objects
objectAllocator.deallocate(objects, capacity);
object_allocator_.deallocate(objects_array_, value_pool_size_);
}
} // namespace containers
} // namespace embb
......
......@@ -35,21 +35,21 @@ Free(Type element, int index) {
assert(element != Undefined);
// Just put back the element
pool[index].Store(element);
pool_array_[index].Store(element);
}
template<typename Type, Type Undefined, class Allocator >
int WaitFreeArrayValuePool<Type, Undefined, Allocator>::
Allocate(Type & element) {
for (int i = 0; i != size; ++i) {
for (int i = 0; i != size_; ++i) {
Type expected;
// If the memory cell is not available, go ahead
if (Undefined == (expected = pool[i].Load()))
if (Undefined == (expected = pool_array_[i].Load()))
continue;
// Try to get the memory cell
if (pool[i].CompareAndSwap(expected, Undefined)) {
if (pool_array_[i].CompareAndSwap(expected, Undefined)) {
// When the CAS was successful, this element is ours
element = expected;
return i;
......@@ -64,23 +64,45 @@ WaitFreeArrayValuePool<Type, Undefined, Allocator>::
WaitFreeArrayValuePool(ForwardIterator first, ForwardIterator last) {
size_t dist = static_cast<size_t>(std::distance(first, last));
size = static_cast<int>(dist);
size_ = static_cast<int>(dist);
// conversion may result in negative number. check!
assert(size_ >= 0);
// Use the allocator to allocate an array of size dist
pool = allocator.allocate(dist);
pool_array_ = allocator_.allocate(dist);
// invoke inplace new for each pool element
for ( size_t i = 0; i != dist; ++i ) {
new (&pool_array_[i]) embb::base::Atomic<Type>();
}
int i = 0;
// Store the elements of the range
for (ForwardIterator curIter(first); curIter != last; ++curIter) {
pool[i++] = *curIter;
pool_array_[i++] = *curIter;
}
}
template<typename Type, Type Undefined, class Allocator >
WaitFreeArrayValuePool<Type, Undefined, Allocator>::~WaitFreeArrayValuePool() {
allocator.deallocate(pool, (size_t)size);
// invoke destructor for each pool element
for (int i = 0; i != size_; ++i) {
pool_array_[i].~Atomic();
}
// free memory
allocator_.deallocate(pool_array_, static_cast<size_t>(size_));
}
template<typename Type, Type Undefined, class Allocator >
size_t WaitFreeArrayValuePool<Type, Undefined, Allocator>::
GetMinimumElementCountForGuaranteedCapacity(size_t capacity) {
// for this value pool, this is just capacity...
return capacity;
}
} // namespace containers
} // namespace embb
......
......@@ -123,22 +123,25 @@ class LockFreeTreeValuePool {
LockFreeTreeValuePool& operator=(const LockFreeTreeValuePool&);
// See algorithm description above
int size;
int size_;
// See algorithm description above
int tree_size;
int tree_size_;
// See algorithm description above
int real_size;
int real_size_;
// The tree above the pool
embb::base::Atomic<int>* tree;
embb::base::Atomic<int>* tree_;
// The actual pool
embb::base::Atomic<Type>* pool;
embb::base::Atomic<Type>* pool_;
PoolAllocator poolAllocator;
TreeAllocator treeAllocator;
// respective allocator
PoolAllocator pool_allocator_;
// respective allocator
TreeAllocator tree_allocator_;
/**
* Computes smallest power of two fitting the specified value
......@@ -278,6 +281,18 @@ class LockFreeTreeValuePool {
);
/**
* Due to concurrency effects, a pool might provide less elements than managed
* by it. However, usually one wants to guarantee a minimal capacity. The
* count of elements, that must be given to the pool when to guarantee \c
* capacity elements is computed using this function.
*
* \return count of indices the pool has to be initialized with
*/
static size_t GetMinimumElementCountForGuaranteedCapacity(
size_t capacity
/**< [IN] count of indices that shall be guaranteed */);
/**
* Destructs the pool.
*
* \notthreadsafe
......
......@@ -35,7 +35,6 @@
namespace embb {
namespace containers {
/**
* \defgroup CPP_CONTAINERS_POOLS Pools
* Concurrent pools
......@@ -62,22 +61,29 @@ class ObjectPool {
/**
* Allocator used to allocate elements of the object pool
*/
ObjectAllocator objectAllocator;
ObjectAllocator object_allocator_;
/**
* Array holding the allocated object
* Capacity of the object pool
*/
Type* objects;
size_t capacity_;
/**
* Capacity of the object pool
* The size of the underlying value pool. This is also the size of the object
* array in this class. It is assumed, that the valuepool manages indices in
* range [0;value_pool_size_-1].
*/
size_t capacity;
size_t value_pool_size_;
/**
* Underlying value pool
*/
ValuePool p;
ValuePool value_pool_;
/**
* Array holding the allocated object
*/
Type* objects_array_;
/**
* Helper providing a virtual iterator that just returns true in each
......
......@@ -39,12 +39,30 @@ namespace containers {
* \ingroup CPP_CONCEPT
* \{
* \par Description
* A value pool is a fixed-size multiset of elements, where each element has a
* unique index. The elements cannot be modified and are given at construction
* time (by providing first/last iterators). A value pool provides two
* operations: \c Allocate and \c Free. \c Allocate removes an element from the
* pool, and \c Free returns an element to the pool. It is only allowed to
* free elements that have previously been allocated.
* A value pool is a multi-set of elements, where each element has a unique,
* continuous (starting with 0) index. The elements cannot be modified and are
* given at construction time by providing first/last iterators.
*
* \par
* A value pool provides two primary operations: \c Allocate and \c Free. \c
* Allocate allocates an element/index "pair" (index via return, element via
* reference parameter) from the pool, and \c Free returns an element/index pair
* to the pool. To guarantee linearizability, \c element is not allowed to be
* modified between \c Allocate and \c Free. It is only allowed to free elements
* that have previously been allocated. The \c Allocate function does not
* guarantee an order on which indices are allocated. The count of elements that
* can be allocated with \c Allocate might be smaller than the count of
* elements, the pool is initialized with. This might be because of
* implementation details and respective concurrency effects: for example, if
* indices are managed within a queue, one has to protect queue elements from
* concurrency effects (reuse and access). As long as a thread potentially
* accesses a node (and with that an index), the respective index cannot not be
* given out to the user, even if being logically not part of the pool anymore.
* However, the user might want to guarantee a certain amount of indices to the
* user. Therefore, the static \c GetMinimumElementCountForGuaranteedCapacity
* method is used. The user passes the count of indices to this method, that
* shall be guaranteed by the pool. The method returns the count on indices, the
* pool has to be initialized with in order to guarantee this count on indices.
*
* \par Requirements
* - Let \c Pool be the pool class
......@@ -54,6 +72,7 @@ namespace containers {
* - Let \c i, j be forward iterators supporting \c std::distance.
* - Let \c c be an object of type \c Type&
* - Let \c e be a value of type \c int
* - Let \c f be a value of type \c int
*
* \par Valid Expressions
*
......@@ -72,7 +91,7 @@ namespace containers {
* the bottom element. The bottom element cannot be stored in the pool, it
* is exclusively used to mark empty cells. The pool initially contains
* \c std::distance(i, j) elements which are copied during construction from
* the range \c [i, j). A concrete class satisfying the value pool concept
* the range \c [i, j]. A concrete class satisfying the value pool concept
* might provide additional template parameters for specifying allocators.
* </td>
* </tr>
......@@ -80,9 +99,10 @@ namespace containers {
* <td>\code{.cpp} Allocate(c) \endcode</td>
* <td>\c int</td>
* <td>
* Gets an element from the pool. Returns -1, if no element is available,
* i.e., the pool is empty. Otherwise, returns the index of the element in
* the pool. The value of the pool element is written into reference \c c.
* Allocates an element/index "pair" from the pool. Returns -1, if no
* element is available, i.e., the pool is empty. Otherwise, returns the
* index of the element in the pool. The value of the pool element is
* written into parameter reference \c c.
* </td>
* </tr>
* <tr>
......@@ -93,6 +113,15 @@ namespace containers {
* \c Allocate. For each allocated element, \c Free must be called exactly
* once.</td>
* </tr>
* <tr>
* <td>\code{.cpp} GetMinimumElementCountForGuaranteedCapacity(f)
* \endcode</td>
* <td>\c void</td>
* <td>Static method, returns the count of indices, the user has to
* initialize the pool with in order to guarantee a count of \c f elements
* (irrespective of concurrency effects).
* </td>
* </tr>
* </table>
*
* \}
......@@ -116,10 +145,10 @@ template<typename Type,
class Allocator = embb::base::Allocator< embb::base::Atomic<Type> > >
class WaitFreeArrayValuePool {
private:
int size;
embb::base::Atomic<Type>* pool;
int size_;
embb::base::Atomic<Type>* pool_array_;
WaitFreeArrayValuePool();
Allocator allocator;
Allocator allocator_;
// Prevent copy-construction
WaitFreeArrayValuePool(const WaitFreeArrayValuePool&);
......@@ -150,6 +179,18 @@ class WaitFreeArrayValuePool {
);
/**
* Due to concurrency effects, a pool might provide less elements than managed
* by it. However, usually one wants to guarantee a minimal capacity. The
* count of elements, that must be given to the pool when to guarantee \c
* capacity elements is computed using this function.
*
* \return count of indices the pool has to be initialized with
*/
static size_t GetMinimumElementCountForGuaranteedCapacity(
size_t capacity
/**< [IN] count of indices that shall be guaranteed */);
/**
* Destructs the pool.
*
* \notthreadsafe
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment