Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
FORMUS3IC_LAS3
/
embb
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Members
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
2b554f74
authored
Oct 23, 2015
by
Christian Kern
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Continue implementation for ticket #520, fix some code style issues
parent
db969583
Hide whitespace changes
Inline
Side-by-side
Showing
6 changed files
with
84 additions
and
74 deletions
+84
-74
containers_cpp/include/embb/containers/internal/lock_free_tree_value_pool-inl.h
+32
-32
containers_cpp/include/embb/containers/internal/object_pool-inl.h
+14
-12
containers_cpp/include/embb/containers/internal/wait_free_array_value_pool-inl.h
+12
-12
containers_cpp/include/embb/containers/lock_free_tree_value_pool.h
+10
-7
containers_cpp/include/embb/containers/object_pool.h
+13
-8
containers_cpp/include/embb/containers/wait_free_array_value_pool.h
+3
-3
No files found.
containers_cpp/include/embb/containers/internal/lock_free_tree_value_pool-inl.h
View file @
2b554f74
...
...
@@ -42,7 +42,7 @@ template<typename Type, Type Undefined, class PoolAllocator,
class
TreeAllocator
>
bool
LockFreeTreeValuePool
<
Type
,
Undefined
,
PoolAllocator
,
TreeAllocator
>::
IsLeaf
(
int
node
)
{
if
(
node
>=
size
-
1
&&
node
<=
2
*
size
-
1
)
{
if
(
node
>=
size
_
-
1
&&
node
<=
2
*
size_
-
1
)
{
return
true
;
}
return
false
;
...
...
@@ -52,7 +52,7 @@ template<typename Type, Type Undefined, class PoolAllocator,
class
TreeAllocator
>
bool
LockFreeTreeValuePool
<
Type
,
Undefined
,
PoolAllocator
,
TreeAllocator
>::
IsValid
(
int
node
)
{
return
(
node
>=
0
&&
node
<=
2
*
size
-
1
);
return
(
node
>=
0
&&
node
<=
2
*
size
_
-
1
);
}
template
<
typename
Type
,
Type
Undefined
,
class
PoolAllocator
,
...
...
@@ -77,14 +77,14 @@ template<typename T, T Undefined, class PoolAllocator, class TreeAllocator >
int
LockFreeTreeValuePool
<
T
,
Undefined
,
PoolAllocator
,
TreeAllocator
>::
NodeIndexToPoolIndex
(
int
node
)
{
assert
(
IsLeaf
(
node
));
return
(
node
-
(
size
-
1
));
return
(
node
-
(
size
_
-
1
));
}
template
<
typename
Type
,
Type
Undefined
,
class
PoolAllocator
,
class
TreeAllocator
>
int
LockFreeTreeValuePool
<
Type
,
Undefined
,
PoolAllocator
,
TreeAllocator
>::
PoolIndexToNodeIndex
(
int
index
)
{
int
node
=
index
+
(
size
-
1
);
int
node
=
index
+
(
size
_
-
1
);
assert
(
IsLeaf
(
node
));
return
node
;
}
...
...
@@ -100,7 +100,7 @@ template<typename T, T Undefined, class PoolAllocator, class TreeAllocator >
int
LockFreeTreeValuePool
<
T
,
Undefined
,
PoolAllocator
,
TreeAllocator
>::
GetParentNode
(
int
node
)
{
int
parent
=
(
node
-
1
)
/
2
;
assert
(
parent
>=
0
&&
parent
<
size
-
1
);
assert
(
parent
>=
0
&&
parent
<
size
_
-
1
);
return
parent
;
}
...
...
@@ -112,11 +112,11 @@ allocate_rec(int node, Type& element) {
if
(
IsLeaf
(
node
))
{
int
pool_index
=
NodeIndexToPoolIndex
(
node
);
Type
expected
=
pool
[
pool_index
];
Type
expected
=
pool
_
[
pool_index
];
if
(
expected
==
Undefined
)
return
-
1
;
if
(
pool
[
pool_index
].
CompareAndSwap
(
expected
,
Undefined
))
{
if
(
pool
_
[
pool_index
].
CompareAndSwap
(
expected
,
Undefined
))
{
element
=
expected
;
return
pool_index
;
}
...
...
@@ -131,11 +131,11 @@ allocate_rec(int node, Type& element) {
// atomically decrement the value in the node if the result is greater than
// or equal to zero. This cannot be done atomically.
do
{
current
=
tree
[
node
];
current
=
tree
_
[
node
];
desired
=
current
-
1
;
if
(
desired
<
0
)
return
-
1
;
}
while
(
!
tree
[
node
].
CompareAndSwap
(
current
,
desired
));
}
while
(
!
tree
_
[
node
].
CompareAndSwap
(
current
,
desired
));
int
leftResult
=
allocate_rec
(
GetLeftChildIndex
(
node
),
element
);
if
(
leftResult
!=
-
1
)
{
...
...
@@ -156,7 +156,7 @@ Fill(int node, int elementsToStore, int power2Value) {
if
(
IsLeaf
(
node
))
return
;
tree
[
node
]
=
elementsToStore
;
tree
_
[
node
]
=
elementsToStore
;
int
postPower2Value
=
power2Value
>>
1
;
...
...
@@ -188,14 +188,14 @@ Free(Type element, int index) {
assert
(
element
!=
Undefined
);
// Put the element back
pool
[
index
].
Store
(
element
);
pool
_
[
index
].
Store
(
element
);
assert
(
index
>=
0
&&
index
<
size
);
assert
(
index
>=
0
&&
index
<
size
_
);
int
node
=
PoolIndexToNodeIndex
(
index
);
while
(
!
IsRoot
(
node
))
{
node
=
GetParentNode
(
node
);
tree
[
node
].
FetchAndAdd
(
1
);
tree
_
[
node
].
FetchAndAdd
(
1
);
}
}
...
...
@@ -205,67 +205,67 @@ template< typename ForwardIterator >
LockFreeTreeValuePool
<
Type
,
Undefined
,
PoolAllocator
,
TreeAllocator
>::
LockFreeTreeValuePool
(
ForwardIterator
first
,
ForwardIterator
last
)
{
// Number of elements to store
real_size
=
static_cast
<
int
>
(
::
std
::
distance
(
first
,
last
));
real_size
_
=
static_cast
<
int
>
(
::
std
::
distance
(
first
,
last
));
// Let k be smallest number so that real_size <= 2^k, size = 2^k
size
=
GetSmallestPowerByTwoValue
(
real_size
);
size
_
=
GetSmallestPowerByTwoValue
(
real_size_
);
// Size of binary tree without the leaves
tree_size
=
size
-
1
;
tree_size
_
=
size_
-
1
;
// make sure, signed values are not negative
assert
(
tree_size
>=
0
);
assert
(
real_size
>=
0
);
assert
(
tree_size
_
>=
0
);
assert
(
real_size
_
>=
0
);
size_t
tree_size_unsigned
=
static_cast
<
size_t
>
(
tree_size
);
size_t
real_size_unsigned
=
static_cast
<
size_t
>
(
real_size
);
size_t
tree_size_unsigned
=
static_cast
<
size_t
>
(
tree_size
_
);
size_t
real_size_unsigned
=
static_cast
<
size_t
>
(
real_size
_
);
// Pool stores elements of type T
pool
=
poolAllocator
.
allocate
(
real_size_unsigned
);
pool
_
=
pool_allocator_
.
allocate
(
real_size_unsigned
);
// invoke inplace new for each pool element
for
(
size_t
i
=
0
;
i
!=
real_size_unsigned
;
++
i
)
{
new
(
&
pool
[
i
])
embb
::
base
::
Atomic
<
Type
>
();
new
(
&
pool
_
[
i
])
embb
::
base
::
Atomic
<
Type
>
();
}
// Tree holds the counter of not allocated elements
tree
=
treeAllocator
.
allocate
(
tree_size_unsigned
);
tree
_
=
tree_allocator_
.
allocate
(
tree_size_unsigned
);
// invoke inplace new for each tree element
for
(
size_t
i
=
0
;
i
!=
tree_size_unsigned
;
++
i
)
{
new
(
&
tree
[
i
])
embb
::
base
::
Atomic
<
int
>
();
new
(
&
tree
_
[
i
])
embb
::
base
::
Atomic
<
int
>
();
}
int
i
=
0
;
// Store the elements from the range
for
(
ForwardIterator
curIter
(
first
);
curIter
!=
last
;
++
curIter
)
{
pool
[
i
++
]
=
*
curIter
;
pool
_
[
i
++
]
=
*
curIter
;
}
// Initialize the binary tree without leaves (counters)
Fill
(
0
,
static_cast
<
int
>
(
::
std
::
distance
(
first
,
last
)),
size
);
Fill
(
0
,
static_cast
<
int
>
(
::
std
::
distance
(
first
,
last
)),
size
_
);
}
template
<
typename
Type
,
Type
Undefined
,
class
PoolAllocator
,
class
TreeAllocator
>
LockFreeTreeValuePool
<
Type
,
Undefined
,
PoolAllocator
,
TreeAllocator
>::
~
LockFreeTreeValuePool
()
{
size_t
tree_size_unsigned
=
static_cast
<
size_t
>
(
tree_size
);
size_t
real_size_unsigned
=
static_cast
<
size_t
>
(
real_size
);
size_t
tree_size_unsigned
=
static_cast
<
size_t
>
(
tree_size
_
);
size_t
real_size_unsigned
=
static_cast
<
size_t
>
(
real_size
_
);
pool
Allocator
.
deallocate
(
pool
,
real_size_unsigned
);
pool
_allocator_
.
deallocate
(
pool_
,
real_size_unsigned
);
// invoke destructor for each pool element
for
(
size_t
i
=
0
;
i
!=
real_size_unsigned
;
++
i
)
{
pool
[
i
].
~
Atomic
();
pool
_
[
i
].
~
Atomic
();
}
tree
Allocator
.
deallocate
(
tree
,
tree_size_unsigned
);
tree
_allocator_
.
deallocate
(
tree_
,
tree_size_unsigned
);
// invoke destructor for each tree element
for
(
size_t
i
=
0
;
i
!=
tree_size_unsigned
;
++
i
)
{
tree
[
i
].
~
Atomic
();
tree
_
[
i
].
~
Atomic
();
}
}
...
...
containers_cpp/include/embb/containers/internal/object_pool-inl.h
View file @
2b554f74
...
...
@@ -83,7 +83,8 @@ ReturningTrueIterator::operator!=(const self_type& rhs) {
template
<
class
Type
,
typename
ValuePool
,
class
ObjectAllocator
>
bool
ObjectPool
<
Type
,
ValuePool
,
ObjectAllocator
>::
IsContained
(
const
Type
&
obj
)
const
{
if
((
&
obj
<
&
objects
[
0
])
||
(
&
obj
>
&
objects
[
capacity
-
1
]))
{
if
((
&
obj
<
&
objects_array_
[
0
])
||
(
&
obj
>
&
objects_array_
[
value_pool_size_
-
1
]))
{
return
false
;
}
else
{
return
true
;
...
...
@@ -94,17 +95,17 @@ template<class Type, typename ValuePool, class ObjectAllocator>
int
ObjectPool
<
Type
,
ValuePool
,
ObjectAllocator
>::
GetIndexOfObject
(
const
Type
&
obj
)
const
{
assert
(
IsContained
(
obj
));
return
(
static_cast
<
int
>
(
&
obj
-
&
objects
[
0
]));
return
(
static_cast
<
int
>
(
&
obj
-
&
objects
_array_
[
0
]));
}
template
<
class
Type
,
typename
ValuePool
,
class
ObjectAllocator
>
Type
*
ObjectPool
<
Type
,
ValuePool
,
ObjectAllocator
>::
AllocateRaw
()
{
bool
val
;
int
allocated_index
=
p
.
Allocate
(
val
);
int
allocated_index
=
value_pool_
.
Allocate
(
val
);
if
(
allocated_index
==
-
1
)
{
return
NULL
;
}
else
{
Type
*
ret_pointer
=
&
(
objects
[
allocated_index
]);
Type
*
ret_pointer
=
&
(
objects
_array_
[
allocated_index
]);
return
ret_pointer
;
}
...
...
@@ -112,16 +113,17 @@ Type* ObjectPool<Type, ValuePool, ObjectAllocator>::AllocateRaw() {
template
<
class
Type
,
typename
ValuePool
,
class
ObjectAllocator
>
size_t
ObjectPool
<
Type
,
ValuePool
,
ObjectAllocator
>::
GetCapacity
()
{
return
capacity
;
return
capacity
_
;
}
template
<
class
Type
,
typename
ValuePool
,
class
ObjectAllocator
>
ObjectPool
<
Type
,
ValuePool
,
ObjectAllocator
>::
ObjectPool
(
size_t
capacity
)
:
capacity
(
capacity
),
p
(
ReturningTrueIterator
(
0
),
ReturningTrueIterator
(
ValuePool
::
GetMinimumElementCountForGuaranteedCapacity
(
capacity
)))
{
// Allocate the objects (without construction, just get the memory)
objects
=
objectAllocator
.
allocate
(
capacity
);
capacity_
(
capacity
),
value_pool_size_
(
ValuePool
::
GetMinimumElementCountForGuaranteedCapacity
(
capacity
)),
value_pool_
(
ReturningTrueIterator
(
0
),
ReturningTrueIterator
(
value_pool_size_
)),
objects_array_
(
object_allocator_
.
allocate
(
value_pool_size_
))
{
}
template
<
class
Type
,
typename
ValuePool
,
class
ObjectAllocator
>
...
...
@@ -129,7 +131,7 @@ void ObjectPool<Type, ValuePool, ObjectAllocator>::Free(Type* obj) {
int
index
=
GetIndexOfObject
(
*
obj
);
obj
->~
Type
();
p
.
Free
(
true
,
index
);
value_pool_
.
Free
(
true
,
index
);
}
template
<
class
Type
,
typename
ValuePool
,
class
ObjectAllocator
>
...
...
@@ -190,7 +192,7 @@ Type* ObjectPool<Type, ValuePool, ObjectAllocator>::Allocate(
template
<
class
Type
,
typename
ValuePool
,
class
ObjectAllocator
>
ObjectPool
<
Type
,
ValuePool
,
ObjectAllocator
>::~
ObjectPool
()
{
// Deallocate the objects
object
Allocator
.
deallocate
(
objects
,
capacity
);
object
_allocator_
.
deallocate
(
objects_array_
,
value_pool_size_
);
}
}
// namespace containers
}
// namespace embb
...
...
containers_cpp/include/embb/containers/internal/wait_free_array_value_pool-inl.h
View file @
2b554f74
...
...
@@ -35,21 +35,21 @@ Free(Type element, int index) {
assert
(
element
!=
Undefined
);
// Just put back the element
pool
[
index
].
Store
(
element
);
pool
_array_
[
index
].
Store
(
element
);
}
template
<
typename
Type
,
Type
Undefined
,
class
Allocator
>
int
WaitFreeArrayValuePool
<
Type
,
Undefined
,
Allocator
>::
Allocate
(
Type
&
element
)
{
for
(
int
i
=
0
;
i
!=
size
;
++
i
)
{
for
(
int
i
=
0
;
i
!=
size
_
;
++
i
)
{
Type
expected
;
// If the memory cell is not available, go ahead
if
(
Undefined
==
(
expected
=
pool
[
i
].
Load
()))
if
(
Undefined
==
(
expected
=
pool
_array_
[
i
].
Load
()))
continue
;
// Try to get the memory cell
if
(
pool
[
i
].
CompareAndSwap
(
expected
,
Undefined
))
{
if
(
pool
_array_
[
i
].
CompareAndSwap
(
expected
,
Undefined
))
{
// When the CAS was successful, this element is ours
element
=
expected
;
return
i
;
...
...
@@ -64,36 +64,36 @@ WaitFreeArrayValuePool<Type, Undefined, Allocator>::
WaitFreeArrayValuePool
(
ForwardIterator
first
,
ForwardIterator
last
)
{
size_t
dist
=
static_cast
<
size_t
>
(
std
::
distance
(
first
,
last
));
size
=
static_cast
<
int
>
(
dist
);
size
_
=
static_cast
<
int
>
(
dist
);
// conversion may result in negative number. check!
assert
(
size
>=
0
);
assert
(
size
_
>=
0
);
// Use the allocator to allocate an array of size dist
pool
=
allocator
.
allocate
(
dist
);
pool
_array_
=
allocator_
.
allocate
(
dist
);
// invoke inplace new for each pool element
for
(
size_t
i
=
0
;
i
!=
dist
;
++
i
)
{
new
(
&
pool
[
i
])
embb
::
base
::
Atomic
<
Type
>
();
new
(
&
pool
_array_
[
i
])
embb
::
base
::
Atomic
<
Type
>
();
}
int
i
=
0
;
// Store the elements of the range
for
(
ForwardIterator
curIter
(
first
);
curIter
!=
last
;
++
curIter
)
{
pool
[
i
++
]
=
*
curIter
;
pool
_array_
[
i
++
]
=
*
curIter
;
}
}
template
<
typename
Type
,
Type
Undefined
,
class
Allocator
>
WaitFreeArrayValuePool
<
Type
,
Undefined
,
Allocator
>::~
WaitFreeArrayValuePool
()
{
// invoke destructor for each pool element
for
(
int
i
=
0
;
i
!=
size
;
++
i
)
{
pool
[
i
].
~
Atomic
();
for
(
int
i
=
0
;
i
!=
size
_
;
++
i
)
{
pool
_array_
[
i
].
~
Atomic
();
}
// free memory
allocator
.
deallocate
(
pool
,
static_cast
<
size_t
>
(
size
));
allocator
_
.
deallocate
(
pool_array_
,
static_cast
<
size_t
>
(
size_
));
}
template
<
typename
Type
,
Type
Undefined
,
class
Allocator
>
...
...
containers_cpp/include/embb/containers/lock_free_tree_value_pool.h
View file @
2b554f74
...
...
@@ -123,22 +123,25 @@ class LockFreeTreeValuePool {
LockFreeTreeValuePool
&
operator
=
(
const
LockFreeTreeValuePool
&
);
// See algorithm description above
int
size
;
int
size
_
;
// See algorithm description above
int
tree_size
;
int
tree_size
_
;
// See algorithm description above
int
real_size
;
int
real_size
_
;
// The tree above the pool
embb
::
base
::
Atomic
<
int
>*
tree
;
embb
::
base
::
Atomic
<
int
>*
tree
_
;
// The actual pool
embb
::
base
::
Atomic
<
Type
>*
pool
;
embb
::
base
::
Atomic
<
Type
>*
pool
_
;
PoolAllocator
poolAllocator
;
TreeAllocator
treeAllocator
;
// respective allocator
PoolAllocator
pool_allocator_
;
// respective allocator
TreeAllocator
tree_allocator_
;
/**
* Computes smallest power of two fitting the specified value
...
...
containers_cpp/include/embb/containers/object_pool.h
View file @
2b554f74
...
...
@@ -35,7 +35,6 @@
namespace
embb
{
namespace
containers
{
/**
* \defgroup CPP_CONTAINERS_POOLS Pools
* Concurrent pools
...
...
@@ -62,22 +61,29 @@ class ObjectPool {
/**
* Allocator used to allocate elements of the object pool
*/
ObjectAllocator
object
Allocator
;
ObjectAllocator
object
_allocator_
;
/**
*
Array holding the allocated object
*
Capacity of the object pool
*/
Type
*
objects
;
size_t
capacity_
;
/**
* Capacity of the object pool
* The size of the underlying value pool. This is also the size of the object
* array in this class. It is assumed, that the valuepool manages indices in
* range [0;value_pool_size_-1].
*/
size_t
capacity
;
size_t
value_pool_size_
;
/**
* Underlying value pool
*/
ValuePool
p
;
ValuePool
value_pool_
;
/**
* Array holding the allocated object
*/
Type
*
objects_array_
;
/**
* Helper providing a virtual iterator that just returns true in each
...
...
@@ -108,7 +114,6 @@ class ObjectPool {
bool
IsContained
(
const
Type
&
obj
)
const
;
int
GetIndexOfObject
(
const
Type
&
obj
)
const
;
Type
*
AllocateRaw
();
public
:
/**
* Constructs an object pool with capacity \c capacity.
...
...
containers_cpp/include/embb/containers/wait_free_array_value_pool.h
View file @
2b554f74
...
...
@@ -116,10 +116,10 @@ template<typename Type,
class
Allocator
=
embb
::
base
::
Allocator
<
embb
::
base
::
Atomic
<
Type
>
>
>
class
WaitFreeArrayValuePool
{
private
:
int
size
;
embb
::
base
::
Atomic
<
Type
>*
pool
;
int
size
_
;
embb
::
base
::
Atomic
<
Type
>*
pool
_array_
;
WaitFreeArrayValuePool
();
Allocator
allocator
;
Allocator
allocator
_
;
// Prevent copy-construction
WaitFreeArrayValuePool
(
const
WaitFreeArrayValuePool
&
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment