Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
las3_pub
/
jester
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Members
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
31cab85d
authored
4 years ago
by
Michael Schmid
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
finishing work on tests
parent
dbeac016
master
…
scheduler
Hide whitespace changes
Inline
Side-by-side
Showing
17 changed files
with
344 additions
and
185 deletions
+344
-185
src/main/java/mvd/jester/App.java
+31
-7
src/main/java/mvd/jester/ResultLogger.java
+18
-11
src/main/java/mvd/jester/TestEnvironment.java
+124
-9
src/main/java/mvd/jester/model/SystemManager.java
+51
-56
src/main/java/mvd/jester/simulator/AbstractSimulator.java
+9
-6
src/main/java/mvd/jester/simulator/DynamicForkJoin.java
+13
-15
src/main/java/mvd/jester/simulator/ParallelSynchronous.java
+13
-15
src/main/java/mvd/jester/tests/AbstractTest.java
+4
-3
src/main/java/mvd/jester/tests/ChwaLee.java
+5
-4
src/main/java/mvd/jester/tests/FonsecaNelis.java
+10
-7
src/main/java/mvd/jester/tests/MaiaBertogna.java
+4
-3
src/main/java/mvd/jester/tests/MelaniButtazzo.java
+13
-12
src/main/java/mvd/jester/tests/SchmidMottok.java
+8
-9
src/test/java/mvd/jester/model/TestDagUtils.java
+1
-1
src/test/java/mvd/jester/model/TestSystemSetup.java
+30
-21
src/test/java/mvd/jester/priority/TestEarliestDeadlineFirst.java
+5
-3
src/test/java/mvd/jester/priority/TestRateMonotonic.java
+5
-3
No files found.
src/main/java/mvd/jester/App.java
View file @
31cab85d
...
@@ -3,7 +3,8 @@ package mvd.jester;
...
@@ -3,7 +3,8 @@ package mvd.jester;
import
java.util.Arrays
;
import
java.util.Arrays
;
import
java.util.List
;
import
java.util.List
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.SystemSetup
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.SystemManager.DagTaskBuilder
;
import
mvd.jester.tests.AbstractTest
;
import
mvd.jester.tests.AbstractTest
;
import
mvd.jester.tests.FonsecaNelis
;
import
mvd.jester.tests.FonsecaNelis
;
import
mvd.jester.tests.MelaniButtazzo
;
import
mvd.jester.tests.MelaniButtazzo
;
...
@@ -16,15 +17,38 @@ import mvd.jester.tests.SchmidMottok;
...
@@ -16,15 +17,38 @@ import mvd.jester.tests.SchmidMottok;
*/
*/
public
class
App
{
public
class
App
{
public
static
void
main
(
String
[]
args
)
{
public
static
void
main
(
String
[]
args
)
{
for
(
int
p
=
8
;
p
<=
8
;
p
*=
2
)
{
{
System
Setup
.
DagTaskBuilder
builder
=
System
Manager
manager
=
new
SystemManager
(
8
);
new
SystemSetup
.
DagTaskBuilder
().
setNumberOfProcessors
(
p
);
DagTaskBuilder
builder
=
new
DagTaskBuilder
(
);
TestEnvironment
te
=
new
TestEnvironment
();
TestEnvironment
te
=
new
TestEnvironment
();
List
<
AbstractTest
<
DagTask
>>
tests
=
te
.
registerTests
(
Arrays
.
asList
(
new
SchmidMottok
(
p
),
List
<
AbstractTest
<
DagTask
>>
tests
=
/* new MelaniButtazzo(p), */
new
FonsecaNelis
(
p
)));
te
.
registerTests
(
Arrays
.
asList
(
new
SchmidMottok
(
manager
),
new
MelaniButtazzo
(
manager
),
new
FonsecaNelis
(
manager
)));
te
.
runExperiments
(
builder
,
tests
,
p
,
100
);
// TODO: Change back to 500
te
.
varyUtilization
(
builder
,
tests
,
8
,
500
);
}
{
SystemManager
manager
=
new
SystemManager
(
8
);
DagTaskBuilder
builder
=
new
DagTaskBuilder
();
TestEnvironment
te
=
new
TestEnvironment
();
List
<
AbstractTest
<
DagTask
>>
tests
=
te
.
registerTests
(
Arrays
.
asList
(
new
SchmidMottok
(
manager
),
new
MelaniButtazzo
(
manager
),
new
FonsecaNelis
(
manager
)));
te
.
varyNumberOfProcessors
(
builder
,
tests
,
manager
,
500
);
}
{
SystemManager
manager
=
new
SystemManager
(
8
);
DagTaskBuilder
builder
=
new
DagTaskBuilder
();
TestEnvironment
te
=
new
TestEnvironment
();
List
<
AbstractTest
<
DagTask
>>
tests
=
te
.
registerTests
(
Arrays
.
asList
(
new
SchmidMottok
(
manager
),
new
MelaniButtazzo
(
manager
),
new
FonsecaNelis
(
manager
)));
te
.
varyNumberOfTasks
(
builder
,
tests
,
8
,
500
);
}
}
}
}
}
}
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/ResultLogger.java
View file @
31cab85d
...
@@ -13,24 +13,31 @@ import mvd.jester.utils.Logger;
...
@@ -13,24 +13,31 @@ import mvd.jester.utils.Logger;
public
class
ResultLogger
{
public
class
ResultLogger
{
private
final
Logger
logger
;
private
final
Logger
logger
;
private
boolean
headerLogged
=
false
;
public
ResultLogger
(
final
long
numberOfProcessors
)
{
public
ResultLogger
(
final
String
fileName
)
{
this
.
logger
=
new
Logger
(
"./results/
feasibility_"
+
numberOfProcessors
+
".txt"
);
this
.
logger
=
new
Logger
(
"./results/
"
+
fileName
+
".txt"
);
}
}
public
<
T
extends
Task
>
void
logHeader
(
final
Map
<
AbstractTest
<
T
>,
Long
>
results
,
String
xAxisName
)
{
final
Appendable
out
=
new
StringBuilder
();
try
{
out
.
append
(
xAxisName
);
for
(
final
Entry
<
AbstractTest
<
T
>,
Long
>
rc
:
results
.
entrySet
())
{
out
.
append
(
"\t"
+
rc
.
getKey
().
getName
());
}
out
.
append
(
"\n"
);
}
catch
(
final
Exception
e
)
{
throw
new
RuntimeException
(
"Failed to log header!"
);
}
logger
.
log
(
out
);
}
public
<
T
extends
Task
>
void
logLine
(
final
double
utilization
,
public
<
T
extends
Task
>
void
logLine
(
final
double
utilization
,
final
Map
<
AbstractTest
<
T
>,
Long
>
results
)
{
final
Map
<
AbstractTest
<
T
>,
Long
>
results
)
{
final
Appendable
out
=
new
StringBuilder
();
final
Appendable
out
=
new
StringBuilder
();
try
{
try
{
if
(!
headerLogged
)
{
headerLogged
=
true
;
out
.
append
(
"Utilization"
);
for
(
final
Entry
<
AbstractTest
<
T
>,
Long
>
rc
:
results
.
entrySet
())
{
out
.
append
(
"\t"
+
rc
.
getKey
().
getName
());
}
out
.
append
(
"\n"
);
}
out
.
append
(
""
+
utilization
);
out
.
append
(
""
+
utilization
);
for
(
final
Entry
<
AbstractTest
<
T
>,
Long
>
rc
:
results
.
entrySet
())
{
for
(
final
Entry
<
AbstractTest
<
T
>,
Long
>
rc
:
results
.
entrySet
())
{
final
long
numberOfFeasibleTasks
=
rc
.
getValue
();
final
long
numberOfFeasibleTasks
=
rc
.
getValue
();
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/TestEnvironment.java
View file @
31cab85d
...
@@ -5,14 +5,19 @@ import java.util.LinkedHashMap;
...
@@ -5,14 +5,19 @@ import java.util.LinkedHashMap;
import
java.util.List
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Map
;
import
java.util.Set
;
import
java.util.Set
;
import
java.util.Map.Entry
;
import
java.util.concurrent.ThreadLocalRandom
;
import
java.util.concurrent.TimeUnit
;
import
com.google.common.base.Stopwatch
;
import
mvd.jester.info.SchedulingInfo
;
import
mvd.jester.info.SchedulingInfo
;
import
mvd.jester.info.SchedulingInfo.Feasiblity
;
import
mvd.jester.info.SchedulingInfo.Feasiblity
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.Task
;
import
mvd.jester.model.Task
;
import
mvd.jester.model.System
Setup
.DagTaskBuilder
;
import
mvd.jester.model.System
Manager
.DagTaskBuilder
;
import
mvd.jester.model.System
Setup
.SynchronousTaskBuilder
;
import
mvd.jester.model.System
Manager
.SynchronousTaskBuilder
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.tests.AbstractTest
;
import
mvd.jester.tests.AbstractTest
;
...
@@ -26,10 +31,6 @@ public class TestEnvironment {
...
@@ -26,10 +31,6 @@ public class TestEnvironment {
}
}
public
<
T
extends
Task
>
List
<
AbstractTest
<
T
>>
registerTests
(
final
List
<
AbstractTest
<
T
>>
tests
)
{
public
<
T
extends
Task
>
List
<
AbstractTest
<
T
>>
registerTests
(
final
List
<
AbstractTest
<
T
>>
tests
)
{
// Set<ResultCollector<AbstractTest<T>>> testSet = new HashSet<>();
// for (AbstractTest<T> t : tests) {
// testSet.add(new ResultCollector<AbstractTest<T>>(t.getPriorityManager(), t));
// }
return
new
ArrayList
<>(
tests
);
return
new
ArrayList
<>(
tests
);
}
}
...
@@ -73,22 +74,136 @@ public class TestEnvironment {
...
@@ -73,22 +74,136 @@ public class TestEnvironment {
}
}
}
}
System
.
out
.
println
(
""
);
System
.
out
.
println
(
""
);
final
ResultLogger
resultLogger
=
new
ResultLogger
(
numberOfProcessors
);
//
final ResultLogger resultLogger = new ResultLogger(numberOfProcessors);
// resultLogger.logTests(abstractTestInstances);
// resultLogger.logTests(abstractTestInstances);
}
}
public
void
measureExecutionTimes
(
final
DagTaskBuilder
builder
,
final
List
<
AbstractTest
<
DagTask
>>
abstractTestInstances
,
final
long
numberOfMeasurements
)
{
Map
<
AbstractTest
<
DagTask
>,
List
<
Long
>>
results
=
new
LinkedHashMap
<>();
abstractTestInstances
.
forEach
(
t
->
results
.
put
(
t
,
new
ArrayList
<>()));
for
(
int
i
=
0
;
i
<
numberOfMeasurements
;
++
i
)
{
double
utilization
=
ThreadLocalRandom
.
current
().
nextDouble
(
1
,
7
);
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
utilization
);
for
(
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
final
SortedTaskSet
<
DagTask
>
sortedTaskSet
=
new
SortedTaskSet
<>(
priorityManager
);
sortedTaskSet
.
addAll
(
taskSet
);
Stopwatch
w
=
Stopwatch
.
createStarted
();
testInstance
.
runSchedulabilityCheck
(
sortedTaskSet
);
w
.
stop
();
long
micros
=
w
.
elapsed
(
TimeUnit
.
MICROSECONDS
);
results
.
get
(
testInstance
).
add
(
micros
);
}
}
for
(
Entry
<
AbstractTest
<
DagTask
>,
List
<
Long
>>
entry
:
results
.
entrySet
())
{
long
size
=
entry
.
getValue
().
size
();
long
total
=
entry
.
getValue
().
stream
().
reduce
((
long
)
0
,
Long:
:
sum
);
System
.
out
.
println
(
entry
.
getKey
().
getName
()
+
": "
+
(
double
)
total
/
size
+
" µs"
);
}
}
public
void
varyNumberOfProcessors
(
final
DagTaskBuilder
builder
,
final
List
<
AbstractTest
<
DagTask
>>
abstractTestInstances
,
SystemManager
manager
,
final
long
numberOfTaskSetsPerStep
)
{
long
checkedTasksets
=
0
;
final
long
numberOfTaskSets
=
8
*
numberOfTaskSetsPerStep
;
final
ResultLogger
resultLogger
=
new
ResultLogger
(
"numberOfProcessors"
);
final
Map
<
AbstractTest
<
DagTask
>,
Long
>
resultMap
=
new
LinkedHashMap
<>();
abstractTestInstances
.
forEach
(
t
->
resultMap
.
put
(
t
,
(
long
)
0
));
resultLogger
.
logHeader
(
resultMap
,
"NoOfProcessors"
);
for
(
long
numberOfProcessors
=
2
;
numberOfProcessors
<=
16
;
numberOfProcessors
+=
2
)
{
manager
.
setNumberOfProcessors
(
numberOfProcessors
);
resultMap
.
replaceAll
((
k
,
v
)
->
(
long
)
0
);
for
(
int
i
=
0
;
i
<
numberOfTaskSetsPerStep
;
++
i
)
{
final
Set
<
DagTask
>
taskSet
=
builder
.
generateUUnifastTaskSet
(
(
long
)
(
1.5
*
numberOfProcessors
),
(
double
)
numberOfProcessors
*
0.5
);
System
.
out
.
print
(
Math
.
round
((
double
)
checkedTasksets
/
numberOfTaskSets
*
100
)
+
"% of "
+
numberOfTaskSets
+
" tasksets tested!\r"
);
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
public
void
runExperiments
(
final
DagTaskBuilder
builder
,
final
SortedTaskSet
<
DagTask
>
sortedTaskSet
=
new
SortedTaskSet
<>(
priorityManager
);
sortedTaskSet
.
addAll
(
taskSet
);
final
SchedulingInfo
schedulingInfo
=
testInstance
.
runSchedulabilityCheck
(
sortedTaskSet
);
if
(
schedulingInfo
.
getFeasibility
()
==
Feasiblity
.
SUCCEEDED
)
{
resultMap
.
computeIfPresent
(
testInstance
,
(
k
,
v
)
->
v
+
1
);
}
}
checkedTasksets
++;
}
resultLogger
.
logLine
(
numberOfProcessors
,
resultMap
);
resultLogger
.
newLine
();
}
System
.
out
.
println
(
""
);
resultLogger
.
finalize
();
}
public
void
varyNumberOfTasks
(
final
DagTaskBuilder
builder
,
final
List
<
AbstractTest
<
DagTask
>>
abstractTestInstances
,
final
long
numberOfProcessors
,
final
long
numberOfTaskSetsPerStep
)
{
long
checkedTasksets
=
0
;
final
long
numberOfTaskSets
=
9
*
numberOfTaskSetsPerStep
;
final
ResultLogger
resultLogger
=
new
ResultLogger
(
"numberOfTasks_"
+
numberOfProcessors
);
final
Map
<
AbstractTest
<
DagTask
>,
Long
>
resultMap
=
new
LinkedHashMap
<>();
abstractTestInstances
.
forEach
(
t
->
resultMap
.
put
(
t
,
(
long
)
0
));
resultLogger
.
logHeader
(
resultMap
,
"NoOfTasks"
);
for
(
long
numberOfTasks
=
4
;
numberOfTasks
<=
20
;
numberOfTasks
+=
2
)
{
resultMap
.
replaceAll
((
k
,
v
)
->
(
long
)
0
);
for
(
int
i
=
0
;
i
<
numberOfTaskSetsPerStep
;
++
i
)
{
final
Set
<
DagTask
>
taskSet
=
builder
.
generateUUnifastTaskSet
(
numberOfTasks
,
(
double
)
numberOfProcessors
*
0.5
);
System
.
out
.
print
(
Math
.
round
((
double
)
checkedTasksets
/
numberOfTaskSets
*
100
)
+
"% of "
+
numberOfTaskSets
+
" tasksets tested!\r"
);
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
final
SortedTaskSet
<
DagTask
>
sortedTaskSet
=
new
SortedTaskSet
<>(
priorityManager
);
sortedTaskSet
.
addAll
(
taskSet
);
final
SchedulingInfo
schedulingInfo
=
testInstance
.
runSchedulabilityCheck
(
sortedTaskSet
);
if
(
schedulingInfo
.
getFeasibility
()
==
Feasiblity
.
SUCCEEDED
)
{
resultMap
.
computeIfPresent
(
testInstance
,
(
k
,
v
)
->
v
+
1
);
}
}
checkedTasksets
++;
}
resultLogger
.
logLine
(
numberOfTasks
,
resultMap
);
resultLogger
.
newLine
();
}
System
.
out
.
println
(
""
);
resultLogger
.
finalize
();
}
public
void
varyUtilization
(
final
DagTaskBuilder
builder
,
final
List
<
AbstractTest
<
DagTask
>>
abstractTestInstances
,
final
long
numberOfProcessors
,
final
List
<
AbstractTest
<
DagTask
>>
abstractTestInstances
,
final
long
numberOfProcessors
,
final
long
numberOfTaskSetsPerUtil
)
{
final
long
numberOfTaskSetsPerUtil
)
{
long
checkedTasksets
=
0
;
long
checkedTasksets
=
0
;
final
long
numberOfTaskSets
=
((
numberOfProcessors
*
4
)
-
3
)
*
numberOfTaskSetsPerUtil
;
final
long
numberOfTaskSets
=
((
numberOfProcessors
*
4
)
-
3
)
*
numberOfTaskSetsPerUtil
;
final
ResultLogger
resultLogger
=
new
ResultLogger
(
numberOfProcessors
);
final
ResultLogger
resultLogger
=
new
ResultLogger
(
"utilization_"
+
numberOfProcessors
);
final
Map
<
AbstractTest
<
DagTask
>,
Long
>
resultMap
=
new
LinkedHashMap
<>();
final
Map
<
AbstractTest
<
DagTask
>,
Long
>
resultMap
=
new
LinkedHashMap
<>();
abstractTestInstances
.
forEach
(
t
->
resultMap
.
put
(
t
,
(
long
)
0
));
abstractTestInstances
.
forEach
(
t
->
resultMap
.
put
(
t
,
(
long
)
0
));
resultLogger
.
logHeader
(
resultMap
,
"Utilization"
);
for
(
double
util
=
1
;
util
<=
numberOfProcessors
;
util
+=
0.25
)
{
for
(
double
util
=
1
;
util
<=
numberOfProcessors
;
util
+=
0.25
)
{
resultMap
.
replaceAll
((
k
,
v
)
->
(
long
)
0
);
resultMap
.
replaceAll
((
k
,
v
)
->
(
long
)
0
);
for
(
int
i
=
0
;
i
<
numberOfTaskSetsPerUtil
;
++
i
)
{
for
(
int
i
=
0
;
i
<
numberOfTaskSetsPerUtil
;
++
i
)
{
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/model/System
Setup
.java
→
src/main/java/mvd/jester/model/System
Manager
.java
View file @
31cab85d
package
mvd
.
jester
.
model
;
package
mvd
.
jester
.
model
;
import
java.io.PrintWriter
;
import
java.util.HashSet
;
import
java.util.HashSet
;
import
java.util.LinkedHashSet
;
import
java.util.LinkedHashSet
;
import
java.util.Map
;
import
java.util.Map
;
...
@@ -9,8 +8,6 @@ import java.util.Set;
...
@@ -9,8 +8,6 @@ import java.util.Set;
import
java.util.concurrent.ThreadLocalRandom
;
import
java.util.concurrent.ThreadLocalRandom
;
import
com.google.common.collect.ArrayListMultimap
;
import
com.google.common.collect.ArrayListMultimap
;
import
com.google.common.collect.Multimap
;
import
com.google.common.collect.Multimap
;
import
com.google.gson.Gson
;
import
com.google.gson.GsonBuilder
;
import
org.jgrapht.experimental.dag.DirectedAcyclicGraph
;
import
org.jgrapht.experimental.dag.DirectedAcyclicGraph
;
import
org.jgrapht.graph.DefaultEdge
;
import
org.jgrapht.graph.DefaultEdge
;
import
mvd.jester.utils.DagUtils
;
import
mvd.jester.utils.DagUtils
;
...
@@ -18,70 +15,27 @@ import mvd.jester.utils.DagUtils;
...
@@ -18,70 +15,27 @@ import mvd.jester.utils.DagUtils;
/**
/**
* TaskSet
* TaskSet
*/
*/
public
class
SystemSetup
<
T
extends
Task
>
{
public
class
SystemManager
{
private
Set
<
T
>
tasks
;
private
long
numberOfProcessors
;
private
final
long
numberOfProcessors
;
public
SystemSetup
(
final
Set
<
T
>
tasks
,
final
long
numberOfProcessors
)
{
public
SystemManager
(
final
long
numberOfProcessors
)
{
this
.
tasks
=
tasks
;
this
.
numberOfProcessors
=
numberOfProcessors
;
this
.
numberOfProcessors
=
numberOfProcessors
;
}
}
/**
/**
* @return the tasks
*/
public
Set
<
T
>
getTasks
()
{
return
tasks
;
}
public
void
setTasks
(
final
Set
<
T
>
tasks
)
{
this
.
tasks
=
tasks
;
}
/**
* @return the numberOfProcessors
* @return the numberOfProcessors
*/
*/
public
long
getNumberOfProcessors
()
{
public
long
getNumberOfProcessors
()
{
return
numberOfProcessors
;
return
numberOfProcessors
;
}
}
/**
@Override
* @param numberOfProcessors the numberOfProcessors to set
public
String
toString
()
{
*/
final
Gson
gson
=
new
GsonBuilder
().
setPrettyPrinting
().
create
();
public
void
setNumberOfProcessors
(
long
numberOfProcessors
)
{
return
gson
.
toJson
(
tasks
);
this
.
numberOfProcessors
=
numberOfProcessors
;
}
public
void
writeToFile
(
final
String
path
)
{
try
(
PrintWriter
pw
=
new
PrintWriter
(
path
))
{
pw
.
write
(
toString
());
}
catch
(
final
Exception
e
)
{
System
.
err
.
println
(
"Something went wrong when writing to file!"
);
}
}
}
// public static SystemSetup<SynchronousTask> readFromFile(final String path,
// final long numberOfProcessors) {
// String jsonString;
// try {
// final byte[] encoded = Files.readAllBytes(Paths.get(path));
// jsonString = new String(encoded, Charset.defaultCharset());
// } catch (final IOException e) {
// System.out.println(e.getMessage());
// jsonString = new String("");
// }
// return SystemSetup.fromString(jsonString, numberOfProcessors);
// }
// public static SystemSetup<SynchronousTask> fromString(final String json,
// final long numberOfProcessors) {
// final Gson gson = new GsonBuilder().registerTypeAdapter(SortedTaskSet.class,
// new SortedTaskSet.Deserializer<SynchronousTask>()).create();
// final SortedTaskSet<SynchronousTask> tasks = gson.fromJson(json, SortedTaskSet.class);
// return new SystemSetup<>(tasks, numberOfProcessors);
// }
public
static
class
SynchronousTaskBuilder
{
public
static
class
SynchronousTaskBuilder
{
private
long
numberOfProcessors
=
4
;
private
long
numberOfProcessors
=
4
;
private
long
minPeriod
=
100
;
private
long
minPeriod
=
100
;
...
@@ -203,13 +157,13 @@ public class SystemSetup<T extends Task> {
...
@@ -203,13 +157,13 @@ public class SystemSetup<T extends Task> {
}
}
public
static
class
DagTaskBuilder
{
public
static
class
DagTaskBuilder
{
private
long
numberOfProcessors
=
4
;
private
long
numberOfProcessors
=
8
;
private
long
minimumWcet
=
1
;
private
long
minimumWcet
=
1
;
private
long
maximumWcet
=
100
;
private
long
maximumWcet
=
100
;
private
long
maxNumberOfBranches
=
5
;
private
long
maxNumberOfBranches
=
5
;
private
long
depth
=
2
;
private
long
depth
=
2
;
private
long
p_par
=
80
;
private
long
p_par
=
80
;
private
long
p_add
=
5
;
// TODO: Change back to 20
private
long
p_add
=
20
;
public
DagTaskBuilder
()
{
public
DagTaskBuilder
()
{
}
}
...
@@ -222,6 +176,26 @@ public class SystemSetup<T extends Task> {
...
@@ -222,6 +176,26 @@ public class SystemSetup<T extends Task> {
return
ThreadLocalRandom
.
current
().
nextLong
(
0
,
100
);
return
ThreadLocalRandom
.
current
().
nextLong
(
0
,
100
);
}
}
public
Set
<
DagTask
>
generateUUnifastTaskSet
(
final
long
numberOfTasks
,
final
double
totalUtilization
)
{
final
LinkedHashSet
<
DagTask
>
taskSet
=
new
LinkedHashSet
<>();
if
(
numberOfTasks
>
0
)
{
double
sumU
=
totalUtilization
;
for
(
int
i
=
1
;
i
<=
numberOfTasks
-
1
;
i
++)
{
Double
nextSumU
=
sumU
*
Math
.
pow
(
ThreadLocalRandom
.
current
().
nextDouble
(),
(
1.0
/
(
double
)
(
numberOfTasks
-
i
)));
DagTask
task
=
generateTask
(
sumU
-
nextSumU
);
taskSet
.
add
(
task
);
sumU
=
nextSumU
;
}
DagTask
task
=
generateTask
(
sumU
);
taskSet
.
add
(
task
);
}
return
taskSet
;
}
public
Set
<
DagTask
>
generateTaskSet
(
final
double
totalUtilization
)
{
public
Set
<
DagTask
>
generateTaskSet
(
final
double
totalUtilization
)
{
final
LinkedHashSet
<
DagTask
>
taskSet
=
new
LinkedHashSet
<>();
final
LinkedHashSet
<
DagTask
>
taskSet
=
new
LinkedHashSet
<>();
double
currentUtilization
=
0
;
double
currentUtilization
=
0
;
...
@@ -247,6 +221,20 @@ public class SystemSetup<T extends Task> {
...
@@ -247,6 +221,20 @@ public class SystemSetup<T extends Task> {
return
taskSet
;
return
taskSet
;
}
}
public
DagTask
generateTask
(
double
utilization
)
{
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
=
new
DirectedAcyclicGraph
<>(
DefaultEdge
.
class
);
final
Job
j
=
fork
(
jobDag
,
Optional
.
empty
(),
this
.
depth
);
fork
(
jobDag
,
Optional
.
of
(
j
),
this
.
depth
);
randomEdges
(
jobDag
);
final
long
workload
=
DagUtils
.
calculateWorkload
(
jobDag
);
final
long
period
=
Math
.
round
(
workload
/
utilization
);
return
new
DagTask
(
jobDag
,
period
,
numberOfProcessors
);
}
public
DagTask
generateTask
()
{
public
DagTask
generateTask
()
{
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
=
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
=
...
@@ -343,6 +331,13 @@ public class SystemSetup<T extends Task> {
...
@@ -343,6 +331,13 @@ public class SystemSetup<T extends Task> {
return
this
;
return
this
;
}
}
/**
* @return the numberOfProcessors
*/
public
long
getNumberOfProcessors
()
{
return
numberOfProcessors
;
}
public
DagTaskBuilder
setWcets
(
final
long
minimumWcet
,
final
long
maximumWcet
)
{
public
DagTaskBuilder
setWcets
(
final
long
minimumWcet
,
final
long
maximumWcet
)
{
this
.
minimumWcet
=
minimumWcet
;
this
.
minimumWcet
=
minimumWcet
;
this
.
maximumWcet
=
maximumWcet
;
this
.
maximumWcet
=
maximumWcet
;
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/simulator/AbstractSimulator.java
View file @
31cab85d
...
@@ -5,8 +5,7 @@ import java.util.HashSet;
...
@@ -5,8 +5,7 @@ import java.util.HashSet;
import
java.util.Set
;
import
java.util.Set
;
import
java.util.TreeSet
;
import
java.util.TreeSet
;
import
com.google.common.collect.TreeMultiset
;
import
com.google.common.collect.TreeMultiset
;
import
mvd.jester.model.SystemSetup
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.RateMonotonic
;
import
mvd.jester.priority.RateMonotonic
;
import
mvd.jester.TypeInterface
;
import
mvd.jester.TypeInterface
;
...
@@ -20,11 +19,11 @@ import mvd.jester.simulator.internals.TaskContextInterface;
...
@@ -20,11 +19,11 @@ import mvd.jester.simulator.internals.TaskContextInterface;
*/
*/
public
abstract
class
AbstractSimulator
implements
SimulatorInterface
,
TypeInterface
{
public
abstract
class
AbstractSimulator
implements
SimulatorInterface
,
TypeInterface
{
protected
final
System
Setup
<
SynchronousTask
>
systemSetup
;
protected
final
System
Manager
systemSetup
;
protected
final
Set
<
ProcessorContext
>
processors
;
protected
final
Set
<
ProcessorContext
>
processors
;
protected
TreeMultiset
<
TaskContextInterface
>
readyTasks
;
protected
TreeMultiset
<
TaskContextInterface
>
readyTasks
;
AbstractSimulator
(
System
Setup
<
SynchronousTask
>
systemSetup
)
{
AbstractSimulator
(
System
Manager
systemSetup
)
{
this
.
systemSetup
=
systemSetup
;
this
.
systemSetup
=
systemSetup
;
this
.
readyTasks
=
TreeMultiset
.
create
((
t1
,
t2
)
->
new
RateMonotonic
().
compare
(
t1
,
t2
));
this
.
readyTasks
=
TreeMultiset
.
create
((
t1
,
t2
)
->
new
RateMonotonic
().
compare
(
t1
,
t2
));
processors
=
new
HashSet
<>();
processors
=
new
HashSet
<>();
...
@@ -98,10 +97,14 @@ public abstract class AbstractSimulator implements SimulatorInterface, TypeInter
...
@@ -98,10 +97,14 @@ public abstract class AbstractSimulator implements SimulatorInterface, TypeInter
}
}
private
long
getHyperPeriod
()
{
private
long
getHyperPeriod
()
{
return
systemSetup
.
getTasks
().
stream
().
max
(
Comparator
.
comparing
(
SynchronousTask:
:
getPeriod
))
// return
.
get
().
getPeriod
()
*
10
;
// systemSetup.getTasks().stream().max(Comparator.comparing(SynchronousTask::getPeriod))
// .get().getPeriod() * 10;
return
10
;
}
}
private
class
ProcessorComparator
implements
Comparator
<
ProcessorContext
>
{
private
class
ProcessorComparator
implements
Comparator
<
ProcessorContext
>
{
@Override
@Override
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/simulator/DynamicForkJoin.java
View file @
31cab85d
package
mvd
.
jester
.
simulator
;
package
mvd
.
jester
.
simulator
;
import
mvd.jester.model.SystemSetup
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.simulator.internals.dynamicforkjoin.TaskContext
;
/**
/**
* SchmidMottok
* SchmidMottok
*/
*/
public
class
DynamicForkJoin
extends
AbstractSimulator
{
public
class
DynamicForkJoin
extends
AbstractSimulator
{
public
DynamicForkJoin
(
System
Setup
<
SynchronousTask
>
systemSetup
)
{
public
DynamicForkJoin
(
System
Manager
systemSetup
)
{
super
(
systemSetup
);
super
(
systemSetup
);
}
}
@Override
@Override
protected
boolean
releaseTasks
(
long
timeStep
)
{
protected
boolean
releaseTasks
(
long
timeStep
)
{
for
(
SynchronousTask
t
:
systemSetup
.
getTasks
())
{
// //
for (SynchronousTask t : systemSetup.getTasks()) {
if
(
timeStep
%
t
.
getPeriod
()
==
0
)
{
//
if (timeStep % t.getPeriod() == 0) {
TaskContext
tc
=
new
TaskContext
(
t
,
systemSetup
.
getNumberOfProcessors
(),
timeStep
);
//
TaskContext tc = new TaskContext(t, systemSetup.getNumberOfProcessors(), timeStep);
if
(!
readyTasks
.
add
(
tc
))
{
//
if (!readyTasks.add(tc)) {
EventPrinter
//
EventPrinter
.
print
(
"Time "
+
timeStep
+
": Task "
+
tc
+
" could not be released!"
);
//
.print("Time " + timeStep + ": Task " + tc + " could not be released!");
return
false
;
//
return false;
}
//
}
EventPrinter
.
print
(
"Time "
+
timeStep
+
": Task "
+
tc
+
" released!"
);
//
EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!");
}
//
}
}
//
}
return
true
;
return
true
;
}
}
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/simulator/ParallelSynchronous.java
View file @
31cab85d
package
mvd
.
jester
.
simulator
;
package
mvd
.
jester
.
simulator
;
import
mvd.jester.model.SystemSetup
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.simulator.internals.parallelsynchronous.TaskContext
;
/**
/**
* MaiaBertogna
* MaiaBertogna
*/
*/
public
class
ParallelSynchronous
extends
AbstractSimulator
{
public
class
ParallelSynchronous
extends
AbstractSimulator
{
public
ParallelSynchronous
(
System
Setup
<
SynchronousTask
>
systemSetup
)
{
public
ParallelSynchronous
(
System
Manager
systemSetup
)
{
super
(
systemSetup
);
super
(
systemSetup
);
}
}
@Override
@Override
protected
boolean
releaseTasks
(
long
timeStep
)
{
protected
boolean
releaseTasks
(
long
timeStep
)
{
for
(
SynchronousTask
t
:
systemSetup
.
getTasks
())
{
//
for (SynchronousTask t : systemSetup.getTasks()) {
if
(
timeStep
%
t
.
getPeriod
()
==
0
)
{
//
if (timeStep % t.getPeriod() == 0) {
TaskContext
tc
=
new
TaskContext
(
t
,
timeStep
);
//
TaskContext tc = new TaskContext(t, timeStep);
if
(!
readyTasks
.
add
(
tc
))
{
//
if (!readyTasks.add(tc)) {
EventPrinter
//
EventPrinter
.
print
(
"Time "
+
timeStep
+
": Task "
+
tc
+
" could not be released!"
);
//
.print("Time " + timeStep + ": Task " + tc + " could not be released!");
return
false
;
//
return false;
}
//
}
EventPrinter
.
print
(
"Time "
+
timeStep
+
": Task "
+
tc
+
" released!"
);
//
EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!");
}
//
}
}
//
}
return
true
;
return
true
;
}
}
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/tests/AbstractTest.java
View file @
31cab85d
package
mvd
.
jester
.
tests
;
package
mvd
.
jester
.
tests
;
import
mvd.jester.TypeInterface
;
import
mvd.jester.TypeInterface
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.Task
;
import
mvd.jester.model.Task
;
/**
/**
...
@@ -8,10 +9,10 @@ import mvd.jester.model.Task;
...
@@ -8,10 +9,10 @@ import mvd.jester.model.Task;
*/
*/
public
abstract
class
AbstractTest
<
T
extends
Task
>
implements
TestInterface
<
T
>,
TypeInterface
{
public
abstract
class
AbstractTest
<
T
extends
Task
>
implements
TestInterface
<
T
>,
TypeInterface
{
protected
final
long
numberOfProcessors
;
protected
final
SystemManager
manager
;
public
AbstractTest
(
long
numberOfProcessors
)
{
public
AbstractTest
(
final
SystemManager
manager
)
{
this
.
numberOfProcessors
=
numberOfProcessors
;
this
.
manager
=
manager
;
}
}
}
}
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/tests/ChwaLee.java
View file @
31cab85d
...
@@ -13,6 +13,7 @@ import mvd.jester.info.TerminationInfo;
...
@@ -13,6 +13,7 @@ import mvd.jester.info.TerminationInfo;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.priority.EarliestDeadlineFirst
;
import
mvd.jester.priority.EarliestDeadlineFirst
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.PriorityManager
;
...
@@ -24,8 +25,8 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
...
@@ -24,8 +25,8 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
private
final
Map
<
SynchronousTask
,
TerminationInfo
>
responseTimes
;
private
final
Map
<
SynchronousTask
,
TerminationInfo
>
responseTimes
;
private
final
PriorityManager
priorityManager
;
private
final
PriorityManager
priorityManager
;
public
ChwaLee
(
final
long
numberOfProcessors
)
{
public
ChwaLee
(
final
SystemManager
manager
)
{
super
(
numberOfProcessors
);
super
(
manager
);
this
.
responseTimes
=
new
HashMap
<>();
this
.
responseTimes
=
new
HashMap
<>();
this
.
priorityManager
=
new
EarliestDeadlineFirst
();
this
.
priorityManager
=
new
EarliestDeadlineFirst
();
}
}
...
@@ -72,8 +73,8 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
...
@@ -72,8 +73,8 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
Math
.
min
(
getSelfInterference
(
task
,
deadline
,
p
+
1
),
deadline
-
minimumWcet
);
Math
.
min
(
getSelfInterference
(
task
,
deadline
,
p
+
1
),
deadline
-
minimumWcet
);
}
}
final
boolean
feasible
=
taskInterference
+
selfInterference
<=
numberOfProcessors
final
boolean
feasible
=
taskInterference
*
(
deadline
-
minimumWcet
);
+
selfInterference
<=
manager
.
getNumberOfProcessors
()
*
(
deadline
-
minimumWcet
);
return
feasible
?
deadline
-
1
:
deadline
+
1
;
return
feasible
?
deadline
-
1
:
deadline
+
1
;
}
}
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/tests/FonsecaNelis.java
View file @
31cab85d
...
@@ -21,6 +21,7 @@ import mvd.jester.model.DagTask;
...
@@ -21,6 +21,7 @@ import mvd.jester.model.DagTask;
import
mvd.jester.model.Job
;
import
mvd.jester.model.Job
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.Task
;
import
mvd.jester.model.Task
;
import
mvd.jester.model.TreeJob
;
import
mvd.jester.model.TreeJob
;
import
mvd.jester.utils.DagUtils
;
import
mvd.jester.utils.DagUtils
;
...
@@ -36,8 +37,8 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
...
@@ -36,8 +37,8 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
private
final
PriorityManager
priorityManager
;
private
final
PriorityManager
priorityManager
;
private
final
Map
<
Task
,
Set
<
Segment
>>
carryOutSegments
;
private
final
Map
<
Task
,
Set
<
Segment
>>
carryOutSegments
;
public
FonsecaNelis
(
final
long
numberOfProcessors
)
{
public
FonsecaNelis
(
final
SystemManager
manager
)
{
super
(
numberOfProcessors
);
super
(
manager
);
this
.
responseTimes
=
new
HashMap
<>();
this
.
responseTimes
=
new
HashMap
<>();
this
.
priorityManager
=
new
RateMonotonic
();
this
.
priorityManager
=
new
RateMonotonic
();
this
.
carryOutSegments
=
new
HashMap
<>();
this
.
carryOutSegments
=
new
HashMap
<>();
...
@@ -76,6 +77,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
...
@@ -76,6 +77,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
private
Set
<
Segment
>
constructCarryOutDistribution
(
private
Set
<
Segment
>
constructCarryOutDistribution
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
nfjDag
,
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
nfjDag
,
final
BinaryDecompositionTree
<
Job
>
tree
)
{
final
BinaryDecompositionTree
<
Job
>
tree
)
{
// List statt Set
final
Set
<
Segment
>
carryOutWorkload
=
new
LinkedHashSet
<>();
final
Set
<
Segment
>
carryOutWorkload
=
new
LinkedHashSet
<>();
final
BinaryDecompositionTree
<
TreeJob
>
modifiedTree
=
transformTree
(
tree
);
final
BinaryDecompositionTree
<
TreeJob
>
modifiedTree
=
transformTree
(
tree
);
...
@@ -156,7 +158,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
...
@@ -156,7 +158,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
}
}
}
}
taskInterference
/=
numberOfProcessors
;
taskInterference
/=
manager
.
getNumberOfProcessors
()
;
final
double
selfInterference
=
getSelfInterference
(
task
);
final
double
selfInterference
=
getSelfInterference
(
task
);
...
@@ -242,7 +244,8 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
...
@@ -242,7 +244,8 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
final
long
improvedWorkloadFromTask
=
final
long
improvedWorkloadFromTask
=
taskWorkload
-
Math
.
max
(
0
,
criticalPath
-
carryOutPeriod
);
taskWorkload
-
Math
.
max
(
0
,
criticalPath
-
carryOutPeriod
);
final
long
improvedWorkloadFromProcessors
=
carryOutPeriod
*
numberOfProcessors
;
final
long
improvedWorkloadFromProcessors
=
carryOutPeriod
*
manager
.
getNumberOfProcessors
();
return
Math
.
min
(
Math
.
min
(
improvedWorkloadFromTask
,
improvedWorkloadFromProcessors
),
return
Math
.
min
(
Math
.
min
(
improvedWorkloadFromTask
,
improvedWorkloadFromProcessors
),
workload
);
workload
);
...
@@ -266,8 +269,8 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
...
@@ -266,8 +269,8 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
workload
+=
Math
.
max
(
Math
.
min
(
width
,
s
.
getJobWcet
()),
0
)
*
s
.
getNumberOfJobs
();
workload
+=
Math
.
max
(
Math
.
min
(
width
,
s
.
getJobWcet
()),
0
)
*
s
.
getNumberOfJobs
();
}
}
final
long
improvedWorkload
=
final
long
improvedWorkload
=
Math
.
max
(
carryInPeriod
-
(
period
-
responseTime
),
0
)
Math
.
max
(
carryInPeriod
-
(
period
-
responseTime
),
0
)
*
numberOfProcessors
;
*
manager
.
getNumberOfProcessors
()
;
return
Math
.
min
(
improvedWorkload
,
workload
);
return
Math
.
min
(
improvedWorkload
,
workload
);
}
}
...
@@ -276,7 +279,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
...
@@ -276,7 +279,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
final
long
criticalPath
=
task
.
getCriticalPath
();
final
long
criticalPath
=
task
.
getCriticalPath
();
final
long
workload
=
task
.
getWorkload
();
final
long
workload
=
task
.
getWorkload
();
return
(
double
)
(
workload
-
criticalPath
)
/
numberOfProcessors
;
return
(
double
)
(
workload
-
criticalPath
)
/
manager
.
getNumberOfProcessors
()
;
}
}
@Override
@Override
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/tests/MaiaBertogna.java
View file @
31cab85d
...
@@ -10,6 +10,7 @@ import mvd.jester.info.TerminationInfo;
...
@@ -10,6 +10,7 @@ import mvd.jester.info.TerminationInfo;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.RateMonotonic
;
import
mvd.jester.priority.RateMonotonic
;
...
@@ -21,8 +22,8 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
...
@@ -21,8 +22,8 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
private
final
Map
<
SynchronousTask
,
TerminationInfo
>
responseTimes
;
private
final
Map
<
SynchronousTask
,
TerminationInfo
>
responseTimes
;
private
final
PriorityManager
priorityManager
;
private
final
PriorityManager
priorityManager
;
public
MaiaBertogna
(
final
long
numberOfProcessors
)
{
public
MaiaBertogna
(
final
SystemManager
manager
)
{
super
(
numberOfProcessors
);
super
(
manager
);
this
.
responseTimes
=
new
HashMap
<>();
this
.
responseTimes
=
new
HashMap
<>();
this
.
priorityManager
=
new
RateMonotonic
();
this
.
priorityManager
=
new
RateMonotonic
();
}
}
...
@@ -77,7 +78,7 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
...
@@ -77,7 +78,7 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
}
}
final
long
totalInterference
=
LongMath
.
divide
(
taskInterference
+
selfInterference
,
final
long
totalInterference
=
LongMath
.
divide
(
taskInterference
+
selfInterference
,
numberOfProcessors
,
RoundingMode
.
FLOOR
);
manager
.
getNumberOfProcessors
()
,
RoundingMode
.
FLOOR
);
responseTime
=
minimumWcet
+
totalInterference
;
responseTime
=
minimumWcet
+
totalInterference
;
}
while
(
previousResponseTime
!=
responseTime
);
}
while
(
previousResponseTime
!=
responseTime
);
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/tests/MelaniButtazzo.java
View file @
31cab85d
...
@@ -9,6 +9,7 @@ import mvd.jester.info.SchedulingInfo;
...
@@ -9,6 +9,7 @@ import mvd.jester.info.SchedulingInfo;
import
mvd.jester.info.TerminationInfo
;
import
mvd.jester.info.TerminationInfo
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.Task
;
import
mvd.jester.model.Task
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.RateMonotonic
;
import
mvd.jester.priority.RateMonotonic
;
...
@@ -18,8 +19,8 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
...
@@ -18,8 +19,8 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
private
final
Map
<
Task
,
TerminationInfo
>
responseTimes
;
private
final
Map
<
Task
,
TerminationInfo
>
responseTimes
;
private
final
PriorityManager
priorityManager
;
private
final
PriorityManager
priorityManager
;
public
MelaniButtazzo
(
final
long
numberOfProcessors
)
{
public
MelaniButtazzo
(
final
SystemManager
manager
)
{
super
(
numberOfProcessors
);
super
(
manager
);
this
.
responseTimes
=
new
HashMap
<>();
this
.
responseTimes
=
new
HashMap
<>();
this
.
priorityManager
=
new
RateMonotonic
();
this
.
priorityManager
=
new
RateMonotonic
();
}
}
...
@@ -46,8 +47,8 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
...
@@ -46,8 +47,8 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
}
}
private
long
calculateResponseTime
(
final
Set
<
DagTask
>
tasks
,
final
DagTask
task
)
{
private
long
calculateResponseTime
(
final
Set
<
DagTask
>
tasks
,
final
DagTask
task
)
{
final
long
minimumWcet
=
task
.
getCriticalPath
();
final
long
criticalPath
=
task
.
getCriticalPath
();
long
responseTime
=
minimumWcet
;
long
responseTime
=
criticalPath
;
long
previousResponseTime
=
0
;
long
previousResponseTime
=
0
;
do
{
do
{
...
@@ -60,12 +61,12 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
...
@@ -60,12 +61,12 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
}
}
}
}
taskInterference
/=
numberOfProcessors
;
taskInterference
/=
manager
.
getNumberOfProcessors
()
;
final
double
selfInterference
=
getSelfInterference
(
task
);
final
double
selfInterference
=
getSelfInterference
(
task
);
// TODO: Einzeln abrunden oder self interference als long abrunden
final
long
totalInterference
=
(
long
)
Math
.
floor
(
taskInterference
+
selfInterference
);
final
long
totalInterference
=
(
long
)
Math
.
floor
(
taskInterference
+
selfInterference
);
responseTime
=
minimumWcet
+
totalInterference
;
responseTime
=
criticalPath
+
totalInterference
;
}
while
(
previousResponseTime
!=
responseTime
);
}
while
(
previousResponseTime
!=
responseTime
);
return
responseTime
;
return
responseTime
;
...
@@ -75,7 +76,7 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
...
@@ -75,7 +76,7 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
final
long
criticalPath
=
task
.
getCriticalPath
();
final
long
criticalPath
=
task
.
getCriticalPath
();
final
long
workload
=
task
.
getWorkload
();
final
long
workload
=
task
.
getWorkload
();
return
(
double
)
(
workload
-
criticalPath
)
/
numberOfProcessors
;
return
(
double
)
(
workload
-
criticalPath
)
/
manager
.
getNumberOfProcessors
()
;
}
}
...
@@ -85,13 +86,13 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
...
@@ -85,13 +86,13 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
final
long
singleWorkload
=
task
.
getWorkload
();
final
long
singleWorkload
=
task
.
getWorkload
();
final
long
period
=
task
.
getPeriod
();
final
long
period
=
task
.
getPeriod
();
final
double
nominator
=
final
double
nominator
=
(
interval
+
responseTime
(
interval
+
responseTime
-
(
double
)
singleWorkload
/
numberOfProcessors
);
-
(
double
)
singleWorkload
/
manager
.
getNumberOfProcessors
()
);
final
long
amountOfJobs
=
final
long
amountOfJobs
=
DoubleMath
.
roundToLong
(
nominator
/
period
,
RoundingMode
.
FLOOR
);
DoubleMath
.
roundToLong
(
nominator
/
period
,
RoundingMode
.
FLOOR
);
final
double
carryOutPortion
=
final
double
carryOutPortion
=
Math
.
min
(
singleWorkload
,
Math
.
min
(
singleWorkload
,
numberOfProcessors
*
(
nominator
%
period
));
manager
.
getNumberOfProcessors
()
*
(
nominator
%
period
));
final
double
interference
=
amountOfJobs
*
singleWorkload
+
carryOutPortion
;
final
double
interference
=
amountOfJobs
*
singleWorkload
+
carryOutPortion
;
...
...
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/tests/SchmidMottok.java
View file @
31cab85d
...
@@ -10,6 +10,7 @@ import mvd.jester.info.TerminationInfo;
...
@@ -10,6 +10,7 @@ import mvd.jester.info.TerminationInfo;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.Task
;
import
mvd.jester.model.Task
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.PriorityManager
;
import
mvd.jester.priority.RateMonotonic
;
import
mvd.jester.priority.RateMonotonic
;
...
@@ -22,8 +23,8 @@ public class SchmidMottok extends AbstractTest<DagTask> {
...
@@ -22,8 +23,8 @@ public class SchmidMottok extends AbstractTest<DagTask> {
private
final
Map
<
Task
,
TerminationInfo
>
responseTimes
;
private
final
Map
<
Task
,
TerminationInfo
>
responseTimes
;
private
final
PriorityManager
priorityManager
;
private
final
PriorityManager
priorityManager
;
public
SchmidMottok
(
final
long
numberOfProcessors
)
{
public
SchmidMottok
(
final
SystemManager
manager
)
{
super
(
numberOfProcessors
);
super
(
manager
);
this
.
responseTimes
=
new
HashMap
<>();
this
.
responseTimes
=
new
HashMap
<>();
this
.
priorityManager
=
new
RateMonotonic
();
this
.
priorityManager
=
new
RateMonotonic
();
}
}
...
@@ -68,7 +69,7 @@ public class SchmidMottok extends AbstractTest<DagTask> {
...
@@ -68,7 +69,7 @@ public class SchmidMottok extends AbstractTest<DagTask> {
}
}
}
}
taskInterference
/=
numberOfProcessors
;
taskInterference
/=
manager
.
getNumberOfProcessors
()
;
final
double
selfInterference
=
getSelfInterference
(
task
);
final
double
selfInterference
=
getSelfInterference
(
task
);
final
long
totalInterference
=
(
long
)
Math
.
floor
(
taskInterference
+
selfInterference
);
final
long
totalInterference
=
(
long
)
Math
.
floor
(
taskInterference
+
selfInterference
);
...
@@ -81,13 +82,11 @@ public class SchmidMottok extends AbstractTest<DagTask> {
...
@@ -81,13 +82,11 @@ public class SchmidMottok extends AbstractTest<DagTask> {
private
double
getSelfInterference
(
final
DagTask
task
)
{
private
double
getSelfInterference
(
final
DagTask
task
)
{
double
interference
=
0
;
// final long numberOfThreads = task.getNumberOfThreads();
final
long
numberOfThreads
=
task
.
getNumberOfThreads
();
// TODO: Change back to number of threads
final
long
numberOfThreads
=
manager
.
getNumberOfProcessors
();
for
(
final
Segment
s
:
task
.
getWorkloadDistribution
())
{
interference
+=
(
double
)
(
s
.
getNumberOfJobs
()
-
1
)
*
s
.
getJobWcet
();
}
double
interference
=
task
.
getWorkload
()
-
task
.
getCriticalPath
();
interference
/=
numberOfThreads
;
interference
/=
numberOfThreads
;
return
interference
;
return
interference
;
...
...
This diff is collapsed.
Click to expand it.
src/test/java/mvd/jester/model/TestDagUtils.java
View file @
31cab85d
...
@@ -8,7 +8,7 @@ import org.jgrapht.graph.DefaultEdge;
...
@@ -8,7 +8,7 @@ import org.jgrapht.graph.DefaultEdge;
import
org.junit.jupiter.api.DisplayName
;
import
org.junit.jupiter.api.DisplayName
;
import
org.junit.jupiter.api.Test
;
import
org.junit.jupiter.api.Test
;
import
mvd.jester.utils.DagUtils
;
import
mvd.jester.utils.DagUtils
;
import
mvd.jester.model.System
Setup
.DagTaskBuilder
;
import
mvd.jester.model.System
Manager
.DagTaskBuilder
;
import
mvd.jester.utils.BinaryDecompositionTree
;
import
mvd.jester.utils.BinaryDecompositionTree
;
public
class
TestDagUtils
{
public
class
TestDagUtils
{
...
...
This diff is collapsed.
Click to expand it.
src/test/java/mvd/jester/model/TestSystemSetup.java
View file @
31cab85d
package
mvd
.
jester
.
model
;
package
mvd
.
jester
.
model
;
import
static
org
.
junit
.
jupiter
.
api
.
Assertions
.
assertTrue
;
import
static
org
.
junit
.
jupiter
.
api
.
Assertions
.
assertTrue
;
import
static
org
.
mockito
.
Mockito
.
mock
;
import
java.util.Set
;
import
java.util.Set
;
import
java.util.concurrent.ThreadLocalRandom
;
import
java.util.concurrent.ThreadLocalRandom
;
import
com.google.common.math.DoubleMath
;
import
com.google.common.math.DoubleMath
;
...
@@ -9,7 +8,7 @@ import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
...
@@ -9,7 +8,7 @@ import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import
org.jgrapht.graph.DefaultEdge
;
import
org.jgrapht.graph.DefaultEdge
;
import
org.junit.jupiter.api.DisplayName
;
import
org.junit.jupiter.api.DisplayName
;
import
org.junit.jupiter.api.Test
;
import
org.junit.jupiter.api.Test
;
import
mvd.jester.model.System
Setup
.DagTaskBuilder
;
import
mvd.jester.model.System
Manager
.DagTaskBuilder
;
public
class
TestSystemSetup
{
public
class
TestSystemSetup
{
...
@@ -21,8 +20,8 @@ public class TestSystemSetup {
...
@@ -21,8 +20,8 @@ public class TestSystemSetup {
public
void
testRandomTaskSetGeneration
()
{
public
void
testRandomTaskSetGeneration
()
{
for
(
int
i
=
0
;
i
<
NUMBER_OF_RUNS
;
++
i
)
{
for
(
int
i
=
0
;
i
<
NUMBER_OF_RUNS
;
++
i
)
{
long
numberOfProcessors
=
ThreadLocalRandom
.
current
().
nextLong
(
2
,
8
);
long
numberOfProcessors
=
ThreadLocalRandom
.
current
().
nextLong
(
2
,
8
);
System
Setup
.
SynchronousTaskBuilder
systemSetupBuilder
=
System
Manager
.
SynchronousTaskBuilder
systemSetupBuilder
=
new
System
Setup
.
SynchronousTaskBuilder
()
//
new
System
Manager
.
SynchronousTaskBuilder
()
//
.
setNumberOfSegments
(
1
,
7
)
//
.
setNumberOfSegments
(
1
,
7
)
//
.
setNumberOfJobs
(
2
,
10
)
//
.
setNumberOfJobs
(
2
,
10
)
//
.
setPeriods
(
100
,
1000
,
1000
)
//
.
setPeriods
(
100
,
1000
,
1000
)
//
...
@@ -58,16 +57,16 @@ public class TestSystemSetup {
...
@@ -58,16 +57,16 @@ public class TestSystemSetup {
@Test
@Test
@DisplayName
(
"Check Getters and Setters."
)
@DisplayName
(
"Check Getters and Setters."
)
void
testGettersAndSetters
()
{
void
testGettersAndSetters
()
{
@SuppressWarnings
(
"unchecked"
)
//
@SuppressWarnings("unchecked")
Set
<
SynchronousTask
>
t1
=
mock
(
Set
.
class
);
//
Set<SynchronousTask> t1 = mock(Set.class);
@SuppressWarnings
(
"unchecked"
)
//
@SuppressWarnings("unchecked")
Set
<
SynchronousTask
>
t2
=
mock
(
Set
.
class
);
//
Set<SynchronousTask> t2 = mock(Set.class);
SystemSetup
<
SynchronousTask
>
systemSetup
=
new
SystemSetup
<>(
t1
,
2
);
// SystemManager<SynchronousTask> systemSetup = new SystemManager
<>(t1, 2);
systemSetup
.
setTasks
(
t2
);
//
systemSetup.setTasks(t2);
assertTrue
(
systemSetup
.
getTasks
()
==
t2
);
//
assertTrue(systemSetup.getTasks() == t2);
}
}
@Test
@Test
...
@@ -101,7 +100,7 @@ public class TestSystemSetup {
...
@@ -101,7 +100,7 @@ public class TestSystemSetup {
@Test
@Test
@DisplayName
(
"Check if utilization works correctly."
)
@DisplayName
(
"Check if utilization works correctly."
)
void
testUtil
()
{
void
testUtil
()
{
for
(
int
i
=
0
;
i
<
5
;
++
i
)
{
for
(
int
i
=
0
;
i
<
10
;
++
i
)
{
for
(
double
d
=
0.25
;
d
<
4
;
d
+=
0.25
)
{
for
(
double
d
=
0.25
;
d
<
4
;
d
+=
0.25
)
{
DagTaskBuilder
builder
=
new
DagTaskBuilder
();
DagTaskBuilder
builder
=
new
DagTaskBuilder
();
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
d
);
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
d
);
...
@@ -110,18 +109,28 @@ public class TestSystemSetup {
...
@@ -110,18 +109,28 @@ public class TestSystemSetup {
taskSetUtil
+=
t
.
getUtilization
();
taskSetUtil
+=
t
.
getUtilization
();
}
}
assertTrue
(
DoubleMath
.
fuzzyEquals
(
taskSetUtil
,
d
,
0.
002
));
assertTrue
(
DoubleMath
.
fuzzyEquals
(
taskSetUtil
,
d
,
0.
1
));
}
}
}
}
}
}
// @Test
@Test
// @DisplayName("Check if parser works correclty.")
@DisplayName
(
"Check if UUnifast Algorithm works correctly."
)
// void testParser() throws IOException {
void
testUUnifast
()
{
// SystemSetup systemSetup = new SystemSetup.Builder().setNumberOfSegments(1, 7)
for
(
int
i
=
1
;
i
<=
100
;
++
i
)
{
// .setNumberOfJobs(2, 10).setPeriods(100, 1000, 1000).build();
long
numberOfTasks
=
ThreadLocalRandom
.
current
().
nextLong
(
1
,
20
);
double
utilization
=
ThreadLocalRandom
.
current
().
nextDouble
(
0.25
,
8
);
DagTaskBuilder
builder
=
new
DagTaskBuilder
();
Set
<
DagTask
>
taskSet
=
builder
.
generateUUnifastTaskSet
(
numberOfTasks
,
utilization
);
double
taskSetUtil
=
0
;
for
(
DagTask
t
:
taskSet
)
{
taskSetUtil
+=
t
.
getUtilization
();
}
assertTrue
(
taskSet
.
size
()
==
numberOfTasks
);
assertTrue
(
DoubleMath
.
fuzzyEquals
(
taskSetUtil
,
utilization
,
0.1
));
}
}
// systemSetup.writeToFile(null);
// systemSetup.writeToFile(tf.getRoot().getAbsolutePath());
// }
}
}
This diff is collapsed.
Click to expand it.
src/test/java/mvd/jester/priority/TestEarliestDeadlineFirst.java
View file @
31cab85d
...
@@ -7,6 +7,7 @@ import static org.mockito.Mockito.when;
...
@@ -7,6 +7,7 @@ import static org.mockito.Mockito.when;
import
org.junit.jupiter.api.DisplayName
;
import
org.junit.jupiter.api.DisplayName
;
import
org.junit.jupiter.api.Test
;
import
org.junit.jupiter.api.Test
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.simulator.DynamicForkJoin
;
import
mvd.jester.simulator.DynamicForkJoin
;
import
mvd.jester.simulator.ParallelSynchronous
;
import
mvd.jester.simulator.ParallelSynchronous
;
import
mvd.jester.simulator.internals.parallelsynchronous.TaskContext
;
import
mvd.jester.simulator.internals.parallelsynchronous.TaskContext
;
...
@@ -47,15 +48,16 @@ public class TestEarliestDeadlineFirst {
...
@@ -47,15 +48,16 @@ public class TestEarliestDeadlineFirst {
@DisplayName
(
"Check Getters, Tests and Simulators."
)
@DisplayName
(
"Check Getters, Tests and Simulators."
)
void
testGettersTestsAndSimulators
()
{
void
testGettersTestsAndSimulators
()
{
EarliestDeadlineFirst
edf
=
new
EarliestDeadlineFirst
();
EarliestDeadlineFirst
edf
=
new
EarliestDeadlineFirst
();
SystemManager
manager
=
new
SystemManager
(
4
);
assertTrue
(
edf
.
hasTest
(
ChwaLee
.
class
));
assertTrue
(
edf
.
hasTest
(
ChwaLee
.
class
));
assertFalse
(
edf
.
hasTest
(
MaiaBertogna
.
class
));
assertFalse
(
edf
.
hasTest
(
MaiaBertogna
.
class
));
assertFalse
(
edf
.
hasTest
(
SchmidMottok
.
class
));
assertFalse
(
edf
.
hasTest
(
SchmidMottok
.
class
));
assertTrue
(
edf
.
hasSimulator
(
ParallelSynchronous
.
class
));
assertTrue
(
edf
.
hasSimulator
(
ParallelSynchronous
.
class
));
assertTrue
(
edf
.
hasSimulator
(
DynamicForkJoin
.
class
));
assertTrue
(
edf
.
hasSimulator
(
DynamicForkJoin
.
class
));
assertTrue
(
edf
.
hasTest
(
new
ChwaLee
(
4
)));
assertTrue
(
edf
.
hasTest
(
new
ChwaLee
(
manager
)));
assertFalse
(
edf
.
hasTest
(
new
SchmidMottok
(
4
)));
assertFalse
(
edf
.
hasTest
(
new
SchmidMottok
(
manager
)));
assertFalse
(
edf
.
hasTest
(
new
MaiaBertogna
(
4
)));
assertFalse
(
edf
.
hasTest
(
new
MaiaBertogna
(
manager
)));
// assertTrue(edf.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(edf.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(edf.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
// assertTrue(edf.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
...
...
This diff is collapsed.
Click to expand it.
src/test/java/mvd/jester/priority/TestRateMonotonic.java
View file @
31cab85d
...
@@ -7,6 +7,7 @@ import static org.mockito.Mockito.when;
...
@@ -7,6 +7,7 @@ import static org.mockito.Mockito.when;
import
org.junit.jupiter.api.DisplayName
;
import
org.junit.jupiter.api.DisplayName
;
import
org.junit.jupiter.api.Test
;
import
org.junit.jupiter.api.Test
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.simulator.DynamicForkJoin
;
import
mvd.jester.simulator.DynamicForkJoin
;
import
mvd.jester.simulator.ParallelSynchronous
;
import
mvd.jester.simulator.ParallelSynchronous
;
import
mvd.jester.simulator.internals.parallelsynchronous.TaskContext
;
import
mvd.jester.simulator.internals.parallelsynchronous.TaskContext
;
...
@@ -46,15 +47,16 @@ public class TestRateMonotonic {
...
@@ -46,15 +47,16 @@ public class TestRateMonotonic {
@DisplayName
(
"Check Tests and Simulators."
)
@DisplayName
(
"Check Tests and Simulators."
)
void
testTestsAndSimulators
()
{
void
testTestsAndSimulators
()
{
RateMonotonic
rm
=
new
RateMonotonic
();
RateMonotonic
rm
=
new
RateMonotonic
();
SystemManager
manager
=
new
SystemManager
(
8
);
assertFalse
(
rm
.
hasTest
(
ChwaLee
.
class
));
assertFalse
(
rm
.
hasTest
(
ChwaLee
.
class
));
assertTrue
(
rm
.
hasTest
(
MaiaBertogna
.
class
));
assertTrue
(
rm
.
hasTest
(
MaiaBertogna
.
class
));
assertTrue
(
rm
.
hasTest
(
SchmidMottok
.
class
));
assertTrue
(
rm
.
hasTest
(
SchmidMottok
.
class
));
assertTrue
(
rm
.
hasSimulator
(
ParallelSynchronous
.
class
));
assertTrue
(
rm
.
hasSimulator
(
ParallelSynchronous
.
class
));
assertTrue
(
rm
.
hasSimulator
(
DynamicForkJoin
.
class
));
assertTrue
(
rm
.
hasSimulator
(
DynamicForkJoin
.
class
));
assertFalse
(
rm
.
hasTest
(
new
ChwaLee
(
8
)));
assertFalse
(
rm
.
hasTest
(
new
ChwaLee
(
manager
)));
assertTrue
(
rm
.
hasTest
(
new
SchmidMottok
(
8
)));
assertTrue
(
rm
.
hasTest
(
new
SchmidMottok
(
manager
)));
assertTrue
(
rm
.
hasTest
(
new
MaiaBertogna
(
8
)));
assertTrue
(
rm
.
hasTest
(
new
MaiaBertogna
(
manager
)));
// assertTrue(rm.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(rm.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(rm.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
// assertTrue(rm.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
...
...
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment