Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
las3_pub
/
jester
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Members
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
bc29a6dc
authored
Jun 02, 2020
by
Michael Schmid
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
creating nfj dags now works
parent
f0a948aa
Hide whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
503 additions
and
213 deletions
+503
-213
src/main/java/mvd/jester/App.java
+8
-5
src/main/java/mvd/jester/ResultCollector.java
+6
-0
src/main/java/mvd/jester/ResultLogger.java
+102
-73
src/main/java/mvd/jester/TestEnvironment.java
+52
-40
src/main/java/mvd/jester/model/Job.java
+9
-0
src/main/java/mvd/jester/model/SystemSetup.java
+15
-4
src/main/java/mvd/jester/tests/FonsecaNelis.java
+70
-0
src/main/java/mvd/jester/utils/DagUtils.java
+173
-84
src/test/java/mvd/jester/model/TestDagUtils.java
+66
-5
src/test/java/mvd/jester/model/TestSystemSetup.java
+2
-2
No files found.
src/main/java/mvd/jester/App.java
View file @
bc29a6dc
package
mvd
.
jester
;
import
java.util.Arrays
;
import
java.util.
Se
t
;
import
java.util.
Lis
t
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.SystemSetup
;
import
mvd.jester.tests.AbstractTest
;
import
mvd.jester.tests.FonsecaNelis
;
import
mvd.jester.tests.MelaniButtazzo
;
import
mvd.jester.tests.SchmidMottok
;
...
...
@@ -17,13 +18,15 @@ public class App {
public
static
void
main
(
String
[]
args
)
{
for
(
int
p
=
8
;
p
<=
8
;
p
*=
2
)
{
SystemSetup
.
DagTaskBuilder
builder
=
new
SystemSetup
.
DagTaskBuilder
().
setNumberOfProcessors
(
p
);
new
SystemSetup
.
DagTaskBuilder
(
p
).
setNumberOfProcessors
(
p
);
TestEnvironment
te
=
new
TestEnvironment
();
Set
<
ResultCollector
<
AbstractTest
<
DagTask
>>>
tests
=
te
.
registerTests
(
Arrays
.
asList
(
new
SchmidMottok
(
p
),
/* new MelaniButtazzo(p), */
new
FonsecaNelis
(
p
)));
List
<
AbstractTest
<
DagTask
>>
tests
=
te
.
registerTests
(
Arrays
.
asList
(
/*
* new SchmidMottok(p), new MelaniButtazzo(p) ,
*/
new
FonsecaNelis
(
p
)));
te
.
runExperiments
(
builder
,
tests
,
p
,
5
00
);
te
.
runExperiments
(
builder
,
tests
,
p
,
1
00
);
}
}
}
src/main/java/mvd/jester/ResultCollector.java
View file @
bc29a6dc
...
...
@@ -20,6 +20,12 @@ public class ResultCollector<T extends TypeInterface>
this
.
schedulingInfos
=
new
HashSet
<>();
}
public
ResultCollector
(
ResultCollector
<
T
>
resultCollector
)
{
this
.
abstractValue
=
resultCollector
.
abstractValue
;
this
.
priorityManager
=
resultCollector
.
priorityManager
;
this
.
schedulingInfos
=
new
HashSet
<>();
}
/**
* @return the priorityManager
*/
...
...
src/main/java/mvd/jester/ResultLogger.java
View file @
bc29a6dc
package
mvd
.
jester
;
import
java.io.IOException
;
import
java.time.LocalTime
;
import
java.util.Map
;
import
java.util.Set
;
import
java.util.TreeSet
;
import
java.util.stream.Collectors
;
import
com.google.common.collect.ImmutableList
;
import
com.google.common.collect.ObjectArrays
;
import
com.google.common.collect.Table
;
import
com.google.common.collect.TreeBasedTable
;
import
com.google.common.math.DoubleMath
;
import
org.apache.commons.csv.CSVFormat
;
import
org.apache.commons.csv.CSVPrinter
;
import
java.util.Map.Entry
;
import
mvd.jester.info.SchedulingInfo.Feasiblity
;
import
mvd.jester.model.Task
;
import
mvd.jester.simulator.AbstractSimulator
;
import
mvd.jester.tests.AbstractTest
;
import
mvd.jester.utils.Logger
;
...
...
@@ -22,34 +13,68 @@ import mvd.jester.utils.Logger;
*/
public
class
ResultLogger
{
private
final
long
numberOfProcessors
;
private
final
Logger
logger
;
private
boolean
headerLogged
=
false
;
public
ResultLogger
(
long
numberOfProcessors
)
{
this
.
numberOfProcessors
=
numberOfProcessors
;
public
ResultLogger
(
final
long
numberOfProcessors
)
{
this
.
logger
=
new
Logger
(
"./results/feasibility_"
+
numberOfProcessors
+
".txt"
)
;
}
public
<
T
extends
Task
>
void
logAll
(
Set
<
ResultCollector
<
AbstractTest
<
T
>>>
testResults
,
Set
<
ResultCollector
<
AbstractSimulator
>>
simResults
)
{
logTests
(
testResults
);
logSimulations
(
simResults
);
}
public
<
T
extends
Task
>
void
logTests
(
Set
<
ResultCollector
<
AbstractTest
<
T
>>>
results
)
{
if
(!
results
.
isEmpty
())
{
logFeasibility
(
results
,
"test"
);
public
<
T
extends
Task
>
void
logLine
(
final
double
utilization
,
final
Map
<
AbstractTest
<
T
>,
Long
>
results
)
{
final
Appendable
out
=
new
StringBuilder
();
try
{
if
(!
headerLogged
)
{
headerLogged
=
true
;
out
.
append
(
"Utilization"
);
for
(
final
Entry
<
AbstractTest
<
T
>,
Long
>
rc
:
results
.
entrySet
())
{
out
.
append
(
"\t"
+
rc
.
getKey
().
getName
());
}
out
.
append
(
"\n"
);
}
out
.
append
(
""
+
utilization
);
for
(
final
Entry
<
AbstractTest
<
T
>,
Long
>
rc
:
results
.
entrySet
())
{
final
long
numberOfFeasibleTasks
=
rc
.
getValue
();
out
.
append
(
"\t"
+
numberOfFeasibleTasks
);
}
}
catch
(
final
Exception
e
)
{
throw
new
RuntimeException
(
"Failed to log line!"
);
}
logger
.
log
(
out
);
}
public
void
logSimulations
(
Set
<
ResultCollector
<
AbstractSimulator
>>
results
)
{
if
(!
results
.
isEmpty
())
{
logFeasibility
(
results
,
"sim"
);
logTaskRatio
(
results
,
"sim"
);
public
void
newLine
()
{
final
Appendable
out
=
new
StringBuilder
();
try
{
out
.
append
(
"\n"
);
}
catch
(
final
Exception
e
)
{
throw
new
RuntimeException
(
"Failed to log line!"
);
}
logger
.
log
(
out
);
}
// public <T extends Task> void logAll(Set<ResultCollector<AbstractTest<T>>> testResults,
// Set<ResultCollector<AbstractSimulator>> simResults) {
// logTests(testResults);
// logSimulations(simResults);
// }
// public <T extends Task> void logTests(Set<ResultCollector<AbstractTest<T>>> results) {
// if (!results.isEmpty()) {
// logFeasibility(results, "test");
// }
// }
// public void logSimulations(Set<ResultCollector<AbstractSimulator>> results) {
// if (!results.isEmpty()) {
// logFeasibility(results, "sim");
// logTaskRatio(results, "sim");
// }
// }
public
<
T
extends
TypeInterface
>
void
logFeasibilityLevel
(
Set
<
ResultCollector
<
T
>>
results
,
String
type
)
{
public
<
T
extends
TypeInterface
>
void
logFeasibilityLevel
(
final
Set
<
ResultCollector
<
T
>>
results
,
final
String
type
)
{
// LocalTime date = LocalTime.now();
// Logger log = new Logger("./results/feasibility_level_" + type + "_" + numberOfProcessors
// + "_" + date.getHour() + ":" + date.getMinute() + ".txt");
...
...
@@ -82,31 +107,31 @@ public class ResultLogger {
public
<
T
extends
TypeInterface
>
void
logFeasibility
(
Set
<
ResultCollector
<
T
>>
results
,
String
type
)
{
LocalTime
date
=
LocalTime
.
now
();
Logger
log
=
new
Logger
(
"./results/feasibility_"
+
type
+
"_"
+
numberOfProcessors
+
"_"
+
date
.
getHour
()
+
":"
+
date
.
getMinute
()
+
".txt"
);
//
public <T extends TypeInterface> void logFeasibility(Set<ResultCollector<T>> results,
//
String type) {
//
LocalTime date = LocalTime.now();
//
Logger log = new Logger("./results/feasibility_" + type + "_" + numberOfProcessors + "_"
//
+ date.getHour() + ":" + date.getMinute() + ".txt");
Table
<
Double
,
ResultCollector
<
T
>,
Long
>
resultTable
=
TreeBasedTable
.
create
();
Set
<
ResultCollector
<
T
>>
resultCollectors
=
new
TreeSet
<>();
//
Table<Double, ResultCollector<T>, Long> resultTable = TreeBasedTable.create();
//
Set<ResultCollector<T>> resultCollectors = new TreeSet<>();
for
(
double
util
=
0.25
;
util
<=
numberOfProcessors
;
util
+=
0.25
)
{
for
(
ResultCollector
<
T
>
rc
:
results
)
{
resultCollectors
.
add
(
rc
);
final
double
local_util
=
util
;
long
feasibleTasksets
=
rc
.
getResults
().
stream
()
.
filter
(
r
->
DoubleMath
.
fuzzyEquals
(
r
.
getUtilization
(),
local_util
,
0.125
))
.
filter
(
r
->
r
.
checkTasksetFeasible
()).
count
();
resultTable
.
put
(
util
,
rc
,
feasibleTasksets
);
}
}
//
for (double util = 0.25; util <= numberOfProcessors; util += 0.25) {
//
for (ResultCollector<T> rc : results) {
//
resultCollectors.add(rc);
//
final double local_util = util;
//
long feasibleTasksets = rc.getResults().stream()
//
.filter(r -> DoubleMath.fuzzyEquals(r.getUtilization(), local_util, 0.125))
//
.filter(r -> r.checkTasksetFeasible()).count();
//
resultTable.put(util, rc, feasibleTasksets);
//
}
//
}
logData
(
log
,
resultTable
,
resultCollectors
,
"Utilization"
);
}
//
logData(log, resultTable, resultCollectors, "Utilization");
//
}
public
<
T
extends
TypeInterface
>
void
logFeasibilityRatio
(
Set
<
ResultCollector
<
T
>>
results
,
String
type
)
{
public
<
T
extends
TypeInterface
>
void
logFeasibilityRatio
(
final
Set
<
ResultCollector
<
T
>>
results
,
final
String
type
)
{
// LocalTime date = LocalTime.now();
// Logger log = new Logger("./results/feasibility_ratio_" + type + "_" + numberOfProcessors
// + "_" + date.getHour() + ":" + date.getMinute() + ".txt");
...
...
@@ -135,8 +160,8 @@ public class ResultLogger {
// logData(log, resultTable, resultCollectors, "Utilization");
}
public
<
T
extends
TypeInterface
>
void
logTaskRatio
(
Set
<
ResultCollector
<
T
>>
results
,
String
type
)
{
public
<
T
extends
TypeInterface
>
void
logTaskRatio
(
final
Set
<
ResultCollector
<
T
>>
results
,
final
String
type
)
{
// LocalTime date = LocalTime.now();
// Logger log = new Logger("./results/task_ratio_" + type + "_" + numberOfProcessors + "_"
// + date.getHour() + ":" + date.getMinute() + ".txt");
...
...
@@ -158,25 +183,29 @@ public class ResultLogger {
// logData(log, resultTable, resultCollectors, "TaskRatio");
}
private
<
T
extends
TypeInterface
>
void
logData
(
Logger
log
,
Table
<
Double
,
ResultCollector
<
T
>,
?
extends
Number
>
resultTable
,
Set
<
ResultCollector
<
T
>>
resultCollectors
,
String
xDataName
)
{
final
Appendable
out
=
new
StringBuilder
();
try
{
String
[]
resultCollectorNames
=
resultCollectors
.
stream
()
.
map
(
ResultCollector
<
T
>::
getName
).
toArray
(
String
[]::
new
);
String
[]
header
=
ObjectArrays
.
concat
(
xDataName
,
resultCollectorNames
);
final
CSVPrinter
printer
=
CSVFormat
.
DEFAULT
.
withHeader
(
header
).
print
(
out
);
printer
.
printRecords
(
resultTable
.
rowMap
().
entrySet
().
stream
()
.
map
(
entry
->
ImmutableList
.
builder
().
add
((
double
)
entry
.
getKey
())
.
addAll
(
entry
.
getValue
().
values
()).
build
())
.
collect
(
Collectors
.
toList
()));
}
catch
(
final
IOException
e
)
{
e
.
printStackTrace
();
}
log
.
log
(
out
);
log
.
finalize
();
public
void
finalize
()
{
logger
.
finalize
();
}
// private <T extends TypeInterface> void logData(Logger log,
// Table<Double, ResultCollector<T>, ? extends Number> resultTable,
// Set<ResultCollector<T>> resultCollectors, String xDataName) {
// final Appendable out = new StringBuilder();
// try {
// String[] resultCollectorNames = resultCollectors.stream()
// .map(ResultCollector<T>::getName).toArray(String[]::new);
// String[] header = ObjectArrays.concat(xDataName, resultCollectorNames);
// final CSVPrinter printer = CSVFormat.DEFAULT.withHeader(header).print(out);
// printer.printRecords(resultTable.rowMap().entrySet().stream()
// .map(entry -> ImmutableList.builder().add((double) entry.getKey())
// .addAll(entry.getValue().values()).build())
// .collect(Collectors.toList()));
// } catch (final IOException e) {
// e.printStackTrace();
// }
// log.log(out);
// log.finalize();
// }
}
src/main/java/mvd/jester/TestEnvironment.java
View file @
bc29a6dc
package
mvd
.
jester
;
import
java.util.ArrayList
;
import
java.util.HashMap
;
import
java.util.HashSet
;
import
java.util.LinkedHashMap
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Set
;
import
mvd.jester.info.SchedulingInfo
;
import
mvd.jester.info.SchedulingInfo.Feasiblity
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SynchronousTask
;
...
...
@@ -22,26 +27,24 @@ public class TestEnvironment {
public
TestEnvironment
()
{
}
public
<
T
extends
Task
>
Set
<
ResultCollector
<
AbstractTest
<
T
>>>
registerTests
(
List
<
AbstractTest
<
T
>>
tests
)
{
Set
<
ResultCollector
<
AbstractTest
<
T
>>>
testSet
=
new
HashSet
<>();
for
(
AbstractTest
<
T
>
t
:
tests
)
{
testSet
.
add
(
new
ResultCollector
<
AbstractTest
<
T
>>(
t
.
getPriorityManager
(),
t
));
}
return
testSet
;
public
<
T
extends
Task
>
List
<
AbstractTest
<
T
>>
registerTests
(
final
List
<
AbstractTest
<
T
>>
tests
)
{
// Set<ResultCollector<AbstractTest<T>>> testSet = new HashSet<>();
// for (AbstractTest<T> t : tests) {
// testSet.add(new ResultCollector<AbstractTest<T>>(t.getPriorityManager(), t));
// }
return
new
ArrayList
<>(
tests
);
}
public
void
runExperiments
(
SynchronousTaskBuilder
builder
,
Set
<
ResultCollector
<
AbstractTest
<
SynchronousTask
>>>
abstractTestInstances
,
long
numberOfProcessors
,
long
numberOfTaskSets
)
{
public
void
runExperiments
(
final
SynchronousTaskBuilder
builder
,
final
Set
<
ResultCollector
<
AbstractTest
<
SynchronousTask
>>>
abstractTestInstances
,
final
long
numberOfProcessors
,
final
long
numberOfTaskSets
)
{
long
checkedTasksets
=
0
;
System
.
out
.
print
(
Math
.
round
((
double
)
checkedTasksets
/
numberOfTaskSets
*
100
)
+
"% of "
+
numberOfTaskSets
+
" tasksets tested!\r"
);
while
(
checkedTasksets
<
numberOfTaskSets
)
{
Set
<
SynchronousTask
>
tasks
=
builder
.
generateTaskSet
();
final
Set
<
SynchronousTask
>
tasks
=
builder
.
generateTaskSet
();
double
utilization
=
tasks
.
stream
().
mapToDouble
(
SynchronousTask:
:
getUtilization
).
sum
();
...
...
@@ -53,14 +56,15 @@ public class TestEnvironment {
+
"% of "
+
numberOfTaskSets
+
" tasksets tested!\r"
);
}
for
(
ResultCollector
<
AbstractTest
<
SynchronousTask
>>
testInstance
:
abstractTestInstances
)
{
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
AbstractTest
<
SynchronousTask
>
abstractTest
=
testInstance
.
getAbstractValue
();
for
(
final
ResultCollector
<
AbstractTest
<
SynchronousTask
>>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
final
AbstractTest
<
SynchronousTask
>
abstractTest
=
testInstance
.
getAbstractValue
();
SortedTaskSet
<
SynchronousTask
>
sortedTaskSet
=
final
SortedTaskSet
<
SynchronousTask
>
sortedTaskSet
=
new
SortedTaskSet
<>(
priorityManager
);
sortedTaskSet
.
addAll
(
tasks
);
SchedulingInfo
schedulingInfo
=
final
SchedulingInfo
schedulingInfo
=
abstractTest
.
runSchedulabilityCheck
(
sortedTaskSet
);
testInstance
.
addResult
(
schedulingInfo
);
}
...
...
@@ -71,47 +75,55 @@ public class TestEnvironment {
}
}
System
.
out
.
println
(
""
);
ResultLogger
resultLogger
=
new
ResultLogger
(
numberOfProcessors
);
final
ResultLogger
resultLogger
=
new
ResultLogger
(
numberOfProcessors
);
resultLogger
.
logTests
(
abstractTestInstances
);
//
resultLogger.logTests(abstractTestInstances);
}
public
void
runExperiments
(
DagTaskBuilder
builder
,
Set
<
ResultCollector
<
AbstractTest
<
DagTask
>>>
abstractTestInstance
s
,
long
numberOfProcessors
,
long
numberOfTaskSetsPerUtil
)
{
public
void
runExperiments
(
final
DagTaskBuilder
builder
,
final
List
<
AbstractTest
<
DagTask
>>
abstractTestInstances
,
final
long
numberOfProcessor
s
,
final
long
numberOfTaskSetsPerUtil
)
{
long
checkedTasksets
=
0
;
long
numberOfTaskSets
=
numberOfProcessors
*
4
*
numberOfTaskSetsPerUtil
;
final
long
numberOfTaskSets
=
((
numberOfProcessors
*
4
)
-
3
)
*
numberOfTaskSetsPerUtil
;
// create LoggerFile here
final
ResultLogger
resultLogger
=
new
ResultLogger
(
numberOfProcessors
);
final
Map
<
AbstractTest
<
DagTask
>,
Long
>
resultMap
=
new
LinkedHashMap
<>();
abstractTestInstances
.
forEach
(
t
->
resultMap
.
put
(
t
,
(
long
)
0
));
for
(
double
util
=
1
;
util
<=
numberOfProcessors
;
util
+=
0.25
)
{
// new Result set here
for
(
int
i
=
0
;
i
<
numberOfTaskSetsPerUtil
;
++
i
)
{
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
util
);
resultMap
.
replaceAll
((
k
,
v
)
->
(
long
)
0
);
nextTry:
for
(
int
i
=
0
;
i
<
numberOfTaskSetsPerUtil
;
++
i
)
{
final
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
util
);
System
.
out
.
print
(
Math
.
round
((
double
)
checkedTasksets
/
numberOfTaskSets
*
100
)
+
"% of "
+
numberOfTaskSets
+
" tasksets tested!\r"
);
for
(
ResultCollector
<
AbstractTest
<
DagTask
>>
testResultCollector
:
abstractTestInstances
)
{
AbstractTest
<
DagTask
>
testInstance
=
testResultCollector
.
getAbstractValue
();
PriorityManager
priorityManager
=
testResultCollector
.
getPriorityManager
();
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
SortedTaskSet
<
DagTask
>
sortedTaskSet
=
new
SortedTaskSet
<>(
priorityManager
);
final
SortedTaskSet
<
DagTask
>
sortedTaskSet
=
new
SortedTaskSet
<>(
priorityManager
);
sortedTaskSet
.
addAll
(
taskSet
);
SchedulingInfo
schedulingInfo
=
testInstance
.
runSchedulabilityCheck
(
sortedTaskSet
);
testResultCollector
.
addResult
(
schedulingInfo
);
final
SchedulingInfo
schedulingInfo
;
try
{
schedulingInfo
=
testInstance
.
runSchedulabilityCheck
(
sortedTaskSet
);
}
catch
(
Exception
e
)
{
i
--;
continue
nextTry
;
// TODO: remove this exception
}
if
(
schedulingInfo
.
getFeasibility
()
==
Feasiblity
.
SUCCEEDED
)
{
resultMap
.
computeIfPresent
(
testInstance
,
(
k
,
v
)
->
v
+
1
);
}
}
checkedTasksets
++;
}
// write Result line to LoggerFile
resultLogger
.
logLine
(
util
,
resultMap
);
resultLogger
.
newLine
();
}
System
.
out
.
println
(
""
);
ResultLogger
resultLogger
=
new
ResultLogger
(
numberOfProcessors
);
resultLogger
.
logTests
(
abstractTestInstances
);
resultLogger
.
finalize
();
}
}
...
...
src/main/java/mvd/jester/model/Job.java
View file @
bc29a6dc
...
...
@@ -4,6 +4,7 @@ public class Job {
private
final
long
wcet
;
private
long
relativeCompletionTime
;
private
boolean
visited
=
false
;
public
Job
(
long
wcet
)
{
this
.
wcet
=
wcet
;
...
...
@@ -14,6 +15,14 @@ public class Job {
return
wcet
;
}
public
boolean
getVisited
()
{
return
visited
;
}
public
void
setVisited
(
boolean
visited
)
{
this
.
visited
=
visited
;
}
/**
* @return the relativeCompletionTime
*/
...
...
src/main/java/mvd/jester/model/SystemSetup.java
View file @
bc29a6dc
...
...
@@ -5,6 +5,7 @@ import java.util.HashSet;
import
java.util.LinkedHashSet
;
import
java.util.Map
;
import
java.util.Optional
;
import
java.util.Random
;
import
java.util.Set
;
import
java.util.concurrent.ThreadLocalRandom
;
import
com.google.common.collect.ArrayListMultimap
;
...
...
@@ -206,17 +207,23 @@ public class SystemSetup<T extends Task> {
private
long
numberOfProcessors
=
4
;
private
long
minimumWcet
=
1
;
private
long
maximumWcet
=
100
;
private
long
maxNumberOfBranches
=
5
;
private
long
depth
=
2
;
private
long
maxNumberOfBranches
=
3
;
// TODO: change back to 5
private
long
depth
=
1
;
// change back to 2
private
long
p_par
=
80
;
private
long
p_add
=
20
;
private
final
Random
random
;
public
DagTaskBuilder
(
long
seed
)
{
random
=
new
Random
(
seed
);
}
public
double
getBeta
()
{
return
0.035
*
numberOfProcessors
;
}
private
long
randomProbability
()
{
return
ThreadLocalRandom
.
current
().
nextLong
(
1
,
100
);
// return random.nextInt(99) + 1;
return
ThreadLocalRandom
.
current
().
nextLong
(
0
,
100
);
}
public
Set
<
DagTask
>
generateTaskSet
(
final
double
totalUtilization
)
{
...
...
@@ -251,7 +258,7 @@ public class SystemSetup<T extends Task> {
final
Job
j
=
fork
(
jobDag
,
Optional
.
empty
(),
this
.
depth
);
fork
(
jobDag
,
Optional
.
of
(
j
),
this
.
depth
);
randomEdges
(
jobDag
);
// randomEdges(jobDag); // TODO: uncomment
final
long
workload
=
DagUtils
.
calculateWorkload
(
jobDag
);
final
long
criticalPath
=
DagUtils
.
calculateCriticalPath
(
jobDag
);
...
...
@@ -320,15 +327,19 @@ public class SystemSetup<T extends Task> {
}
private
long
randomTaskPeriod
(
final
long
criticalPathLength
,
final
long
workload
)
{
// long upper = (long) (workload / getBeta());
// return random.nextInt((int) (upper - criticalPathLength)) + criticalPathLength;
return
ThreadLocalRandom
.
current
().
nextLong
(
criticalPathLength
,
(
long
)
(
workload
/
getBeta
()));
}
private
long
randomNumberOfBranches
()
{
// return random.nextInt((int) maxNumberOfBranches - 2) + 2;
return
ThreadLocalRandom
.
current
().
nextLong
(
2
,
maxNumberOfBranches
);
}
private
long
randomWcet
()
{
// return random.nextInt((int) (maximumWcet - minimumWcet)) + minimumWcet;
return
ThreadLocalRandom
.
current
().
nextLong
(
minimumWcet
,
maximumWcet
);
}
...
...
src/main/java/mvd/jester/tests/FonsecaNelis.java
View file @
bc29a6dc
...
...
@@ -13,6 +13,7 @@ import java.util.Set;
import
com.google.common.collect.Lists
;
import
com.google.common.collect.Sets
;
import
com.google.common.math.LongMath
;
import
org.jgrapht.Graphs
;
import
org.jgrapht.experimental.dag.DirectedAcyclicGraph
;
import
org.jgrapht.graph.DefaultEdge
;
import
mvd.jester.info.SchedulingInfo
;
...
...
@@ -68,11 +69,80 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
nfjJobDag
=
DagUtils
.
createNFJGraph
(
jobDag
);
final
BinaryDecompositionTree
<
Job
>
tree
=
DagUtils
.
createDecompositionTree
(
nfjJobDag
);
// TODO: remove this
if
(!
DagUtils
.
checkNFJProperty
(
nfjJobDag
)
||
!
checkTree
(
nfjJobDag
,
tree
))
{
throw
new
RuntimeException
(
"Nicht NFJ in Test!"
);
}
sortedSegments
.
put
(
t
,
constructCarryOutDistribution
(
nfjJobDag
,
tree
));
}
}
// TODO: Remove this function
private
boolean
checkTree
(
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
nfjJobDag
,
BinaryDecompositionTree
<
Job
>
tree
)
{
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDagFromTree
=
DagUtils
.
createNFJfromDecompositionTree
(
tree
);
if
(
nfjJobDag
.
vertexSet
().
size
()
!=
jobDagFromTree
.
vertexSet
().
size
())
{
return
false
;
}
if
(
jobDagFromTree
.
edgeSet
().
size
()
!=
nfjJobDag
.
edgeSet
().
size
())
{
return
false
;
}
for
(
DefaultEdge
e
:
nfjJobDag
.
edgeSet
())
{
Job
target
=
nfjJobDag
.
getEdgeTarget
(
e
);
Job
source
=
nfjJobDag
.
getEdgeSource
(
e
);
if
(!
jobDagFromTree
.
containsEdge
(
source
,
target
))
{
return
false
;
}
}
for
(
Job
j
:
nfjJobDag
)
{
for
(
Job
n
:
nfjJobDag
)
{
if
(
n
==
j
)
{
continue
;
}
if
(
nfjJobDag
.
containsEdge
(
n
,
j
)
&&
!
jobDagFromTree
.
containsEdge
(
n
,
j
))
{
return
false
;
}
}
if
(
nfjJobDag
.
inDegreeOf
(
j
)
!=
jobDagFromTree
.
inDegreeOf
(
j
))
return
false
;
if
(
nfjJobDag
.
outDegreeOf
(
j
)
!=
jobDagFromTree
.
outDegreeOf
(
j
))
return
false
;
for
(
Job
p
:
Graphs
.
predecessorListOf
(
nfjJobDag
,
j
))
{
if
(!
Graphs
.
predecessorListOf
(
jobDagFromTree
,
j
).
contains
(
p
))
{
return
false
;
}
}
for
(
Job
s
:
Graphs
.
successorListOf
(
nfjJobDag
,
j
))
{
if
(!
Graphs
.
successorListOf
(
jobDagFromTree
,
j
).
contains
(
s
))
{
return
false
;
}
}
for
(
Job
a
:
nfjJobDag
.
getAncestors
(
nfjJobDag
,
j
))
{
if
(!
jobDagFromTree
.
getAncestors
(
jobDagFromTree
,
j
).
contains
(
a
))
{
return
false
;
}
}
for
(
Job
d
:
nfjJobDag
.
getDescendants
(
nfjJobDag
,
j
))
{
if
(!
jobDagFromTree
.
getDescendants
(
jobDagFromTree
,
j
).
contains
(
d
))
{
return
false
;
}
}
}
return
true
;
}
private
Set
<
Segment
>
constructCarryOutDistribution
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
nfjDag
,
final
BinaryDecompositionTree
<
Job
>
tree
)
{
...
...
src/main/java/mvd/jester/utils/DagUtils.java
View file @
bc29a6dc
...
...
@@ -2,6 +2,7 @@ package mvd.jester.utils;
import
java.util.Arrays
;
import
java.util.HashSet
;
import
java.util.Iterator
;
import
java.util.LinkedHashSet
;
import
java.util.LinkedList
;
import
java.util.List
;
...
...
@@ -12,6 +13,7 @@ import com.google.common.collect.Sets;
import
org.jgrapht.Graphs
;
import
org.jgrapht.experimental.dag.DirectedAcyclicGraph
;
import
org.jgrapht.graph.DefaultEdge
;
import
org.jgrapht.traverse.ClosestFirstIterator
;
import
mvd.jester.model.Job
;
import
mvd.jester.model.Segment
;
import
mvd.jester.utils.BinaryDecompositionTree.Node
;
...
...
@@ -78,18 +80,22 @@ public class DagUtils {
return
segments
;
}
public
static
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
createNFJGraph
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
)
{
final
LinkedList
<
Job
>
joinNodes
=
new
LinkedList
<>();
final
LinkedList
<
Job
>
forkNodes
=
new
LinkedList
<>();
//
public static DirectedAcyclicGraph<Job, DefaultEdge> createNFJGraph(
//
final DirectedAcyclicGraph<Job, DefaultEdge> jobDag) {
//
final LinkedList<Job> joinNodes = new LinkedList<>();
//
final LinkedList<Job> forkNodes = new LinkedList<>();
collectForksAndJoins
(
jobDag
,
forkNodes
,
joinNodes
);
return
createNFJGraph
(
jobDag
,
forkNodes
,
joinNodes
);
}
//
collectForksAndJoins(jobDag, forkNodes, joinNodes);
//
return createNFJGraph(jobDag, forkNodes, joinNodes);
//
}
public
static
void
collectForksAndJoins
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
LinkedList
<
Job
>
forkNodes
,
final
LinkedList
<
Job
>
joinNodes
)
{
for
(
final
Job
j
:
jobDag
)
{
// final ClosestFirstIterator<Job, DefaultEdge> it = new ClosestFirstIterator<>(jobDag);
Iterator
<
Job
>
it
=
jobDag
.
iterator
();
for
(;
it
.
hasNext
();)
{
final
Job
j
=
it
.
next
();
// for (final Job j : jobDag) {
if
(
jobDag
.
inDegreeOf
(
j
)
>
1
)
{
joinNodes
.
add
(
j
);
}
...
...
@@ -99,42 +105,143 @@ public class DagUtils {
}
}
private
static
LinkedList
<
Job
>
getUnvisitedJoinNodes
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
)
{
final
LinkedList
<
Job
>
joinNodes
=
new
LinkedList
<>();
final
ClosestFirstIterator
<
Job
,
DefaultEdge
>
it
=
new
ClosestFirstIterator
<>(
jobDag
);
for
(;
it
.
hasNext
();)
{
final
Job
j
=
it
.
next
();
if
(
jobDag
.
inDegreeOf
(
j
)
>
1
&&
!
j
.
getVisited
())
{
joinNodes
.
add
(
j
);
}
}
return
joinNodes
;
}
public
static
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
createNFJGraph
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
LinkedList
<
Job
>
forkNodes
,
final
LinkedList
<
Job
>
joinNodes
)
{
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
)
{
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
modifiedJobDag
=
new
DirectedAcyclicGraph
<>(
DefaultEdge
.
class
);
Graphs
.
addGraph
(
modifiedJobDag
,
jobDag
);
Job
sink
=
null
;
if
(!(
joinNodes
.
size
()
>
1
)
)
{
return
modifiedJobDag
;
for
(
final
Job
j
:
modifiedJobDag
)
{
sink
=
j
;
}
final
Job
sink
=
joinNodes
.
getLast
();
while
(
true
)
{
final
LinkedList
<
Job
>
joins
=
getUnvisitedJoinNodes
(
modifiedJobDag
);
if
(
joins
.
isEmpty
())
{
break
;
}
final
Job
joinNode
=
joins
.
getFirst
();
joinNode
.
setVisited
(
true
);
final
Set
<
Job
>
ancestorsOfJ
=
jobDag
.
getAncestors
(
modifiedJobDag
,
joinNode
);
ancestorsOfJ
.
add
(
joinNode
);
nextEdge:
while
(
modifiedJobDag
.
inDegreeOf
(
joinNode
)
>
1
)
{
final
List
<
Job
>
predecessors
=
Graphs
.
predecessorListOf
(
modifiedJobDag
,
joinNode
);
for
(
final
Job
p
:
predecessors
)
{
if
(
modifiedJobDag
.
outDegreeOf
(
p
)
>
1
||
!
checkPredecessors
(
modifiedJobDag
,
joinNode
,
ancestorsOfJ
,
p
))
{
// || !checkPredecessors(modifiedJobDag, p, joinNode, ancestorsOfJ)) {
// DefaultEdge e = modifiedJobDag.getEdge(p, joinNode);
// if ((modifiedJobDag.outDegreeOf(p) > 1)
// || checkForFork(modifiedJobDag, joinNode, ancestorsOfJ, e)) {
final
DefaultEdge
conflictingEdge
=
modifiedJobDag
.
getEdge
(
p
,
joinNode
);
modifiedJobDag
.
removeEdge
(
conflictingEdge
);
if
(
modifiedJobDag
.
outDegreeOf
(
p
)
==
0
)
{
try
{
modifiedJobDag
.
addDagEdge
(
p
,
sink
);
}
catch
(
final
Exception
exception
)
{
// TODO: handle exception
}
}
continue
nextEdge
;
}
}
break
;
}
}
for
(
final
Job
j
:
joinNodes
)
{
final
Set
<
DefaultEdge
>
edgeSet
=
new
HashSet
<>(
modifiedJobDag
.
incomingEdgesOf
(
j
));
for
(
final
DefaultEdge
e
:
edgeSet
)
{
final
Job
predecessor
=
modifiedJobDag
.
getEdgeSource
(
e
);
final
boolean
satisfiesProposition
=
DagUtils
.
checkForFork
(
modifiedJobDag
,
j
,
forkNodes
,
predecessor
);
if
(!
satisfiesProposition
)
{
modifiedJobDag
.
removeEdge
(
e
);
if
(
modifiedJobDag
.
outgoingEdgesOf
(
predecessor
).
isEmpty
())
{
try
{
modifiedJobDag
.
addDagEdge
(
predecessor
,
sink
);
}
catch
(
final
Exception
ex
)
{
return
modifiedJobDag
;
}
public
static
boolean
checkForFork
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
Job
joinNode
,
Set
<
Job
>
ancestorsOfJ2
,
final
DefaultEdge
edge
)
{
Set
<
Job
>
ancestorsOfJ
=
jobDag
.
getAncestors
(
jobDag
,
joinNode
);
nextFork:
for
(
final
Job
f
:
jobDag
)
{
if
(
jobDag
.
outDegreeOf
(
f
)
>
1
)
{
final
Set
<
Job
>
descendantsOfF
=
jobDag
.
getDescendants
(
jobDag
,
f
);
final
Set
<
Job
>
inbetweenJobs
=
new
HashSet
<>(
ancestorsOfJ
);
inbetweenJobs
.
retainAll
(
descendantsOfF
);
if
(
inbetweenJobs
.
isEmpty
())
{
continue
;
}
for
(
final
Job
a
:
inbetweenJobs
)
{
final
List
<
Job
>
neighboursOfA
=
Graphs
.
neighborListOf
(
jobDag
,
a
);
for
(
final
Job
b
:
neighboursOfA
)
{
if
(!((
jobDag
.
getAncestors
(
jobDag
,
joinNode
).
contains
(
b
)
||
b
==
joinNode
)
&&
(
jobDag
.
getDescendants
(
jobDag
,
f
).
contains
(
b
)
||
b
==
f
)))
{
continue
nextFork
;
}
}
}
if
(
modifiedJobDag
.
inDegreeOf
(
j
)
==
1
)
{
break
;
Job
source
=
jobDag
.
getEdgeSource
(
edge
);
if
(
inbetweenJobs
.
contains
(
source
))
{
return
true
;
}
}
}
return
modifiedJobDag
;
return
false
;
}
private
static
boolean
checkPredecessors
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
Job
current
,
final
Job
joinNode
,
final
Set
<
Job
>
ancestorsOfJ2
)
{
final
List
<
Job
>
successorsOfCurrent
=
Graphs
.
successorListOf
(
jobDag
,
current
);
final
Set
<
Job
>
ancestorsOfJ
=
jobDag
.
getAncestors
(
jobDag
,
joinNode
);
if
(
ancestorsOfJ
.
containsAll
(
successorsOfCurrent
))
{
final
Set
<
Job
>
descendantsOfCurrent
=
jobDag
.
getDescendants
(
jobDag
,
current
);
if
(
descendantsOfCurrent
.
containsAll
(
Graphs
.
predecessorListOf
(
jobDag
,
joinNode
)))
{
return
true
;
}
else
{
for
(
final
Job
p
:
Graphs
.
predecessorListOf
(
jobDag
,
current
))
{
if
(!
checkPredecessors
(
jobDag
,
p
,
joinNode
,
ancestorsOfJ
))
{
return
false
;
}
}
return
true
;
}
}
else
{
return
false
;
}
}
private
static
boolean
checkPredecessors
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
Job
joinNode
,
final
Set
<
Job
>
ancestorsOfJ2
,
Job
current
)
{
Set
<
Job
>
ancestorsOfJ
=
jobDag
.
getAncestors
(
jobDag
,
joinNode
);
if
(
jobDag
.
outDegreeOf
(
current
)
>
1
)
{
if
(!
ancestorsOfJ
.
containsAll
(
Graphs
.
successorListOf
(
jobDag
,
current
)))
{
return
false
;
}
final
Set
<
Job
>
descendantsOfCurrent
=
jobDag
.
getDescendants
(
jobDag
,
current
);
if
(
descendantsOfCurrent
.
containsAll
(
Graphs
.
predecessorListOf
(
jobDag
,
joinNode
)))
{
return
true
;
}
}
final
List
<
Job
>
predecessorList
=
Graphs
.
predecessorListOf
(
jobDag
,
current
);
for
(
Job
p
:
predecessorList
)
{
if
(!
checkPredecessors
(
jobDag
,
joinNode
,
ancestorsOfJ
,
p
))
{
return
false
;
}
}
return
true
;
}
public
static
BinaryDecompositionTree
<
Job
>
createDecompositionTree
(
...
...
@@ -152,19 +259,15 @@ public class DagUtils {
final
BinaryDecompositionTree
<
Job
>
tree
=
new
BinaryDecompositionTree
<>();
Optional
<
Job
>
source
=
Optional
.
empty
();
Optional
<
Job
>
sink
=
Optional
.
empty
();
if
(
forkNodes
.
size
()
>
0
)
{
source
=
Optional
.
of
(
forkNodes
.
getFirst
());
sink
=
Optional
.
of
(
joinNodes
.
getLast
());
}
else
{
boolean
firstRun
=
true
;
for
(
final
Job
j
:
jobDag
)
{
if
(
firstRun
)
{
firstRun
=
false
;
source
=
Optional
.
of
(
j
);
}
sink
=
Optional
.
of
(
j
);
boolean
firstRun
=
true
;
for
(
final
Job
j
:
jobDag
)
{
if
(
firstRun
)
{
firstRun
=
false
;
source
=
Optional
.
of
(
j
);
}
sink
=
Optional
.
of
(
j
);
}
if
(
source
.
isPresent
()
&&
sink
.
isPresent
())
{
final
Job
current
=
source
.
get
();
...
...
@@ -247,41 +350,7 @@ public class DagUtils {
}
}
public
static
boolean
checkNFJProperty
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
)
{
final
LinkedList
<
Job
>
joinNodes
=
new
LinkedList
<>();
final
LinkedList
<
Job
>
forkNodes
=
new
LinkedList
<>();
collectForksAndJoins
(
jobDag
,
forkNodes
,
joinNodes
);
if
(!(
joinNodes
.
size
()
>
1
))
{
return
true
;
}
nextJoin:
for
(
final
Job
j
:
joinNodes
)
{
for
(
final
Job
f
:
forkNodes
)
{
final
Set
<
Job
>
ancestorsOfJ
=
jobDag
.
getAncestors
(
jobDag
,
j
);
final
Set
<
Job
>
ancesotorsOfF
=
jobDag
.
getAncestors
(
jobDag
,
f
);
ancesotorsOfF
.
add
(
f
);
final
Set
<
Job
>
inbetweenJobs
=
new
HashSet
<>(
ancestorsOfJ
);
inbetweenJobs
.
removeAll
(
ancesotorsOfF
);
if
(
inbetweenJobs
.
isEmpty
())
{
continue
;
}
for
(
final
Job
a
:
inbetweenJobs
)
{
final
List
<
Job
>
neighboursOfA
=
Graphs
.
neighborListOf
(
jobDag
,
a
);
for
(
final
Job
b
:
neighboursOfA
)
{
if
((
jobDag
.
getAncestors
(
jobDag
,
j
).
contains
(
b
)
||
b
==
j
)
&&
(
jobDag
.
getDescendants
(
jobDag
,
f
).
contains
(
b
)
||
b
==
f
))
{
continue
nextJoin
;
}
}
}
}
return
false
;
}
return
true
;
}
private
static
Job
findJoin
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
Job
forkNode
,
final
Job
sink
,
final
List
<
Job
>
joinNodes
)
{
...
...
@@ -305,26 +374,46 @@ public class DagUtils {
return
sink
;
}
private
static
boolean
checkForFork
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
Job
joinNode
,
final
List
<
Job
>
forkNodes
,
final
Job
job
)
{
final
List
<
Job
>
pred
=
Graphs
.
predecessorListOf
(
jobDag
,
job
);
for
(
final
Job
p
:
pred
)
{
if
(
forkNodes
.
contains
(
p
))
{
for
(
final
Job
successor
:
Graphs
.
successorListOf
(
jobDag
,
p
))
{
if
(
jobDag
.
getAncestors
(
jobDag
,
joinNode
).
contains
(
successor
))
{
return
false
;
public
static
boolean
checkNFJProperty
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
)
{
final
LinkedList
<
Job
>
joinNodes
=
new
LinkedList
<>();
final
LinkedList
<
Job
>
forkNodes
=
new
LinkedList
<>();
collectForksAndJoins
(
jobDag
,
forkNodes
,
joinNodes
);
if
(
joinNodes
.
isEmpty
())
{
return
true
;
}
nextJoin:
for
(
final
Job
j
:
joinNodes
)
{
nextFork:
for
(
final
Job
f
:
forkNodes
)
{
final
Set
<
Job
>
ancestorsOfJ
=
jobDag
.
getAncestors
(
jobDag
,
j
);
final
Set
<
Job
>
descendantsOfF
=
jobDag
.
getDescendants
(
jobDag
,
f
);
final
Set
<
Job
>
inbetweenJobs
=
new
HashSet
<>(
ancestorsOfJ
);
inbetweenJobs
.
retainAll
(
descendantsOfF
);
if
(
inbetweenJobs
.
isEmpty
())
{
continue
;
}
for
(
final
Job
a
:
inbetweenJobs
)
{
final
List
<
Job
>
neighboursOfA
=
Graphs
.
neighborListOf
(
jobDag
,
a
);
for
(
final
Job
b
:
neighboursOfA
)
{
if
((
jobDag
.
getAncestors
(
jobDag
,
j
).
contains
(
b
)
||
b
==
j
)
&&
(
jobDag
.
getDescendants
(
jobDag
,
f
).
contains
(
b
)
||
b
==
f
))
{
// this b is ok
}
else
{
continue
nextFork
;
}
}
}
}
else
{
return
checkForFork
(
jobDag
,
joinNode
,
forkNodes
,
p
);
continue
nextJoin
;
}
return
false
;
}
return
true
;
}
public
static
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
createNFJfromDecompositionTree
(
final
BinaryDecompositionTree
<
Job
>
tree
)
{
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
=
...
...
src/test/java/mvd/jester/model/TestDagUtils.java
View file @
bc29a6dc
package
mvd
.
jester
.
model
;
import
static
org
.
junit
.
jupiter
.
api
.
Assertions
.
assertTrue
;
import
java.util.List
;
import
java.util.Stack
;
import
java.util.concurrent.ThreadLocalRandom
;
import
org.jgrapht.Graphs
;
import
org.jgrapht.experimental.dag.DirectedAcyclicGraph
;
...
...
@@ -8,6 +10,8 @@ import org.jgrapht.graph.DefaultEdge;
import
org.junit.jupiter.api.DisplayName
;
import
org.junit.jupiter.api.Test
;
import
mvd.jester.utils.DagUtils
;
import
mvd.jester.utils.BinaryDecompositionTree.Node
;
import
mvd.jester.utils.BinaryDecompositionTree.NodeType
;
import
mvd.jester.model.SystemSetup.DagTaskBuilder
;
import
mvd.jester.utils.BinaryDecompositionTree
;
...
...
@@ -35,22 +39,70 @@ public class TestDagUtils {
@Test
@DisplayName
(
"Check if NFJ DAGs are created correctly."
)
void
checkNfjDagCreation
()
{
for
(
int
i
=
0
;
i
<
100
;
++
i
)
{
DagTaskBuilder
b
=
new
DagTaskBuilder
();
for
(
int
i
=
0
;
i
<
10000
;
++
i
)
{
// {
// int i = 1992;
System
.
out
.
println
(
i
);
DagTaskBuilder
b
=
new
DagTaskBuilder
(
i
);
DagTask
t
=
b
.
generateTask
();
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
nfjDag
=
DagUtils
.
createNFJGraph
(
t
.
getJobDag
());
assertTrue
(
DagUtils
.
checkNFJProperty
(
nfjDag
));
}
}
private
long
countLeafs
(
BinaryDecompositionTree
<
Job
>
tree
)
{
Node
<
Job
>
node
=
tree
.
getRootNode
();
// Base Case
if
(
node
==
null
)
{
return
0
;
}
// Create an empty stack and push root to it
Stack
<
Node
<
Job
>>
nodeStack
=
new
Stack
<>();
nodeStack
.
push
(
node
);
long
count
=
0
;
while
(
nodeStack
.
empty
()
==
false
)
{
// Pop the top item from stack and print it
Node
<
Job
>
mynode
=
nodeStack
.
peek
();
if
(
mynode
.
getNodeType
()
==
NodeType
.
LEAF
)
{
count
++;
}
nodeStack
.
pop
();
// Push right and left children of the popped node to stack
if
(
mynode
.
getLeftNode
()
!=
null
)
{
nodeStack
.
push
(
mynode
.
getLeftNode
());
}
if
(
mynode
.
getRightNode
()
!=
null
)
{
nodeStack
.
push
(
mynode
.
getRightNode
());
}
}
return
count
;
}
private
void
drawGraph
(
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
nfjJobDag
)
{
for
(
Job
j
:
nfjJobDag
)
{
List
<
Job
>
predecessors
=
Graphs
.
predecessorListOf
(
nfjJobDag
,
j
);
List
<
Job
>
successors
=
Graphs
.
successorListOf
(
nfjJobDag
,
j
);
int
test
=
4
;
}
}
@Test
@DisplayName
(
"Check if Decomposition Tree is created correctly for NFJ Dag."
)
void
checkDecompositionTreeCreation
()
{
for
(
int
i
=
0
;
i
<
100
;
++
i
)
{
for
(
int
i
=
0
;
i
<
10000
;
++
i
)
{
// {
// int i = 847;
// System.out.println(i);
long
numberOfProcessors
=
ThreadLocalRandom
.
current
().
nextLong
(
4
,
16
);
DagTaskBuilder
b
=
new
DagTaskBuilder
().
setNumberOfProcessors
(
numberOfProcessors
);
// long numberOfProcessors = 4;
DagTaskBuilder
b
=
new
DagTaskBuilder
(
i
).
setNumberOfProcessors
(
numberOfProcessors
);
DagTask
t
=
b
.
generateTask
();
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
=
t
.
getJobDag
();
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
nfjJobDag
=
DagUtils
.
createNFJGraph
(
jobDag
);
...
...
@@ -59,6 +111,11 @@ public class TestDagUtils {
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDagFromTree
=
DagUtils
.
createNFJfromDecompositionTree
(
tree
);
// drawGraph(nfjJobDag);
// drawGraph(nfjJobDag);
boolean
ja
=
DagUtils
.
checkNFJProperty
(
nfjJobDag
);
System
.
out
.
println
(
"Bei i = "
+
i
+
": nfj = "
+
ja
);
assertTrue
(
jobDag
.
vertexSet
().
size
()
==
nfjJobDag
.
vertexSet
().
size
());
assertTrue
(
jobDag
.
vertexSet
().
size
()
==
jobDagFromTree
.
vertexSet
().
size
());
assertTrue
(
jobDagFromTree
.
edgeSet
().
size
()
==
nfjJobDag
.
edgeSet
().
size
());
...
...
@@ -67,6 +124,9 @@ public class TestDagUtils {
for
(
DefaultEdge
e
:
nfjJobDag
.
edgeSet
())
{
Job
target
=
nfjJobDag
.
getEdgeTarget
(
e
);
Job
source
=
nfjJobDag
.
getEdgeSource
(
e
);
// if (jobDagFromTree.containsEdge(source, target)) {
// drawGraph(jobDag);
// }
assertTrue
(
jobDagFromTree
.
containsEdge
(
source
,
target
));
}
...
...
@@ -87,7 +147,7 @@ public class TestDagUtils {
assertTrue
(
Graphs
.
predecessorListOf
(
jobDagFromTree
,
j
).
contains
(
p
));
}
for
(
Job
s
:
Graphs
.
successorListOf
(
jobDagFromTree
,
j
))
{
for
(
Job
s
:
Graphs
.
successorListOf
(
nfjJobDag
,
j
))
{
assertTrue
(
Graphs
.
successorListOf
(
jobDagFromTree
,
j
).
contains
(
s
));
}
...
...
@@ -101,6 +161,7 @@ public class TestDagUtils {
}
}
}
...
...
src/test/java/mvd/jester/model/TestSystemSetup.java
View file @
bc29a6dc
...
...
@@ -74,7 +74,7 @@ public class TestSystemSetup {
@DisplayName
(
"Check if DagTaskBuilder works correctly"
)
void
testDagTaskBuilder
()
{
for
(
int
i
=
0
;
i
<
100
;
++
i
)
{
DagTaskBuilder
builder
=
new
DagTaskBuilder
();
DagTaskBuilder
builder
=
new
DagTaskBuilder
(
i
);
DagTask
task
=
builder
.
generateTask
();
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
=
task
.
getJobDag
();
...
...
@@ -103,7 +103,7 @@ public class TestSystemSetup {
void
testUtil
()
{
for
(
int
i
=
0
;
i
<
5
;
++
i
)
{
for
(
double
d
=
0.25
;
d
<
4
;
d
+=
0.25
)
{
DagTaskBuilder
builder
=
new
DagTaskBuilder
();
DagTaskBuilder
builder
=
new
DagTaskBuilder
(
i
);
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
d
);
double
taskSetUtil
=
0
;
for
(
DagTask
t
:
taskSet
)
{
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment