Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
las3_pub
/
jester
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Members
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
eb53dcfb
authored
Sep 02, 2020
by
Michael Schmid
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
changes before working on system manager
parent
9809bc74
Hide whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
576 additions
and
252 deletions
+576
-252
src/main/java/mvd/jester/App.java
+70
-36
src/main/java/mvd/jester/TestEnvironment.java
+48
-7
src/main/java/mvd/jester/info/SchedulingInfo.java
+5
-68
src/main/java/mvd/jester/model/ContainerTask.java
+35
-0
src/main/java/mvd/jester/model/DagTask.java
+10
-47
src/main/java/mvd/jester/model/SynchronousTask.java
+3
-11
src/main/java/mvd/jester/model/SystemManager.java
+131
-3
src/main/java/mvd/jester/model/Task.java
+41
-9
src/main/java/mvd/jester/simulator/internals/dynamicforkjoin/TaskContext.java
+0
-1
src/main/java/mvd/jester/tests/JiangYi.java
+102
-0
src/main/java/mvd/jester/tests/SchmidMottok.java
+19
-47
src/main/java/mvd/jester/tests/TypeFunction.java
+111
-22
src/test/java/mvd/jester/priority/TestEarliestDeadlineFirst.java
+1
-1
No files found.
src/main/java/mvd/jester/App.java
View file @
eb53dcfb
...
@@ -2,14 +2,18 @@ package mvd.jester;
...
@@ -2,14 +2,18 @@ package mvd.jester;
import
java.util.Arrays
;
import
java.util.Arrays
;
import
java.util.List
;
import
java.util.List
;
import
java.util.Set
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.SystemManager.DagTaskBuilder
;
import
mvd.jester.model.SystemManager.DagTaskBuilder
;
import
mvd.jester.tests.AbstractTest
;
import
mvd.jester.tests.AbstractTest
;
import
mvd.jester.tests.FonsecaNelis
;
import
mvd.jester.tests.FonsecaNelis
;
import
mvd.jester.tests.JiangYi
;
import
mvd.jester.tests.MelaniButtazzo
;
import
mvd.jester.tests.MelaniButtazzo
;
import
mvd.jester.tests.SchmidMottok
;
import
mvd.jester.tests.SchmidMottok
;
import
mvd.jester.tests.TypeFunction.KownStructure
;
import
mvd.jester.tests.TypeFunction.KnownStructureWithMaxThreads
;
import
mvd.jester.tests.TypeFunction.KnownStructure
;
import
mvd.jester.tests.TypeFunction.UnkownStructure
;
import
mvd.jester.tests.TypeFunction.UnkownStructure
;
...
@@ -20,51 +24,81 @@ import mvd.jester.tests.TypeFunction.UnkownStructure;
...
@@ -20,51 +24,81 @@ import mvd.jester.tests.TypeFunction.UnkownStructure;
public
class
App
{
public
class
App
{
public
static
void
main
(
String
[]
args
)
{
public
static
void
main
(
String
[]
args
)
{
// {
// {
//
SystemManager manager = new SystemManager(8);
// SystemManager manager = new SystemManager(8);
//
DagTaskBuilder builder = new DagTaskBuilder();
// DagTaskBuilder builder = new DagTaskBuilder();
//
TestEnvironment te = new TestEnvironment();
// TestEnvironment te = new TestEnvironment();
// List<AbstractTest<DagTask>> tests =
// // SchmidMottok sm = new SchmidMottok(new KnownStructure(), manager);
// te.registerTests(Arrays.asList(new SchmidMottok(new UnkownStructure(), manager),
// // JiangYi jy = new JiangYi(manager);
// new SchmidMottok(new KownStructure(), manager),
// new MelaniButtazzo(manager), new FonsecaNelis(manager)));
// te.varyUtilization(builder, tests, 8, 500);
// // Set<DagTask> set = builder.generateRenyiTaskSet(7.0);
// // final SortedTaskSet<DagTask> rmTasks = new SortedTaskSet<>(sm.getPriorityManager());
// // rmTasks.addAll(set);
// // final SortedTaskSet<DagTask> edfTasks = new SortedTaskSet<>(jy.getPriorityManager());
// // edfTasks.addAll(set);
// // jy.runSchedulabilityCheck(edfTasks);
// // sm.runSchedulabilityCheck(rmTasks);
// List<AbstractTest<DagTask>> tests = te.registerTests(Arrays.asList(
// new SchmidMottok(new KnownStructureWithMaxThreads(manager), manager),
// new SchmidMottok(new KnownStructure(), manager),
// // new MelaniButtazzo(manager),
// // new FonsecaNelis(manager),
// new JiangYi(manager)));
// te.varyRenyiUtilization(builder, tests, 8, 500);
// }
// {
// SystemManager manager = new SystemManager(8);
// DagTaskBuilder builder = new DagTaskBuilder();
// TestEnvironment te = new TestEnvironment();
// List<AbstractTest<DagTask>> tests = te.registerTests(Arrays.asList(
// new SchmidMottok(new KnownStructureWithMaxThreads(manager), manager),
// new SchmidMottok(new KnownStructure(), manager)
// // new MelaniButtazzo(manager)
// // , new FonsecaNelis(manager)
// ));
// te.varyUtilization(builder, tests, 8, 500);
// }
// {
// SystemManager manager = new SystemManager(8);
// DagTaskBuilder builder = new DagTaskBuilder();
// TestEnvironment te = new TestEnvironment();
// List<AbstractTest<DagTask>> tests =
// te.registerTests(Arrays.asList(new SchmidMottok(new UnkownStructure(), manager),
// new SchmidMottok(new KownStructure(), manager),
// new MelaniButtazzo(manager), new FonsecaNelis(manager)));
// te.varyNumberOfProcessors(builder, tests, manager, 500);
// }
// }
// {
// {
//
SystemManager manager = new SystemManager(8);
// SystemManager manager = new SystemManager(8);
//
DagTaskBuilder builder = new DagTaskBuilder();
// DagTaskBuilder builder = new DagTaskBuilder();
//
TestEnvironment te = new TestEnvironment();
// TestEnvironment te = new TestEnvironment();
//
List<AbstractTest<DagTask>> tests =
// List<AbstractTest<DagTask>> tests =
//
te.registerTests(Arrays.asList(new SchmidMottok(new UnkownStructure(), manager),
//
te.registerTests(Arrays.asList(new SchmidMottok(new UnkownStructure(), manager),
//
new SchmidMottok(new KownStructure(), manager),
// new SchmidMottok(new KownStructure(), manager),
//
new MelaniButtazzo(manager), new FonsecaNelis(manager)));
// new MelaniButtazzo(manager), new FonsecaNelis(manager)));
//
te.varyNumberOfProcessors(builder, tests, manager
, 500);
//
te.varyNumberOfTasks(builder, tests, 8
, 500);
// }
// }
{
SystemManager
manager
=
new
SystemManager
(
8
);
DagTaskBuilder
builder
=
new
DagTaskBuilder
();
TestEnvironment
te
=
new
TestEnvironment
();
List
<
AbstractTest
<
DagTask
>>
tests
=
te
.
registerTests
(
Arrays
.
asList
(
new
SchmidMottok
(
new
UnkownStructure
(),
manager
),
new
SchmidMottok
(
new
KownStructure
(),
manager
),
new
MelaniButtazzo
(
manager
),
new
FonsecaNelis
(
manager
)));
te
.
varyNumberOfTasks
(
builder
,
tests
,
8
,
500
);
}
// {
// {
//
SystemManager manager = new SystemManager(8);
// SystemManager manager = new SystemManager(8);
//
DagTaskBuilder builder = new DagTaskBuilder();
// DagTaskBuilder builder = new DagTaskBuilder();
//
TestEnvironment te = new TestEnvironment();
// TestEnvironment te = new TestEnvironment();
//
List<AbstractTest<DagTask>> tests =
// List<AbstractTest<DagTask>> tests =
//
te.registerTests(Arrays.asList(new SchmidMottok(manager),
// te.registerTests(Arrays.asList(new SchmidMottok(manager),
//
new MelaniButtazzo(manager), new FonsecaNelis(manager)));
// new MelaniButtazzo(manager), new FonsecaNelis(manager)));
//
te.measureExecutionTimes(builder, tests, manager, 500);
// te.measureExecutionTimes(builder, tests, manager, 500);
// }
// }
}
}
}
}
src/main/java/mvd/jester/TestEnvironment.java
View file @
eb53dcfb
...
@@ -79,12 +79,13 @@ public class TestEnvironment {
...
@@ -79,12 +79,13 @@ public class TestEnvironment {
}
}
public
void
measureExecutionTimes
(
final
DagTaskBuilder
builder
,
public
void
measureExecutionTimes
(
final
DagTaskBuilder
builder
,
final
List
<
AbstractTest
<
DagTask
>>
abstractTestInstances
,
final
List
<
AbstractTest
<
DagTask
>>
abstractTestInstances
,
SystemManager
manager
,
SystemManager
manager
,
final
long
numberOfMeasurements
)
{
final
long
numberOfMeasurements
)
{
Map
<
AbstractTest
<
DagTask
>,
List
<
Long
>>
results
=
new
LinkedHashMap
<>();
Map
<
AbstractTest
<
DagTask
>,
List
<
Long
>>
results
=
new
LinkedHashMap
<>();
abstractTestInstances
.
forEach
(
t
->
results
.
put
(
t
,
new
ArrayList
<>()));
abstractTestInstances
.
forEach
(
t
->
results
.
put
(
t
,
new
ArrayList
<>()));
for
(
int
i
=
0
;
i
<
numberOfMeasurements
;
++
i
)
{
for
(
int
i
=
0
;
i
<
numberOfMeasurements
;
++
i
)
{
double
utilization
=
ThreadLocalRandom
.
current
().
nextDouble
(
1
,
manager
.
getNumberOfProcessors
());
double
utilization
=
ThreadLocalRandom
.
current
().
nextDouble
(
1
,
manager
.
getNumberOfProcessors
());
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
utilization
);
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
utilization
);
for
(
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
for
(
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
...
@@ -94,7 +95,7 @@ public class TestEnvironment {
...
@@ -94,7 +95,7 @@ public class TestEnvironment {
long
timeBefore
=
ManagementFactory
.
getThreadMXBean
().
getCurrentThreadCpuTime
();
long
timeBefore
=
ManagementFactory
.
getThreadMXBean
().
getCurrentThreadCpuTime
();
testInstance
.
runSchedulabilityCheck
(
sortedTaskSet
);
testInstance
.
runSchedulabilityCheck
(
sortedTaskSet
);
long
timeAfter
=
ManagementFactory
.
getThreadMXBean
().
getCurrentThreadCpuTime
();
long
timeAfter
=
ManagementFactory
.
getThreadMXBean
().
getCurrentThreadCpuTime
();
long
micros
=
(
timeAfter
-
timeBefore
)/
1000
;
long
micros
=
(
timeAfter
-
timeBefore
)
/
1000
;
results
.
get
(
testInstance
).
add
(
micros
);
results
.
get
(
testInstance
).
add
(
micros
);
}
}
}
}
...
@@ -125,7 +126,7 @@ public class TestEnvironment {
...
@@ -125,7 +126,7 @@ public class TestEnvironment {
final
Set
<
DagTask
>
taskSet
=
builder
.
generateUUnifastTaskSet
(
final
Set
<
DagTask
>
taskSet
=
builder
.
generateUUnifastTaskSet
(
(
long
)
(
1.5
*
numberOfProcessors
),
(
double
)
numberOfProcessors
*
0.5
);
(
long
)
(
1.5
*
numberOfProcessors
),
(
double
)
numberOfProcessors
*
0.5
);
System
.
out
.
print
(
checkedTasksets
+
"/"
+
numberOfTaskSets
+
" tasksets tested!\r"
);
System
.
out
.
print
(
checkedTasksets
+
"/"
+
numberOfTaskSets
+
" tasksets tested!\r"
);
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
...
@@ -166,7 +167,7 @@ public class TestEnvironment {
...
@@ -166,7 +167,7 @@ public class TestEnvironment {
final
Set
<
DagTask
>
taskSet
=
builder
.
generateUUnifastTaskSet
(
numberOfTasks
,
final
Set
<
DagTask
>
taskSet
=
builder
.
generateUUnifastTaskSet
(
numberOfTasks
,
(
double
)
numberOfProcessors
*
0.5
);
(
double
)
numberOfProcessors
*
0.5
);
System
.
out
.
print
(
checkedTasksets
+
"/"
+
numberOfTaskSets
+
" tasksets tested!\r"
);
System
.
out
.
print
(
checkedTasksets
+
"/"
+
numberOfTaskSets
+
" tasksets tested!\r"
);
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
...
@@ -206,7 +207,47 @@ public class TestEnvironment {
...
@@ -206,7 +207,47 @@ public class TestEnvironment {
for
(
int
i
=
0
;
i
<
numberOfTaskSetsPerUtil
;
++
i
)
{
for
(
int
i
=
0
;
i
<
numberOfTaskSetsPerUtil
;
++
i
)
{
final
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
util
);
final
Set
<
DagTask
>
taskSet
=
builder
.
generateTaskSet
(
util
);
System
.
out
.
print
(
checkedTasksets
+
"/"
+
numberOfTaskSets
+
" tasksets tested!\r"
);
System
.
out
.
print
(
checkedTasksets
+
"/"
+
numberOfTaskSets
+
" tasksets tested!\r"
);
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
final
SortedTaskSet
<
DagTask
>
sortedTaskSet
=
new
SortedTaskSet
<>(
priorityManager
);
sortedTaskSet
.
addAll
(
taskSet
);
final
SchedulingInfo
schedulingInfo
=
testInstance
.
runSchedulabilityCheck
(
sortedTaskSet
);
if
(
schedulingInfo
.
getFeasibility
()
==
Feasiblity
.
SUCCEEDED
)
{
resultMap
.
computeIfPresent
(
testInstance
,
(
k
,
v
)
->
v
+
1
);
}
}
checkedTasksets
++;
}
resultLogger
.
logLine
(
util
,
resultMap
);
resultLogger
.
newLine
();
}
System
.
out
.
println
(
""
);
resultLogger
.
finalize
();
}
public
void
varyRenyiUtilization
(
final
DagTaskBuilder
builder
,
final
List
<
AbstractTest
<
DagTask
>>
abstractTestInstances
,
final
long
numberOfProcessors
,
final
long
numberOfTaskSetsPerUtil
)
{
long
checkedTasksets
=
0
;
final
long
numberOfTaskSets
=
((
numberOfProcessors
*
4
)
-
3
)
*
numberOfTaskSetsPerUtil
;
final
ResultLogger
resultLogger
=
new
ResultLogger
(
"utilization_"
+
numberOfProcessors
);
final
Map
<
AbstractTest
<
DagTask
>,
Long
>
resultMap
=
new
LinkedHashMap
<>();
abstractTestInstances
.
forEach
(
t
->
resultMap
.
put
(
t
,
(
long
)
0
));
resultLogger
.
logHeader
(
resultMap
,
"Utilization"
);
for
(
double
util
=
5
;
util
<=
numberOfProcessors
;
util
+=
0.25
)
{
resultMap
.
replaceAll
((
k
,
v
)
->
(
long
)
0
);
for
(
int
i
=
0
;
i
<
numberOfTaskSetsPerUtil
;
++
i
)
{
final
Set
<
DagTask
>
taskSet
=
builder
.
generateRenyiTaskSet
(
util
);
System
.
out
.
print
(
checkedTasksets
+
"/"
+
numberOfTaskSets
+
" tasksets tested!\r"
);
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
for
(
final
AbstractTest
<
DagTask
>
testInstance
:
abstractTestInstances
)
{
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
final
PriorityManager
priorityManager
=
testInstance
.
getPriorityManager
();
...
...
src/main/java/mvd/jester/info/SchedulingInfo.java
View file @
eb53dcfb
...
@@ -15,6 +15,10 @@ public class SchedulingInfo {
...
@@ -15,6 +15,10 @@ public class SchedulingInfo {
feasiblity
=
failedTerminationInfo
.
isPresent
()
?
Feasiblity
.
FAILED
:
Feasiblity
.
SUCCEEDED
;
feasiblity
=
failedTerminationInfo
.
isPresent
()
?
Feasiblity
.
FAILED
:
Feasiblity
.
SUCCEEDED
;
}
}
public
SchedulingInfo
(
Feasiblity
feasiblity
)
{
this
.
feasiblity
=
feasiblity
;
}
/**
/**
* @return the feasiblity
* @return the feasiblity
*/
*/
...
@@ -25,72 +29,5 @@ public class SchedulingInfo {
...
@@ -25,72 +29,5 @@ public class SchedulingInfo {
public
enum
Feasiblity
{
public
enum
Feasiblity
{
FAILED
,
SUCCEEDED
,
FAILED
,
SUCCEEDED
,
}
}
// private final double parallelTaskRatio;
// private final double utilization;
// private final Set<TerminationInfo> terminationInfos;
// private Optional<TerminationInfo> failedTerminationInfo;
// public SchedulingInfo(double parallelTaskRatio, double utilization) {
// this.parallelTaskRatio = parallelTaskRatio;
// this.utilization = utilization;
// this.terminationInfos = new HashSet<>();
// this.failedTerminationInfo = Optional.empty();
// }
// /**
// * @return the utilization
// */
// public double getUtilization() {
// return utilization;
// }
// /**
// * @return the parallelTaskRatio
// */
// public double getParallelTaskRatio() {
// return parallelTaskRatio;
// }
// public SchedulingInfo(Set<TerminationInfo> terminationInfos, double parallelTaskRatio,
// double utilization) {
// this.terminationInfos = terminationInfos;
// this.parallelTaskRatio = parallelTaskRatio;
// this.utilization = utilization;
// failedTerminationInfo =
// terminationInfos.stream().filter(t -> t.getLateness() > 0).findFirst();
// }
// public boolean checkLevelFail(Level level) {
// return terminationInfos.stream()
// .anyMatch(t -> t.getLateness() > 0 && t.getTaskLevel() == level);
// }
// public boolean checkTasksetFeasible() {
// // return terminationInfos.isEmpty();
// return !terminationInfos.stream().anyMatch(t -> t.getLateness() > 0);
// }
// public boolean addTerminationInfo(TerminationInfo terminationInfo) {
// return terminationInfos.add(terminationInfo);
// }
// /**
// * @return the terminationInfos
// */
// public Set<TerminationInfo> getTerminationInfos() {
// return terminationInfos;
// }
// /**
// * @return the failedTerminationInfo
// */
// public Optional<TerminationInfo> getFailedTerminationInfo() {
// return failedTerminationInfo;
// }
// public void setFailedTerminationInfo(TerminationInfo failedTerminationInfo) {
// this.failedTerminationInfo = Optional.of(failedTerminationInfo);
// }
}
}
src/main/java/mvd/jester/model/ContainerTask.java
0 → 100644
View file @
eb53dcfb
package
mvd
.
jester
.
model
;
import
java.util.HashSet
;
import
java.util.Set
;
public
class
ContainerTask
extends
Task
{
private
final
Set
<
Segment
>
segments
;
private
final
double
loadBound
;
public
ContainerTask
(
final
long
period
,
final
long
deadline
,
final
double
loadBound
)
{
super
(
period
,
deadline
,
(
long
)
Math
.
ceil
(
loadBound
*
period
),
(
long
)
Math
.
ceil
(
loadBound
*
period
),
1
);
this
.
loadBound
=
loadBound
;
segments
=
new
HashSet
<>();
segments
.
add
(
new
Segment
(
workload
,
1
));
}
@Override
public
Set
<
Segment
>
getWorkloadDistribution
()
{
return
segments
;
}
@Override
public
double
getDensity
()
{
return
loadBound
;
}
@Override
public
long
getMaximumParallelism
()
{
return
1
;
}
}
src/main/java/mvd/jester/model/DagTask.java
View file @
eb53dcfb
...
@@ -5,38 +5,28 @@ import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
...
@@ -5,38 +5,28 @@ import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import
org.jgrapht.graph.DefaultEdge
;
import
org.jgrapht.graph.DefaultEdge
;
import
mvd.jester.utils.DagUtils
;
import
mvd.jester.utils.DagUtils
;
public
class
DagTask
implement
s
Task
{
public
class
DagTask
extend
s
Task
{
private
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
;
private
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
;
private
final
Set
<
Segment
>
workloadDistribution
;
private
final
Set
<
Segment
>
workloadDistribution
;
private
final
long
workload
;
private
final
long
criticalPath
;
private
final
long
period
;
private
final
long
deadline
;
private
final
long
numberOfThreads
;
public
DagTask
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
long
period
,
public
DagTask
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
long
period
,
final
long
numberOfThreads
)
{
final
long
numberOfThreads
)
{
super
(
period
,
period
,
DagUtils
.
calculateWorkload
(
jobDag
),
DagUtils
.
calculateCriticalPath
(
jobDag
),
numberOfThreads
);
this
.
jobDag
=
jobDag
;
this
.
jobDag
=
jobDag
;
this
.
period
=
period
;
this
.
deadline
=
period
;
this
.
numberOfThreads
=
numberOfThreads
;
this
.
workload
=
DagUtils
.
calculateWorkload
(
this
.
jobDag
);
this
.
criticalPath
=
DagUtils
.
calculateCriticalPath
(
this
.
jobDag
);
this
.
workloadDistribution
=
this
.
workloadDistribution
=
DagUtils
.
calculateWorkloadDistribution
(
this
.
jobDag
,
this
.
criticalPath
);
DagUtils
.
calculateWorkloadDistribution
(
this
.
jobDag
,
this
.
criticalPath
);
}
}
public
double
getUtilization
()
{
public
DagTask
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
long
period
,
return
(
double
)
workload
/
period
;
final
Set
<
Segment
>
workloadDistribution
,
final
long
numberOfThreads
)
{
super
(
period
,
period
,
DagUtils
.
calculateWorkload
(
jobDag
),
DagUtils
.
calculateCriticalPath
(
jobDag
),
numberOfThreads
);
this
.
jobDag
=
jobDag
;
this
.
workloadDistribution
=
workloadDistribution
;
}
}
/**
* @return the deadline
*/
public
long
getDeadline
()
{
return
deadline
;
}
/**
/**
* @return the jobDag
* @return the jobDag
...
@@ -53,30 +43,9 @@ public class DagTask implements Task {
...
@@ -53,30 +43,9 @@ public class DagTask implements Task {
}
}
/**
/**
* @return the period
*/
public
long
getPeriod
()
{
return
period
;
}
/**
* @return the workload
*/
public
long
getWorkload
()
{
return
workload
;
}
/**
* @return the criticalPath
*/
public
long
getCriticalPath
()
{
return
criticalPath
;
}
/**
* @return the workloadDistribution
* @return the workloadDistribution
*/
*/
@Override
public
Set
<
Segment
>
getWorkloadDistribution
()
{
public
Set
<
Segment
>
getWorkloadDistribution
()
{
return
workloadDistribution
;
return
workloadDistribution
;
}
}
...
@@ -92,10 +61,4 @@ public class DagTask implements Task {
...
@@ -92,10 +61,4 @@ public class DagTask implements Task {
return
max
;
return
max
;
}
}
@Override
public
long
getNumberOfThreads
()
{
return
numberOfThreads
;
}
}
}
src/main/java/mvd/jester/model/SynchronousTask.java
View file @
eb53dcfb
...
@@ -5,23 +5,15 @@ import java.util.Set;
...
@@ -5,23 +5,15 @@ import java.util.Set;
/**
/**
* Task
* Task
*/
*/
public
class
SynchronousTask
implement
s
Task
{
public
class
SynchronousTask
extend
s
Task
{
private
final
long
deadline
;
private
final
long
period
;
private
final
Set
<
Segment
>
segments
;
private
final
Set
<
Segment
>
segments
;
private
final
long
workload
;
private
final
long
criticalPath
;
private
final
long
numberOfThreads
;
public
SynchronousTask
(
final
long
period
,
final
long
deadline
,
final
long
numberOfThreads
,
public
SynchronousTask
(
final
long
period
,
final
long
deadline
,
final
long
numberOfThreads
,
final
Set
<
Segment
>
segments
)
{
final
Set
<
Segment
>
segments
)
{
this
.
deadline
=
deadline
;
super
(
period
,
deadline
,
SynchronousUtils
.
calculateWorkload
(
segments
),
this
.
period
=
period
;
SynchronousUtils
.
calculateCriticalPath
(
segments
),
numberOfThreads
);
this
.
numberOfThreads
=
numberOfThreads
;
this
.
segments
=
segments
;
this
.
segments
=
segments
;
this
.
workload
=
SynchronousUtils
.
calculateWorkload
(
segments
);
this
.
criticalPath
=
SynchronousUtils
.
calculateCriticalPath
(
segments
);
}
}
public
SynchronousTask
(
final
Set
<
Segment
>
segments
,
final
long
period
,
public
SynchronousTask
(
final
Set
<
Segment
>
segments
,
final
long
period
,
...
...
src/main/java/mvd/jester/model/SystemManager.java
View file @
eb53dcfb
package
mvd
.
jester
.
model
;
package
mvd
.
jester
.
model
;
import
java.math.RoundingMode
;
import
java.util.HashSet
;
import
java.util.HashSet
;
import
java.util.LinkedHashSet
;
import
java.util.LinkedHashSet
;
import
java.util.Map
;
import
java.util.Map
;
...
@@ -8,6 +9,8 @@ import java.util.Set;
...
@@ -8,6 +9,8 @@ import java.util.Set;
import
java.util.concurrent.ThreadLocalRandom
;
import
java.util.concurrent.ThreadLocalRandom
;
import
com.google.common.collect.ArrayListMultimap
;
import
com.google.common.collect.ArrayListMultimap
;
import
com.google.common.collect.Multimap
;
import
com.google.common.collect.Multimap
;
import
com.google.common.math.LongMath
;
import
org.apache.commons.math3.distribution.GammaDistribution
;
import
org.jgrapht.experimental.dag.DirectedAcyclicGraph
;
import
org.jgrapht.experimental.dag.DirectedAcyclicGraph
;
import
org.jgrapht.graph.DefaultEdge
;
import
org.jgrapht.graph.DefaultEdge
;
import
mvd.jester.utils.DagUtils
;
import
mvd.jester.utils.DagUtils
;
...
@@ -161,9 +164,10 @@ public class SystemManager {
...
@@ -161,9 +164,10 @@ public class SystemManager {
private
long
minimumWcet
=
1
;
private
long
minimumWcet
=
1
;
private
long
maximumWcet
=
100
;
private
long
maximumWcet
=
100
;
private
long
maxNumberOfBranches
=
5
;
private
long
maxNumberOfBranches
=
5
;
private
long
maxNumberOfThreads
=
numberOfProcessors
;
private
long
depth
=
2
;
private
long
depth
=
2
;
private
long
p_par
=
80
;
private
long
p_par
=
80
;
private
long
p_add
=
20
;
private
long
p_add
=
10
;
// TODO: Change back to 0.2
public
DagTaskBuilder
()
{
public
DagTaskBuilder
()
{
}
}
...
@@ -176,6 +180,32 @@ public class SystemManager {
...
@@ -176,6 +180,32 @@ public class SystemManager {
return
ThreadLocalRandom
.
current
().
nextLong
(
0
,
100
);
return
ThreadLocalRandom
.
current
().
nextLong
(
0
,
100
);
}
}
public
Set
<
DagTask
>
generateRenyiTaskSet
(
final
double
totalUtilization
)
{
final
LinkedHashSet
<
DagTask
>
taskSet
=
new
LinkedHashSet
<>();
double
currentUtilization
=
0
;
while
(
currentUtilization
<=
totalUtilization
)
{
final
DagTask
dagTask
=
generateRenyiTask
(
totalUtilization
);
if
(
currentUtilization
+
dagTask
.
getUtilization
()
<
totalUtilization
)
{
currentUtilization
+=
dagTask
.
getUtilization
();
taskSet
.
add
(
dagTask
);
}
else
{
final
double
remainingUtilization
=
totalUtilization
-
currentUtilization
;
final
long
period
=
(
long
)
Math
.
ceil
(
dagTask
.
getWorkload
()
/
remainingUtilization
);
if
(
period
>=
dagTask
.
getCriticalPath
())
{
final
DagTask
modifiedTask
=
new
DagTask
(
dagTask
.
getJobDag
(),
period
,
numberOfProcessors
);
taskSet
.
add
(
modifiedTask
);
break
;
}
}
}
return
taskSet
;
}
public
Set
<
DagTask
>
generateUUnifastTaskSet
(
final
long
numberOfTasks
,
public
Set
<
DagTask
>
generateUUnifastTaskSet
(
final
long
numberOfTasks
,
final
double
totalUtilization
)
{
final
double
totalUtilization
)
{
final
LinkedHashSet
<
DagTask
>
taskSet
=
new
LinkedHashSet
<>();
final
LinkedHashSet
<
DagTask
>
taskSet
=
new
LinkedHashSet
<>();
...
@@ -221,6 +251,65 @@ public class SystemManager {
...
@@ -221,6 +251,65 @@ public class SystemManager {
return
taskSet
;
return
taskSet
;
}
}
public
DagTask
generateRenyiTask
(
final
double
totalUtilization
)
{
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
=
new
DirectedAcyclicGraph
<>(
DefaultEdge
.
class
);
final
long
numberOfVertices
=
randomNumberInRange
(
50
,
250
);
for
(
int
i
=
0
;
i
<
numberOfVertices
-
2
;
++
i
)
{
final
long
wcet
=
randomNumberInRange
(
50
,
100
);
Job
j
=
new
Job
(
wcet
);
jobDag
.
addVertex
(
j
);
}
randomEdgesRenyi
(
jobDag
);
addSourceAndSink
(
jobDag
);
final
long
workload
=
DagUtils
.
calculateWorkload
(
jobDag
);
final
long
criticalPath
=
DagUtils
.
calculateCriticalPath
(
jobDag
);
final
long
period
=
randomRenyiPeriod
(
workload
,
criticalPath
,
totalUtilization
);
// final long numberOfThreads = LongMath.divide(workload - criticalPath,
// period - criticalPath, RoundingMode.CEILING) + 1;
final
long
numberOfThreads
=
randomNumberOfThreads
(
numberOfProcessors
/
2
);
return
new
DagTask
(
jobDag
,
period
,
numberOfThreads
);
}
private
long
randomRenyiPeriod
(
final
long
workload
,
final
long
criticalPath
,
final
double
totalUtilization
)
{
final
double
firstPart
=
(
criticalPath
+
(
double
)
(
workload
)
/
(
0.4
*
totalUtilization
));
final
double
gamma
=
new
GammaDistribution
(
2
,
1
).
sample
();
final
double
secondPart
=
1
+
0.25
*
gamma
;
return
(
long
)
Math
.
ceil
(
firstPart
*
secondPart
);
}
private
void
addSourceAndSink
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
)
{
final
Multimap
<
Job
,
Job
>
edgePairs
=
ArrayListMultimap
.
create
();
Job
source
=
new
Job
(
randomNumberInRange
(
50
,
100
));
Job
sink
=
new
Job
(
randomNumberInRange
(
50
,
100
));
jobDag
.
addVertex
(
source
);
jobDag
.
addVertex
(
sink
);
for
(
Job
j
:
jobDag
)
{
if
(
jobDag
.
inDegreeOf
(
j
)
==
0
)
{
edgePairs
.
put
(
source
,
j
);
}
if
(
jobDag
.
outDegreeOf
(
j
)
==
0
)
{
edgePairs
.
put
(
j
,
sink
);
}
}
for
(
final
Map
.
Entry
<
Job
,
Job
>
pairs
:
edgePairs
.
entries
())
{
try
{
jobDag
.
addDagEdge
(
pairs
.
getKey
(),
pairs
.
getValue
());
}
catch
(
final
Exception
e
)
{
// nothing to do here
}
}
}
public
DagTask
generateTask
(
double
utilization
)
{
public
DagTask
generateTask
(
double
utilization
)
{
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
=
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
=
new
DirectedAcyclicGraph
<>(
DefaultEdge
.
class
);
new
DirectedAcyclicGraph
<>(
DefaultEdge
.
class
);
...
@@ -231,7 +320,7 @@ public class SystemManager {
...
@@ -231,7 +320,7 @@ public class SystemManager {
final
long
workload
=
DagUtils
.
calculateWorkload
(
jobDag
);
final
long
workload
=
DagUtils
.
calculateWorkload
(
jobDag
);
final
long
period
=
Math
.
round
(
workload
/
utilization
);
final
long
period
=
(
long
)
Math
.
ceil
(
workload
/
utilization
);
return
new
DagTask
(
jobDag
,
period
,
numberOfProcessors
);
return
new
DagTask
(
jobDag
,
period
,
numberOfProcessors
);
}
}
...
@@ -249,7 +338,10 @@ public class SystemManager {
...
@@ -249,7 +338,10 @@ public class SystemManager {
final
long
period
=
randomTaskPeriod
(
criticalPath
,
workload
);
final
long
period
=
randomTaskPeriod
(
criticalPath
,
workload
);
return
new
DagTask
(
jobDag
,
period
,
numberOfProcessors
);
final
long
minNumberOfThreads
=
LongMath
.
divide
(
workload
,
period
,
RoundingMode
.
CEILING
);
final
long
numberOfThreads
=
randomNumberOfThreads
(
minNumberOfThreads
);
return
new
DagTask
(
jobDag
,
period
,
numberOfThreads
);
}
}
private
Job
join
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
Job
current
,
private
Job
join
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
,
final
Job
current
,
...
@@ -310,6 +402,32 @@ public class SystemManager {
...
@@ -310,6 +402,32 @@ public class SystemManager {
}
}
}
}
private
void
randomEdgesRenyi
(
final
DirectedAcyclicGraph
<
Job
,
DefaultEdge
>
jobDag
)
{
final
Multimap
<
Job
,
Job
>
edgePairs
=
ArrayListMultimap
.
create
();
for
(
final
Job
j1
:
jobDag
)
{
for
(
final
Job
j2
:
jobDag
)
{
if
(
j2
==
j1
)
{
break
;
}
if
(
randomProbability
()
<
p_add
)
{
edgePairs
.
put
(
j1
,
j2
);
}
}
}
for
(
final
Map
.
Entry
<
Job
,
Job
>
pairs
:
edgePairs
.
entries
())
{
try
{
jobDag
.
addDagEdge
(
pairs
.
getKey
(),
pairs
.
getValue
());
}
catch
(
final
Exception
e
)
{
// nothing to do here
}
}
}
private
long
randomNumberOfThreads
(
final
long
minNumberOfThreads
)
{
return
ThreadLocalRandom
.
current
().
nextLong
(
minNumberOfThreads
,
maxNumberOfThreads
);
}
private
long
randomTaskPeriod
(
final
long
criticalPathLength
,
final
long
workload
)
{
private
long
randomTaskPeriod
(
final
long
criticalPathLength
,
final
long
workload
)
{
return
ThreadLocalRandom
.
current
().
nextLong
(
criticalPathLength
,
return
ThreadLocalRandom
.
current
().
nextLong
(
criticalPathLength
,
(
long
)
(
workload
/
getBeta
()));
(
long
)
(
workload
/
getBeta
()));
...
@@ -319,6 +437,10 @@ public class SystemManager {
...
@@ -319,6 +437,10 @@ public class SystemManager {
return
ThreadLocalRandom
.
current
().
nextLong
(
2
,
maxNumberOfBranches
);
return
ThreadLocalRandom
.
current
().
nextLong
(
2
,
maxNumberOfBranches
);
}
}
private
long
randomNumberInRange
(
final
long
min
,
final
long
max
)
{
return
ThreadLocalRandom
.
current
().
nextLong
(
min
,
max
);
}
private
long
randomWcet
()
{
private
long
randomWcet
()
{
return
ThreadLocalRandom
.
current
().
nextLong
(
minimumWcet
,
maximumWcet
);
return
ThreadLocalRandom
.
current
().
nextLong
(
minimumWcet
,
maximumWcet
);
}
}
...
@@ -358,6 +480,12 @@ public class SystemManager {
...
@@ -358,6 +480,12 @@ public class SystemManager {
return
this
;
return
this
;
}
}
public
DagTaskBuilder
setNumberOfThreads
(
final
long
maxNumberOfThreads
)
{
this
.
maxNumberOfThreads
=
maxNumberOfThreads
;
return
this
;
}
/**
/**
* @param depth the depth to set
* @param depth the depth to set
*/
*/
...
...
src/main/java/mvd/jester/model/Task.java
View file @
eb53dcfb
...
@@ -2,21 +2,53 @@ package mvd.jester.model;
...
@@ -2,21 +2,53 @@ package mvd.jester.model;
import
java.util.Set
;
import
java.util.Set
;
public
interface
Task
{
public
abstract
class
Task
{
public
long
getWorkload
();
protected
final
long
period
;
protected
final
long
deadline
;
protected
final
long
workload
;
protected
final
long
criticalPath
;
protected
final
long
numberOfThreads
;
public
long
getCriticalPath
();
public
Task
(
final
long
period
,
final
long
deadline
,
final
long
workload
,
final
long
criticalPath
,
final
long
numberOfThreads
)
{
this
.
period
=
period
;
this
.
deadline
=
deadline
;
this
.
workload
=
workload
;
this
.
criticalPath
=
criticalPath
;
this
.
numberOfThreads
=
numberOfThreads
;
public
double
getUtilization
();
}
public
long
getMaximumParallelism
();
public
long
getWorkload
()
{
return
workload
;
}
public
long
getDeadline
();
public
long
getCriticalPath
()
{
return
criticalPath
;
}
public
long
getPeriod
();
public
double
getUtilization
()
{
return
(
double
)
workload
/
period
;
}
public
long
getNumberOfThreads
();
public
double
getDensity
()
{
return
(
double
)
workload
/
deadline
;
}
public
Set
<
Segment
>
getWorkloadDistribution
();
public
long
getDeadline
()
{
return
deadline
;
}
public
long
getPeriod
()
{
return
period
;
}
public
long
getNumberOfThreads
()
{
return
numberOfThreads
;
}
abstract
public
long
getMaximumParallelism
();
abstract
public
Set
<
Segment
>
getWorkloadDistribution
();
}
}
src/main/java/mvd/jester/simulator/internals/dynamicforkjoin/TaskContext.java
View file @
eb53dcfb
...
@@ -4,7 +4,6 @@ import java.util.ArrayList;
...
@@ -4,7 +4,6 @@ import java.util.ArrayList;
import
java.util.Optional
;
import
java.util.Optional
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.model.SynchronousTask
;
import
mvd.jester.simulator.EventPrinter
;
import
mvd.jester.simulator.internals.JobContextInterface
;
import
mvd.jester.simulator.internals.JobContextInterface
;
import
mvd.jester.simulator.internals.TaskContextInterface
;
import
mvd.jester.simulator.internals.TaskContextInterface
;
...
...
src/main/java/mvd/jester/tests/JiangYi.java
0 → 100644
View file @
eb53dcfb
package
mvd
.
jester
.
tests
;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.HashSet
;
import
java.util.Iterator
;
import
java.util.LinkedList
;
import
java.util.List
;
import
java.util.Set
;
import
mvd.jester.info.SchedulingInfo
;
import
mvd.jester.info.SchedulingInfo.Feasiblity
;
import
mvd.jester.model.ContainerTask
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.Task
;
import
mvd.jester.priority.EarliestDeadlineFirst
;
import
mvd.jester.priority.PriorityManager
;
public
class
JiangYi
extends
AbstractTest
<
DagTask
>
{
private
final
EarliestDeadlineFirst
priorityManager
;
public
JiangYi
(
SystemManager
manager
)
{
super
(
manager
);
this
.
priorityManager
=
new
EarliestDeadlineFirst
();
}
@Override
public
SchedulingInfo
runSchedulabilityCheck
(
SortedTaskSet
<
DagTask
>
tasks
)
{
Set
<
DagTask
>
heavyTasks
=
new
HashSet
<>();
Set
<
Task
>
lightTasks
=
new
HashSet
<>();
for
(
DagTask
t
:
tasks
)
{
if
(
t
.
getUtilization
()
<
1
)
{
lightTasks
.
add
(
t
);
}
else
{
heavyTasks
.
add
(
t
);
}
}
long
remainingProcessors
=
manager
.
getNumberOfProcessors
();
for
(
DagTask
t
:
heavyTasks
)
{
double
processingCapacity
=
((
double
)
t
.
getWorkload
()
-
t
.
getCriticalPath
())
/
(
t
.
getDeadline
()
-
t
.
getCriticalPath
());
long
minimumProcessingCapacity
=
(
long
)
Math
.
floor
(
processingCapacity
);
if
(
remainingProcessors
<
minimumProcessingCapacity
)
{
return
new
SchedulingInfo
(
Feasiblity
.
FAILED
);
}
remainingProcessors
-=
minimumProcessingCapacity
;
double
loadBound
=
processingCapacity
-
minimumProcessingCapacity
;
lightTasks
.
add
(
new
ContainerTask
(
t
.
getPeriod
(),
t
.
getDeadline
(),
loadBound
));
}
return
scheduleLightTasks
(
lightTasks
,
remainingProcessors
);
}
private
SchedulingInfo
scheduleLightTasks
(
Set
<
Task
>
tasks
,
long
numberOfprocessors
)
{
if
(
numberOfprocessors
==
0
&&
tasks
.
size
()
>
0
)
{
return
new
SchedulingInfo
(
Feasiblity
.
FAILED
);
}
List
<
Task
>
taskList
=
new
LinkedList
<>(
tasks
);
Collections
.
sort
(
taskList
,
(
a
,
b
)
->
Double
.
compare
(
b
.
getDensity
(),
a
.
getDensity
()));
List
<
Double
>
bins
=
new
ArrayList
<>();
for
(
int
i
=
0
;
i
<
numberOfprocessors
;
++
i
)
{
bins
.
add
((
double
)
0
);
}
Iterator
<
Task
>
taskIterator
=
taskList
.
iterator
();
while
(
taskIterator
.
hasNext
())
{
Collections
.
sort
(
bins
);
Task
t
=
taskIterator
.
next
();
taskIterator
.
remove
();
if
(
bins
.
get
(
0
)
+
t
.
getDensity
()
<=
1
)
{
bins
.
set
(
0
,
bins
.
get
(
0
)
+
t
.
getDensity
());
}
else
{
return
new
SchedulingInfo
(
Feasiblity
.
FAILED
);
}
}
return
new
SchedulingInfo
(
Feasiblity
.
SUCCEEDED
);
}
@Override
public
PriorityManager
getPriorityManager
()
{
return
priorityManager
;
}
@Override
public
String
getName
()
{
return
"JiangYi"
;
}
}
src/main/java/mvd/jester/tests/SchmidMottok.java
View file @
eb53dcfb
...
@@ -46,7 +46,7 @@ public class SchmidMottok extends AbstractTest<DagTask> {
...
@@ -46,7 +46,7 @@ public class SchmidMottok extends AbstractTest<DagTask> {
@Override
@Override
public
String
getName
()
{
public
String
getName
()
{
return
"SchmidMottok"
+
"_"
+
structure
.
getType
();
return
"SchmidMottok"
+
"_"
+
structure
.
getType
();
}
}
private
long
calculateResponseTime
(
final
Set
<
DagTask
>
tasks
,
final
DagTask
task
)
{
private
long
calculateResponseTime
(
final
Set
<
DagTask
>
tasks
,
final
DagTask
task
)
{
...
@@ -58,18 +58,31 @@ public class SchmidMottok extends AbstractTest<DagTask> {
...
@@ -58,18 +58,31 @@ public class SchmidMottok extends AbstractTest<DagTask> {
previousResponseTime
=
responseTime
;
previousResponseTime
=
responseTime
;
double
taskInterference
=
0
;
double
taskInterference
=
0
;
long
occupiedProcessors
=
manager
.
getNumberOfProcessors
()
+
1
;
// for (final DagTask t : tasks) {
// if (t.getPeriod() < task.getPeriod()) {
// final long numberOfProcessors = structure.getNumerOfThreads(t);
// occupiedProcessors += numberOfProcessors;
// }
// }
for
(
final
DagTask
t
:
tasks
)
{
for
(
final
DagTask
t
:
tasks
)
{
if
(
t
.
getPeriod
()
<
task
.
getPeriod
())
{
if
(
t
.
getPeriod
()
<
task
.
getPeriod
())
{
final
long
numberOfThreads
=
t
.
getNumberOfThreads
();
final
long
numberOfProcessors
=
structure
.
getNumerOfThreads
(
t
);
for
(
int
p
=
0
;
p
<
numberOfThreads
;
++
p
)
{
for
(
int
p
=
0
;
p
<
numberOfProcessors
;
++
p
)
{
taskInterference
+=
Math
.
min
(
structure
.
getTaskInterference
(
t
,
responseTimes
,
responseTime
,
p
+
1
),
if
(
occupiedProcessors
>
manager
.
getNumberOfProcessors
())
{
responseTime
-
minimumWcet
+
1
);
taskInterference
+=
Math
.
min
(
structure
.
getTaskInterference
(
t
,
responseTimes
,
responseTime
,
p
+
1
),
responseTime
-
minimumWcet
+
1
);
}
}
}
}
}
}
}
taskInterference
/=
manager
.
getNumberOfProcessors
();
taskInterference
/=
manager
.
getNumberOfProcessors
();
final
double
selfInterference
=
getSelfInterference
(
task
);
final
double
selfInterference
=
structure
.
getSelfInterference
(
task
);
final
long
totalInterference
=
(
long
)
Math
.
floor
(
taskInterference
+
selfInterference
);
final
long
totalInterference
=
(
long
)
Math
.
floor
(
taskInterference
+
selfInterference
);
...
@@ -79,45 +92,4 @@ public class SchmidMottok extends AbstractTest<DagTask> {
...
@@ -79,45 +92,4 @@ public class SchmidMottok extends AbstractTest<DagTask> {
return
responseTime
;
return
responseTime
;
}
}
private
double
getSelfInterference
(
final
DagTask
task
)
{
// final long numberOfThreads = task.getNumberOfThreads();
// TODO: Change back to number of threads
final
long
numberOfThreads
=
manager
.
getNumberOfProcessors
();
double
interference
=
task
.
getWorkload
()
-
task
.
getCriticalPath
();
interference
/=
numberOfThreads
;
return
interference
;
}
// private double getTaskInterference(final DagTask task, final long interval,
// final long parallelism) {
// if (responseTimes.containsKey(task)) {
// final long responseTime = responseTimes.get(task).getResponseTime();
// final long minWcet = task.getCriticalPath();
// final long period = task.getPeriod();
// final long amountOfJobs =
// (LongMath.divide(interval + responseTime - minWcet, period, RoundingMode.FLOOR)
// + 1);
// double workload = 0;
// for (final Segment s : task.getWorkloadDistribution()) {
// final long numberOfThreads =
// s.getNumberOfJobs() > 1 ? task.getNumberOfThreads() : 1;
// if (numberOfThreads >= parallelism) {
// workload += (double)(s.getNumberOfJobs() * s.getJobWcet()) / numberOfThreads;
// }
// }
// final double interference = amountOfJobs * workload;
// return interference;
// } else {
// throw new RuntimeException("Task was not found in task set!");
// }
// }
}
}
src/main/java/mvd/jester/tests/TypeFunction.java
View file @
eb53dcfb
...
@@ -6,39 +6,48 @@ import com.google.common.math.LongMath;
...
@@ -6,39 +6,48 @@ import com.google.common.math.LongMath;
import
mvd.jester.info.TerminationInfo
;
import
mvd.jester.info.TerminationInfo
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.SystemManager
;
import
mvd.jester.model.Task
;
import
mvd.jester.model.Task
;
public
interface
TypeFunction
{
public
interface
TypeFunction
{
public
double
getTaskInterference
(
final
DagTask
task
,
public
double
getTaskInterference
(
final
DagTask
task
,
final
Map
<
Task
,
TerminationInfo
>
responseTimes
,
final
Map
<
Task
,
TerminationInfo
>
responseTimes
,
final
long
interval
,
final
long
interval
,
final
long
parallelism
);
final
long
parallelism
);
public
double
getSelfInterference
(
final
DagTask
task
);
public
long
getNumerOfThreads
(
final
DagTask
task
);
public
String
getType
();
public
String
getType
();
public
class
UnkownStructure
implements
TypeFunction
{
public
class
KnownStructureWithMaxThreads
implements
TypeFunction
{
private
final
SystemManager
manager
;
public
KnownStructureWithMaxThreads
(
final
SystemManager
manager
)
{
this
.
manager
=
manager
;
}
@Override
@Override
public
double
getTaskInterference
(
final
DagTask
task
,
public
double
getTaskInterference
(
final
DagTask
task
,
final
Map
<
Task
,
TerminationInfo
>
responseTimes
,
final
Map
<
Task
,
TerminationInfo
>
responseTimes
,
final
long
interval
,
final
long
interval
,
final
long
parallelism
)
{
final
long
parallelism
)
{
if
(
responseTimes
.
containsKey
(
task
))
{
if
(
responseTimes
.
containsKey
(
task
))
{
final
long
responseTime
=
responseTimes
.
get
(
task
).
getResponseTime
();
final
long
responseTime
=
responseTimes
.
get
(
task
).
getResponseTime
();
final
long
minWcet
=
task
.
getCriticalPath
();
final
long
minWcet
=
task
.
getCriticalPath
();
final
long
period
=
task
.
getPeriod
();
final
long
period
=
task
.
getPeriod
();
final
long
amountOfJobs
=
final
long
amountOfJobs
=
(
LongMath
.
divide
(
interval
+
responseTime
-
minWcet
,
(
LongMath
.
divide
(
interval
+
responseTime
-
minWcet
,
period
,
RoundingMode
.
FLOOR
)
period
,
RoundingMode
.
FLOOR
)
+
1
);
+
1
);
double
workload
=
0
;
double
workload
=
0
;
for
(
final
Segment
s
:
task
.
getWorkloadDistribution
())
{
for
(
final
Segment
s
:
task
.
getWorkloadDistribution
())
{
final
long
numberOfThreads
=
final
long
numberOfThreads
=
task
.
getNumberOfThreads
()
;
s
.
getNumberOfJobs
()
>
1
?
manager
.
getNumberOfProcessors
()
:
1
;
if
(
numberOfThreads
>=
parallelism
)
{
if
(
numberOfThreads
>=
parallelism
)
{
workload
+=
(
double
)(
s
.
getNumberOfJobs
()
*
s
.
getJobWcet
())
/
numberOfThreads
;
workload
+=
(
double
)
(
s
.
getNumberOfJobs
()
*
s
.
getJobWcet
())
/
numberOfThreads
;
}
}
}
}
...
@@ -51,25 +60,90 @@ public interface TypeFunction {
...
@@ -51,25 +60,90 @@ public interface TypeFunction {
}
}
@Override
@Override
public
double
getSelfInterference
(
final
DagTask
task
)
{
final
long
numberOfThreads
=
manager
.
getNumberOfProcessors
();
double
interference
=
task
.
getWorkload
()
-
task
.
getCriticalPath
();
interference
/=
numberOfThreads
;
return
interference
;
}
@Override
public
String
getType
()
{
return
"SKP"
;
}
@Override
public
long
getNumerOfThreads
(
final
DagTask
task
)
{
return
manager
.
getNumberOfProcessors
();
}
}
public
class
UnkownStructure
implements
TypeFunction
{
@Override
public
double
getTaskInterference
(
final
DagTask
task
,
final
Map
<
Task
,
TerminationInfo
>
responseTimes
,
final
long
interval
,
final
long
parallelism
)
{
if
(
responseTimes
.
containsKey
(
task
))
{
final
long
responseTime
=
responseTimes
.
get
(
task
).
getResponseTime
();
final
long
minWcet
=
task
.
getCriticalPath
();
final
long
period
=
task
.
getPeriod
();
final
long
amountOfJobs
=
(
LongMath
.
divide
(
interval
+
responseTime
-
minWcet
,
period
,
RoundingMode
.
FLOOR
)
+
1
);
double
workload
=
0
;
for
(
final
Segment
s
:
task
.
getWorkloadDistribution
())
{
final
long
numberOfThreads
=
task
.
getNumberOfThreads
();
if
(
numberOfThreads
>=
parallelism
)
{
workload
+=
(
double
)
(
s
.
getNumberOfJobs
()
*
s
.
getJobWcet
())
/
numberOfThreads
;
}
}
final
double
interference
=
amountOfJobs
*
workload
;
return
interference
;
}
else
{
throw
new
RuntimeException
(
"Task was not found in task set!"
);
}
}
@Override
public
double
getSelfInterference
(
final
DagTask
task
)
{
final
long
numberOfThreads
=
task
.
getNumberOfThreads
();
double
interference
=
task
.
getWorkload
()
-
task
.
getCriticalPath
();
interference
/=
numberOfThreads
;
return
interference
;
}
@Override
public
String
getType
()
{
public
String
getType
()
{
return
"SU"
;
return
"SU"
;
}
}
@Override
public
long
getNumerOfThreads
(
final
DagTask
task
)
{
return
task
.
getNumberOfThreads
();
}
}
}
public
class
KownStructure
implements
TypeFunction
{
public
class
K
n
ownStructure
implements
TypeFunction
{
@Override
@Override
public
double
getTaskInterference
(
final
DagTask
task
,
public
double
getTaskInterference
(
final
DagTask
task
,
final
Map
<
Task
,
TerminationInfo
>
responseTimes
,
final
Map
<
Task
,
TerminationInfo
>
responseTimes
,
final
long
interval
,
final
long
interval
,
final
long
parallelism
)
{
final
long
parallelism
)
{
if
(
responseTimes
.
containsKey
(
task
))
{
if
(
responseTimes
.
containsKey
(
task
))
{
final
long
responseTime
=
responseTimes
.
get
(
task
).
getResponseTime
();
final
long
responseTime
=
responseTimes
.
get
(
task
).
getResponseTime
();
final
long
minWcet
=
task
.
getCriticalPath
();
final
long
minWcet
=
task
.
getCriticalPath
();
final
long
period
=
task
.
getPeriod
();
final
long
period
=
task
.
getPeriod
();
final
long
amountOfJobs
=
final
long
amountOfJobs
=
(
LongMath
.
divide
(
interval
+
responseTime
-
minWcet
,
(
LongMath
.
divide
(
interval
+
responseTime
-
minWcet
,
period
,
RoundingMode
.
FLOOR
)
period
,
RoundingMode
.
FLOOR
)
+
1
);
+
1
);
double
workload
=
0
;
double
workload
=
0
;
...
@@ -77,7 +151,8 @@ public interface TypeFunction {
...
@@ -77,7 +151,8 @@ public interface TypeFunction {
final
long
numberOfThreads
=
final
long
numberOfThreads
=
s
.
getNumberOfJobs
()
>
1
?
task
.
getNumberOfThreads
()
:
1
;
s
.
getNumberOfJobs
()
>
1
?
task
.
getNumberOfThreads
()
:
1
;
if
(
numberOfThreads
>=
parallelism
)
{
if
(
numberOfThreads
>=
parallelism
)
{
workload
+=
(
double
)(
s
.
getNumberOfJobs
()
*
s
.
getJobWcet
())
/
numberOfThreads
;
workload
+=
(
double
)
(
s
.
getNumberOfJobs
()
*
s
.
getJobWcet
())
/
numberOfThreads
;
}
}
}
}
...
@@ -90,9 +165,24 @@ public interface TypeFunction {
...
@@ -90,9 +165,24 @@ public interface TypeFunction {
}
}
@Override
@Override
public
double
getSelfInterference
(
final
DagTask
task
)
{
final
long
numberOfThreads
=
task
.
getNumberOfThreads
();
double
interference
=
task
.
getWorkload
()
-
task
.
getCriticalPath
();
interference
/=
numberOfThreads
;
return
interference
;
}
@Override
public
String
getType
()
{
public
String
getType
()
{
return
"SK"
;
return
"SK"
;
}
}
@Override
public
long
getNumerOfThreads
(
final
DagTask
task
)
{
return
task
.
getNumberOfThreads
();
}
}
}
}
}
\ No newline at end of file
src/test/java/mvd/jester/priority/TestEarliestDeadlineFirst.java
View file @
eb53dcfb
...
@@ -57,7 +57,7 @@ public class TestEarliestDeadlineFirst {
...
@@ -57,7 +57,7 @@ public class TestEarliestDeadlineFirst {
assertTrue
(
edf
.
hasSimulator
(
DynamicForkJoin
.
class
));
assertTrue
(
edf
.
hasSimulator
(
DynamicForkJoin
.
class
));
assertTrue
(
edf
.
hasTest
(
new
ChwaLee
(
manager
)));
assertTrue
(
edf
.
hasTest
(
new
ChwaLee
(
manager
)));
assertFalse
(
edf
.
hasTest
(
new
SchmidMottok
(
new
TypeFunction
.
KownStructure
(),
manager
)));
assertFalse
(
edf
.
hasTest
(
new
SchmidMottok
(
new
TypeFunction
.
K
n
ownStructure
(),
manager
)));
assertFalse
(
edf
.
hasTest
(
new
MaiaBertogna
(
manager
)));
assertFalse
(
edf
.
hasTest
(
new
MaiaBertogna
(
manager
)));
// assertTrue(edf.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(edf.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(edf.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
// assertTrue(edf.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment