Skip to content
Toggle navigation
P
Projects
G
Groups
S
Snippets
Help
las3_pub
/
jester
This project
Loading...
Sign in
Toggle navigation
Go to a project
Project
Repository
Issues
0
Merge Requests
0
Pipelines
Wiki
Members
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Commit
f0a948aa
authored
May 26, 2020
by
Michael Schmid
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
DagUtils now in seperate class and small changes
parent
a52ab93d
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
18 changed files
with
212 additions
and
266 deletions
+212
-266
src/main/java/mvd/jester/info/SchedulingInfo.java
+72
-55
src/main/java/mvd/jester/info/TerminationInfo.java
+1
-23
src/main/java/mvd/jester/model/DagTask.java
+0
-0
src/main/java/mvd/jester/model/SortedTaskSet.java
+7
-6
src/main/java/mvd/jester/model/SynchronousTask.java
+9
-8
src/main/java/mvd/jester/model/SystemSetup.java
+2
-2
src/main/java/mvd/jester/model/TreeJob.java
+2
-2
src/main/java/mvd/jester/tests/ChwaLee.java
+26
-28
src/main/java/mvd/jester/tests/FonsecaNelis.java
+0
-0
src/main/java/mvd/jester/tests/MaiaBertogna.java
+25
-27
src/main/java/mvd/jester/tests/MelaniButtazzo.java
+23
-25
src/main/java/mvd/jester/tests/SchmidMottok.java
+25
-26
src/main/java/mvd/jester/utils/BinaryDecompositionTree.java
+7
-7
src/main/java/mvd/jester/utils/DagUtils.java
+0
-0
src/main/java/mvd/jester/utils/Logger.java
+5
-5
src/test/java/mvd/jester/info/TestSchedulingInfo.java
+5
-45
src/test/java/mvd/jester/info/TestTerminationInfo.java
+2
-6
src/test/java/mvd/jester/model/TestDagUtils.java
+1
-1
No files found.
src/main/java/mvd/jester/info/SchedulingInfo.java
View file @
f0a948aa
package
mvd
.
jester
.
info
;
import
java.util.
HashSet
;
import
java.util.
Collection
;
import
java.util.Optional
;
import
java.util.Set
;
import
mvd.jester.info.TerminationInfo.Level
;
/**
* SchedulingInfo
*/
public
class
SchedulingInfo
{
private
final
Feasiblity
feasiblity
;
private
final
double
parallelTaskRatio
;
private
final
double
utilization
;
private
final
Set
<
TerminationInfo
>
terminationInfos
;
private
Optional
<
TerminationInfo
>
failedTerminationInfo
;
public
SchedulingInfo
(
double
parallelTaskRatio
,
double
utilization
)
{
this
.
parallelTaskRatio
=
parallelTaskRatio
;
this
.
utilization
=
utilization
;
this
.
terminationInfos
=
new
HashSet
<>();
this
.
failedTerminationInfo
=
Optional
.
empty
();
public
SchedulingInfo
(
Collection
<
TerminationInfo
>
terminationInfos
)
{
Optional
<
TerminationInfo
>
failedTerminationInfo
=
terminationInfos
.
stream
().
filter
(
t
->
t
.
getLateness
()
>
0
).
findFirst
();
feasiblity
=
failedTerminationInfo
.
isPresent
()
?
Feasiblity
.
FAILED
:
Feasiblity
.
SUCCEEDED
;
}
/**
* @return the
utilization
* @return the
feasiblity
*/
public
double
getUtilization
()
{
return
utilization
;
public
Feasiblity
getFeasibility
()
{
return
feasiblity
;
}
/**
* @return the parallelTaskRatio
*/
public
double
getParallelTaskRatio
()
{
return
parallelTaskRatio
;
public
enum
Feasiblity
{
FAILED
,
SUCCEEDED
,
}
public
SchedulingInfo
(
Set
<
TerminationInfo
>
terminationInfos
,
double
parallelTaskRatio
,
double
utilization
)
{
this
.
terminationInfos
=
terminationInfos
;
this
.
parallelTaskRatio
=
parallelTaskRatio
;
this
.
utilization
=
utilization
;
failedTerminationInfo
=
terminationInfos
.
stream
().
filter
(
t
->
t
.
getLateness
()
>
0
).
findFirst
();
}
public
boolean
checkLevelFail
(
Level
level
)
{
return
terminationInfos
.
stream
()
.
anyMatch
(
t
->
t
.
getLateness
()
>
0
&&
t
.
getTaskLevel
()
==
level
)
;
}
// private final double parallelTaskRatio;
// private final double utilization;
// private final Set<TerminationInfo> terminationInfos
;
// private Optional<TerminationInfo> failedTerminationInfo;
public
boolean
checkTasksetFeasible
()
{
// return terminationInfos.isEmpty();
return
!
terminationInfos
.
stream
().
anyMatch
(
t
->
t
.
getLateness
()
>
0
);
}
// public SchedulingInfo(double parallelTaskRatio, double utilization) {
// this.parallelTaskRatio = parallelTaskRatio;
// this.utilization = utilization;
// this.terminationInfos = new HashSet<>();
// this.failedTerminationInfo = Optional.empty();
// }
public
boolean
addTerminationInfo
(
TerminationInfo
terminationInfo
)
{
return
terminationInfos
.
add
(
terminationInfo
);
}
// /**
// * @return the utilization
// */
// public double getUtilization() {
// return utilization;
// }
/**
* @return the terminationInfos
*/
public
Set
<
TerminationInfo
>
getTerminationInfos
()
{
return
terminationInfos
;
}
/
/ /
**
// * @return the parallelTaskRatio
//
*/
// public double getParallelTaskRatio
() {
// return parallelTaskRatio
;
//
}
/**
* @return the failedTerminationInfo
*/
public
Optional
<
TerminationInfo
>
getFailedTerminationInfo
()
{
return
failedTerminationInfo
;
}
// public SchedulingInfo(Set<TerminationInfo> terminationInfos, double parallelTaskRatio,
// double utilization) {
// this.terminationInfos = terminationInfos;
// this.parallelTaskRatio = parallelTaskRatio;
// this.utilization = utilization;
// failedTerminationInfo =
// terminationInfos.stream().filter(t -> t.getLateness() > 0).findFirst();
// }
public
void
setFailedTerminationInfo
(
TerminationInfo
failedTerminationInfo
)
{
this
.
failedTerminationInfo
=
Optional
.
of
(
failedTerminationInfo
);
}
// public boolean checkLevelFail(Level level) {
// return terminationInfos.stream()
// .anyMatch(t -> t.getLateness() > 0 && t.getTaskLevel() == level);
// }
// public boolean checkTasksetFeasible() {
// // return terminationInfos.isEmpty();
// return !terminationInfos.stream().anyMatch(t -> t.getLateness() > 0);
// }
// public boolean addTerminationInfo(TerminationInfo terminationInfo) {
// return terminationInfos.add(terminationInfo);
// }
// /**
// * @return the terminationInfos
// */
// public Set<TerminationInfo> getTerminationInfos() {
// return terminationInfos;
// }
// /**
// * @return the failedTerminationInfo
// */
// public Optional<TerminationInfo> getFailedTerminationInfo() {
// return failedTerminationInfo;
// }
// public void setFailedTerminationInfo(TerminationInfo failedTerminationInfo) {
// this.failedTerminationInfo = Optional.of(failedTerminationInfo);
// }
}
src/main/java/mvd/jester/info/TerminationInfo.java
View file @
f0a948aa
...
...
@@ -9,30 +9,19 @@ public class TerminationInfo {
private
final
long
deadline
;
private
final
long
responseTime
;
private
final
long
lateness
;
private
final
Level
taskLevel
;
public
TerminationInfo
(
long
releaseTime
,
long
deadline
,
long
responseTime
)
{
this
.
releaseTime
=
releaseTime
;
this
.
deadline
=
deadline
;
this
.
responseTime
=
responseTime
;
this
.
lateness
=
responseTime
-
deadline
;
this
.
taskLevel
=
Level
.
LOW
;
}
public
TerminationInfo
(
long
releaseTime
,
long
deadline
,
long
responseTime
,
Level
taskLevel
)
{
this
.
releaseTime
=
releaseTime
;
this
.
deadline
=
deadline
;
this
.
responseTime
=
responseTime
;
this
.
lateness
=
responseTime
-
deadline
;
this
.
taskLevel
=
taskLevel
;
}
public
TerminationInfo
(
long
deadline
,
long
responseTime
,
Level
taskLevel
)
{
public
TerminationInfo
(
long
deadline
,
long
responseTime
)
{
this
.
releaseTime
=
0
;
this
.
deadline
=
deadline
;
this
.
responseTime
=
responseTime
;
this
.
lateness
=
responseTime
-
deadline
;
this
.
taskLevel
=
taskLevel
;
}
/**
...
...
@@ -62,15 +51,4 @@ public class TerminationInfo {
public
long
getResponseTime
()
{
return
responseTime
;
}
/**
* @return the taskLevel
*/
public
Level
getTaskLevel
()
{
return
taskLevel
;
}
public
enum
Level
{
HIGH
,
LOW
}
}
src/main/java/mvd/jester/model/DagTask.java
View file @
f0a948aa
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/model/SortedTaskSet.java
View file @
f0a948aa
...
...
@@ -17,7 +17,7 @@ public class SortedTaskSet<T extends Task> extends TreeSet<T> {
private
static
final
long
serialVersionUID
=
4808544133562675597L
;
public
SortedTaskSet
(
PriorityManager
priorityMananger
)
{
public
SortedTaskSet
(
final
PriorityManager
priorityMananger
)
{
super
((
t1
,
t2
)
->
priorityMananger
.
compare
(
t1
,
t2
));
}
...
...
@@ -28,7 +28,8 @@ public class SortedTaskSet<T extends Task> extends TreeSet<T> {
public
double
getParallelTaskRatio
()
{
long
parallelTasks
=
super
.
stream
().
filter
(
t
->
t
.
getMaximumParallelism
()
>
1
).
count
();
final
long
parallelTasks
=
super
.
stream
().
filter
(
t
->
t
.
getMaximumParallelism
()
>
1
).
count
();
return
(
double
)
parallelTasks
/
super
.
size
();
}
...
...
@@ -36,11 +37,11 @@ public class SortedTaskSet<T extends Task> extends TreeSet<T> {
public
static
class
Deserializer
<
T
extends
Task
>
implements
JsonDeserializer
<
SortedTaskSet
<
T
>>
{
@Override
public
SortedTaskSet
<
T
>
deserialize
(
JsonElement
json
,
Type
typeOfT
,
JsonDeserializationContext
context
)
throws
JsonParseException
{
SortedTaskSet
<
T
>
taskSet
=
new
SortedTaskSet
<>(
new
RateMonotonic
());
public
SortedTaskSet
<
T
>
deserialize
(
final
JsonElement
json
,
final
Type
typeOfT
,
final
JsonDeserializationContext
context
)
throws
JsonParseException
{
final
SortedTaskSet
<
T
>
taskSet
=
new
SortedTaskSet
<>(
new
RateMonotonic
());
if
(
json
.
isJsonArray
())
{
JsonArray
array
=
json
.
getAsJsonArray
();
final
JsonArray
array
=
json
.
getAsJsonArray
();
array
.
forEach
(
e
->
{
taskSet
.
add
(
context
.
deserialize
(
e
,
SynchronousTask
.
class
));
});
...
...
src/main/java/mvd/jester/model/SynchronousTask.java
View file @
f0a948aa
...
...
@@ -14,8 +14,8 @@ public class SynchronousTask implements Task {
private
final
long
criticalPath
;
private
final
long
numberOfThreads
;
public
SynchronousTask
(
long
period
,
long
deadline
,
long
numberOfThreads
,
Set
<
Segment
>
segments
)
{
public
SynchronousTask
(
final
long
period
,
final
long
deadline
,
final
long
numberOfThreads
,
final
Set
<
Segment
>
segments
)
{
this
.
deadline
=
deadline
;
this
.
period
=
period
;
this
.
numberOfThreads
=
numberOfThreads
;
...
...
@@ -24,7 +24,8 @@ public class SynchronousTask implements Task {
this
.
criticalPath
=
SynchronousUtils
.
calculateCriticalPath
(
segments
);
}
public
SynchronousTask
(
Set
<
Segment
>
segments
,
long
period
,
long
numberOfThreads
)
{
public
SynchronousTask
(
final
Set
<
Segment
>
segments
,
final
long
period
,
final
long
numberOfThreads
)
{
this
(
period
,
period
,
numberOfThreads
,
segments
);
}
...
...
@@ -70,7 +71,7 @@ public class SynchronousTask implements Task {
@Override
public
long
getMaximumParallelism
()
{
long
max
=
0
;
for
(
Segment
s
:
segments
)
{
for
(
final
Segment
s
:
segments
)
{
if
(
max
<
s
.
getNumberOfJobs
())
{
max
=
s
.
getNumberOfJobs
();
}
...
...
@@ -84,18 +85,18 @@ public class SynchronousTask implements Task {
}
public
static
class
SynchronousUtils
{
public
static
long
calculateWorkload
(
Set
<
Segment
>
segments
)
{
public
static
long
calculateWorkload
(
final
Set
<
Segment
>
segments
)
{
long
workload
=
0
;
for
(
Segment
s
:
segments
)
{
for
(
final
Segment
s
:
segments
)
{
workload
+=
s
.
getJobWcet
()
*
s
.
getNumberOfJobs
();
}
return
workload
;
}
public
static
long
calculateCriticalPath
(
Set
<
Segment
>
segments
)
{
public
static
long
calculateCriticalPath
(
final
Set
<
Segment
>
segments
)
{
long
criticalPath
=
0
;
for
(
Segment
s
:
segments
)
{
for
(
final
Segment
s
:
segments
)
{
criticalPath
+=
s
.
getJobWcet
();
}
...
...
src/main/java/mvd/jester/model/SystemSetup.java
View file @
f0a948aa
...
...
@@ -13,7 +13,7 @@ import com.google.gson.Gson;
import
com.google.gson.GsonBuilder
;
import
org.jgrapht.experimental.dag.DirectedAcyclicGraph
;
import
org.jgrapht.graph.DefaultEdge
;
import
mvd.jester.
model.DagTask
.DagUtils
;
import
mvd.jester.
utils
.DagUtils
;
/**
* TaskSet
...
...
@@ -219,7 +219,7 @@ public class SystemSetup<T extends Task> {
return
ThreadLocalRandom
.
current
().
nextLong
(
1
,
100
);
}
public
Set
<
DagTask
>
generateTaskSet
(
double
totalUtilization
)
{
public
Set
<
DagTask
>
generateTaskSet
(
final
double
totalUtilization
)
{
final
LinkedHashSet
<
DagTask
>
taskSet
=
new
LinkedHashSet
<>();
double
currentUtilization
=
0
;
while
(
currentUtilization
<=
totalUtilization
)
{
...
...
src/main/java/mvd/jester/model/TreeJob.java
View file @
f0a948aa
...
...
@@ -4,7 +4,7 @@ public class TreeJob {
private
long
wcet
;
private
final
Job
job
;
public
TreeJob
(
Job
job
)
{
public
TreeJob
(
final
Job
job
)
{
this
.
wcet
=
job
.
getWcet
();
this
.
job
=
job
;
}
...
...
@@ -26,7 +26,7 @@ public class TreeJob {
/**
* @param wcet the wcet to set
*/
public
void
setWcet
(
long
wcet
)
{
public
void
setWcet
(
final
long
wcet
)
{
this
.
wcet
=
wcet
;
}
}
src/main/java/mvd/jester/tests/ChwaLee.java
View file @
f0a948aa
...
...
@@ -4,14 +4,12 @@ import java.math.RoundingMode;
import
java.util.ArrayList
;
import
java.util.Collections
;
import
java.util.HashMap
;
import
java.util.HashSet
;
import
java.util.List
;
import
java.util.Map
;
import
java.util.Set
;
import
com.google.common.math.LongMath
;
import
mvd.jester.info.SchedulingInfo
;
import
mvd.jester.info.TerminationInfo
;
import
mvd.jester.info.TerminationInfo.Level
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SynchronousTask
;
...
...
@@ -26,7 +24,7 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
private
final
Map
<
SynchronousTask
,
TerminationInfo
>
responseTimes
;
private
final
PriorityManager
priorityManager
;
public
ChwaLee
(
long
numberOfProcessors
)
{
public
ChwaLee
(
final
long
numberOfProcessors
)
{
super
(
numberOfProcessors
);
this
.
responseTimes
=
new
HashMap
<>();
this
.
priorityManager
=
new
EarliestDeadlineFirst
();
...
...
@@ -38,27 +36,26 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
}
@Override
public
SchedulingInfo
runSchedulabilityCheck
(
SortedTaskSet
<
SynchronousTask
>
tasks
)
{
public
SchedulingInfo
runSchedulabilityCheck
(
final
SortedTaskSet
<
SynchronousTask
>
tasks
)
{
responseTimes
.
clear
();
for
(
SynchronousTask
t
:
tasks
)
{
Level
taskLevel
=
tasks
.
headSet
(
t
).
size
()
<=
tasks
.
size
()
/
2
?
Level
.
HIGH
:
Level
.
LOW
;
long
responseTime
=
calculateResponseTime
(
tasks
,
t
);
responseTimes
.
put
(
t
,
new
TerminationInfo
(
t
.
getDeadline
(),
responseTime
,
taskLevel
));
for
(
final
SynchronousTask
t
:
tasks
)
{
final
long
responseTime
=
calculateResponseTime
(
tasks
,
t
);
responseTimes
.
put
(
t
,
new
TerminationInfo
(
t
.
getDeadline
(),
responseTime
));
}
return
new
SchedulingInfo
(
new
HashSet
<>(
responseTimes
.
values
()),
tasks
.
getParallelTaskRatio
(),
tasks
.
getUtilization
());
return
new
SchedulingInfo
(
responseTimes
.
values
());
}
private
long
calculateResponseTime
(
Set
<
SynchronousTask
>
tasks
,
SynchronousTask
task
)
{
long
minimumWcet
=
getMinimumWcet
(
task
);
long
deadline
=
task
.
getDeadline
();
private
long
calculateResponseTime
(
final
Set
<
SynchronousTask
>
tasks
,
final
SynchronousTask
task
)
{
final
long
minimumWcet
=
getMinimumWcet
(
task
);
final
long
deadline
=
task
.
getDeadline
();
long
taskInterference
=
0
;
for
(
SynchronousTask
t
:
tasks
)
{
for
(
final
SynchronousTask
t
:
tasks
)
{
if
(!
t
.
equals
(
task
))
{
long
maxNumberOfJobs
=
t
.
getMaximumParallelism
();
final
long
maxNumberOfJobs
=
t
.
getMaximumParallelism
();
for
(
long
p
=
0
;
p
<
maxNumberOfJobs
;
++
p
)
{
taskInterference
+=
Math
.
min
(
getTaskInterference
(
t
,
deadline
,
p
+
1
),
deadline
-
minimumWcet
);
...
...
@@ -68,23 +65,24 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
long
selfInterference
=
0
;
long
maxNumberOfJobs
=
task
.
getMaximumParallelism
();
final
long
maxNumberOfJobs
=
task
.
getMaximumParallelism
();
for
(
long
p
=
0
;
p
<
maxNumberOfJobs
;
++
p
)
{
selfInterference
+=
Math
.
min
(
getSelfInterference
(
task
,
deadline
,
p
+
1
),
deadline
-
minimumWcet
);
}
boolean
feasible
=
taskInterference
+
selfInterference
<=
numberOfProcessors
final
boolean
feasible
=
taskInterference
+
selfInterference
<=
numberOfProcessors
*
(
deadline
-
minimumWcet
);
return
feasible
?
deadline
-
1
:
deadline
+
1
;
}
private
long
getSelfInterference
(
SynchronousTask
task
,
long
deadline
,
long
p
)
{
private
long
getSelfInterference
(
final
SynchronousTask
task
,
final
long
deadline
,
final
long
p
)
{
long
selfInterference
=
0
;
for
(
Segment
s
:
task
.
getWorkloadDistribution
())
{
for
(
final
Segment
s
:
task
.
getWorkloadDistribution
())
{
if
(
s
.
getNumberOfJobs
()
>=
p
+
1
)
{
selfInterference
+=
s
.
getJobWcet
();
}
...
...
@@ -92,24 +90,24 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
return
selfInterference
;
}
private
long
getTaskInterference
(
SynchronousTask
t
,
long
deadline
,
long
p
)
{
long
numberOfBodyJobs
=
LongMath
.
divide
(
deadline
,
t
.
getPeriod
(),
RoundingMode
.
FLOOR
);
private
long
getTaskInterference
(
final
SynchronousTask
t
,
final
long
deadline
,
final
long
p
)
{
final
long
numberOfBodyJobs
=
LongMath
.
divide
(
deadline
,
t
.
getPeriod
(),
RoundingMode
.
FLOOR
);
long
workloadOfBodyJobs
=
0
;
for
(
Segment
s
:
t
.
getWorkloadDistribution
())
{
for
(
final
Segment
s
:
t
.
getWorkloadDistribution
())
{
if
(
s
.
getNumberOfJobs
()
>=
p
)
{
workloadOfBodyJobs
+=
s
.
getJobWcet
();
}
}
long
boundedBodyWorkload
=
numberOfBodyJobs
*
workloadOfBodyJobs
;
final
long
boundedBodyWorkload
=
numberOfBodyJobs
*
workloadOfBodyJobs
;
long
boundedCarryInWorkload
=
0
;
long
remainingLength
=
deadline
%
t
.
getPeriod
();
final
long
remainingLength
=
deadline
%
t
.
getPeriod
();
long
carryInLength
=
0
;
List
<
Segment
>
segmentList
=
new
ArrayList
<>(
t
.
getWorkloadDistribution
());
final
List
<
Segment
>
segmentList
=
new
ArrayList
<>(
t
.
getWorkloadDistribution
());
Collections
.
reverse
(
segmentList
);
for
(
Segment
s
:
segmentList
)
{
for
(
final
Segment
s
:
segmentList
)
{
carryInLength
+=
s
.
getJobWcet
();
if
(
s
.
getNumberOfJobs
()
>=
p
&&
remainingLength
>
carryInLength
)
{
boundedCarryInWorkload
+=
s
.
getJobWcet
();
...
...
@@ -124,9 +122,9 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
return
boundedBodyWorkload
+
boundedCarryInWorkload
;
}
private
long
getMinimumWcet
(
SynchronousTask
task
)
{
private
long
getMinimumWcet
(
final
SynchronousTask
task
)
{
long
minWcet
=
0
;
for
(
Segment
s
:
task
.
getWorkloadDistribution
())
{
for
(
final
Segment
s
:
task
.
getWorkloadDistribution
())
{
minWcet
+=
s
.
getJobWcet
();
}
...
...
src/main/java/mvd/jester/tests/FonsecaNelis.java
View file @
f0a948aa
This diff is collapsed.
Click to expand it.
src/main/java/mvd/jester/tests/MaiaBertogna.java
View file @
f0a948aa
...
...
@@ -2,13 +2,11 @@ package mvd.jester.tests;
import
java.math.RoundingMode
;
import
java.util.HashMap
;
import
java.util.HashSet
;
import
java.util.Map
;
import
java.util.Set
;
import
com.google.common.math.LongMath
;
import
mvd.jester.info.SchedulingInfo
;
import
mvd.jester.info.TerminationInfo
;
import
mvd.jester.info.TerminationInfo.Level
;
import
mvd.jester.model.Segment
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.SynchronousTask
;
...
...
@@ -23,7 +21,7 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
private
final
Map
<
SynchronousTask
,
TerminationInfo
>
responseTimes
;
private
final
PriorityManager
priorityManager
;
public
MaiaBertogna
(
long
numberOfProcessors
)
{
public
MaiaBertogna
(
final
long
numberOfProcessors
)
{
super
(
numberOfProcessors
);
this
.
responseTimes
=
new
HashMap
<>();
this
.
priorityManager
=
new
RateMonotonic
();
...
...
@@ -35,16 +33,14 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
}
@Override
public
SchedulingInfo
runSchedulabilityCheck
(
SortedTaskSet
<
SynchronousTask
>
tasks
)
{
public
SchedulingInfo
runSchedulabilityCheck
(
final
SortedTaskSet
<
SynchronousTask
>
tasks
)
{
responseTimes
.
clear
();
for
(
SynchronousTask
t
:
tasks
)
{
Level
taskLevel
=
tasks
.
headSet
(
t
).
size
()
<=
tasks
.
size
()
/
2
?
Level
.
HIGH
:
Level
.
LOW
;
long
responseTime
=
calculateResponseTime
(
tasks
,
t
);
responseTimes
.
put
(
t
,
new
TerminationInfo
(
t
.
getDeadline
(),
responseTime
,
taskLevel
));
for
(
final
SynchronousTask
t
:
tasks
)
{
final
long
responseTime
=
calculateResponseTime
(
tasks
,
t
);
responseTimes
.
put
(
t
,
new
TerminationInfo
(
t
.
getDeadline
(),
responseTime
));
}
return
new
SchedulingInfo
(
new
HashSet
<>(
responseTimes
.
values
()),
tasks
.
getParallelTaskRatio
(),
tasks
.
getUtilization
());
return
new
SchedulingInfo
(
responseTimes
.
values
());
}
@Override
...
...
@@ -52,8 +48,9 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
return
"MaiaBertogna"
;
}
private
long
calculateResponseTime
(
Set
<
SynchronousTask
>
tasks
,
SynchronousTask
task
)
{
long
minimumWcet
=
getMinimumWcet
(
task
);
private
long
calculateResponseTime
(
final
Set
<
SynchronousTask
>
tasks
,
final
SynchronousTask
task
)
{
final
long
minimumWcet
=
getMinimumWcet
(
task
);
long
responseTime
=
minimumWcet
;
long
previousResponseTime
=
0
;
...
...
@@ -61,9 +58,9 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
previousResponseTime
=
responseTime
;
long
taskInterference
=
0
;
for
(
SynchronousTask
t
:
tasks
)
{
for
(
final
SynchronousTask
t
:
tasks
)
{
if
(
t
.
getPeriod
()
<
task
.
getPeriod
())
{
long
maxNumberOfJobsOfT
=
t
.
getMaximumParallelism
();
final
long
maxNumberOfJobsOfT
=
t
.
getMaximumParallelism
();
for
(
int
p
=
0
;
p
<
maxNumberOfJobsOfT
;
++
p
)
{
taskInterference
+=
Math
.
min
(
getTaskInterference
(
t
,
responseTime
,
p
+
1
),
responseTime
-
minimumWcet
+
1
);
...
...
@@ -73,13 +70,13 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
long
selfInterference
=
0
;
long
maxNumberOfJobs
=
task
.
getMaximumParallelism
();
final
long
maxNumberOfJobs
=
task
.
getMaximumParallelism
();
for
(
int
p
=
0
;
p
<
maxNumberOfJobs
;
++
p
)
{
selfInterference
+=
Math
.
min
(
getSelfInterference
(
task
,
p
+
1
),
responseTime
-
minimumWcet
+
1
);
}
long
totalInterference
=
LongMath
.
divide
(
taskInterference
+
selfInterference
,
final
long
totalInterference
=
LongMath
.
divide
(
taskInterference
+
selfInterference
,
numberOfProcessors
,
RoundingMode
.
FLOOR
);
responseTime
=
minimumWcet
+
totalInterference
;
...
...
@@ -89,10 +86,10 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
return
responseTime
;
}
private
long
getSelfInterference
(
SynchronousTask
task
,
long
parallelism
)
{
private
long
getSelfInterference
(
final
SynchronousTask
task
,
final
long
parallelism
)
{
long
interference
=
0
;
for
(
Segment
s
:
task
.
getWorkloadDistribution
())
{
for
(
final
Segment
s
:
task
.
getWorkloadDistribution
())
{
if
(
s
.
getNumberOfJobs
()
>=
parallelism
+
1
)
{
interference
+=
s
.
getJobWcet
();
}
...
...
@@ -101,24 +98,25 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
return
interference
;
}
private
long
getTaskInterference
(
SynchronousTask
task
,
long
interval
,
long
parallelism
)
{
private
long
getTaskInterference
(
final
SynchronousTask
task
,
final
long
interval
,
final
long
parallelism
)
{
if
(
responseTimes
.
containsKey
(
task
))
{
long
responseTime
=
responseTimes
.
get
(
task
).
getResponseTime
();
long
minWcet
=
getMinimumWcet
(
task
);
long
period
=
task
.
getPeriod
();
long
numberOfJobs
=
final
long
responseTime
=
responseTimes
.
get
(
task
).
getResponseTime
();
final
long
minWcet
=
getMinimumWcet
(
task
);
final
long
period
=
task
.
getPeriod
();
final
long
numberOfJobs
=
(
LongMath
.
divide
(
interval
+
responseTime
-
minWcet
,
period
,
RoundingMode
.
FLOOR
)
+
1
);
long
workload
=
0
;
for
(
Segment
s
:
task
.
getWorkloadDistribution
())
{
for
(
final
Segment
s
:
task
.
getWorkloadDistribution
())
{
if
(
s
.
getNumberOfJobs
()
>=
parallelism
)
{
workload
+=
s
.
getJobWcet
();
}
}
long
interference
=
numberOfJobs
*
workload
;
final
long
interference
=
numberOfJobs
*
workload
;
return
interference
;
}
else
{
...
...
@@ -126,9 +124,9 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
}
}
private
long
getMinimumWcet
(
SynchronousTask
task
)
{
private
long
getMinimumWcet
(
final
SynchronousTask
task
)
{
long
minWcet
=
0
;
for
(
Segment
s
:
task
.
getWorkloadDistribution
())
{
for
(
final
Segment
s
:
task
.
getWorkloadDistribution
())
{
minWcet
+=
s
.
getJobWcet
();
}
...
...
src/main/java/mvd/jester/tests/MelaniButtazzo.java
View file @
f0a948aa
...
...
@@ -2,13 +2,11 @@ package mvd.jester.tests;
import
java.math.RoundingMode
;
import
java.util.HashMap
;
import
java.util.HashSet
;
import
java.util.Map
;
import
java.util.Set
;
import
com.google.common.math.DoubleMath
;
import
mvd.jester.info.SchedulingInfo
;
import
mvd.jester.info.TerminationInfo
;
import
mvd.jester.info.TerminationInfo.Level
;
import
mvd.jester.model.DagTask
;
import
mvd.jester.model.SortedTaskSet
;
import
mvd.jester.model.Task
;
...
...
@@ -20,7 +18,7 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
private
final
Map
<
Task
,
TerminationInfo
>
responseTimes
;
private
final
PriorityManager
priorityManager
;
public
MelaniButtazzo
(
long
numberOfProcessors
)
{
public
MelaniButtazzo
(
final
long
numberOfProcessors
)
{
super
(
numberOfProcessors
);
this
.
responseTimes
=
new
HashMap
<>();
this
.
priorityManager
=
new
RateMonotonic
();
...
...
@@ -37,19 +35,18 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
}
@Override
public
SchedulingInfo
runSchedulabilityCheck
(
SortedTaskSet
<
DagTask
>
tasks
)
{
public
SchedulingInfo
runSchedulabilityCheck
(
final
SortedTaskSet
<
DagTask
>
tasks
)
{
responseTimes
.
clear
();
for
(
DagTask
t
:
tasks
)
{
long
responseTime
=
calculateResponseTime
(
tasks
,
t
);
responseTimes
.
put
(
t
,
new
TerminationInfo
(
t
.
getDeadline
(),
responseTime
,
Level
.
HIGH
));
for
(
final
DagTask
t
:
tasks
)
{
final
long
responseTime
=
calculateResponseTime
(
tasks
,
t
);
responseTimes
.
put
(
t
,
new
TerminationInfo
(
t
.
getDeadline
(),
responseTime
));
}
return
new
SchedulingInfo
(
new
HashSet
<>(
responseTimes
.
values
()),
tasks
.
getParallelTaskRatio
(),
tasks
.
getUtilization
());
return
new
SchedulingInfo
(
responseTimes
.
values
());
}
private
long
calculateResponseTime
(
Set
<
DagTask
>
tasks
,
DagTask
task
)
{
long
minimumWcet
=
task
.
getCriticalPath
();
private
long
calculateResponseTime
(
final
Set
<
DagTask
>
tasks
,
final
DagTask
task
)
{
final
long
minimumWcet
=
task
.
getCriticalPath
();
long
responseTime
=
minimumWcet
;
long
previousResponseTime
=
0
;
...
...
@@ -57,16 +54,16 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
previousResponseTime
=
responseTime
;
double
taskInterference
=
0
;
for
(
DagTask
t
:
tasks
)
{
for
(
final
DagTask
t
:
tasks
)
{
if
(
t
.
getPeriod
()
<
task
.
getPeriod
())
{
taskInterference
+=
getTaskInterference
(
t
,
responseTime
);
}
}
taskInterference
/=
numberOfProcessors
;
double
selfInterference
=
getSelfInterference
(
task
);
final
double
selfInterference
=
getSelfInterference
(
task
);
long
totalInterference
=
(
long
)
Math
.
floor
(
taskInterference
+
selfInterference
);
final
long
totalInterference
=
(
long
)
Math
.
floor
(
taskInterference
+
selfInterference
);
responseTime
=
minimumWcet
+
totalInterference
;
}
while
(
previousResponseTime
!=
responseTime
);
...
...
@@ -74,28 +71,29 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
return
responseTime
;
}
private
double
getSelfInterference
(
DagTask
task
)
{
long
criticalPath
=
task
.
getCriticalPath
();
long
workload
=
task
.
getWorkload
();
private
double
getSelfInterference
(
final
DagTask
task
)
{
final
long
criticalPath
=
task
.
getCriticalPath
();
final
long
workload
=
task
.
getWorkload
();
return
(
double
)
(
workload
-
criticalPath
)
/
numberOfProcessors
;
}
private
double
getTaskInterference
(
DagTask
task
,
long
interval
)
{
private
double
getTaskInterference
(
final
DagTask
task
,
final
long
interval
)
{
if
(
responseTimes
.
containsKey
(
task
))
{
long
responseTime
=
responseTimes
.
get
(
task
).
getResponseTime
();
long
singleWorkload
=
task
.
getWorkload
();
long
period
=
task
.
getPeriod
();
final
long
responseTime
=
responseTimes
.
get
(
task
).
getResponseTime
();
final
long
singleWorkload
=
task
.
getWorkload
();
final
long
period
=
task
.
getPeriod
();
double
nominator
=
final
double
nominator
=
(
interval
+
responseTime
-
(
double
)
singleWorkload
/
numberOfProcessors
);
long
amountOfJobs
=
DoubleMath
.
roundToLong
(
nominator
/
period
,
RoundingMode
.
FLOOR
);
final
long
amountOfJobs
=
DoubleMath
.
roundToLong
(
nominator
/
period
,
RoundingMode
.
FLOOR
);
double
carryOutPortion
=
final
double
carryOutPortion
=
Math
.
min
(
singleWorkload
,
numberOfProcessors
*
(
nominator
%
period
));
double
interference
=
amountOfJobs
*
singleWorkload
+
carryOutPortion
;
final
double
interference
=
amountOfJobs
*
singleWorkload
+
carryOutPortion
;
return
interference
;
...
...
src/main/java/mvd/jester/tests/SchmidMottok.java
View file @
f0a948aa
...
...
@@ -2,13 +2,11 @@ package mvd.jester.tests;
import
java.math.RoundingMode
;
import
java.util.HashMap
;
import
java.util.HashSet
;
import
java.util.Map
;
import
java.util.Set
;