Commit f0a948aa by Michael Schmid

DagUtils now in seperate class and small changes

parent a52ab93d
package mvd.jester.info;
import java.util.HashSet;
import java.util.Collection;
import java.util.Optional;
import java.util.Set;
import mvd.jester.info.TerminationInfo.Level;
/**
* SchedulingInfo
*/
public class SchedulingInfo {
private final Feasiblity feasiblity;
private final double parallelTaskRatio;
private final double utilization;
private final Set<TerminationInfo> terminationInfos;
private Optional<TerminationInfo> failedTerminationInfo;
public SchedulingInfo(double parallelTaskRatio, double utilization) {
this.parallelTaskRatio = parallelTaskRatio;
this.utilization = utilization;
this.terminationInfos = new HashSet<>();
this.failedTerminationInfo = Optional.empty();
public SchedulingInfo(Collection<TerminationInfo> terminationInfos) {
Optional<TerminationInfo> failedTerminationInfo =
terminationInfos.stream().filter(t -> t.getLateness() > 0).findFirst();
feasiblity = failedTerminationInfo.isPresent() ? Feasiblity.FAILED : Feasiblity.SUCCEEDED;
}
/**
* @return the utilization
* @return the feasiblity
*/
public double getUtilization() {
return utilization;
public Feasiblity getFeasibility() {
return feasiblity;
}
/**
* @return the parallelTaskRatio
*/
public double getParallelTaskRatio() {
return parallelTaskRatio;
public enum Feasiblity {
FAILED, SUCCEEDED,
}
public SchedulingInfo(Set<TerminationInfo> terminationInfos, double parallelTaskRatio,
double utilization) {
this.terminationInfos = terminationInfos;
this.parallelTaskRatio = parallelTaskRatio;
this.utilization = utilization;
failedTerminationInfo =
terminationInfos.stream().filter(t -> t.getLateness() > 0).findFirst();
}
public boolean checkLevelFail(Level level) {
return terminationInfos.stream()
.anyMatch(t -> t.getLateness() > 0 && t.getTaskLevel() == level);
}
// private final double parallelTaskRatio;
// private final double utilization;
// private final Set<TerminationInfo> terminationInfos;
// private Optional<TerminationInfo> failedTerminationInfo;
public boolean checkTasksetFeasible() {
// return terminationInfos.isEmpty();
return !terminationInfos.stream().anyMatch(t -> t.getLateness() > 0);
}
// public SchedulingInfo(double parallelTaskRatio, double utilization) {
// this.parallelTaskRatio = parallelTaskRatio;
// this.utilization = utilization;
// this.terminationInfos = new HashSet<>();
// this.failedTerminationInfo = Optional.empty();
// }
public boolean addTerminationInfo(TerminationInfo terminationInfo) {
return terminationInfos.add(terminationInfo);
}
// /**
// * @return the utilization
// */
// public double getUtilization() {
// return utilization;
// }
/**
* @return the terminationInfos
*/
public Set<TerminationInfo> getTerminationInfos() {
return terminationInfos;
}
// /**
// * @return the parallelTaskRatio
// */
// public double getParallelTaskRatio() {
// return parallelTaskRatio;
// }
/**
* @return the failedTerminationInfo
*/
public Optional<TerminationInfo> getFailedTerminationInfo() {
return failedTerminationInfo;
}
// public SchedulingInfo(Set<TerminationInfo> terminationInfos, double parallelTaskRatio,
// double utilization) {
// this.terminationInfos = terminationInfos;
// this.parallelTaskRatio = parallelTaskRatio;
// this.utilization = utilization;
// failedTerminationInfo =
// terminationInfos.stream().filter(t -> t.getLateness() > 0).findFirst();
// }
public void setFailedTerminationInfo(TerminationInfo failedTerminationInfo) {
this.failedTerminationInfo = Optional.of(failedTerminationInfo);
}
// public boolean checkLevelFail(Level level) {
// return terminationInfos.stream()
// .anyMatch(t -> t.getLateness() > 0 && t.getTaskLevel() == level);
// }
// public boolean checkTasksetFeasible() {
// // return terminationInfos.isEmpty();
// return !terminationInfos.stream().anyMatch(t -> t.getLateness() > 0);
// }
// public boolean addTerminationInfo(TerminationInfo terminationInfo) {
// return terminationInfos.add(terminationInfo);
// }
// /**
// * @return the terminationInfos
// */
// public Set<TerminationInfo> getTerminationInfos() {
// return terminationInfos;
// }
// /**
// * @return the failedTerminationInfo
// */
// public Optional<TerminationInfo> getFailedTerminationInfo() {
// return failedTerminationInfo;
// }
// public void setFailedTerminationInfo(TerminationInfo failedTerminationInfo) {
// this.failedTerminationInfo = Optional.of(failedTerminationInfo);
// }
}
......@@ -9,30 +9,19 @@ public class TerminationInfo {
private final long deadline;
private final long responseTime;
private final long lateness;
private final Level taskLevel;
public TerminationInfo(long releaseTime, long deadline, long responseTime) {
this.releaseTime = releaseTime;
this.deadline = deadline;
this.responseTime = responseTime;
this.lateness = responseTime - deadline;
this.taskLevel = Level.LOW;
}
public TerminationInfo(long releaseTime, long deadline, long responseTime, Level taskLevel) {
this.releaseTime = releaseTime;
this.deadline = deadline;
this.responseTime = responseTime;
this.lateness = responseTime - deadline;
this.taskLevel = taskLevel;
}
public TerminationInfo(long deadline, long responseTime, Level taskLevel) {
public TerminationInfo(long deadline, long responseTime) {
this.releaseTime = 0;
this.deadline = deadline;
this.responseTime = responseTime;
this.lateness = responseTime - deadline;
this.taskLevel = taskLevel;
}
/**
......@@ -62,15 +51,4 @@ public class TerminationInfo {
public long getResponseTime() {
return responseTime;
}
/**
* @return the taskLevel
*/
public Level getTaskLevel() {
return taskLevel;
}
public enum Level {
HIGH, LOW
}
}
......@@ -17,7 +17,7 @@ public class SortedTaskSet<T extends Task> extends TreeSet<T> {
private static final long serialVersionUID = 4808544133562675597L;
public SortedTaskSet(PriorityManager priorityMananger) {
public SortedTaskSet(final PriorityManager priorityMananger) {
super((t1, t2) -> priorityMananger.compare(t1, t2));
}
......@@ -28,7 +28,8 @@ public class SortedTaskSet<T extends Task> extends TreeSet<T> {
public double getParallelTaskRatio() {
long parallelTasks = super.stream().filter(t -> t.getMaximumParallelism() > 1).count();
final long parallelTasks =
super.stream().filter(t -> t.getMaximumParallelism() > 1).count();
return (double) parallelTasks / super.size();
}
......@@ -36,11 +37,11 @@ public class SortedTaskSet<T extends Task> extends TreeSet<T> {
public static class Deserializer<T extends Task> implements JsonDeserializer<SortedTaskSet<T>> {
@Override
public SortedTaskSet<T> deserialize(JsonElement json, Type typeOfT,
JsonDeserializationContext context) throws JsonParseException {
SortedTaskSet<T> taskSet = new SortedTaskSet<>(new RateMonotonic());
public SortedTaskSet<T> deserialize(final JsonElement json, final Type typeOfT,
final JsonDeserializationContext context) throws JsonParseException {
final SortedTaskSet<T> taskSet = new SortedTaskSet<>(new RateMonotonic());
if (json.isJsonArray()) {
JsonArray array = json.getAsJsonArray();
final JsonArray array = json.getAsJsonArray();
array.forEach(e -> {
taskSet.add(context.deserialize(e, SynchronousTask.class));
});
......
......@@ -14,8 +14,8 @@ public class SynchronousTask implements Task {
private final long criticalPath;
private final long numberOfThreads;
public SynchronousTask(long period, long deadline, long numberOfThreads,
Set<Segment> segments) {
public SynchronousTask(final long period, final long deadline, final long numberOfThreads,
final Set<Segment> segments) {
this.deadline = deadline;
this.period = period;
this.numberOfThreads = numberOfThreads;
......@@ -24,7 +24,8 @@ public class SynchronousTask implements Task {
this.criticalPath = SynchronousUtils.calculateCriticalPath(segments);
}
public SynchronousTask(Set<Segment> segments, long period, long numberOfThreads) {
public SynchronousTask(final Set<Segment> segments, final long period,
final long numberOfThreads) {
this(period, period, numberOfThreads, segments);
}
......@@ -70,7 +71,7 @@ public class SynchronousTask implements Task {
@Override
public long getMaximumParallelism() {
long max = 0;
for (Segment s : segments) {
for (final Segment s : segments) {
if (max < s.getNumberOfJobs()) {
max = s.getNumberOfJobs();
}
......@@ -84,18 +85,18 @@ public class SynchronousTask implements Task {
}
public static class SynchronousUtils {
public static long calculateWorkload(Set<Segment> segments) {
public static long calculateWorkload(final Set<Segment> segments) {
long workload = 0;
for (Segment s : segments) {
for (final Segment s : segments) {
workload += s.getJobWcet() * s.getNumberOfJobs();
}
return workload;
}
public static long calculateCriticalPath(Set<Segment> segments) {
public static long calculateCriticalPath(final Set<Segment> segments) {
long criticalPath = 0;
for (Segment s : segments) {
for (final Segment s : segments) {
criticalPath += s.getJobWcet();
}
......
......@@ -13,7 +13,7 @@ import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.graph.DefaultEdge;
import mvd.jester.model.DagTask.DagUtils;
import mvd.jester.utils.DagUtils;
/**
* TaskSet
......@@ -219,7 +219,7 @@ public class SystemSetup<T extends Task> {
return ThreadLocalRandom.current().nextLong(1, 100);
}
public Set<DagTask> generateTaskSet(double totalUtilization) {
public Set<DagTask> generateTaskSet(final double totalUtilization) {
final LinkedHashSet<DagTask> taskSet = new LinkedHashSet<>();
double currentUtilization = 0;
while (currentUtilization <= totalUtilization) {
......
......@@ -4,7 +4,7 @@ public class TreeJob {
private long wcet;
private final Job job;
public TreeJob(Job job) {
public TreeJob(final Job job) {
this.wcet = job.getWcet();
this.job = job;
}
......@@ -26,7 +26,7 @@ public class TreeJob {
/**
* @param wcet the wcet to set
*/
public void setWcet(long wcet) {
public void setWcet(final long wcet) {
this.wcet = wcet;
}
}
......@@ -4,14 +4,12 @@ import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.math.LongMath;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SynchronousTask;
......@@ -26,7 +24,7 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
private final Map<SynchronousTask, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public ChwaLee(long numberOfProcessors) {
public ChwaLee(final long numberOfProcessors) {
super(numberOfProcessors);
this.responseTimes = new HashMap<>();
this.priorityManager = new EarliestDeadlineFirst();
......@@ -38,27 +36,26 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
}
@Override
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<SynchronousTask> tasks) {
public SchedulingInfo runSchedulabilityCheck(final SortedTaskSet<SynchronousTask> tasks) {
responseTimes.clear();
for (SynchronousTask t : tasks) {
Level taskLevel = tasks.headSet(t).size() <= tasks.size() / 2 ? Level.HIGH : Level.LOW;
long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, taskLevel));
for (final SynchronousTask t : tasks) {
final long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime));
}
return new SchedulingInfo(new HashSet<>(responseTimes.values()),
tasks.getParallelTaskRatio(), tasks.getUtilization());
return new SchedulingInfo(responseTimes.values());
}
private long calculateResponseTime(Set<SynchronousTask> tasks, SynchronousTask task) {
long minimumWcet = getMinimumWcet(task);
long deadline = task.getDeadline();
private long calculateResponseTime(final Set<SynchronousTask> tasks,
final SynchronousTask task) {
final long minimumWcet = getMinimumWcet(task);
final long deadline = task.getDeadline();
long taskInterference = 0;
for (SynchronousTask t : tasks) {
for (final SynchronousTask t : tasks) {
if (!t.equals(task)) {
long maxNumberOfJobs = t.getMaximumParallelism();
final long maxNumberOfJobs = t.getMaximumParallelism();
for (long p = 0; p < maxNumberOfJobs; ++p) {
taskInterference += Math.min(getTaskInterference(t, deadline, p + 1),
deadline - minimumWcet);
......@@ -68,23 +65,24 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
long selfInterference = 0;
long maxNumberOfJobs = task.getMaximumParallelism();
final long maxNumberOfJobs = task.getMaximumParallelism();
for (long p = 0; p < maxNumberOfJobs; ++p) {
selfInterference +=
Math.min(getSelfInterference(task, deadline, p + 1), deadline - minimumWcet);
}
boolean feasible = taskInterference + selfInterference <= numberOfProcessors
final boolean feasible = taskInterference + selfInterference <= numberOfProcessors
* (deadline - minimumWcet);
return feasible ? deadline - 1 : deadline + 1;
}
private long getSelfInterference(SynchronousTask task, long deadline, long p) {
private long getSelfInterference(final SynchronousTask task, final long deadline,
final long p) {
long selfInterference = 0;
for (Segment s : task.getWorkloadDistribution()) {
for (final Segment s : task.getWorkloadDistribution()) {
if (s.getNumberOfJobs() >= p + 1) {
selfInterference += s.getJobWcet();
}
......@@ -92,24 +90,24 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
return selfInterference;
}
private long getTaskInterference(SynchronousTask t, long deadline, long p) {
long numberOfBodyJobs = LongMath.divide(deadline, t.getPeriod(), RoundingMode.FLOOR);
private long getTaskInterference(final SynchronousTask t, final long deadline, final long p) {
final long numberOfBodyJobs = LongMath.divide(deadline, t.getPeriod(), RoundingMode.FLOOR);
long workloadOfBodyJobs = 0;
for (Segment s : t.getWorkloadDistribution()) {
for (final Segment s : t.getWorkloadDistribution()) {
if (s.getNumberOfJobs() >= p) {
workloadOfBodyJobs += s.getJobWcet();
}
}
long boundedBodyWorkload = numberOfBodyJobs * workloadOfBodyJobs;
final long boundedBodyWorkload = numberOfBodyJobs * workloadOfBodyJobs;
long boundedCarryInWorkload = 0;
long remainingLength = deadline % t.getPeriod();
final long remainingLength = deadline % t.getPeriod();
long carryInLength = 0;
List<Segment> segmentList = new ArrayList<>(t.getWorkloadDistribution());
final List<Segment> segmentList = new ArrayList<>(t.getWorkloadDistribution());
Collections.reverse(segmentList);
for (Segment s : segmentList) {
for (final Segment s : segmentList) {
carryInLength += s.getJobWcet();
if (s.getNumberOfJobs() >= p && remainingLength > carryInLength) {
boundedCarryInWorkload += s.getJobWcet();
......@@ -124,9 +122,9 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
return boundedBodyWorkload + boundedCarryInWorkload;
}
private long getMinimumWcet(SynchronousTask task) {
private long getMinimumWcet(final SynchronousTask task) {
long minWcet = 0;
for (Segment s : task.getWorkloadDistribution()) {
for (final Segment s : task.getWorkloadDistribution()) {
minWcet += s.getJobWcet();
}
......
......@@ -2,13 +2,11 @@ package mvd.jester.tests;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.common.math.LongMath;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SynchronousTask;
......@@ -23,7 +21,7 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
private final Map<SynchronousTask, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public MaiaBertogna(long numberOfProcessors) {
public MaiaBertogna(final long numberOfProcessors) {
super(numberOfProcessors);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
......@@ -35,16 +33,14 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
}
@Override
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<SynchronousTask> tasks) {
public SchedulingInfo runSchedulabilityCheck(final SortedTaskSet<SynchronousTask> tasks) {
responseTimes.clear();
for (SynchronousTask t : tasks) {
Level taskLevel = tasks.headSet(t).size() <= tasks.size() / 2 ? Level.HIGH : Level.LOW;
long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, taskLevel));
for (final SynchronousTask t : tasks) {
final long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime));
}
return new SchedulingInfo(new HashSet<>(responseTimes.values()),
tasks.getParallelTaskRatio(), tasks.getUtilization());
return new SchedulingInfo(responseTimes.values());
}
@Override
......@@ -52,8 +48,9 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
return "MaiaBertogna";
}
private long calculateResponseTime(Set<SynchronousTask> tasks, SynchronousTask task) {
long minimumWcet = getMinimumWcet(task);
private long calculateResponseTime(final Set<SynchronousTask> tasks,
final SynchronousTask task) {
final long minimumWcet = getMinimumWcet(task);
long responseTime = minimumWcet;
long previousResponseTime = 0;
......@@ -61,9 +58,9 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
previousResponseTime = responseTime;
long taskInterference = 0;
for (SynchronousTask t : tasks) {
for (final SynchronousTask t : tasks) {
if (t.getPeriod() < task.getPeriod()) {
long maxNumberOfJobsOfT = t.getMaximumParallelism();
final long maxNumberOfJobsOfT = t.getMaximumParallelism();
for (int p = 0; p < maxNumberOfJobsOfT; ++p) {
taskInterference += Math.min(getTaskInterference(t, responseTime, p + 1),
responseTime - minimumWcet + 1);
......@@ -73,13 +70,13 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
long selfInterference = 0;
long maxNumberOfJobs = task.getMaximumParallelism();
final long maxNumberOfJobs = task.getMaximumParallelism();
for (int p = 0; p < maxNumberOfJobs; ++p) {
selfInterference +=
Math.min(getSelfInterference(task, p + 1), responseTime - minimumWcet + 1);
}
long totalInterference = LongMath.divide(taskInterference + selfInterference,
final long totalInterference = LongMath.divide(taskInterference + selfInterference,
numberOfProcessors, RoundingMode.FLOOR);
responseTime = minimumWcet + totalInterference;
......@@ -89,10 +86,10 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
return responseTime;
}
private long getSelfInterference(SynchronousTask task, long parallelism) {
private long getSelfInterference(final SynchronousTask task, final long parallelism) {
long interference = 0;
for (Segment s : task.getWorkloadDistribution()) {
for (final Segment s : task.getWorkloadDistribution()) {
if (s.getNumberOfJobs() >= parallelism + 1) {
interference += s.getJobWcet();
}
......@@ -101,24 +98,25 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
return interference;
}
private long getTaskInterference(SynchronousTask task, long interval, long parallelism) {
private long getTaskInterference(final SynchronousTask task, final long interval,
final long parallelism) {
if (responseTimes.containsKey(task)) {
long responseTime = responseTimes.get(task).getResponseTime();
long minWcet = getMinimumWcet(task);
long period = task.getPeriod();
long numberOfJobs =
final long responseTime = responseTimes.get(task).getResponseTime();
final long minWcet = getMinimumWcet(task);
final long period = task.getPeriod();
final long numberOfJobs =
(LongMath.divide(interval + responseTime - minWcet, period, RoundingMode.FLOOR)
+ 1);
long workload = 0;
for (Segment s : task.getWorkloadDistribution()) {
for (final Segment s : task.getWorkloadDistribution()) {
if (s.getNumberOfJobs() >= parallelism) {
workload += s.getJobWcet();
}
}
long interference = numberOfJobs * workload;
final long interference = numberOfJobs * workload;
return interference;
} else {
......@@ -126,9 +124,9 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
}
}
private long getMinimumWcet(SynchronousTask task) {
private long getMinimumWcet(final SynchronousTask task) {
long minWcet = 0;
for (Segment s : task.getWorkloadDistribution()) {
for (final Segment s : task.getWorkloadDistribution()) {
minWcet += s.getJobWcet();
}
......
......@@ -2,13 +2,11 @@ package mvd.jester.tests;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.common.math.DoubleMath;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.DagTask;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.Task;
......@@ -20,7 +18,7 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
private final Map<Task, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public MelaniButtazzo(long numberOfProcessors) {
public MelaniButtazzo(final long numberOfProcessors) {
super(numberOfProcessors);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
......@@ -37,19 +35,18 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
}
@Override
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<DagTask> tasks) {
public SchedulingInfo runSchedulabilityCheck(final SortedTaskSet<DagTask> tasks) {
responseTimes.clear();
for (DagTask t : tasks) {
long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, Level.HIGH));
for (final DagTask t : tasks) {
final long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime));
}
return new SchedulingInfo(new HashSet<>(responseTimes.values()),
tasks.getParallelTaskRatio(), tasks.getUtilization());
return new SchedulingInfo(responseTimes.values());
}
private long calculateResponseTime(Set<DagTask> tasks, DagTask task) {
long minimumWcet = task.getCriticalPath();
private long calculateResponseTime(final Set<DagTask> tasks, final DagTask task) {
final long minimumWcet = task.getCriticalPath();
long responseTime = minimumWcet;
long previousResponseTime = 0;
......@@ -57,16 +54,16 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
previousResponseTime = responseTime;
double taskInterference = 0;
for (DagTask t : tasks) {
for (final DagTask t : tasks) {
if (t.getPeriod() < task.getPeriod()) {
taskInterference += getTaskInterference(t, responseTime);
}
}
taskInterference /= numberOfProcessors;
double selfInterference = getSelfInterference(task);
final double selfInterference = getSelfInterference(task);
long totalInterference = (long) Math.floor(taskInterference + selfInterference);
final long totalInterference = (long) Math.floor(taskInterference + selfInterference);
responseTime = minimumWcet + totalInterference;
} while (previousResponseTime != responseTime);
......@@ -74,28 +71,29 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
return responseTime;
}
private double getSelfInterference(DagTask task) {
long criticalPath = task.getCriticalPath();
long workload = task.getWorkload();
private double getSelfInterference(final DagTask task) {
final long criticalPath = task.getCriticalPath();
final long workload = task.getWorkload();
return (double) (workload - criticalPath) / numberOfProcessors;
}
private double getTaskInterference(DagTask task, long interval) {
private double getTaskInterference(final DagTask task, final long interval) {
if (responseTimes.containsKey(task)) {
long responseTime = responseTimes.get(task).getResponseTime();
long singleWorkload = task.getWorkload();
long period = task.getPeriod();
final long responseTime = responseTimes.get(task).getResponseTime();
final long singleWorkload = task.getWorkload();
final long period = task.getPeriod();
double nominator =
final double nominator =
(interval + responseTime - (double) singleWorkload / numberOfProcessors);
long amountOfJobs = DoubleMath.roundToLong(nominator / period, RoundingMode.FLOOR);
final long amountOfJobs =
DoubleMath.roundToLong(nominator / period, RoundingMode.FLOOR);
double carryOutPortion =
final double carryOutPortion =
Math.min(singleWorkload, numberOfProcessors * (nominator % period));
double interference = amountOfJobs * singleWorkload + carryOutPortion;
final double interference = amountOfJobs * singleWorkload + carryOutPortion;
return interference;
......
......@@ -2,13 +2,11 @@ package mvd.jester.tests;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.common.math.LongMath;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.DagTask;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
......@@ -24,7 +22,7 @@ public class SchmidMottok extends AbstractTest<DagTask> {
private final Map<Task, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public SchmidMottok(long numberOfProcessors) {
public SchmidMottok(final long numberOfProcessors) {
super(numberOfProcessors);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
......@@ -36,15 +34,14 @@ public class SchmidMottok extends AbstractTest<DagTask> {
}
@Override
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<DagTask> tasks) {
public SchedulingInfo runSchedulabilityCheck(final SortedTaskSet<DagTask> tasks) {
responseTimes.clear();
for (DagTask t : tasks) {
long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, Level.HIGH));
for (final DagTask t : tasks) {
final long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime));
}
return new SchedulingInfo(new HashSet<>(responseTimes.values()),
tasks.getParallelTaskRatio(), tasks.getUtilization());
return new SchedulingInfo(responseTimes.values());
}
@Override
......@@ -52,8 +49,8 @@ public class SchmidMottok extends AbstractTest<DagTask> {
return "SchmidMottok";
}
private long calculateResponseTime(Set<DagTask> tasks, DagTask task) {
long minimumWcet = task.getCriticalPath();
private long calculateResponseTime(final Set<DagTask> tasks, final DagTask task) {
final long minimumWcet = task.getCriticalPath();
long responseTime = minimumWcet;
long previousResponseTime = 0;
......@@ -61,9 +58,9 @@ public class SchmidMottok extends AbstractTest<DagTask> {
previousResponseTime = responseTime;
double taskInterference = 0;
for (DagTask t : tasks) {
for (final DagTask t : tasks) {
if (t.getPeriod() < task.getPeriod()) {
long numberOfThreads = t.getNumberOfThreads();
final long numberOfThreads = t.getNumberOfThreads();
for (int p = 0; p < numberOfThreads; ++p) {
taskInterference += Math.min(getTaskInterference(t, responseTime, p + 1),
responseTime - minimumWcet + 1);
......@@ -72,9 +69,9 @@ public class SchmidMottok extends AbstractTest<DagTask> {
}
taskInterference /= numberOfProcessors;
double selfInterference = getSelfInterference(task);
final double selfInterference = getSelfInterference(task);
long totalInterference = (long) Math.floor(taskInterference + selfInterference);
final long totalInterference = (long) Math.floor(taskInterference + selfInterference);
responseTime = minimumWcet + totalInterference;
} while (previousResponseTime != responseTime);
......@@ -83,11 +80,11 @@ public class SchmidMottok extends AbstractTest<DagTask> {
}
private double getSelfInterference(DagTask task) {
private double getSelfInterference(final DagTask task) {
double interference = 0;
long numberOfThreads = task.getNumberOfThreads();
final long numberOfThreads = task.getNumberOfThreads();
for (Segment s : task.getWorkloadDistribution()) {
for (final Segment s : task.getWorkloadDistribution()) {
interference += (double) (s.getNumberOfJobs() - 1) * s.getJobWcet();
}
......@@ -97,25 +94,27 @@ public class SchmidMottok extends AbstractTest<DagTask> {
}
private double getTaskInterference(DagTask task, long interval, long parallelism) {
private double getTaskInterference(final DagTask task, final long interval,
final long parallelism) {
if (responseTimes.containsKey(task)) {
long responseTime = responseTimes.get(task).getResponseTime();
long minWcet = task.getCriticalPath();
long period = task.getPeriod();
long amountOfJobs =
final long responseTime = responseTimes.get(task).getResponseTime();
final long minWcet = task.getCriticalPath();
final long period = task.getPeriod();
final long amountOfJobs =
(LongMath.divide(interval + responseTime - minWcet, period, RoundingMode.FLOOR)
+ 1);
double workload = 0;
for (Segment s : task.getWorkloadDistribution()) {
long numberOfThreads = s.getNumberOfJobs() > 1 ? task.getNumberOfThreads() : 1;
for (final Segment s : task.getWorkloadDistribution()) {
final long numberOfThreads =
s.getNumberOfJobs() > 1 ? task.getNumberOfThreads() : 1;
if (numberOfThreads >= parallelism) {
workload += s.getNumberOfJobs() * s.getJobWcet() / numberOfThreads;
}
}
double interference = amountOfJobs * workload;
final double interference = amountOfJobs * workload;
return interference;
} else {
......
......@@ -12,7 +12,7 @@ public class BinaryDecompositionTree<N> {
return root;
}
public boolean contains(N object) {
public boolean contains(final N object) {
if (root == null) {
return false;
}
......@@ -23,7 +23,7 @@ public class BinaryDecompositionTree<N> {
return root == null;
}
public void setRoot(Node<N> root) {
public void setRoot(final Node<N> root) {
this.root = root;
}
......@@ -34,13 +34,13 @@ public class BinaryDecompositionTree<N> {
private final NodeType nodeType;
private final T object;
public Node(Node<T> parentNode, NodeType nodeType) {
public Node(final Node<T> parentNode, final NodeType nodeType) {
this.parentNode = parentNode;
this.nodeType = nodeType;
this.object = null;
}
public boolean contains(T object) {
public boolean contains(final T object) {
if (nodeType.equals(NodeType.LEAF)) {
return this.object == object;
} else {
......@@ -56,7 +56,7 @@ public class BinaryDecompositionTree<N> {
}
}
public Node(Node<T> parentNode, T object) {
public Node(final Node<T> parentNode, final T object) {
this.parentNode = parentNode;
this.nodeType = NodeType.LEAF;
this.object = object;
......@@ -80,7 +80,7 @@ public class BinaryDecompositionTree<N> {
/**
* @param leftNode the leftNode to set
*/
public Node<T> setLeftNode(Node<T> leftNode) {
public Node<T> setLeftNode(final Node<T> leftNode) {
this.leftNode = leftNode;
return this.leftNode;
}
......@@ -95,7 +95,7 @@ public class BinaryDecompositionTree<N> {
/**
* @param rightNode the rightNode to set
*/
public Node<T> setRightNode(Node<T> rightNode) {
public Node<T> setRightNode(final Node<T> rightNode) {
this.rightNode = rightNode;
return this.rightNode;
}
......
......@@ -13,18 +13,18 @@ public class Logger {
private BufferedWriter bufferedWriter;
private PrintWriter printWriter;
public Logger(String pathToFile) {
public Logger(final String pathToFile) {
try {
fileWriter = new FileWriter(pathToFile, true);
fileWriter = new FileWriter(pathToFile, false);
bufferedWriter = new BufferedWriter(fileWriter);
printWriter = new PrintWriter(bufferedWriter);
} catch (Exception e) {
} catch (final Exception e) {
System.out.println("Could not create file!");
}
}
public void log(Appendable sb) {
public void log(final Appendable sb) {
printWriter.write(sb.toString());
}
......@@ -39,7 +39,7 @@ public class Logger {
if (fileWriter != null) {
fileWriter.close();
}
} catch (Exception e) {
} catch (final Exception e) {
}
}
......
package mvd.jester.info;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.info.SchedulingInfo.Feasiblity;
/**
* TestTerminationInfo
......@@ -22,58 +20,20 @@ public class TestSchedulingInfo {
Set<TerminationInfo> terminationInfos = new HashSet<>();
int numberOfTerminationInfos = ThreadLocalRandom.current().nextInt(5, 10);
boolean feasibile = true;
boolean levelFailed = false;
for (int i = 0; i < numberOfTerminationInfos; ++i) {
long deadline = ThreadLocalRandom.current().nextLong(50, 100);
long responseTime = ThreadLocalRandom.current().nextLong(50, 100);
Level taskLevel =
ThreadLocalRandom.current().nextLong(0, 100) < 50 ? Level.LOW : Level.HIGH;
terminationInfos.add(new TerminationInfo(deadline, responseTime, taskLevel));
terminationInfos.add(new TerminationInfo(deadline, responseTime));
if (deadline < responseTime) {
feasibile = false;
if (taskLevel == Level.HIGH) {
levelFailed = true;
}
}
}
SchedulingInfo schedulingInfo =
new SchedulingInfo(terminationInfos, 2, numberOfTerminationInfos);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == levelFailed);
assertTrue(schedulingInfo.checkTasksetFeasible() == feasibile);
}
}
@Test
@DisplayName("Check Getters and Setters.")
public void testGettersAndSetters() {
double taskRatio = 0.23;
double utilization = 0.49;
SchedulingInfo si = new SchedulingInfo(taskRatio, utilization);
Set<TerminationInfo> terminationInfos = new HashSet<>();
long numberOfTerminationInfos = ThreadLocalRandom.current().nextLong(5, 10);
for (int i = 0; i < numberOfTerminationInfos; ++i) {
TerminationInfo ti = mock(TerminationInfo.class);
terminationInfos.add(ti);
assertTrue(si.addTerminationInfo(ti));
assertFalse(si.addTerminationInfo(ti));
SchedulingInfo schedulingInfo = new SchedulingInfo(terminationInfos);
assertTrue(schedulingInfo
.getFeasibility() == (feasibile ? Feasiblity.SUCCEEDED : Feasiblity.FAILED));
}
assertTrue(si.getParallelTaskRatio() == taskRatio);
assertTrue(si.getUtilization() == utilization);
assertTrue(si.getTerminationInfos().size() == numberOfTerminationInfos);
assertTrue(si.getTerminationInfos().equals(terminationInfos));
assertTrue(si.addTerminationInfo(null));
assertFalse(si.getFailedTerminationInfo().isPresent());
TerminationInfo ti = mock(TerminationInfo.class);
si.setFailedTerminationInfo(ti);
assertTrue(si.getFailedTerminationInfo().isPresent());
assertTrue(si.getFailedTerminationInfo().get().equals(ti));
}
}
......@@ -3,7 +3,6 @@ package mvd.jester.info;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.info.TerminationInfo.Level;
/**
* TestTerminationInfo
......@@ -16,11 +15,10 @@ public class TestTerminationInfo {
long releaseTime = 20;
long deadline = 40;
long responseTime = 30;
Level taskLevel = Level.LOW;
TerminationInfo ti = new TerminationInfo(releaseTime, deadline, responseTime);
TerminationInfo til = new TerminationInfo(deadline, responseTime, taskLevel);
TerminationInfo tirl = new TerminationInfo(releaseTime, deadline, responseTime, taskLevel);
TerminationInfo til = new TerminationInfo(deadline, responseTime);
TerminationInfo tirl = new TerminationInfo(releaseTime, deadline, responseTime);
assertTrue(ti.getDeadline() == deadline);
......@@ -31,8 +29,6 @@ public class TestTerminationInfo {
assertTrue(tirl.getResponseTime() == responseTime);
assertTrue(ti.getReleaseTime() == releaseTime);
assertTrue(tirl.getReleaseTime() == releaseTime);
assertTrue(til.getTaskLevel() == taskLevel);
assertTrue(tirl.getTaskLevel() == taskLevel);
assertTrue(ti.getLateness() == responseTime - deadline);
assertTrue(til.getLateness() == responseTime - deadline);
assertTrue(tirl.getLateness() == responseTime - deadline);
......
......@@ -7,7 +7,7 @@ import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.graph.DefaultEdge;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.model.DagTask.DagUtils;
import mvd.jester.utils.DagUtils;
import mvd.jester.model.SystemSetup.DagTaskBuilder;
import mvd.jester.utils.BinaryDecompositionTree;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment