Commit 4d9fd939 by Michael Schmid

Lots of changes for Dags

parent 72d32bca
......@@ -15,7 +15,17 @@
</properties>
<dependencies>
<dependency>
<dependency>
<groupId>org.jgrapht</groupId>
<artifactId>jgrapht-ext</artifactId>
<version>1.0.1</version>
</dependency>
<dependency>
<groupId>org.jgrapht</groupId>
<artifactId>jgrapht-core</artifactId>
<version>1.0.1</version>
</dependency>
<dependency>
<groupId>net.sourceforge.cobertura</groupId>
<artifactId>cobertura</artifactId>
<version>2.1.1</version>
......
package mvd.jester;
import java.util.Arrays;
import java.util.Set;
import mvd.jester.model.DagTask;
import mvd.jester.model.SystemSetup;
import mvd.jester.priority.EarliestDeadlineFirst;
import mvd.jester.priority.RateMonotonic;
import mvd.jester.tests.AbstractTest;
import mvd.jester.tests.FonsecaNelis;
import mvd.jester.tests.MelaniButtazzo;
import mvd.jester.tests.SchmidMottok;
/**
......@@ -11,18 +16,15 @@ import mvd.jester.priority.RateMonotonic;
*/
public class App {
public static void main(String[] args) {
for (int p = 4; p <= 16; p *= 2) {
SystemSetup.Builder builder = new SystemSetup.Builder().setNumberOfProcessors(p);
TestEnvironment te = new TestEnvironment(builder, 40000);
for (int p = 4; p <= 4; p *= 2) {
SystemSetup.DagTaskBuilder builder =
new SystemSetup.DagTaskBuilder().setNumberOfProcessors(p);
TestEnvironment te = new TestEnvironment();
te.registerSchedulingAlgorithm(new RateMonotonic());
te.registerSchedulingAlgorithm(new EarliestDeadlineFirst());
Set<ResultCollector<AbstractTest<DagTask>>> tests = te.registerTests(
Arrays.asList(new SchmidMottok(p), new MelaniButtazzo(p), new FonsecaNelis(p)));
te.registerTest(mvd.jester.tests.SchmidMottok.class);
te.registerTest(mvd.jester.tests.MaiaBertogna.class);
te.registerTest(mvd.jester.tests.ChwaLee.class);
te.runExperiments();
te.runExperiments(builder, tests, p, 500);
}
}
}
......@@ -4,17 +4,15 @@ import java.io.IOException;
import java.time.LocalTime;
import java.util.Set;
import java.util.TreeSet;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ObjectArrays;
import com.google.common.collect.Table;
import com.google.common.collect.TreeBasedTable;
import com.google.common.math.DoubleMath;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.Task;
import mvd.jester.simulator.AbstractSimulator;
import mvd.jester.tests.AbstractTest;
import mvd.jester.utils.Logger;
......@@ -30,18 +28,18 @@ public class ResultLogger {
this.numberOfProcessors = numberOfProcessors;
}
public void logAll(Set<ResultCollector<AbstractTest>> testResults,
public <T extends Task> void logAll(Set<ResultCollector<AbstractTest<T>>> testResults,
Set<ResultCollector<AbstractSimulator>> simResults) {
logTests(testResults);
logSimulations(simResults);
}
public void logTests(Set<ResultCollector<AbstractTest>> results) {
public <T extends Task> void logTests(Set<ResultCollector<AbstractTest<T>>> results) {
if (!results.isEmpty()) {
logFeasibility(results, "test");
logFeasibilityRatio(results, "test");
logTaskRatio(results, "test");
logFeasibilityLevel(results, "test");
// logFeasibilityRatio(results, "test");
// logTaskRatio(results, "test");
// logFeasibilityLevel(results, "test");
}
}
......@@ -55,34 +53,34 @@ public class ResultLogger {
public <T extends TypeInterface> void logFeasibilityLevel(Set<ResultCollector<T>> results,
String type) {
LocalTime date = LocalTime.now();
Logger log = new Logger("./results/feasibility_level_" + type + "_" + numberOfProcessors
+ "_" + date.getHour() + ":" + date.getMinute() + ".txt");
Table<Long, ResultCollector<T>, Double> resultTable = TreeBasedTable.create();
Set<ResultCollector<T>> resultCollectors = new TreeSet<>();
for (long util = 0; util <= numberOfProcessors * 10; util += numberOfProcessors / 4) {
for (ResultCollector<T> rc : results) {
resultCollectors.add(rc);
final long local_util = util;
Supplier<Stream<SchedulingInfo>> schedulingResults = () -> rc.getResults().stream()
.filter(r -> Math.round(r.getUtilization() * 10 / (numberOfProcessors / 4))
* (numberOfProcessors / 4) == local_util);
if (schedulingResults.get().filter(r -> !r.checkTasksetFeasible()).count() > 0) {
double feasibleTasksets = (double) schedulingResults.get()
.filter(r -> r.checkLevelFail(Level.HIGH)).count()
/ schedulingResults.get().filter(r -> !r.checkTasksetFeasible())
.count();
resultTable.put(util, rc, feasibleTasksets);
} else {
resultTable.put(util, rc, 0.);
}
}
}
logData(log, resultTable, resultCollectors, "Utilization");
// LocalTime date = LocalTime.now();
// Logger log = new Logger("./results/feasibility_level_" + type + "_" + numberOfProcessors
// + "_" + date.getHour() + ":" + date.getMinute() + ".txt");
// Table<Long, ResultCollector<T>, Double> resultTable = TreeBasedTable.create();
// Set<ResultCollector<T>> resultCollectors = new TreeSet<>();
// for (long util = 0; util <= numberOfProcessors * 10; util += numberOfProcessors / 4) {
// for (ResultCollector<T> rc : results) {
// resultCollectors.add(rc);
// final long local_util = util;
// Supplier<Stream<SchedulingInfo>> schedulingResults = () -> rc.getResults().stream()
// .filter(r -> Math.round(r.getUtilization() * 10 / (numberOfProcessors / 4))
// * (numberOfProcessors / 4) == local_util);
// if (schedulingResults.get().filter(r -> !r.checkTasksetFeasible()).count() > 0) {
// double feasibleTasksets = (double) schedulingResults.get()
// .filter(r -> r.checkLevelFail(Level.HIGH)).count()
// / schedulingResults.get().filter(r -> !r.checkTasksetFeasible())
// .count();
// resultTable.put(util, rc, feasibleTasksets);
// } else {
// resultTable.put(util, rc, 0.);
// }
// }
// }
// logData(log, resultTable, resultCollectors, "Utilization");
}
......@@ -93,16 +91,15 @@ public class ResultLogger {
Logger log = new Logger("./results/feasibility_" + type + "_" + numberOfProcessors + "_"
+ date.getHour() + ":" + date.getMinute() + ".txt");
Table<Long, ResultCollector<T>, Long> resultTable = TreeBasedTable.create();
Table<Double, ResultCollector<T>, Long> resultTable = TreeBasedTable.create();
Set<ResultCollector<T>> resultCollectors = new TreeSet<>();
for (long util = 0; util <= numberOfProcessors * 10; util += numberOfProcessors / 4) {
for (double util = 0.25; util <= numberOfProcessors; util += 0.25) {
for (ResultCollector<T> rc : results) {
resultCollectors.add(rc);
final long local_util = util;
final double local_util = util;
long feasibleTasksets = rc.getResults().stream()
.filter(r -> Math.round(r.getUtilization() * 10 / (numberOfProcessors / 4))
* (numberOfProcessors / 4) == local_util)
.filter(r -> DoubleMath.fuzzyEquals(r.getUtilization(), local_util, 0.125))
.filter(r -> r.checkTasksetFeasible()).count();
resultTable.put(util, rc, feasibleTasksets);
}
......@@ -113,59 +110,59 @@ public class ResultLogger {
public <T extends TypeInterface> void logFeasibilityRatio(Set<ResultCollector<T>> results,
String type) {
LocalTime date = LocalTime.now();
Logger log = new Logger("./results/feasibility_ratio_" + type + "_" + numberOfProcessors
+ "_" + date.getHour() + ":" + date.getMinute() + ".txt");
Table<Long, ResultCollector<T>, Double> resultTable = TreeBasedTable.create();
Set<ResultCollector<T>> resultCollectors = new TreeSet<>();
for (long util = 0; util <= numberOfProcessors * 10; util += numberOfProcessors / 4) {
for (ResultCollector<T> rc : results) {
resultCollectors.add(rc);
final long local_util = util;
Supplier<Stream<SchedulingInfo>> schedulingResults = () -> rc.getResults().stream()
.filter(r -> Math.round(r.getUtilization() * 10 / (numberOfProcessors / 4))
* (numberOfProcessors / 4) == local_util);
if (schedulingResults.get().count() > 0) {
double feasibleTasksets =
(double) schedulingResults.get().filter(r -> r.checkTasksetFeasible())
.count() / schedulingResults.get().count();
resultTable.put(util, rc, feasibleTasksets);
} else {
resultTable.put(util, rc, 1.);
}
}
}
logData(log, resultTable, resultCollectors, "Utilization");
// LocalTime date = LocalTime.now();
// Logger log = new Logger("./results/feasibility_ratio_" + type + "_" + numberOfProcessors
// + "_" + date.getHour() + ":" + date.getMinute() + ".txt");
// Table<Long, ResultCollector<T>, Double> resultTable = TreeBasedTable.create();
// Set<ResultCollector<T>> resultCollectors = new TreeSet<>();
// for (long util = 0; util <= numberOfProcessors * 10; util += numberOfProcessors / 4) {
// for (ResultCollector<T> rc : results) {
// resultCollectors.add(rc);
// final long local_util = util;
// Supplier<Stream<SchedulingInfo>> schedulingResults = () -> rc.getResults().stream()
// .filter(r -> Math.round(r.getUtilization() * 10 / (numberOfProcessors / 4))
// * (numberOfProcessors / 4) == local_util);
// if (schedulingResults.get().count() > 0) {
// double feasibleTasksets =
// (double) schedulingResults.get().filter(r -> r.checkTasksetFeasible())
// .count() / schedulingResults.get().count();
// resultTable.put(util, rc, feasibleTasksets);
// } else {
// resultTable.put(util, rc, 1.);
// }
// }
// }
// logData(log, resultTable, resultCollectors, "Utilization");
}
public <T extends TypeInterface> void logTaskRatio(Set<ResultCollector<T>> results,
String type) {
LocalTime date = LocalTime.now();
Logger log = new Logger("./results/task_ratio_" + type + "_" + numberOfProcessors + "_"
+ date.getHour() + ":" + date.getMinute() + ".txt");
Table<Long, ResultCollector<T>, Long> resultTable = TreeBasedTable.create();
Set<ResultCollector<T>> resultCollectors = new TreeSet<>();
for (long ratio = 0; ratio <= 10; ratio += 1) {
for (ResultCollector<T> rc : results) {
resultCollectors.add(rc);
final long local_ratio = ratio;
long feasibleTasksets = rc.getResults().stream()
.filter(r -> Math.ceil(r.getParallelTaskRatio() * 10) == local_ratio)
.filter(r -> r.checkTasksetFeasible()).count();
resultTable.put(ratio, rc, feasibleTasksets);
}
}
logData(log, resultTable, resultCollectors, "TaskRatio");
// LocalTime date = LocalTime.now();
// Logger log = new Logger("./results/task_ratio_" + type + "_" + numberOfProcessors + "_"
// + date.getHour() + ":" + date.getMinute() + ".txt");
// Table<Long, ResultCollector<T>, Long> resultTable = TreeBasedTable.create();
// Set<ResultCollector<T>> resultCollectors = new TreeSet<>();
// for (long ratio = 0; ratio <= 10; ratio += 1) {
// for (ResultCollector<T> rc : results) {
// resultCollectors.add(rc);
// final long local_ratio = ratio;
// long feasibleTasksets = rc.getResults().stream()
// .filter(r -> Math.ceil(r.getParallelTaskRatio() * 10) == local_ratio)
// .filter(r -> r.checkTasksetFeasible()).count();
// resultTable.put(ratio, rc, feasibleTasksets);
// }
// }
// logData(log, resultTable, resultCollectors, "TaskRatio");
}
private <T extends TypeInterface> void logData(Logger log,
Table<Long, ResultCollector<T>, ? extends Number> resultTable,
Table<Double, ResultCollector<T>, ? extends Number> resultTable,
Set<ResultCollector<T>> resultCollectors, String xDataName) {
final Appendable out = new StringBuilder();
try {
......@@ -175,7 +172,7 @@ public class ResultLogger {
final CSVPrinter printer = CSVFormat.DEFAULT.withHeader(header).print(out);
printer.printRecords(resultTable.rowMap().entrySet().stream()
.map(entry -> ImmutableList.builder().add((double) entry.getKey() / 10)
.map(entry -> ImmutableList.builder().add((double) entry.getKey())
.addAll(entry.getValue().values()).build())
.collect(Collectors.toList()));
} catch (final IOException e) {
......
package mvd.jester;
import java.lang.reflect.Constructor;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.DagTask;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SynchronousTask;
import mvd.jester.model.Task;
import mvd.jester.model.SystemSetup.DagTaskBuilder;
import mvd.jester.model.SystemSetup.SynchronousTaskBuilder;
import mvd.jester.priority.PriorityManager;
import mvd.jester.simulator.AbstractSimulator;
import mvd.jester.tests.AbstractTest;
/**
......@@ -14,106 +18,34 @@ import mvd.jester.tests.AbstractTest;
*/
public class TestEnvironment {
private final long numberOfTaskSets;
private final SystemSetup systemSetup;
private final SystemSetup.Builder builder;
private final Set<Constructor<? extends AbstractTest>> abstractTests;
private final Set<Constructor<? extends AbstractSimulator>> abstractSimulators;
private final Set<PriorityManager> schedulingAlgorithms;
public TestEnvironment(SystemSetup.Builder builder, long numberOfTaskSets) {
this.numberOfTaskSets = numberOfTaskSets;
this.abstractTests = new HashSet<>();
this.abstractSimulators = new HashSet<>();
this.schedulingAlgorithms = new HashSet<>();
this.builder = builder;
this.systemSetup = builder.build();
}
public TestEnvironment registerSchedulingAlgorithm(PriorityManager priorityManager) {
schedulingAlgorithms.add(priorityManager);
return this;
}
public TestEnvironment registerTest(Class<? extends AbstractTest> abstractTest) {
try {
abstractTests.add(abstractTest.getConstructor(SystemSetup.class));
} catch (Exception e) {
System.out.println("Missing constructor for abstract test!");
}
return this;
}
public TestEnvironment registerSimulator(Class<? extends AbstractSimulator> abstractSimulator) {
try {
abstractSimulators.add(abstractSimulator.getConstructor(SystemSetup.class));
} catch (Exception e) {
System.out.println("Missing constructor for abstract simulator!");
}
return this;
public TestEnvironment() {
}
public TestEnvironment registerTestPair(Class<? extends AbstractTest> abstractTest,
Class<? extends AbstractSimulator> abstractSimulator) {
try {
abstractTests.add(abstractTest.getConstructor(SystemSetup.class));
} catch (Exception e) {
System.out.println("Missing constructor for abstract test!");
}
try {
abstractSimulators.add(abstractSimulator.getConstructor(SystemSetup.class));
} catch (Exception e) {
System.out.println("Missing constructor for abstract simulator!");
public <T extends Task> Set<ResultCollector<AbstractTest<T>>> registerTests(
List<AbstractTest<T>> tests) {
Set<ResultCollector<AbstractTest<T>>> testSet = new HashSet<>();
for (AbstractTest<T> t : tests) {
testSet.add(new ResultCollector<AbstractTest<T>>(t.getPriorityManager(), t));
}
return this;
return testSet;
}
public void runExperiments() {
Set<ResultCollector<AbstractTest>> abstractTestInstances = new HashSet<>();
Set<ResultCollector<AbstractSimulator>> abstractSimulatorInstances = new HashSet<>();
for (PriorityManager pm : schedulingAlgorithms) {
for (Constructor<? extends AbstractTest> c : abstractTests) {
try {
if (pm.hasTest(c.getDeclaringClass())) {
abstractTestInstances.add(new ResultCollector<AbstractTest>(pm,
c.newInstance(this.systemSetup)));
}
} catch (Exception e) {
System.out.println("Could not instantiate object of AbstractTest!");
throw new RuntimeException("Could not instantiate object of AbstractTest!");
}
}
for (Constructor<? extends AbstractSimulator> c : abstractSimulators) {
try {
if (pm.hasSimulator(c.getDeclaringClass())) {
abstractSimulatorInstances.add(new ResultCollector<AbstractSimulator>(pm,
c.newInstance(this.systemSetup)));
}
} catch (Exception e) {
System.out.println("Could not instantiate object of AbstractSimulator!");
throw new RuntimeException(
"Could not instantiate object of AbstractSimulator!");
}
}
}
public void runExperiments(SynchronousTaskBuilder builder,
Set<ResultCollector<AbstractTest<SynchronousTask>>> abstractTestInstances,
long numberOfProcessors, long numberOfTaskSets) {
long checkedTasksets = 0;
System.out.print(Math.round((double) checkedTasksets / numberOfTaskSets * 100) + "% of "
+ numberOfTaskSets + " tasksets tested!\r");
while (checkedTasksets < numberOfTaskSets) {
builder.rebuild(this.systemSetup);
Set<SynchronousTask> tasks = builder.generateTaskSet();
double utilization = this.systemSetup.getUtilization();
double utilization = tasks.stream().mapToDouble(SynchronousTask::getUtilization).sum();
while (utilization <= this.systemSetup.getNumberOfProcessors()
&& checkedTasksets < numberOfTaskSets) {
while (utilization <= numberOfProcessors && checkedTasksets < numberOfTaskSets) {
checkedTasksets++;
if (checkedTasksets % 10 == 0) {
......@@ -121,36 +53,63 @@ public class TestEnvironment {
+ "% of " + numberOfTaskSets + " tasksets tested!\r");
}
for (ResultCollector<AbstractTest> testInstance : abstractTestInstances) {
for (ResultCollector<AbstractTest<SynchronousTask>> testInstance : abstractTestInstances) {
PriorityManager priorityManager = testInstance.getPriorityManager();
AbstractTest abstractTest = testInstance.getAbstractValue();
AbstractTest<SynchronousTask> abstractTest = testInstance.getAbstractValue();
SortedTaskSet<SynchronousTask> sortedTaskSet =
new SortedTaskSet<>(priorityManager);
sortedTaskSet.addAll(tasks);
SchedulingInfo schedulingInfo =
abstractTest.runSchedulabilityCheck(priorityManager);
abstractTest.runSchedulabilityCheck(sortedTaskSet);
testInstance.addResult(schedulingInfo);
}
for (ResultCollector<AbstractSimulator> simulatorInstance : abstractSimulatorInstances) {
PriorityManager priorityManager = simulatorInstance.getPriorityManager();
AbstractSimulator abstractSimulator = simulatorInstance.getAbstractValue();
builder.addTask(tasks);
SchedulingInfo schedulingInfo =
abstractSimulator.runSimulation(priorityManager);
simulatorInstance.addResult(schedulingInfo);
}
utilization = tasks.stream().mapToDouble(SynchronousTask::getUtilization).sum();
}
}
System.out.println("");
ResultLogger resultLogger = new ResultLogger(numberOfProcessors);
builder.addTask(systemSetup);
resultLogger.logTests(abstractTestInstances);
}
public void runExperiments(DagTaskBuilder builder,
Set<ResultCollector<AbstractTest<DagTask>>> abstractTestInstances,
long numberOfProcessors, long numberOfTaskSetsPerUtil) {
long checkedTasksets = 0;
long numberOfTaskSets = numberOfProcessors * 4 * numberOfTaskSetsPerUtil;
utilization = this.systemSetup.getUtilization();
for (double util = 1; util <= numberOfProcessors; util += 0.25) {
for (int i = 0; i < numberOfTaskSetsPerUtil; ++i) {
Set<DagTask> taskSet = builder.generateTaskSet(util);
System.out.print(Math.round((double) checkedTasksets / numberOfTaskSets * 100)
+ "% of " + numberOfTaskSets + " tasksets tested!\r");
for (ResultCollector<AbstractTest<DagTask>> testResultCollector : abstractTestInstances) {
AbstractTest<DagTask> testInstance = testResultCollector.getAbstractValue();
PriorityManager priorityManager = testResultCollector.getPriorityManager();
SortedTaskSet<DagTask> sortedTaskSet = new SortedTaskSet<>(priorityManager);
sortedTaskSet.addAll(taskSet);
SchedulingInfo schedulingInfo =
testInstance.runSchedulabilityCheck(sortedTaskSet);
testResultCollector.addResult(schedulingInfo);
}
checkedTasksets++;
}
}
System.out.println("");
ResultLogger resultLogger = new ResultLogger(systemSetup.getNumberOfProcessors());
ResultLogger resultLogger = new ResultLogger(numberOfProcessors);
resultLogger.logTests(abstractTestInstances);
resultLogger.logSimulations(abstractSimulatorInstances);
}
}
package mvd.jester.model;
import java.util.LinkedHashSet;
import java.util.Set;
import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.graph.DefaultEdge;
public class DagTask implements Task {
private DirectedAcyclicGraph<Job, DefaultEdge> jobDag;
private final Set<Segment> workloadDistribution;
private final long workload;
private final long criticalPath;
private final long period;
private final long deadline;
private final long numberOfThreads;
public DagTask(DirectedAcyclicGraph<Job, DefaultEdge> jobDag, long period,
long numberOfThreads) {
this.jobDag = jobDag;
this.period = period;
this.deadline = period;
this.numberOfThreads = numberOfThreads;
this.workload = DagUtils.calculateWorkload(this.jobDag);
this.criticalPath = DagUtils.calculateCriticalPath(this.jobDag);
this.workloadDistribution =
DagUtils.calculateWorkloadDistribution(this.jobDag, this.criticalPath);
}
public double getUtilization() {
return (double) workload / period;
}
/**
* @return the deadline
*/
public long getDeadline() {
return deadline;
}
/**
* @return the jobDag
*/
public DirectedAcyclicGraph<Job, DefaultEdge> getJobDag() {
return jobDag;
}
/**
* @param jobDag the jobDag to set
*/
public void setJobDag(DirectedAcyclicGraph<Job, DefaultEdge> jobDag) {
this.jobDag = jobDag;
}
/**
* @return the period
*/
public long getPeriod() {
return period;
}
/**
* @return the workload
*/
public long getWorkload() {
return workload;
}
/**
* @return the criticalPath
*/
public long getCriticalPath() {
return criticalPath;
}
/**
* @return the workloadDistribution
*/
public Set<Segment> getWorkloadDistribution() {
return workloadDistribution;
}
@Override
public long getMaximumParallelism() {
long max = 0;
for (Segment s : workloadDistribution) {
if (max < s.getNumberOfJobs()) {
max = s.getNumberOfJobs();
}
}
return max;
}
@Override
public long getNumberOfThreads() {
return numberOfThreads;
}
public static class DagUtils {
public static long calculateWorkload(DirectedAcyclicGraph<Job, DefaultEdge> jobDag) {
long workload = 0;
for (Job job : jobDag) {
workload += job.getWcet();
}
return workload;
}
public static long calculateCriticalPath(DirectedAcyclicGraph<Job, DefaultEdge> jobDag) {
long criticalPath = 0;
for (Job job : jobDag) {
Set<DefaultEdge> edges = jobDag.incomingEdgesOf(job);
long longestRelativeCompletionTime = 0;
for (DefaultEdge e : edges) {
Job source = jobDag.getEdgeSource(e);
longestRelativeCompletionTime =
longestRelativeCompletionTime >= source.getRelativeCompletionTime()
? longestRelativeCompletionTime
: source.getRelativeCompletionTime();
}
job.setRelativeCompletionTime(longestRelativeCompletionTime + job.getWcet());
criticalPath = job.getRelativeCompletionTime();
}
return criticalPath;
}
public static LinkedHashSet<Segment> calculateWorkloadDistribution(
DirectedAcyclicGraph<Job, DefaultEdge> jobDag, long criticalPath) {
LinkedHashSet<Segment> segments = new LinkedHashSet<>();
long segmentDuration = 0;
long segmentHeight = 1;
for (long t = 0; t < criticalPath; ++t) {
long currentHeight = 0;
for (Job j : jobDag) {
if (t >= j.getRelativeCompletionTime() - j.getWcet()
&& t < j.getRelativeCompletionTime()) {
currentHeight++;
}
}
if (currentHeight == segmentHeight) {
segmentDuration++;
} else {
segments.add(new Segment(segmentDuration, segmentHeight));
segmentDuration = 1;
segmentHeight = currentHeight;
}
}
segments.add(new Segment(segmentDuration, segmentHeight));
return segments;
}
public static void createNFJGraph(DirectedAcyclicGraph<Job, DefaultEdge> jobDag) {
Set<Job> joinNodes = new LinkedHashSet<>();
Set<Job> forkNodes = new LinkedHashSet<>();
for (Job j : jobDag) {
if (jobDag.inDegreeOf(j) > 1) {
joinNodes.add(j);
}
if (jobDag.outDegreeOf(j) > 1) {
forkNodes.add(j);
}
}
for (Job j : joinNodes) {
for (Job f : forkNodes) {
Set<DefaultEdge> edges = jobDag.getAllEdges(f, j);
// jobDag.
}
}
}
}
}
package mvd.jester.model;
public class Job {
private final long wcet;
private long relativeCompletionTime;
public Job(long wcet) {
this.wcet = wcet;
this.relativeCompletionTime = wcet;
}
public long getWcet() {
return wcet;
}
/**
* @return the relativeCompletionTime
*/
public long getRelativeCompletionTime() {
return relativeCompletionTime;
}
/**
* @param relativeCompletionTime the relativeCompletionTime to set
*/
public void setRelativeCompletionTime(long relativeCompletionTime) {
this.relativeCompletionTime = relativeCompletionTime;
}
}
package mvd.jester.model;
import com.google.common.math.LongMath;
// import com.google.common.math.LongMath;
public class Segment {
private final long jobWcet;
private final long numberOfJobs;
private final long taskletWcet;
private final long numberOfTasklets;
public Segment(long jobWcet, long numberOfJobs) {
this.jobWcet = jobWcet;
this.numberOfJobs = numberOfJobs;
if (numberOfJobs == 1) {
this.taskletWcet = this.jobWcet;
this.numberOfTasklets = this.numberOfJobs;
} else if (this.numberOfJobs > this.jobWcet) {
this.taskletWcet = LongMath.gcd(jobWcet, numberOfJobs);
this.numberOfTasklets = this.jobWcet * this.numberOfJobs / this.taskletWcet;
} else {
this.taskletWcet = numberOfJobs;
this.numberOfTasklets = jobWcet;
}
}
/**
* @return the numberOfJobs
*/
......@@ -42,17 +25,4 @@ public class Segment {
return jobWcet;
}
/**
* @return the numberOfTasklets
*/
public long getNumberOfTasklets() {
return numberOfTasklets;
}
/**
* @return the taskletWcet
*/
public long getTaskletWcet() {
return taskletWcet;
}
}
......@@ -13,7 +13,7 @@ import mvd.jester.priority.RateMonotonic;
/**
* SortedTaskSet
*/
public class SortedTaskSet extends TreeSet<Task> {
public class SortedTaskSet<T extends Task> extends TreeSet<T> {
private static final long serialVersionUID = 4808544133562675597L;
......@@ -21,16 +21,28 @@ public class SortedTaskSet extends TreeSet<Task> {
super((t1, t2) -> priorityMananger.compare(t1, t2));
}
public static class Deserializer implements JsonDeserializer<SortedTaskSet> {
public double getUtilization() {
return super.stream().mapToDouble(T::getUtilization).sum();
}
public double getParallelTaskRatio() {
long parallelTasks = super.stream().filter(t -> t.getMaximumParallelism() > 1).count();
return (double) parallelTasks / super.size();
}
public static class Deserializer<T extends Task> implements JsonDeserializer<SortedTaskSet<T>> {
@Override
public SortedTaskSet deserialize(JsonElement json, Type typeOfT,
public SortedTaskSet<T> deserialize(JsonElement json, Type typeOfT,
JsonDeserializationContext context) throws JsonParseException {
SortedTaskSet taskSet = new SortedTaskSet(new RateMonotonic());
SortedTaskSet<T> taskSet = new SortedTaskSet<>(new RateMonotonic());
if (json.isJsonArray()) {
JsonArray array = json.getAsJsonArray();
array.forEach(e -> {
taskSet.add(context.deserialize(e, Task.class));
taskSet.add(context.deserialize(e, SynchronousTask.class));
});
}
return taskSet;
......
package mvd.jester.model;
import java.util.Set;
/**
* Task
*/
public class SynchronousTask implements Task {
private final long deadline;
private final long period;
private final Set<Segment> segments;
private final long workload;
private final long criticalPath;
private final long numberOfThreads;
public SynchronousTask(long period, long deadline, long numberOfThreads,
Set<Segment> segments) {
this.deadline = deadline;
this.period = period;
this.numberOfThreads = numberOfThreads;
this.segments = segments;
this.workload = SynchronousUtils.calculateWorkload(segments);
this.criticalPath = SynchronousUtils.calculateCriticalPath(segments);
}
public SynchronousTask(Set<Segment> segments, long period, long numberOfThreads) {
this(period, period, numberOfThreads, segments);
}
public double getUtilization() {
return (double) workload / period;
}
/**
* @return the deadline
*/
public long getDeadline() {
return deadline;
}
/**
* @return the period
*/
public long getPeriod() {
return period;
}
/**
* @return the segments
*/
public Set<Segment> getWorkloadDistribution() {
return segments;
}
/**
* @return the maximumWcet
*/
public long getWorkload() {
return workload;
}
/**
* @return the criticalPath
*/
public long getCriticalPath() {
return criticalPath;
}
@Override
public long getMaximumParallelism() {
long max = 0;
for (Segment s : segments) {
if (max < s.getNumberOfJobs()) {
max = s.getNumberOfJobs();
}
}
return max;
}
@Override
public long getNumberOfThreads() {
return numberOfThreads;
}
public static class SynchronousUtils {
public static long calculateWorkload(Set<Segment> segments) {
long workload = 0;
for (Segment s : segments) {
workload += s.getJobWcet() * s.getNumberOfJobs();
}
return workload;
}
public static long calculateCriticalPath(Set<Segment> segments) {
long criticalPath = 0;
for (Segment s : segments) {
criticalPath += s.getJobWcet();
}
return criticalPath;
}
}
}
package mvd.jester.model;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.graph.DefaultEdge;
import mvd.jester.model.DagTask.DagUtils;
/**
* TaskSet
*/
public class SystemSetup {
private Set<Task> tasks;
public class SystemSetup<T extends Task> {
private Set<T> tasks;
private final long numberOfProcessors;
public SystemSetup(Set<Task> tasks, long numberOfProcessors) {
public SystemSetup(final Set<T> tasks, final long numberOfProcessors) {
this.tasks = tasks;
this.numberOfProcessors = numberOfProcessors;
}
......@@ -27,11 +30,11 @@ public class SystemSetup {
/**
* @return the tasks
*/
public Set<Task> getTasks() {
public Set<T> getTasks() {
return tasks;
}
public void setTasks(Set<Task> tasks) {
public void setTasks(final Set<T> tasks) {
this.tasks = tasks;
}
......@@ -42,64 +45,44 @@ public class SystemSetup {
return numberOfProcessors;
}
public double getUtilization() {
double utilization = 0;
for (Task t : tasks) {
utilization += (double) t.getMaximumWcet() / t.getPeriod();
}
return utilization;
}
public double getParallelTaskRatio() {
long parallelTasks = 0;
for (Task t : tasks) {
if (t.getMaximumParallelism() > 1) {
parallelTasks++;
}
}
return (double) parallelTasks / tasks.size();
}
@Override
public String toString() {
Gson gson = new GsonBuilder().setPrettyPrinting().create();
final Gson gson = new GsonBuilder().setPrettyPrinting().create();
return gson.toJson(tasks);
}
public void writeToFile(String path) {
public void writeToFile(final String path) {
try (PrintWriter pw = new PrintWriter(path)) {
pw.write(toString());
} catch (Exception e) {
} catch (final Exception e) {
System.err.println("Something went wrong when writing to file!");
}
}
public static SystemSetup readFromFile(String path, long numberOfProcessors) {
String jsonString;
try {
byte[] encoded = Files.readAllBytes(Paths.get(path));
jsonString = new String(encoded, Charset.defaultCharset());
} catch (IOException e) {
System.out.println(e.getMessage());
jsonString = new String("");
}
return SystemSetup.fromString(jsonString, numberOfProcessors);
}
public static SystemSetup fromString(String json, long numberOfProcessors) {
Gson gson = new GsonBuilder()
.registerTypeAdapter(SortedTaskSet.class, new SortedTaskSet.Deserializer())
.create();
SortedTaskSet tasks = gson.fromJson(json, SortedTaskSet.class);
return new SystemSetup(tasks, numberOfProcessors);
}
public static class Builder {
// public static SystemSetup<SynchronousTask> readFromFile(final String path,
// final long numberOfProcessors) {
// String jsonString;
// try {
// final byte[] encoded = Files.readAllBytes(Paths.get(path));
// jsonString = new String(encoded, Charset.defaultCharset());
// } catch (final IOException e) {
// System.out.println(e.getMessage());
// jsonString = new String("");
// }
// return SystemSetup.fromString(jsonString, numberOfProcessors);
// }
// public static SystemSetup<SynchronousTask> fromString(final String json,
// final long numberOfProcessors) {
// final Gson gson = new GsonBuilder().registerTypeAdapter(SortedTaskSet.class,
// new SortedTaskSet.Deserializer<SynchronousTask>()).create();
// final SortedTaskSet<SynchronousTask> tasks = gson.fromJson(json, SortedTaskSet.class);
// return new SystemSetup<>(tasks, numberOfProcessors);
// }
public static class SynchronousTaskBuilder {
private long numberOfProcessors = 4;
private long minPeriod = 100;
private long maxSequentialPeriod = 1000;
......@@ -108,11 +91,11 @@ public class SystemSetup {
private long maxNumberOfSegments = 7;
private long minNumberOfJobs = 2;
private long maxNumberOfJobs = 3 * numberOfProcessors / 2;
private long minWcet = 1;
private final long minWcet = 1;
private long ratio = randomTaskRatio(this.parallelTaskRatio);
private long parallelTaskRatio = 0;
private final long parallelTaskRatio = 0;
public Builder() {
public SynchronousTaskBuilder() {
}
......@@ -124,7 +107,7 @@ public class SystemSetup {
return ThreadLocalRandom.current().nextLong(minPeriod, maxParallelPeriod);
}
private long randomTaskRatio(long min) {
private long randomTaskRatio(final long min) {
return ThreadLocalRandom.current().nextLong(min, 100);
}
......@@ -137,68 +120,70 @@ public class SystemSetup {
return ThreadLocalRandom.current().nextLong(minNumberOfJobs, maxNumberOfJobs);
}
private long randomWcet(long period, long numberOfSegments) {
long maxWcet = period / numberOfSegments;
private long randomWcet(final long period, final long numberOfSegments) {
final long maxWcet = period / numberOfSegments;
return ThreadLocalRandom.current().nextLong(minWcet, maxWcet);
}
private Task generateTask() {
boolean serial = randomTaskRatio(0) > this.ratio;
long period = serial ? randomSequentialTaskPeriod() : randomParallelTaskPeriod();
long numberOfSegments = serial ? 1 : randomNumberOfSegments();
long parallelNumberOfJobs = serial ? 1 : randomNumberOfJobs();
Set<Segment> segments = new LinkedHashSet<Segment>();
private SynchronousTask generateTask() {
final boolean serial = randomTaskRatio(0) > this.ratio;
final long period = serial ? randomSequentialTaskPeriod() : randomParallelTaskPeriod();
final long numberOfSegments = serial ? 1 : randomNumberOfSegments();
final long parallelNumberOfJobs = serial ? 1 : randomNumberOfJobs();
final Set<Segment> segments = new LinkedHashSet<Segment>();
for (int i = 0; i < numberOfSegments; ++i) {
long numberOfJobs = i % 2 == 1 ? parallelNumberOfJobs : 1;
long wcet = randomWcet(period, numberOfSegments);
final long numberOfJobs = i % 2 == 1 ? parallelNumberOfJobs : 1;
final long wcet = randomWcet(period, numberOfSegments);
segments.add(new Segment(wcet, numberOfJobs));
}
return new Task(period, segments);
return new SynchronousTask(segments, period, numberOfProcessors);
}
private Set<Task> generateTaskSet() {
Set<Task> taskSet = new HashSet<>();
public Set<SynchronousTask> generateTaskSet() {
this.ratio = randomTaskRatio(this.parallelTaskRatio);
final Set<SynchronousTask> taskSet = new HashSet<>();
for (int i = 0; i < numberOfProcessors; ++i) {
Task task = generateTask();
final SynchronousTask task = generateTask();
taskSet.add(task);
}
return taskSet;
}
public SystemSetup build() {
this.ratio = randomTaskRatio(this.parallelTaskRatio);
Set<Task> taskSet = generateTaskSet();
return new SystemSetup(taskSet, numberOfProcessors);
}
// public SystemSetup<SynchronousTask> build() {
// this.ratio = randomTaskRatio(this.parallelTaskRatio);
// final Set<SynchronousTask> taskSet = generateTaskSet();
// return new SystemSetup<>(taskSet, numberOfProcessors);
// }
public void rebuild(SystemSetup systemSetup) {
this.ratio = randomTaskRatio(this.parallelTaskRatio);
systemSetup.tasks = generateTaskSet();
}
// public Set<SynchronousTask> rebuild(final SystemSetup<SynchronousTask> systemSetup) {
// this.ratio = randomTaskRatio(this.parallelTaskRatio);
// return generateTaskSet();
// }
public boolean addTask(SystemSetup systemSetup) {
return systemSetup.tasks.add(generateTask());
public boolean addTask(final Set<SynchronousTask> taskSet) {
return taskSet.add(generateTask());
}
public Builder setNumberOfProcessors(long numberOfProcessors) {
public SynchronousTaskBuilder setNumberOfProcessors(final long numberOfProcessors) {
this.numberOfProcessors = numberOfProcessors;
return this;
}
public Builder setNumberOfSegments(long minNumberOfSegments, long maxNumberOfSegments) {
public SynchronousTaskBuilder setNumberOfSegments(final long minNumberOfSegments,
final long maxNumberOfSegments) {
this.minNumberOfSegments = minNumberOfSegments;
this.maxNumberOfSegments = maxNumberOfSegments;
return this;
}
public Builder setPeriods(long minPeriod, long maxSequentialPeriod,
long maxParallelPeriod) {
public SynchronousTaskBuilder setPeriods(final long minPeriod,
final long maxSequentialPeriod, final long maxParallelPeriod) {
this.minPeriod = minPeriod;
this.maxSequentialPeriod = maxSequentialPeriod;
this.maxParallelPeriod = maxParallelPeriod;
......@@ -209,11 +194,179 @@ public class SystemSetup {
/**
* @param maxNumberOfJobs the maxNumberOfJobs to set
*/
public Builder setNumberOfJobs(long minNumberOfJobs, long maxNumberOfJobs) {
public SynchronousTaskBuilder setNumberOfJobs(final long minNumberOfJobs,
final long maxNumberOfJobs) {
this.minNumberOfJobs = minNumberOfJobs;
this.maxNumberOfJobs = maxNumberOfJobs;
return this;
}
}
public static class DagTaskBuilder {
private long numberOfProcessors = 4;
private long minimumWcet = 1;
private long maximumWcet = 100;
private long maxNumberOfBranches = 5;
private long depth = 2;
private long p_par = 80;
private long p_add = 20;
private double getBeta() {
return 0.035 * numberOfProcessors;
}
private long randomProbability() {
return ThreadLocalRandom.current().nextLong(1, 100);
}
public Set<DagTask> generateTaskSet(double totalUtilization) {
final LinkedHashSet<DagTask> taskSet = new LinkedHashSet<>();
double currentUtilization = 0;
while (currentUtilization <= totalUtilization) {
final DagTask dagTask = generateTask();
if (currentUtilization + dagTask.getUtilization() < totalUtilization) {
currentUtilization += dagTask.getUtilization();
taskSet.add(dagTask);
} else {
final double remainingUtilization = totalUtilization - currentUtilization;
final long period =
(long) Math.ceil(dagTask.getWorkload() / remainingUtilization);
if (period >= dagTask.getCriticalPath()) {
final DagTask modifiedTask =
new DagTask(dagTask.getJobDag(), period, numberOfProcessors);
taskSet.add(modifiedTask);
break;
}
}
}
return taskSet;
}
public DagTask generateTask() {
final DirectedAcyclicGraph<Job, DefaultEdge> jobDag =
new DirectedAcyclicGraph<>(DefaultEdge.class);
final Job j = fork(jobDag, Optional.empty(), this.depth);
fork(jobDag, Optional.of(j), this.depth);
randomEdges(jobDag);
final long workload = DagUtils.calculateWorkload(jobDag);
final long criticalPath = DagUtils.calculateCriticalPath(jobDag);
final long period = randomTaskPeriod(criticalPath, workload);
return new DagTask(jobDag, period, numberOfProcessors);
}
private Job join(final DirectedAcyclicGraph<Job, DefaultEdge> jobDag, final Job current,
final Set<Job> childs) {
if (childs.size() > 0) {
final Job job = new Job(randomWcet());
jobDag.addVertex(job);
for (final Job c : childs) {
try {
jobDag.addDagEdge(c, job);
} catch (final Exception e) {
System.out.println("Failed to join nodes!");
}
}
return job;
}
return current;
}
private Job fork(final DirectedAcyclicGraph<Job, DefaultEdge> jobDag,
final Optional<Job> predecessor, final long depth) {
final Job job = new Job(randomWcet());
jobDag.addVertex(job);
final Set<Job> childs = new HashSet<>();
if (predecessor.isPresent()) {
try {
jobDag.addDagEdge(predecessor.get(), job);
} catch (final Exception e) {
System.out.println("Adding fork edge failed!");
}
}
if (depth >= 0 && randomProbability() <= p_par) {
final long numberOfJobs = randomNumberOfBranches();
for (int i = 0; i < numberOfJobs; ++i) {
childs.add(fork(jobDag, Optional.of(job), depth - 1));
}
}
return join(jobDag, job, childs);
}
private void randomEdges(final DirectedAcyclicGraph<Job, DefaultEdge> jobDag) {
final Multimap<Job, Job> edgePairs = ArrayListMultimap.create();
for (final Job j1 : jobDag) {
for (final Job j2 : jobDag) {
if (randomProbability() <= p_add) {
edgePairs.put(j1, j2);
}
}
}
for (final Map.Entry<Job, Job> pairs : edgePairs.entries()) {
try {
jobDag.addDagEdge(pairs.getKey(), pairs.getValue());
} catch (final Exception e) {
// TODO: handle exception
}
}
}
private long randomTaskPeriod(final long criticalPathLength, final long workload) {
return ThreadLocalRandom.current().nextLong(criticalPathLength,
(long) (workload / getBeta()));
}
private long randomNumberOfBranches() {
return ThreadLocalRandom.current().nextLong(2, maxNumberOfBranches);
}
private long randomWcet() {
return ThreadLocalRandom.current().nextLong(minimumWcet, maximumWcet);
}
/**
* @param numberOfProcessors the numberOfProcessors to set
*/
public DagTaskBuilder setNumberOfProcessors(final long numberOfProcessors) {
this.numberOfProcessors = numberOfProcessors;
return this;
}
public DagTaskBuilder setWcets(final long minimumWcet, final long maximumWcet) {
this.minimumWcet = minimumWcet;
this.maximumWcet = maximumWcet;
return this;
}
/**
* @param maxNumberOfBranches the maxNumberOfBranches to set
*/
public DagTaskBuilder setMaxNumberOfBranches(final long maxNumberOfBranches) {
this.maxNumberOfBranches = maxNumberOfBranches;
return this;
}
public DagTaskBuilder setPropabilities(final long p_par, final long p_add) {
this.p_par = p_par;
this.p_add = p_add;
return this;
}
/**
* @param depth the depth to set
*/
public DagTaskBuilder setDepth(final long depth) {
this.depth = depth;
return this;
}
}
}
......@@ -2,72 +2,21 @@ package mvd.jester.model;
import java.util.Set;
/**
* Task
*/
public class Task {
public interface Task {
public long getWorkload();
private final long deadline;
private final long period;
private final Set<Segment> segments;
private final long maximumWcet;
private final long maximumParallelism;
public long getCriticalPath();
public Task(long period, long deadline, Set<Segment> segments) {
this.deadline = deadline;
this.period = period;
this.segments = segments;
public double getUtilization();
long maxWcet = 0;
long maxParallelism = 0;
for (Segment s : segments) {
maxWcet += s.getJobWcet() * s.getNumberOfJobs();
if (maxParallelism < s.getNumberOfJobs()) {
maxParallelism = s.getNumberOfJobs();
}
}
public long getMaximumParallelism();
this.maximumParallelism = maxParallelism;
this.maximumWcet = maxWcet;
}
public long getDeadline();
public Task(long period, Set<Segment> segments) {
this(period, period, segments);
}
public long getPeriod();
public long getNumberOfThreads();
/**
* @return the deadline
*/
public long getDeadline() {
return deadline;
}
public Set<Segment> getWorkloadDistribution();
/**
* @return the period
*/
public long getPeriod() {
return period;
}
/**
* @return the segments
*/
public Set<Segment> getSegments() {
return segments;
}
/**
* @return the maximumWcet
*/
public long getMaximumWcet() {
return maximumWcet;
}
/**
* @return the maximumParallelism
*/
public long getMaximumParallelism() {
return maximumParallelism;
}
}
......@@ -16,7 +16,7 @@ import mvd.jester.tests.ChwaLee;
*/
public class EarliestDeadlineFirst implements PriorityManager {
final static Set<Class<? extends AbstractTest>> abstractTests =
final static Set<Class<? extends AbstractTest<? extends Task>>> abstractTests =
new HashSet<>(Arrays.asList(ChwaLee.class));
final static Set<Class<? extends AbstractSimulator>> abstractSimulators =
new HashSet<>(Arrays.asList(ParallelSynchronous.class, DynamicForkJoin.class));
......@@ -40,12 +40,12 @@ public class EarliestDeadlineFirst implements PriorityManager {
}
@Override
public boolean hasTest(AbstractTest abstractTest) {
public boolean hasTest(AbstractTest<? extends Task> abstractTest) {
return abstractTests.contains(abstractTest.getClass());
}
@Override
public boolean hasTest(Class<? extends AbstractTest> abstractTestClass) {
public boolean hasTest(Class<? extends AbstractTest<? extends Task>> abstractTestClass) {
return abstractTests.contains(abstractTestClass);
}
......
......@@ -14,9 +14,9 @@ public interface PriorityManager {
public int compare(TaskContextInterface t1, TaskContextInterface t2);
public boolean hasTest(AbstractTest abstractTest);
public boolean hasTest(AbstractTest<? extends Task> abstractTest);
public boolean hasTest(Class<? extends AbstractTest> abstractTestClass);
public boolean hasTest(Class<? extends AbstractTest<? extends Task>> abstractTestClass);
public boolean hasSimulator(AbstractSimulator abstractTest);
......
......@@ -10,8 +10,9 @@ import mvd.jester.tests.AbstractTest;
public class RateMonotonic implements PriorityManager {
final static Set<Class<? extends AbstractTest>> abstractTests = new HashSet<>(Arrays
.asList(mvd.jester.tests.MaiaBertogna.class, mvd.jester.tests.SchmidMottok.class));
final static Set<Class<? extends AbstractTest<? extends Task>>> abstractTests =
new HashSet<>(Arrays.asList(mvd.jester.tests.MaiaBertogna.class,
mvd.jester.tests.SchmidMottok.class));
final static Set<Class<? extends AbstractSimulator>> abstractSimulators =
new HashSet<>(Arrays.asList(mvd.jester.simulator.ParallelSynchronous.class,
mvd.jester.simulator.DynamicForkJoin.class));
......@@ -35,12 +36,12 @@ public class RateMonotonic implements PriorityManager {
}
@Override
public boolean hasTest(AbstractTest abstractTest) {
public boolean hasTest(AbstractTest<? extends Task> abstractTest) {
return abstractTests.contains(abstractTest.getClass());
}
@Override
public boolean hasTest(Class<? extends AbstractTest> abstractTestClass) {
public boolean hasTest(Class<? extends AbstractTest<? extends Task>> abstractTestClass) {
return abstractTests.contains(abstractTestClass);
}
......
......@@ -2,17 +2,15 @@ package mvd.jester.simulator;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import com.google.common.collect.TreeMultiset;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.priority.PriorityManager;
import mvd.jester.priority.RateMonotonic;
import mvd.jester.TypeInterface;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.simulator.internals.ProcessorContext;
import mvd.jester.simulator.internals.TaskContextInterface;
......@@ -22,11 +20,11 @@ import mvd.jester.simulator.internals.TaskContextInterface;
*/
public abstract class AbstractSimulator implements SimulatorInterface, TypeInterface {
protected final SystemSetup systemSetup;
protected final SystemSetup<SynchronousTask> systemSetup;
protected final Set<ProcessorContext> processors;
protected TreeMultiset<TaskContextInterface> readyTasks;
AbstractSimulator(SystemSetup systemSetup) {
AbstractSimulator(SystemSetup<SynchronousTask> systemSetup) {
this.systemSetup = systemSetup;
this.readyTasks = TreeMultiset.create((t1, t2) -> new RateMonotonic().compare(t1, t2));
processors = new HashSet<>();
......@@ -40,46 +38,47 @@ public abstract class AbstractSimulator implements SimulatorInterface, TypeInter
@Override
public SchedulingInfo runSimulation(PriorityManager priorityManager) {
SchedulingInfo schedulingInfo = new SchedulingInfo(systemSetup.getParallelTaskRatio(),
systemSetup.getUtilization());
long hyperPeriod = init(priorityManager);
for (int t = 0; t < hyperPeriod; ++t) {
if (!releaseTasks(t)) {
throw new RuntimeException("Could not release a task. This should not happen!");
}
Set<ProcessorContext> sortedProcessors = sortProcessors(processors);
for (ProcessorContext p : sortedProcessors) {
for (TaskContextInterface tc : readyTasks) {
if (p.acceptTask(tc, t)) {
break;
}
}
}
for (ProcessorContext p : processors) {
Optional<TaskContextInterface> optionalTc = p.updateExecution(t);
if (optionalTc.isPresent()) {
TaskContextInterface tc = optionalTc.get();
if (t >= tc.getDeadline()) {
TerminationInfo terminationInfo =
new TerminationInfo(tc.getReleaseTime(), tc.getDeadline(), t);
schedulingInfo.addTerminationInfo(terminationInfo);
EventPrinter.print("Time " + t + ": Task " + tc + " failed its deadline!");
schedulingInfo.setFailedTerminationInfo(terminationInfo);
return schedulingInfo;
}
readyTasks.remove(optionalTc.get());
}
}
}
return schedulingInfo;
// SchedulingInfo schedulingInfo = new SchedulingInfo(systemSetup.getParallelTaskRatio(),
// systemSetup.getUtilization());
// long hyperPeriod = init(priorityManager);
// for (int t = 0; t < hyperPeriod; ++t) {
// if (!releaseTasks(t)) {
// throw new RuntimeException("Could not release a task. This should not happen!");
// }
// Set<ProcessorContext> sortedProcessors = sortProcessors(processors);
// for (ProcessorContext p : sortedProcessors) {
// for (TaskContextInterface tc : readyTasks) {
// if (p.acceptTask(tc, t)) {
// break;
// }
// }
// }
// for (ProcessorContext p : processors) {
// Optional<TaskContextInterface> optionalTc = p.updateExecution(t);
// if (optionalTc.isPresent()) {
// TaskContextInterface tc = optionalTc.get();
// if (t >= tc.getDeadline()) {
// TerminationInfo terminationInfo =
// new TerminationInfo(tc.getReleaseTime(), tc.getDeadline(), t);
// schedulingInfo.addTerminationInfo(terminationInfo);
// EventPrinter.print("Time " + t + ": Task " + tc + " failed its deadline!");
// schedulingInfo.setFailedTerminationInfo(terminationInfo);
// return schedulingInfo;
// }
// readyTasks.remove(optionalTc.get());
// }
// }
// }
// return schedulingInfo;
return null;
}
private long init(PriorityManager priorityManager) {
......@@ -99,8 +98,8 @@ public abstract class AbstractSimulator implements SimulatorInterface, TypeInter
}
private long getHyperPeriod() {
return systemSetup.getTasks().stream().max(Comparator.comparing(Task::getPeriod)).get()
.getPeriod() * 10;
return systemSetup.getTasks().stream().max(Comparator.comparing(SynchronousTask::getPeriod))
.get().getPeriod() * 10;
}
private class ProcessorComparator implements Comparator<ProcessorContext> {
......
......@@ -2,7 +2,7 @@ package mvd.jester.simulator;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.internals.dynamicforkjoin.TaskContext;
/**
......@@ -10,14 +10,14 @@ import mvd.jester.simulator.internals.dynamicforkjoin.TaskContext;
*/
public class DynamicForkJoin extends AbstractSimulator {
public DynamicForkJoin(SystemSetup systemSetup) {
public DynamicForkJoin(SystemSetup<SynchronousTask> systemSetup) {
super(systemSetup);
}
@Override
protected boolean releaseTasks(long timeStep) {
for (Task t : systemSetup.getTasks()) {
for (SynchronousTask t : systemSetup.getTasks()) {
if (timeStep % t.getPeriod() == 0) {
TaskContext tc = new TaskContext(t, systemSetup.getNumberOfProcessors(), timeStep);
if (!readyTasks.add(tc)) {
......
package mvd.jester.simulator;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.internals.parallelsynchronous.TaskContext;
/**
......@@ -9,13 +9,13 @@ import mvd.jester.simulator.internals.parallelsynchronous.TaskContext;
*/
public class ParallelSynchronous extends AbstractSimulator {
public ParallelSynchronous(SystemSetup systemSetup) {
public ParallelSynchronous(SystemSetup<SynchronousTask> systemSetup) {
super(systemSetup);
}
@Override
protected boolean releaseTasks(long timeStep) {
for (Task t : systemSetup.getTasks()) {
for (SynchronousTask t : systemSetup.getTasks()) {
if (timeStep % t.getPeriod() == 0) {
TaskContext tc = new TaskContext(t, timeStep);
if (!readyTasks.add(tc)) {
......
package mvd.jester.simulator.internals;
import java.util.Optional;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
/**
* TaskContextInterface
*/
public interface TaskContextInterface {
public Task getTask();
public SynchronousTask getTask();
public Optional<TaskContextInterface> acceptNotification(long time);
......
......@@ -26,9 +26,9 @@ public class SegmentContext {
jobs.add(new JobContext(taskContext, this));
}
for (int j = 0; j < segment.getNumberOfTasklets(); ++j) {
tasklets.add(new TaskletContext(taskContext, this));
}
// for (int j = 0; j < segment.getNumberOfTasklets(); ++j) {
// tasklets.add(new TaskletContext(taskContext, this));
// }
}
......@@ -72,7 +72,8 @@ public class SegmentContext {
@Override
public String toString() {
return "(nJobs=" + segment.getNumberOfJobs() + ", nTasklets="
+ segment.getNumberOfTasklets() + ", taskletWcet=" + segment.getTaskletWcet() + ")";
return "something";
// return "(nJobs=" + segment.getNumberOfJobs() + ", nTasklets="
// + segment.getNumberOfTasklets() + ", taskletWcet=" + segment.getTaskletWcet() + ")";
}
}
......@@ -3,7 +3,7 @@ package mvd.jester.simulator.internals.dynamicforkjoin;
import java.util.ArrayList;
import java.util.Optional;
import mvd.jester.model.Segment;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.EventPrinter;
import mvd.jester.simulator.internals.JobContextInterface;
import mvd.jester.simulator.internals.TaskContextInterface;
......@@ -13,7 +13,7 @@ import mvd.jester.simulator.internals.TaskContextInterface;
*/
public class TaskContext implements TaskContextInterface {
private final Task task;
private final SynchronousTask task;
private final ArrayList<SegmentContext> segments;
private final long releaseTime;
private final long deadline;
......@@ -21,7 +21,7 @@ public class TaskContext implements TaskContextInterface {
private int segmentCounter;
public TaskContext(Task task, long numberOfProcessors, long releaseTime) {
public TaskContext(SynchronousTask task, long numberOfProcessors, long releaseTime) {
this.task = task;
this.segments = new ArrayList<>();
this.currentSegment = 0;
......@@ -29,7 +29,7 @@ public class TaskContext implements TaskContextInterface {
this.deadline = releaseTime + task.getDeadline();
this.releaseTime = releaseTime;
for (Segment s : task.getSegments()) {
for (Segment s : task.getWorkloadDistribution()) {
segments.add(new SegmentContext(this, s, numberOfProcessors));
}
}
......@@ -37,7 +37,7 @@ public class TaskContext implements TaskContextInterface {
/**
* @return the task
*/
public Task getTask() {
public SynchronousTask getTask() {
return task;
}
......@@ -57,14 +57,14 @@ public class TaskContext implements TaskContextInterface {
public Optional<TaskContextInterface> acceptNotification(long time) {
segmentCounter++;
if (segmentCounter >= segments.get(currentSegment).getSegment().getNumberOfTasklets()) {
currentSegment++;
segmentCounter = 0;
if (currentSegment >= segments.size()) {
EventPrinter.print("Time " + time + ": Task " + this + "finished!");
return Optional.of(this);
}
}
// if (segmentCounter >= segments.get(currentSegment).getSegment().getNumberOfTasklets()) {
// currentSegment++;
// segmentCounter = 0;
// if (currentSegment >= segments.size()) {
// EventPrinter.print("Time " + time + ": Task " + this + "finished!");
// return Optional.of(this);
// }
// }
return Optional.empty();
......
......@@ -15,7 +15,8 @@ public class TaskletContext {
public TaskletContext(TaskContext taskContext, SegmentContext segment) {
this.taskContext = taskContext;
this.wcet = segment.getSegment().getTaskletWcet();
this.wcet = 88;
// segment.getSegment().getTaskletWcet();
this.executionTime = wcet;
currentJob = Optional.empty();
}
......
......@@ -3,7 +3,7 @@ package mvd.jester.simulator.internals.parallelsynchronous;
import java.util.ArrayList;
import java.util.Optional;
import mvd.jester.model.Segment;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.EventPrinter;
import mvd.jester.simulator.internals.JobContextInterface;
import mvd.jester.simulator.internals.TaskContextInterface;
......@@ -13,14 +13,14 @@ import mvd.jester.simulator.internals.TaskContextInterface;
*/
public class TaskContext implements TaskContextInterface {
private final Task task;
private final SynchronousTask task;
private final ArrayList<SegmentContext> segments;
private final long deadline;
private final long releaseTime;
private int currentSegment;
private int segmentCounter;
public TaskContext(Task task, long releaseTime) {
public TaskContext(SynchronousTask task, long releaseTime) {
this.task = task;
this.segments = new ArrayList<>();
this.currentSegment = 0;
......@@ -28,7 +28,7 @@ public class TaskContext implements TaskContextInterface {
this.releaseTime = releaseTime;
this.deadline = releaseTime + task.getDeadline();
for (Segment s : task.getSegments()) {
for (Segment s : task.getWorkloadDistribution()) {
segments.add(new SegmentContext(this, s));
}
}
......@@ -36,7 +36,7 @@ public class TaskContext implements TaskContextInterface {
/**
* @return the task
*/
public Task getTask() {
public SynchronousTask getTask() {
return task;
}
......
package mvd.jester.tests;
import java.util.HashMap;
import java.util.Map;
import mvd.jester.TypeInterface;
import mvd.jester.info.TerminationInfo;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.Task;
/**
* AbstractTest
*/
public abstract class AbstractTest implements TestInterface, TypeInterface {
public abstract class AbstractTest<T extends Task> implements TestInterface<T>, TypeInterface {
protected final Map<Task, TerminationInfo> responseTimes;
protected final SystemSetup systemSetup;
protected final long numberOfProcessors;
public AbstractTest(SystemSetup systemSetup) {
this.systemSetup = systemSetup;
this.responseTimes = new HashMap<>();
public AbstractTest(long numberOfProcessors) {
this.numberOfProcessors = numberOfProcessors;
}
}
......@@ -3,8 +3,10 @@ package mvd.jester.tests;
import java.math.RoundingMode;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.math.LongMath;
import mvd.jester.info.SchedulingInfo;
......@@ -12,42 +14,49 @@ import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.priority.EarliestDeadlineFirst;
import mvd.jester.priority.PriorityManager;
/**
* ChwaLee
*/
public class ChwaLee extends AbstractTest {
public class ChwaLee extends AbstractTest<SynchronousTask> {
public ChwaLee(SystemSetup systemSetup) {
super(systemSetup);
private final Map<SynchronousTask, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public ChwaLee(long numberOfProcessors) {
super(numberOfProcessors);
this.responseTimes = new HashMap<>();
this.priorityManager = new EarliestDeadlineFirst();
}
@Override
public PriorityManager getPriorityManager() {
return priorityManager;
}
@Override
public SchedulingInfo runSchedulabilityCheck(PriorityManager priorityManager) {
SortedTaskSet tasks = new SortedTaskSet(priorityManager);
tasks.addAll(systemSetup.getTasks());
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<SynchronousTask> tasks) {
responseTimes.clear();
for (Task t : tasks) {
for (SynchronousTask t : tasks) {
Level taskLevel = tasks.headSet(t).size() <= tasks.size() / 2 ? Level.HIGH : Level.LOW;
long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, taskLevel));
}
return new SchedulingInfo(new HashSet<>(responseTimes.values()),
systemSetup.getParallelTaskRatio(), systemSetup.getUtilization());
tasks.getParallelTaskRatio(), tasks.getUtilization());
}
private long calculateResponseTime(Set<Task> tasks, Task task) {
private long calculateResponseTime(Set<SynchronousTask> tasks, SynchronousTask task) {
long minimumWcet = getMinimumWcet(task);
long deadline = task.getDeadline();
long numberOfProcessors = systemSetup.getNumberOfProcessors();
long taskInterference = 0;
for (Task t : tasks) {
for (SynchronousTask t : tasks) {
if (!t.equals(task)) {
long maxNumberOfJobs = t.getMaximumParallelism();
for (long p = 0; p < maxNumberOfJobs; ++p) {
......@@ -72,10 +81,10 @@ public class ChwaLee extends AbstractTest {
return feasible ? deadline - 1 : deadline + 1;
}
private long getSelfInterference(Task task, long deadline, long p) {
private long getSelfInterference(SynchronousTask task, long deadline, long p) {
long selfInterference = 0;
for (Segment s : task.getSegments()) {
for (Segment s : task.getWorkloadDistribution()) {
if (s.getNumberOfJobs() >= p + 1) {
selfInterference += s.getJobWcet();
}
......@@ -83,12 +92,12 @@ public class ChwaLee extends AbstractTest {
return selfInterference;
}
private long getTaskInterference(Task t, long deadline, long p) {
private long getTaskInterference(SynchronousTask t, long deadline, long p) {
long numberOfBodyJobs = LongMath.divide(deadline, t.getPeriod(), RoundingMode.FLOOR);
long workloadOfBodyJobs = 0;
for (Segment s : t.getSegments()) {
for (Segment s : t.getWorkloadDistribution()) {
if (s.getNumberOfJobs() >= p) {
workloadOfBodyJobs += s.getJobWcet();
}
......@@ -98,7 +107,7 @@ public class ChwaLee extends AbstractTest {
long boundedCarryInWorkload = 0;
long remainingLength = deadline % t.getPeriod();
long carryInLength = 0;
List<Segment> segmentList = new ArrayList<>(t.getSegments());
List<Segment> segmentList = new ArrayList<>(t.getWorkloadDistribution());
Collections.reverse(segmentList);
for (Segment s : segmentList) {
carryInLength += s.getJobWcet();
......@@ -115,9 +124,9 @@ public class ChwaLee extends AbstractTest {
return boundedBodyWorkload + boundedCarryInWorkload;
}
private long getMinimumWcet(Task task) {
private long getMinimumWcet(SynchronousTask task) {
long minWcet = 0;
for (Segment s : task.getSegments()) {
for (Segment s : task.getWorkloadDistribution()) {
minWcet += s.getJobWcet();
}
......
package mvd.jester.tests;
import java.math.RoundingMode;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.common.collect.Lists;
import com.google.common.math.LongMath;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.DagTask;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.Task;
import mvd.jester.priority.PriorityManager;
import mvd.jester.priority.RateMonotonic;
public class FonsecaNelis extends AbstractTest<DagTask> {
private final Map<Task, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public FonsecaNelis(long numberOfProcessors) {
super(numberOfProcessors);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
}
@Override
public PriorityManager getPriorityManager() {
return priorityManager;
}
@Override
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<DagTask> tasks) {
createNFJandDecompositionTree(tasks);
responseTimes.clear();
for (DagTask t : tasks) {
long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, Level.HIGH));
}
return new SchedulingInfo(new HashSet<>(responseTimes.values()),
tasks.getParallelTaskRatio(), tasks.getUtilization());
}
private void createNFJandDecompositionTree(SortedTaskSet<DagTask> tasks) {
}
private long calculateResponseTime(SortedTaskSet<DagTask> tasks, DagTask task) {
long criticalPath = task.getCriticalPath();
long responseTime = criticalPath;
long previousResponseTime = 0;
do {
previousResponseTime = responseTime;
double taskInterference = 0;
for (DagTask t : tasks) {
if (t.getPeriod() < task.getPeriod()) {
taskInterference += getTaskInterference(t, responseTime);
}
}
taskInterference /= numberOfProcessors;
double selfInterference = getSelfInterference(task);
long totalInterference = (long) Math.floor(taskInterference + selfInterference);
responseTime = criticalPath + totalInterference;
} while (responseTime != previousResponseTime);
return responseTime;
}
private double getTaskInterference(DagTask task, long interval) {
long period = task.getPeriod();
long criticalPath = task.getCriticalPath();
long numberOfIntervals =
LongMath.divide(interval - criticalPath, period, RoundingMode.FLOOR);
long carryInAndOutInterval = interval - Math.max(0, numberOfIntervals) * period;
long numberOfBodyJobs =
LongMath.divide(interval - carryInAndOutInterval, period, RoundingMode.FLOOR);
long bodyWorkload = Math.max(0, numberOfBodyJobs) * task.getWorkload();
long carryInAndOutWorkload = getCarryInAndOutWorkload(task, task.getWorkloadDistribution(),
new HashSet<>(), carryInAndOutInterval);
return carryInAndOutWorkload + bodyWorkload;
}
private long getCarryInAndOutWorkload(DagTask task, Set<Segment> carryInDistribution,
Set<Segment> carryOutDistribution, long carryInAndOutPeriod) {
long workload = getCarryOutWorkload(task, carryOutDistribution, carryInAndOutPeriod);
long carryInPeriod = task.getPeriod() - responseTimes.get(task).getResponseTime();
long carryOutPeriod = 0;
List<Segment> carryInList = Lists.newArrayList(carryInDistribution);
Collections.reverse(carryInList);
for (Segment s : carryInList) {
carryInPeriod += s.getJobWcet();
carryOutPeriod = carryInAndOutPeriod - carryInPeriod;
long carryInWorkload = getCarryInWorkload(task, carryInDistribution, carryInPeriod);
long carryOutWorkload = getCarryOutWorkload(task, carryOutDistribution, carryOutPeriod);
workload = Math.max(workload, carryInWorkload + carryOutWorkload);
}
workload = Math.max(workload,
getCarryInWorkload(task, carryInDistribution, carryInAndOutPeriod));
carryOutPeriod = 0;
for (Segment s : carryOutDistribution) {
carryOutPeriod += s.getJobWcet();
carryInPeriod = carryInAndOutPeriod - carryOutPeriod;
long carryInWorkload = getCarryInWorkload(task, carryInDistribution, carryInPeriod);
long carryOutWorkload = getCarryOutWorkload(task, carryOutDistribution, carryOutPeriod);
workload = Math.max(workload, carryInWorkload + carryOutWorkload);
}
return workload;
}
private long getCarryOutWorkload(DagTask task, Set<Segment> carryOutDistribution,
long carryOutPeriod) {
long workload = 0;
long period = task.getPeriod();
long responseTime = responseTimes.get(task).getResponseTime();
List<Segment> distributionList = Lists.newArrayList(carryOutDistribution);
for (int i = 0; i < distributionList.size(); ++i) {
Segment s = distributionList.get(i);
long weightOfPreviousSegments = 0;
for (int j = 0; j < i; ++j) {
weightOfPreviousSegments += distributionList.get(j).getJobWcet();
}
long width = carryOutPeriod - weightOfPreviousSegments;
workload += Math.max(Math.min(width, s.getJobWcet()), 0) * s.getNumberOfJobs();
}
long improvedWorkload =
Math.max(carryOutPeriod - (period - responseTime), 0) * numberOfProcessors;
return Math.min(improvedWorkload, workload);
}
private long getCarryInWorkload(DagTask task, Set<Segment> carryInDistribution,
long carryInPeriod) {
long workload = 0;
long period = task.getPeriod();
long responseTime = responseTimes.get(task).getResponseTime();
List<Segment> distributionList = Lists.newArrayList(carryInDistribution);
for (int i = 0; i < carryInDistribution.size(); ++i) {
Segment s = distributionList.get(i);
long weightOfRemainingSegments = 0;
for (int j = i + 1; j < carryInDistribution.size(); ++j) {
weightOfRemainingSegments += distributionList.get(j).getJobWcet();
}
long width = carryInPeriod - period + responseTime - weightOfRemainingSegments;
workload += Math.max(Math.min(width, s.getJobWcet()), 0) * s.getNumberOfJobs();
}
long improvedWorkload =
Math.max(carryInPeriod - (period - responseTime), 0) * numberOfProcessors;
return Math.min(improvedWorkload, workload);
}
private double getSelfInterference(DagTask task) {
long criticalPath = task.getCriticalPath();
long workload = task.getWorkload();
return (double) (workload - criticalPath) / numberOfProcessors;
}
@Override
public String getName() {
return "FonsecaNelis";
}
}
package mvd.jester.tests;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.common.math.LongMath;
import mvd.jester.info.SchedulingInfo;
......@@ -9,32 +11,40 @@ import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.priority.PriorityManager;
import mvd.jester.model.SystemSetup;
import mvd.jester.priority.RateMonotonic;
/**
* MaiaBertogna
*/
public class MaiaBertogna extends AbstractTest {
public class MaiaBertogna extends AbstractTest<SynchronousTask> {
public MaiaBertogna(SystemSetup systemSetup) {
super(systemSetup);
private final Map<SynchronousTask, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public MaiaBertogna(long numberOfProcessors) {
super(numberOfProcessors);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
}
@Override
public PriorityManager getPriorityManager() {
return priorityManager;
}
@Override
public SchedulingInfo runSchedulabilityCheck(PriorityManager priorityManager) {
SortedTaskSet tasks = new SortedTaskSet(priorityManager);
tasks.addAll(systemSetup.getTasks());
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<SynchronousTask> tasks) {
responseTimes.clear();
for (Task t : tasks) {
for (SynchronousTask t : tasks) {
Level taskLevel = tasks.headSet(t).size() <= tasks.size() / 2 ? Level.HIGH : Level.LOW;
long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, taskLevel));
}
return new SchedulingInfo(new HashSet<>(responseTimes.values()),
systemSetup.getParallelTaskRatio(), systemSetup.getUtilization());
tasks.getParallelTaskRatio(), tasks.getUtilization());
}
@Override
......@@ -42,7 +52,7 @@ public class MaiaBertogna extends AbstractTest {
return "MaiaBertogna";
}
private long calculateResponseTime(Set<Task> tasks, Task task) {
private long calculateResponseTime(Set<SynchronousTask> tasks, SynchronousTask task) {
long minimumWcet = getMinimumWcet(task);
long responseTime = minimumWcet;
long previousResponseTime = 0;
......@@ -51,7 +61,7 @@ public class MaiaBertogna extends AbstractTest {
previousResponseTime = responseTime;
long taskInterference = 0;
for (Task t : tasks) {
for (SynchronousTask t : tasks) {
if (t.getPeriod() < task.getPeriod()) {
long maxNumberOfJobsOfT = t.getMaximumParallelism();
for (int p = 0; p < maxNumberOfJobsOfT; ++p) {
......@@ -70,7 +80,7 @@ public class MaiaBertogna extends AbstractTest {
}
long totalInterference = LongMath.divide(taskInterference + selfInterference,
systemSetup.getNumberOfProcessors(), RoundingMode.FLOOR);
numberOfProcessors, RoundingMode.FLOOR);
responseTime = minimumWcet + totalInterference;
} while (previousResponseTime != responseTime);
......@@ -79,10 +89,10 @@ public class MaiaBertogna extends AbstractTest {
return responseTime;
}
private long getSelfInterference(Task task, long parallelism) {
private long getSelfInterference(SynchronousTask task, long parallelism) {
long interference = 0;
for (Segment s : task.getSegments()) {
for (Segment s : task.getWorkloadDistribution()) {
if (s.getNumberOfJobs() >= parallelism + 1) {
interference += s.getJobWcet();
}
......@@ -91,7 +101,7 @@ public class MaiaBertogna extends AbstractTest {
return interference;
}
private long getTaskInterference(Task task, long interval, long parallelism) {
private long getTaskInterference(SynchronousTask task, long interval, long parallelism) {
if (responseTimes.containsKey(task)) {
long responseTime = responseTimes.get(task).getResponseTime();
long minWcet = getMinimumWcet(task);
......@@ -102,7 +112,7 @@ public class MaiaBertogna extends AbstractTest {
long workload = 0;
for (Segment s : task.getSegments()) {
for (Segment s : task.getWorkloadDistribution()) {
if (s.getNumberOfJobs() >= parallelism) {
workload += s.getJobWcet();
}
......@@ -116,9 +126,9 @@ public class MaiaBertogna extends AbstractTest {
}
}
private long getMinimumWcet(Task task) {
private long getMinimumWcet(SynchronousTask task) {
long minWcet = 0;
for (Segment s : task.getSegments()) {
for (Segment s : task.getWorkloadDistribution()) {
minWcet += s.getJobWcet();
}
......
package mvd.jester.tests;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.common.math.DoubleMath;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.DagTask;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.Task;
import mvd.jester.priority.PriorityManager;
import mvd.jester.priority.RateMonotonic;
public class MelaniButtazzo extends AbstractTest<DagTask> {
private final Map<Task, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public MelaniButtazzo(long numberOfProcessors) {
super(numberOfProcessors);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
}
@Override
public PriorityManager getPriorityManager() {
return priorityManager;
}
@Override
public String getName() {
return "MelaniButtazzo";
}
@Override
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<DagTask> tasks) {
responseTimes.clear();
for (DagTask t : tasks) {
long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, Level.HIGH));
}
return new SchedulingInfo(new HashSet<>(responseTimes.values()),
tasks.getParallelTaskRatio(), tasks.getUtilization());
}
private long calculateResponseTime(Set<DagTask> tasks, DagTask task) {
long minimumWcet = task.getCriticalPath();
long responseTime = minimumWcet;
long previousResponseTime = 0;
do {
previousResponseTime = responseTime;
double taskInterference = 0;
for (DagTask t : tasks) {
if (t.getPeriod() < task.getPeriod()) {
taskInterference += getTaskInterference(t, responseTime);
}
}
taskInterference /= numberOfProcessors;
double selfInterference = getSelfInterference(task);
long totalInterference = (long) Math.floor(taskInterference + selfInterference);
responseTime = minimumWcet + totalInterference;
} while (previousResponseTime != responseTime);
return responseTime;
}
private double getSelfInterference(DagTask task) {
long criticalPath = task.getCriticalPath();
long workload = task.getWorkload();
return (double) (workload - criticalPath) / numberOfProcessors;
}
private double getTaskInterference(DagTask task, long interval) {
if (responseTimes.containsKey(task)) {
long responseTime = responseTimes.get(task).getResponseTime();
long singleWorkload = task.getWorkload();
long period = task.getPeriod();
double nominator =
(interval + responseTime - (double) singleWorkload / numberOfProcessors);
long amountOfJobs = DoubleMath.roundToLong(nominator / period, RoundingMode.FLOOR);
double carryOutPortion =
Math.min(singleWorkload, numberOfProcessors * (nominator % period));
double interference = amountOfJobs * singleWorkload + carryOutPortion;
return interference;
} else {
throw new RuntimeException("Task was not found in task set!");
}
}
}
package mvd.jester.tests;
import java.math.RoundingMode;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import com.google.common.math.LongMath;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.DagTask;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.Task;
import mvd.jester.priority.PriorityManager;
import mvd.jester.model.SystemSetup;
import mvd.jester.priority.RateMonotonic;
/**
* SchmidMottok
*/
public class SchmidMottok extends AbstractTest {
public class SchmidMottok extends AbstractTest<DagTask> {
public SchmidMottok(SystemSetup systemSetup) {
super(systemSetup);
private final Map<Task, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public SchmidMottok(long numberOfProcessors) {
super(numberOfProcessors);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
}
@Override
public SchedulingInfo runSchedulabilityCheck(PriorityManager priorityManager) {
SortedTaskSet tasks = new SortedTaskSet(priorityManager);
tasks.addAll(systemSetup.getTasks());
public PriorityManager getPriorityManager() {
return priorityManager;
}
@Override
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<DagTask> tasks) {
responseTimes.clear();
for (Task t : tasks) {
Level taskLevel = tasks.headSet(t).size() <= tasks.size() / 2 ? Level.HIGH : Level.LOW;
for (DagTask t : tasks) {
long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, taskLevel));
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, Level.HIGH));
}
return new SchedulingInfo(new HashSet<>(responseTimes.values()),
systemSetup.getParallelTaskRatio(), systemSetup.getUtilization());
tasks.getParallelTaskRatio(), tasks.getUtilization());
}
@Override
......@@ -42,22 +52,19 @@ public class SchmidMottok extends AbstractTest {
return "SchmidMottok";
}
private long calculateResponseTime(Set<Task> tasks, Task task) {
long minimumWcet = getMinimumWcet(task);
private long calculateResponseTime(Set<DagTask> tasks, DagTask task) {
long minimumWcet = task.getCriticalPath();
long responseTime = minimumWcet;
long previousResponseTime = 0;
long numberOfProcessors = systemSetup.getNumberOfProcessors();
do {
previousResponseTime = responseTime;
double taskInterference = 0;
for (Task t : tasks) {
for (DagTask t : tasks) {
if (t.getPeriod() < task.getPeriod()) {
long numberOfJobs =
t.getMaximumParallelism() > numberOfProcessors ? numberOfProcessors
: t.getMaximumParallelism();
for (int p = 0; p < numberOfJobs; ++p) {
long numberOfThreads = t.getNumberOfThreads();
for (int p = 0; p < numberOfThreads; ++p) {
taskInterference += Math.min(getTaskInterference(t, responseTime, p + 1),
responseTime - minimumWcet + 1);
}
......@@ -76,39 +83,35 @@ public class SchmidMottok extends AbstractTest {
}
private double getSelfInterference(Task task) {
private double getSelfInterference(DagTask task) {
double interference = 0;
long numberOfProcessors = systemSetup.getNumberOfProcessors();
long numberOfJobs = task.getMaximumParallelism() > numberOfProcessors ? numberOfProcessors
: task.getMaximumParallelism();
long numberOfThreads = task.getNumberOfThreads();
for (Segment s : task.getSegments()) {
interference += (double) (s.getNumberOfTasklets() - 1) * s.getTaskletWcet();
for (Segment s : task.getWorkloadDistribution()) {
interference += (double) (s.getNumberOfJobs() - 1) * s.getJobWcet();
}
interference /= numberOfJobs;
interference /= numberOfThreads;
return interference;
}
private double getTaskInterference(Task task, long interval, long parallelism) {
private double getTaskInterference(DagTask task, long interval, long parallelism) {
if (responseTimes.containsKey(task)) {
long responseTime = responseTimes.get(task).getResponseTime();
long minWcet = getMinimumWcet(task);
long minWcet = task.getCriticalPath();
long period = task.getPeriod();
long numberOfProcessors = systemSetup.getNumberOfProcessors();
long amountOfJobs =
(LongMath.divide(interval + responseTime - minWcet, period, RoundingMode.FLOOR)
+ 1);
double workload = 0;
for (Segment s : task.getSegments()) {
long numberOfJobs = s.getNumberOfJobs() > numberOfProcessors ? numberOfProcessors
: s.getNumberOfJobs();
if (numberOfJobs >= parallelism) {
workload += s.getNumberOfTasklets() * s.getTaskletWcet() / numberOfJobs;
for (Segment s : task.getWorkloadDistribution()) {
long numberOfThreads = s.getNumberOfJobs() > 1 ? task.getNumberOfThreads() : 1;
if (numberOfThreads >= parallelism) {
workload += s.getNumberOfJobs() * s.getJobWcet() / numberOfThreads;
}
}
......@@ -120,14 +123,4 @@ public class SchmidMottok extends AbstractTest {
}
}
private long getMinimumWcet(Task task) {
long minWcet = 0;
for (Segment s : task.getSegments()) {
minWcet += s.getTaskletWcet();
}
return minWcet;
}
}
package mvd.jester.tests;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.Task;
import mvd.jester.priority.PriorityManager;
/**
* TestInterface
*/
public interface TestInterface {
public interface TestInterface<T extends Task> {
public SchedulingInfo runSchedulabilityCheck(PriorityManager priorityManager);
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<T> tasks);
public String getName();
public PriorityManager getPriorityManager();
}
......@@ -18,13 +18,8 @@ public class TestSegment {
assertTrue(s1.getJobWcet() == 100);
assertTrue(s1.getNumberOfJobs() == 10);
assertTrue(s1.getNumberOfTasklets() == 100);
assertTrue(s1.getTaskletWcet() == 10);
assertTrue(s2.getJobWcet() == 9);
assertTrue(s2.getNumberOfJobs() == 10);
assertTrue(s2.getNumberOfTasklets() == 9 * 10 / LongMath.gcd(9, 10));
assertTrue(s2.getTaskletWcet() == LongMath.gcd(9, 10));
}
}
package mvd.jester.model;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import java.io.IOException;
import java.awt.Color;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import javax.imageio.ImageIO;
import com.google.common.math.DoubleMath;
import com.mxgraph.layout.mxCircleLayout;
import com.mxgraph.layout.mxIGraphLayout;
import com.mxgraph.util.mxCellRenderer;
import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.ext.JGraphXAdapter;
import org.jgrapht.graph.DefaultEdge;
import org.junit.Rule;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.rules.TemporaryFolder;
import mvd.jester.model.SystemSetup.DagTaskBuilder;
public class TestSystemSetup {
......@@ -24,42 +34,37 @@ public class TestSystemSetup {
for (int i = 0; i < NUMBER_OF_RUNS; ++i) {
long numberOfProcessors = ThreadLocalRandom.current().nextLong(2, 8);
SystemSetup.Builder systemSetupBuilder = new SystemSetup.Builder()//
.setNumberOfSegments(1, 7)//
.setNumberOfJobs(2, 10)//
.setPeriods(100, 1000, 1000)//
.setNumberOfProcessors(numberOfProcessors);
SystemSetup.SynchronousTaskBuilder systemSetupBuilder =
new SystemSetup.SynchronousTaskBuilder()//
.setNumberOfSegments(1, 7)//
.setNumberOfJobs(2, 10)//
.setPeriods(100, 1000, 1000)//
.setNumberOfProcessors(numberOfProcessors);
SystemSetup systemSetup = systemSetupBuilder.build();
Set<SynchronousTask> taskSet = systemSetupBuilder.generateTaskSet();
assertTrue(systemSetup.getNumberOfProcessors() == numberOfProcessors);
// assertTrue(systemSetup.getNumberOfProcessors() == numberOfProcessors);
for (Task t : systemSetup.getTasks()) {
for (SynchronousTask t : taskSet) {
assertTrue(t.getPeriod() >= 100);
assertTrue(t.getPeriod() <= 1000);
assertTrue(t.getDeadline() == t.getPeriod());
long maxJobWcet = t.getPeriod() / t.getSegments().size();
long maxJobWcet = t.getPeriod() / t.getWorkloadDistribution().size();
for (Segment s : t.getSegments()) {
assertTrue(s.getJobWcet() * s.getNumberOfJobs() == s.getTaskletWcet()
* s.getNumberOfTasklets());
for (Segment s : t.getWorkloadDistribution()) {
assertTrue(s.getJobWcet() >= 1);
assertTrue(s.getJobWcet() <= maxJobWcet);
assertTrue(s.getNumberOfJobs() >= 1);
assertTrue(s.getNumberOfJobs() <= 10);
}
assertTrue(t.getSegments().size() >= 1);
assertTrue(t.getSegments().size() <= 7);
assertTrue(t.getWorkloadDistribution().size() >= 1);
assertTrue(t.getWorkloadDistribution().size() <= 7);
}
assertTrue(systemSetupBuilder.addTask(systemSetup));
Set<Task> tasks = systemSetup.getTasks();
systemSetupBuilder.rebuild(systemSetup);
assertFalse(tasks == systemSetup.getTasks());
assertTrue(systemSetupBuilder.addTask(taskSet));
}
}
......@@ -67,17 +72,57 @@ public class TestSystemSetup {
@DisplayName("Check Getters and Setters.")
void testGettersAndSetters() {
@SuppressWarnings("unchecked")
Set<Task> t1 = mock(Set.class);
Set<SynchronousTask> t1 = mock(Set.class);
@SuppressWarnings("unchecked")
Set<Task> t2 = mock(Set.class);
Set<SynchronousTask> t2 = mock(Set.class);
SystemSetup systemSetup = new SystemSetup(t1, 2);
SystemSetup<SynchronousTask> systemSetup = new SystemSetup<>(t1, 2);
systemSetup.setTasks(t2);
assertTrue(systemSetup.getTasks() == t2);
}
@Test
@DisplayName("Check if DagTaskBuilder works correctly")
void testDagTaskBuilder() {
DagTaskBuilder builder = new DagTaskBuilder();
DirectedAcyclicGraph<Job, DefaultEdge> j = builder.generateTask().getJobDag();
JGraphXAdapter<Job, DefaultEdge> graphAdapter = new JGraphXAdapter<Job, DefaultEdge>(j);
mxIGraphLayout layout = new mxCircleLayout(graphAdapter);
layout.execute(graphAdapter.getDefaultParent());
BufferedImage image =
mxCellRenderer.createBufferedImage(graphAdapter, null, 2, Color.WHITE, true, null);
File imgFile = new File("src/test/resources/graph.png");
try {
ImageIO.write(image, "PNG", imgFile);
} catch (Exception e) {
}
assertTrue(imgFile.exists());
}
@Test
@DisplayName("Check if utilization works correctly.")
void testUtil() {
for (double d = 0.25; d < 4; d += 0.25) {
DagTaskBuilder builder = new DagTaskBuilder();
Set<DagTask> taskSet = builder.generateTaskSet(d);
double taskSetUtil = 0;
for (DagTask t : taskSet) {
taskSetUtil += t.getUtilization();
}
assertTrue(DoubleMath.fuzzyEquals(taskSetUtil, d, 0.002));
}
}
// @Test
// @DisplayName("Check if parser works correclty.")
// void testParser() throws IOException {
......
......@@ -6,8 +6,7 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.DynamicForkJoin;
import mvd.jester.simulator.ParallelSynchronous;
import mvd.jester.simulator.internals.parallelsynchronous.TaskContext;
......@@ -25,8 +24,8 @@ public class TestEarliestDeadlineFirst {
public void testPriority() {
EarliestDeadlineFirst edf = new EarliestDeadlineFirst();
Task t1 = mock(Task.class);
Task t2 = mock(Task.class);
SynchronousTask t1 = mock(SynchronousTask.class);
SynchronousTask t2 = mock(SynchronousTask.class);
when(t1.getDeadline()).thenReturn((long) 100);
when(t2.getDeadline()).thenReturn((long) 200);
......@@ -54,11 +53,11 @@ public class TestEarliestDeadlineFirst {
assertTrue(edf.hasSimulator(ParallelSynchronous.class));
assertTrue(edf.hasSimulator(DynamicForkJoin.class));
assertTrue(edf.hasTest(new ChwaLee(mock(SystemSetup.class))));
assertFalse(edf.hasTest(new SchmidMottok(mock(SystemSetup.class))));
assertFalse(edf.hasTest(new MaiaBertogna(mock(SystemSetup.class))));
assertTrue(edf.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
assertTrue(edf.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
assertTrue(edf.hasTest(new ChwaLee(4)));
assertFalse(edf.hasTest(new SchmidMottok(4)));
assertFalse(edf.hasTest(new MaiaBertogna(4)));
// assertTrue(edf.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(edf.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
assertTrue(edf.getName().equals("EDF"));
}
......
......@@ -6,8 +6,7 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.DynamicForkJoin;
import mvd.jester.simulator.ParallelSynchronous;
import mvd.jester.simulator.internals.parallelsynchronous.TaskContext;
......@@ -24,8 +23,8 @@ public class TestRateMonotonic {
@DisplayName("Test if priority manager returns the correct priority.")
public void testPriority() {
RateMonotonic rm = new RateMonotonic();
Task t1 = mock(Task.class);
Task t2 = mock(Task.class);
SynchronousTask t1 = mock(SynchronousTask.class);
SynchronousTask t2 = mock(SynchronousTask.class);
when(t1.getPeriod()).thenReturn((long) 100);
when(t2.getPeriod()).thenReturn((long) 200);
......@@ -53,11 +52,11 @@ public class TestRateMonotonic {
assertTrue(rm.hasSimulator(ParallelSynchronous.class));
assertTrue(rm.hasSimulator(DynamicForkJoin.class));
assertFalse(rm.hasTest(new ChwaLee(mock(SystemSetup.class))));
assertTrue(rm.hasTest(new SchmidMottok(mock(SystemSetup.class))));
assertTrue(rm.hasTest(new MaiaBertogna(mock(SystemSetup.class))));
assertTrue(rm.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
assertTrue(rm.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
assertFalse(rm.hasTest(new ChwaLee(8)));
assertTrue(rm.hasTest(new SchmidMottok(8)));
assertTrue(rm.hasTest(new MaiaBertogna(8)));
// assertTrue(rm.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(rm.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
assertTrue(rm.getName().equals("RM"));
}
......
......@@ -14,7 +14,7 @@ import java.util.Optional;
import java.util.concurrent.ThreadLocalRandom;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.internals.ProcessorContext;
import mvd.jester.simulator.internals.TaskContextInterface;
import mvd.jester.simulator.internals.parallelsynchronous.JobContext;
......@@ -64,7 +64,7 @@ public class TestProcessorContext {
JobContext firstJob = mock(JobContext.class);
TaskContext firstTaskContext = mock(TaskContext.class);
Task firstTask = mock(Task.class);
SynchronousTask firstTask = mock(SynchronousTask.class);
when(firstJob.getTaskContext()).thenReturn(firstTaskContext);
when(firstJob.prepareJob(anyLong())).thenReturn(true);
......@@ -75,7 +75,7 @@ public class TestProcessorContext {
JobContext secondJob = mock(JobContext.class);
TaskContext secondTaskContext = mock(TaskContext.class);
Task secondTask = mock(Task.class);
SynchronousTask secondTask = mock(SynchronousTask.class);
when(secondJob.getTaskContext()).thenReturn(secondTaskContext);
when(secondJob.prepareJob(anyLong())).thenReturn(true);
......
......@@ -31,8 +31,6 @@ public class TestSegmentContext {
Segment s = mock(Segment.class);
when(s.getNumberOfJobs()).thenReturn(numberOfJobs);
when(s.getJobWcet()).thenReturn(jobWcet);
when(s.getNumberOfTasklets()).thenReturn(jobWcet);
when(s.getTaskletWcet()).thenReturn(numberOfJobs);
TaskContext tc = mock(TaskContext.class);
SegmentContext sc = new SegmentContext(tc, s, 4);
......@@ -72,8 +70,6 @@ public class TestSegmentContext {
Segment s = mock(Segment.class);
when(s.getNumberOfJobs()).thenReturn(numberOfJobs);
when(s.getJobWcet()).thenReturn(jobWcet);
when(s.getNumberOfTasklets()).thenReturn(jobWcet);
when(s.getTaskletWcet()).thenReturn(numberOfJobs);
TaskContext tc = mock(TaskContext.class);
when(tc.acceptNotification(jobWcet - 1)).thenReturn(Optional.empty());
......
......@@ -10,7 +10,7 @@ import java.util.concurrent.ThreadLocalRandom;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.model.Segment;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.internals.JobContextInterface;
import mvd.jester.simulator.internals.ProcessorContext;
import mvd.jester.simulator.internals.TaskContextInterface;
......@@ -33,19 +33,13 @@ public class TestTaskContext {
segments.add(new Segment(10, numJobs));
}
Task t = new Task(100, new LinkedHashSet<>(segments));
SynchronousTask t = new SynchronousTask(new LinkedHashSet<>(segments), 100, 8);
TaskContext tc = new TaskContext(t, 1, 0);
for (int i = 0; i < segments.size() - 1; ++i) {
Segment s = segments.get(i);
for (int j = 0; j < s.getNumberOfTasklets(); ++j) {
assertFalse(tc.acceptNotification(0).isPresent());
}
}
for (int i = 0; i < segments.get(segments.size() - 1).getNumberOfTasklets() - 1; ++i) {
assertFalse(tc.acceptNotification(0).isPresent());
}
Optional<TaskContextInterface> tci = tc.acceptNotification(0);
......@@ -64,7 +58,7 @@ public class TestTaskContext {
segments.add(new Segment(5, 1));
segments.add(new Segment(10, 10));
segments.add(new Segment(15, 1));
Task t = new Task(100, segments);
SynchronousTask t = new SynchronousTask(segments, 100, 8);
TaskContext tc = new TaskContext(t, numberOfProcessors, 0);
......
......@@ -28,7 +28,6 @@ public class TestTaskletContext {
long taskletWcet = ThreadLocalRandom.current().nextLong(20, 50);
Segment s = mock(Segment.class);
when(s.getTaskletWcet()).thenReturn(taskletWcet);
SegmentContext sc = mock(SegmentContext.class);
when(sc.getSegment()).thenReturn(s);
TaskContext tc = mock(TaskContext.class);
......
......@@ -10,7 +10,7 @@ import java.util.concurrent.ThreadLocalRandom;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.model.Segment;
import mvd.jester.model.Task;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.internals.JobContextInterface;
import mvd.jester.simulator.internals.ProcessorContext;
import mvd.jester.simulator.internals.TaskContextInterface;
......@@ -34,7 +34,7 @@ public class TestTaskContext {
segments.add(new Segment(10, numJobs));
}
Task t = new Task(100, new LinkedHashSet<>(segments));
SynchronousTask t = new SynchronousTask(new LinkedHashSet<>(segments), 100, 8);
TaskContext tc = new TaskContext(t, 0);
for (int i = 0; i < segments.size() - 1; ++i) {
......@@ -66,7 +66,7 @@ public class TestTaskContext {
segments.add(new Segment(5, 1));
segments.add(new Segment(10, 10));
segments.add(new Segment(15, 1));
Task t = new Task(100, segments);
SynchronousTask t = new SynchronousTask(segments, 100, 8);
TaskContext tc = new TaskContext(t, 0);
......
package mvd.jester.tests;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.SystemSetup;
import mvd.jester.priority.EarliestDeadlineFirst;
/**
* TestChwaLee
......@@ -16,41 +11,41 @@ public class TestChwaLee {
@Test
@DisplayName("Check if the schedulability check returns the correct values.")
public void testRunSchedulabilityCheck() {
{
SystemSetup systemSetup =
SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 4);
ChwaLee cl = new ChwaLee(systemSetup);
SchedulingInfo schedulingInfo = cl.runSchedulabilityCheck(new EarliestDeadlineFirst());
assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
assertTrue(schedulingInfo.checkTasksetFeasible() == false);
assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
}
{
SystemSetup systemSetup =
SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 8);
ChwaLee cl = new ChwaLee(systemSetup);
SchedulingInfo schedulingInfo = cl.runSchedulabilityCheck(new EarliestDeadlineFirst());
assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
assertTrue(schedulingInfo.checkTasksetFeasible() == false);
assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
}
{
SystemSetup systemSetup =
SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 16);
ChwaLee cl = new ChwaLee(systemSetup);
SchedulingInfo schedulingInfo = cl.runSchedulabilityCheck(new EarliestDeadlineFirst());
assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
assertTrue(schedulingInfo.checkTasksetFeasible() == false);
assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
}
// {
// SystemSetup<SynchronousTask> systemSetup =
// SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 4);
// ChwaLee cl = new ChwaLee(systemSetup);
// SchedulingInfo schedulingInfo = cl.runSchedulabilityCheck(new EarliestDeadlineFirst());
// assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
// assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
// assertTrue(schedulingInfo.checkTasksetFeasible() == false);
// assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
// }
// {
// SystemSetup<SynchronousTask> systemSetup =
// SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 8);
// ChwaLee cl = new ChwaLee(systemSetup);
// SchedulingInfo schedulingInfo = cl.runSchedulabilityCheck(new EarliestDeadlineFirst());
// assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
// assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
// assertTrue(schedulingInfo.checkTasksetFeasible() == false);
// assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
// }
// {
// SystemSetup<SynchronousTask> systemSetup =
// SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 16);
// ChwaLee cl = new ChwaLee(systemSetup);
// SchedulingInfo schedulingInfo = cl.runSchedulabilityCheck(new EarliestDeadlineFirst());
// assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
// assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
// assertTrue(schedulingInfo.checkTasksetFeasible() == false);
// assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
// }
}
}
package mvd.jester.tests;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.SystemSetup;
import mvd.jester.priority.RateMonotonic;
/**
* TestMaiaBertogna
......@@ -16,41 +11,41 @@ public class TestMaiaBertogna {
@Test
@DisplayName("Check if the schedulability check returns the correct values.")
public void testRunSchedulabilityCheck() {
{
SystemSetup systemSetup =
SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 4);
MaiaBertogna mb = new MaiaBertogna(systemSetup);
SchedulingInfo schedulingInfo = mb.runSchedulabilityCheck(new RateMonotonic());
assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
assertTrue(schedulingInfo.checkTasksetFeasible() == false);
assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
}
{
SystemSetup systemSetup =
SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 8);
MaiaBertogna mb = new MaiaBertogna(systemSetup);
SchedulingInfo schedulingInfo = mb.runSchedulabilityCheck(new RateMonotonic());
assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
assertTrue(schedulingInfo.checkTasksetFeasible() == false);
assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
}
{
SystemSetup systemSetup =
SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 16);
MaiaBertogna mb = new MaiaBertogna(systemSetup);
SchedulingInfo schedulingInfo = mb.runSchedulabilityCheck(new RateMonotonic());
assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == false);
assertTrue(schedulingInfo.checkTasksetFeasible() == true);
assertTrue(!schedulingInfo.getFailedTerminationInfo().isPresent());
}
// {
// SystemSetup<SynchronousTask> systemSetup =
// SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 4);
// MaiaBertogna mb = new MaiaBertogna(systemSetup);
// SchedulingInfo schedulingInfo = mb.runSchedulabilityCheck(new RateMonotonic());
// assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
// assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
// assertTrue(schedulingInfo.checkTasksetFeasible() == false);
// assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
// }
// {
// SystemSetup<SynchronousTask> systemSetup =
// SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 8);
// MaiaBertogna mb = new MaiaBertogna(systemSetup);
// SchedulingInfo schedulingInfo = mb.runSchedulabilityCheck(new RateMonotonic());
// assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
// assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
// assertTrue(schedulingInfo.checkTasksetFeasible() == false);
// assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
// }
// {
// SystemSetup<SynchronousTask> systemSetup =
// SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 16);
// MaiaBertogna mb = new MaiaBertogna(systemSetup);
// SchedulingInfo schedulingInfo = mb.runSchedulabilityCheck(new RateMonotonic());
// assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
// assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == false);
// assertTrue(schedulingInfo.checkTasksetFeasible() == true);
// assertTrue(!schedulingInfo.getFailedTerminationInfo().isPresent());
// }
}
}
package mvd.jester.tests;
import static org.junit.jupiter.api.Assertions.assertTrue;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.SystemSetup;
import mvd.jester.priority.RateMonotonic;
/**
* TestSchmidMottok
......@@ -17,41 +12,41 @@ public class TestSchmidMottok {
@Test
@DisplayName("Check if the schedulability check returns the correct values.")
public void testRunSchedulabilityCheck() {
{
SystemSetup systemSetup =
SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 4);
SchmidMottok sm = new SchmidMottok(systemSetup);
SchedulingInfo schedulingInfo = sm.runSchedulabilityCheck(new RateMonotonic());
assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
assertTrue(schedulingInfo.checkTasksetFeasible() == false);
assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
}
{
SystemSetup systemSetup =
SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 8);
SchmidMottok sm = new SchmidMottok(systemSetup);
SchedulingInfo schedulingInfo = sm.runSchedulabilityCheck(new RateMonotonic());
assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
assertTrue(schedulingInfo.checkTasksetFeasible() == false);
assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
}
{
SystemSetup systemSetup =
SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 16);
SchmidMottok sm = new SchmidMottok(systemSetup);
SchedulingInfo schedulingInfo = sm.runSchedulabilityCheck(new RateMonotonic());
assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == false);
assertTrue(schedulingInfo.checkTasksetFeasible() == true);
assertTrue(!schedulingInfo.getFailedTerminationInfo().isPresent());
}
// {
// SystemSetup<SynchronousTask> systemSetup =
// SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 4);
// SchmidMottok sm = new SchmidMottok(systemSetup);
// SchedulingInfo schedulingInfo = sm.runSchedulabilityCheck(new RateMonotonic());
// assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
// assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
// assertTrue(schedulingInfo.checkTasksetFeasible() == false);
// assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
// }
// {
// SystemSetup<SynchronousTask> systemSetup =
// SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 8);
// SchmidMottok sm = new SchmidMottok(systemSetup);
// SchedulingInfo schedulingInfo = sm.runSchedulabilityCheck(new RateMonotonic());
// assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
// assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == true);
// assertTrue(schedulingInfo.checkTasksetFeasible() == false);
// assertTrue(schedulingInfo.getFailedTerminationInfo().isPresent());
// }
// {
// SystemSetup<SynchronousTask> systemSetup =
// SystemSetup.readFromFile("src/test/resources/Taskset1.txt", 16);
// SchmidMottok sm = new SchmidMottok(systemSetup);
// SchedulingInfo schedulingInfo = sm.runSchedulabilityCheck(new RateMonotonic());
// assertTrue(schedulingInfo.getTerminationInfos().size() == 4);
// assertTrue(schedulingInfo.checkLevelFail(Level.HIGH) == false);
// assertTrue(schedulingInfo.checkTasksetFeasible() == true);
// assertTrue(!schedulingInfo.getFailedTerminationInfo().isPresent());
// }
}
}
package mvd.jester.utils;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import com.google.common.io.Files;
import org.junit.Rule;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.io.TempDir;
import org.junit.rules.TemporaryFolder;
/**
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment