Commit 31cab85d by Michael Schmid

finishing work on tests

parent dbeac016
......@@ -3,7 +3,8 @@ package mvd.jester;
import java.util.Arrays;
import java.util.List;
import mvd.jester.model.DagTask;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.SystemManager;
import mvd.jester.model.SystemManager.DagTaskBuilder;
import mvd.jester.tests.AbstractTest;
import mvd.jester.tests.FonsecaNelis;
import mvd.jester.tests.MelaniButtazzo;
......@@ -16,15 +17,38 @@ import mvd.jester.tests.SchmidMottok;
*/
public class App {
public static void main(String[] args) {
for (int p = 8; p <= 8; p *= 2) {
SystemSetup.DagTaskBuilder builder =
new SystemSetup.DagTaskBuilder().setNumberOfProcessors(p);
{
SystemManager manager = new SystemManager(8);
DagTaskBuilder builder = new DagTaskBuilder();
TestEnvironment te = new TestEnvironment();
List<AbstractTest<DagTask>> tests = te.registerTests(Arrays.asList(new SchmidMottok(p),
/* new MelaniButtazzo(p), */ new FonsecaNelis(p)));
List<AbstractTest<DagTask>> tests =
te.registerTests(Arrays.asList(new SchmidMottok(manager),
new MelaniButtazzo(manager), new FonsecaNelis(manager)));
te.runExperiments(builder, tests, p, 100); // TODO: Change back to 500
te.varyUtilization(builder, tests, 8, 500);
}
{
SystemManager manager = new SystemManager(8);
DagTaskBuilder builder = new DagTaskBuilder();
TestEnvironment te = new TestEnvironment();
List<AbstractTest<DagTask>> tests =
te.registerTests(Arrays.asList(new SchmidMottok(manager),
new MelaniButtazzo(manager), new FonsecaNelis(manager)));
te.varyNumberOfProcessors(builder, tests, manager, 500);
}
{
SystemManager manager = new SystemManager(8);
DagTaskBuilder builder = new DagTaskBuilder();
TestEnvironment te = new TestEnvironment();
List<AbstractTest<DagTask>> tests =
te.registerTests(Arrays.asList(new SchmidMottok(manager),
new MelaniButtazzo(manager), new FonsecaNelis(manager)));
te.varyNumberOfTasks(builder, tests, 8, 500);
}
}
}
......@@ -13,24 +13,31 @@ import mvd.jester.utils.Logger;
public class ResultLogger {
private final Logger logger;
private boolean headerLogged = false;
public ResultLogger(final long numberOfProcessors) {
this.logger = new Logger("./results/feasibility_" + numberOfProcessors + ".txt");
public ResultLogger(final String fileName) {
this.logger = new Logger("./results/" + fileName + ".txt");
}
public <T extends Task> void logHeader(final Map<AbstractTest<T>, Long> results,
String xAxisName) {
final Appendable out = new StringBuilder();
try {
out.append(xAxisName);
for (final Entry<AbstractTest<T>, Long> rc : results.entrySet()) {
out.append("\t" + rc.getKey().getName());
}
out.append("\n");
} catch (final Exception e) {
throw new RuntimeException("Failed to log header!");
}
logger.log(out);
}
public <T extends Task> void logLine(final double utilization,
final Map<AbstractTest<T>, Long> results) {
final Appendable out = new StringBuilder();
try {
if (!headerLogged) {
headerLogged = true;
out.append("Utilization");
for (final Entry<AbstractTest<T>, Long> rc : results.entrySet()) {
out.append("\t" + rc.getKey().getName());
}
out.append("\n");
}
out.append("" + utilization);
for (final Entry<AbstractTest<T>, Long> rc : results.entrySet()) {
final long numberOfFeasibleTasks = rc.getValue();
......
......@@ -5,14 +5,19 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import com.google.common.base.Stopwatch;
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.SchedulingInfo.Feasiblity;
import mvd.jester.model.DagTask;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SynchronousTask;
import mvd.jester.model.SystemManager;
import mvd.jester.model.Task;
import mvd.jester.model.SystemSetup.DagTaskBuilder;
import mvd.jester.model.SystemSetup.SynchronousTaskBuilder;
import mvd.jester.model.SystemManager.DagTaskBuilder;
import mvd.jester.model.SystemManager.SynchronousTaskBuilder;
import mvd.jester.priority.PriorityManager;
import mvd.jester.tests.AbstractTest;
......@@ -26,10 +31,6 @@ public class TestEnvironment {
}
public <T extends Task> List<AbstractTest<T>> registerTests(final List<AbstractTest<T>> tests) {
// Set<ResultCollector<AbstractTest<T>>> testSet = new HashSet<>();
// for (AbstractTest<T> t : tests) {
// testSet.add(new ResultCollector<AbstractTest<T>>(t.getPriorityManager(), t));
// }
return new ArrayList<>(tests);
}
......@@ -73,22 +74,136 @@ public class TestEnvironment {
}
}
System.out.println("");
final ResultLogger resultLogger = new ResultLogger(numberOfProcessors);
// final ResultLogger resultLogger = new ResultLogger(numberOfProcessors);
// resultLogger.logTests(abstractTestInstances);
}
public void measureExecutionTimes(final DagTaskBuilder builder,
final List<AbstractTest<DagTask>> abstractTestInstances,
final long numberOfMeasurements) {
Map<AbstractTest<DagTask>, List<Long>> results = new LinkedHashMap<>();
abstractTestInstances.forEach(t -> results.put(t, new ArrayList<>()));
for (int i = 0; i < numberOfMeasurements; ++i) {
double utilization = ThreadLocalRandom.current().nextDouble(1, 7);
Set<DagTask> taskSet = builder.generateTaskSet(utilization);
for (AbstractTest<DagTask> testInstance : abstractTestInstances) {
final PriorityManager priorityManager = testInstance.getPriorityManager();
final SortedTaskSet<DagTask> sortedTaskSet = new SortedTaskSet<>(priorityManager);
sortedTaskSet.addAll(taskSet);
Stopwatch w = Stopwatch.createStarted();
testInstance.runSchedulabilityCheck(sortedTaskSet);
w.stop();
long micros = w.elapsed(TimeUnit.MICROSECONDS);
results.get(testInstance).add(micros);
}
}
for (Entry<AbstractTest<DagTask>, List<Long>> entry : results.entrySet()) {
long size = entry.getValue().size();
long total = entry.getValue().stream().reduce((long) 0, Long::sum);
System.out.println(entry.getKey().getName() + ": " + (double) total / size + " µs");
}
}
public void varyNumberOfProcessors(final DagTaskBuilder builder,
final List<AbstractTest<DagTask>> abstractTestInstances, SystemManager manager,
final long numberOfTaskSetsPerStep) {
long checkedTasksets = 0;
final long numberOfTaskSets = 8 * numberOfTaskSetsPerStep;
final ResultLogger resultLogger = new ResultLogger("numberOfProcessors");
final Map<AbstractTest<DagTask>, Long> resultMap = new LinkedHashMap<>();
abstractTestInstances.forEach(t -> resultMap.put(t, (long) 0));
resultLogger.logHeader(resultMap, "NoOfProcessors");
for (long numberOfProcessors = 2; numberOfProcessors <= 16; numberOfProcessors += 2) {
manager.setNumberOfProcessors(numberOfProcessors);
resultMap.replaceAll((k, v) -> (long) 0);
for (int i = 0; i < numberOfTaskSetsPerStep; ++i) {
final Set<DagTask> taskSet = builder.generateUUnifastTaskSet(
(long) (1.5 * numberOfProcessors), (double) numberOfProcessors * 0.5);
System.out.print(Math.round((double) checkedTasksets / numberOfTaskSets * 100)
+ "% of " + numberOfTaskSets + " tasksets tested!\r");
for (final AbstractTest<DagTask> testInstance : abstractTestInstances) {
final PriorityManager priorityManager = testInstance.getPriorityManager();
public void runExperiments(final DagTaskBuilder builder,
final SortedTaskSet<DagTask> sortedTaskSet =
new SortedTaskSet<>(priorityManager);
sortedTaskSet.addAll(taskSet);
final SchedulingInfo schedulingInfo =
testInstance.runSchedulabilityCheck(sortedTaskSet);
if (schedulingInfo.getFeasibility() == Feasiblity.SUCCEEDED) {
resultMap.computeIfPresent(testInstance, (k, v) -> v + 1);
}
}
checkedTasksets++;
}
resultLogger.logLine(numberOfProcessors, resultMap);
resultLogger.newLine();
}
System.out.println("");
resultLogger.finalize();
}
public void varyNumberOfTasks(final DagTaskBuilder builder,
final List<AbstractTest<DagTask>> abstractTestInstances, final long numberOfProcessors,
final long numberOfTaskSetsPerStep) {
long checkedTasksets = 0;
final long numberOfTaskSets = 9 * numberOfTaskSetsPerStep;
final ResultLogger resultLogger = new ResultLogger("numberOfTasks_" + numberOfProcessors);
final Map<AbstractTest<DagTask>, Long> resultMap = new LinkedHashMap<>();
abstractTestInstances.forEach(t -> resultMap.put(t, (long) 0));
resultLogger.logHeader(resultMap, "NoOfTasks");
for (long numberOfTasks = 4; numberOfTasks <= 20; numberOfTasks += 2) {
resultMap.replaceAll((k, v) -> (long) 0);
for (int i = 0; i < numberOfTaskSetsPerStep; ++i) {
final Set<DagTask> taskSet = builder.generateUUnifastTaskSet(numberOfTasks,
(double) numberOfProcessors * 0.5);
System.out.print(Math.round((double) checkedTasksets / numberOfTaskSets * 100)
+ "% of " + numberOfTaskSets + " tasksets tested!\r");
for (final AbstractTest<DagTask> testInstance : abstractTestInstances) {
final PriorityManager priorityManager = testInstance.getPriorityManager();
final SortedTaskSet<DagTask> sortedTaskSet =
new SortedTaskSet<>(priorityManager);
sortedTaskSet.addAll(taskSet);
final SchedulingInfo schedulingInfo =
testInstance.runSchedulabilityCheck(sortedTaskSet);
if (schedulingInfo.getFeasibility() == Feasiblity.SUCCEEDED) {
resultMap.computeIfPresent(testInstance, (k, v) -> v + 1);
}
}
checkedTasksets++;
}
resultLogger.logLine(numberOfTasks, resultMap);
resultLogger.newLine();
}
System.out.println("");
resultLogger.finalize();
}
public void varyUtilization(final DagTaskBuilder builder,
final List<AbstractTest<DagTask>> abstractTestInstances, final long numberOfProcessors,
final long numberOfTaskSetsPerUtil) {
long checkedTasksets = 0;
final long numberOfTaskSets = ((numberOfProcessors * 4) - 3) * numberOfTaskSetsPerUtil;
final ResultLogger resultLogger = new ResultLogger(numberOfProcessors);
final ResultLogger resultLogger = new ResultLogger("utilization_" + numberOfProcessors);
final Map<AbstractTest<DagTask>, Long> resultMap = new LinkedHashMap<>();
abstractTestInstances.forEach(t -> resultMap.put(t, (long) 0));
resultLogger.logHeader(resultMap, "Utilization");
for (double util = 1; util <= numberOfProcessors; util += 0.25) {
resultMap.replaceAll((k, v) -> (long) 0);
for (int i = 0; i < numberOfTaskSetsPerUtil; ++i) {
......
package mvd.jester.model;
import java.io.PrintWriter;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.Map;
......@@ -9,8 +8,6 @@ import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Multimap;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.graph.DefaultEdge;
import mvd.jester.utils.DagUtils;
......@@ -18,70 +15,27 @@ import mvd.jester.utils.DagUtils;
/**
* TaskSet
*/
public class SystemSetup<T extends Task> {
private Set<T> tasks;
private final long numberOfProcessors;
public class SystemManager {
private long numberOfProcessors;
public SystemSetup(final Set<T> tasks, final long numberOfProcessors) {
this.tasks = tasks;
public SystemManager(final long numberOfProcessors) {
this.numberOfProcessors = numberOfProcessors;
}
/**
* @return the tasks
*/
public Set<T> getTasks() {
return tasks;
}
public void setTasks(final Set<T> tasks) {
this.tasks = tasks;
}
/**
* @return the numberOfProcessors
*/
public long getNumberOfProcessors() {
return numberOfProcessors;
}
@Override
public String toString() {
final Gson gson = new GsonBuilder().setPrettyPrinting().create();
return gson.toJson(tasks);
}
public void writeToFile(final String path) {
try (PrintWriter pw = new PrintWriter(path)) {
pw.write(toString());
} catch (final Exception e) {
System.err.println("Something went wrong when writing to file!");
}
/**
* @param numberOfProcessors the numberOfProcessors to set
*/
public void setNumberOfProcessors(long numberOfProcessors) {
this.numberOfProcessors = numberOfProcessors;
}
// public static SystemSetup<SynchronousTask> readFromFile(final String path,
// final long numberOfProcessors) {
// String jsonString;
// try {
// final byte[] encoded = Files.readAllBytes(Paths.get(path));
// jsonString = new String(encoded, Charset.defaultCharset());
// } catch (final IOException e) {
// System.out.println(e.getMessage());
// jsonString = new String("");
// }
// return SystemSetup.fromString(jsonString, numberOfProcessors);
// }
// public static SystemSetup<SynchronousTask> fromString(final String json,
// final long numberOfProcessors) {
// final Gson gson = new GsonBuilder().registerTypeAdapter(SortedTaskSet.class,
// new SortedTaskSet.Deserializer<SynchronousTask>()).create();
// final SortedTaskSet<SynchronousTask> tasks = gson.fromJson(json, SortedTaskSet.class);
// return new SystemSetup<>(tasks, numberOfProcessors);
// }
public static class SynchronousTaskBuilder {
private long numberOfProcessors = 4;
private long minPeriod = 100;
......@@ -203,13 +157,13 @@ public class SystemSetup<T extends Task> {
}
public static class DagTaskBuilder {
private long numberOfProcessors = 4;
private long numberOfProcessors = 8;
private long minimumWcet = 1;
private long maximumWcet = 100;
private long maxNumberOfBranches = 5;
private long depth = 2;
private long p_par = 80;
private long p_add = 5; // TODO: Change back to 20
private long p_add = 20;
public DagTaskBuilder() {
}
......@@ -222,6 +176,26 @@ public class SystemSetup<T extends Task> {
return ThreadLocalRandom.current().nextLong(0, 100);
}
public Set<DagTask> generateUUnifastTaskSet(final long numberOfTasks,
final double totalUtilization) {
final LinkedHashSet<DagTask> taskSet = new LinkedHashSet<>();
if (numberOfTasks > 0) {
double sumU = totalUtilization;
for (int i = 1; i <= numberOfTasks - 1; i++) {
Double nextSumU = sumU * Math.pow(ThreadLocalRandom.current().nextDouble(),
(1.0 / (double) (numberOfTasks - i)));
DagTask task = generateTask(sumU - nextSumU);
taskSet.add(task);
sumU = nextSumU;
}
DagTask task = generateTask(sumU);
taskSet.add(task);
}
return taskSet;
}
public Set<DagTask> generateTaskSet(final double totalUtilization) {
final LinkedHashSet<DagTask> taskSet = new LinkedHashSet<>();
double currentUtilization = 0;
......@@ -247,6 +221,20 @@ public class SystemSetup<T extends Task> {
return taskSet;
}
public DagTask generateTask(double utilization) {
final DirectedAcyclicGraph<Job, DefaultEdge> jobDag =
new DirectedAcyclicGraph<>(DefaultEdge.class);
final Job j = fork(jobDag, Optional.empty(), this.depth);
fork(jobDag, Optional.of(j), this.depth);
randomEdges(jobDag);
final long workload = DagUtils.calculateWorkload(jobDag);
final long period = Math.round(workload / utilization);
return new DagTask(jobDag, period, numberOfProcessors);
}
public DagTask generateTask() {
final DirectedAcyclicGraph<Job, DefaultEdge> jobDag =
......@@ -343,6 +331,13 @@ public class SystemSetup<T extends Task> {
return this;
}
/**
* @return the numberOfProcessors
*/
public long getNumberOfProcessors() {
return numberOfProcessors;
}
public DagTaskBuilder setWcets(final long minimumWcet, final long maximumWcet) {
this.minimumWcet = minimumWcet;
this.maximumWcet = maximumWcet;
......
......@@ -5,8 +5,7 @@ import java.util.HashSet;
import java.util.Set;
import java.util.TreeSet;
import com.google.common.collect.TreeMultiset;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.SynchronousTask;
import mvd.jester.model.SystemManager;
import mvd.jester.priority.PriorityManager;
import mvd.jester.priority.RateMonotonic;
import mvd.jester.TypeInterface;
......@@ -20,11 +19,11 @@ import mvd.jester.simulator.internals.TaskContextInterface;
*/
public abstract class AbstractSimulator implements SimulatorInterface, TypeInterface {
protected final SystemSetup<SynchronousTask> systemSetup;
protected final SystemManager systemSetup;
protected final Set<ProcessorContext> processors;
protected TreeMultiset<TaskContextInterface> readyTasks;
AbstractSimulator(SystemSetup<SynchronousTask> systemSetup) {
AbstractSimulator(SystemManager systemSetup) {
this.systemSetup = systemSetup;
this.readyTasks = TreeMultiset.create((t1, t2) -> new RateMonotonic().compare(t1, t2));
processors = new HashSet<>();
......@@ -98,10 +97,14 @@ public abstract class AbstractSimulator implements SimulatorInterface, TypeInter
}
private long getHyperPeriod() {
return systemSetup.getTasks().stream().max(Comparator.comparing(SynchronousTask::getPeriod))
.get().getPeriod() * 10;
// return
// systemSetup.getTasks().stream().max(Comparator.comparing(SynchronousTask::getPeriod))
// .get().getPeriod() * 10;
return 10;
}
private class ProcessorComparator implements Comparator<ProcessorContext> {
@Override
......
package mvd.jester.simulator;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.internals.dynamicforkjoin.TaskContext;
import mvd.jester.model.SystemManager;
/**
* SchmidMottok
*/
public class DynamicForkJoin extends AbstractSimulator {
public DynamicForkJoin(SystemSetup<SynchronousTask> systemSetup) {
public DynamicForkJoin(SystemManager systemSetup) {
super(systemSetup);
}
@Override
protected boolean releaseTasks(long timeStep) {
for (SynchronousTask t : systemSetup.getTasks()) {
if (timeStep % t.getPeriod() == 0) {
TaskContext tc = new TaskContext(t, systemSetup.getNumberOfProcessors(), timeStep);
if (!readyTasks.add(tc)) {
EventPrinter
.print("Time " + timeStep + ": Task " + tc + " could not be released!");
return false;
}
EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!");
}
}
// // for (SynchronousTask t : systemSetup.getTasks()) {
// if (timeStep % t.getPeriod() == 0) {
// TaskContext tc = new TaskContext(t, systemSetup.getNumberOfProcessors(), timeStep);
// if (!readyTasks.add(tc)) {
// EventPrinter
// .print("Time " + timeStep + ": Task " + tc + " could not be released!");
// return false;
// }
// EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!");
// }
// }
return true;
}
......
package mvd.jester.simulator;
import mvd.jester.model.SystemSetup;
import mvd.jester.model.SynchronousTask;
import mvd.jester.simulator.internals.parallelsynchronous.TaskContext;
import mvd.jester.model.SystemManager;
/**
* MaiaBertogna
*/
public class ParallelSynchronous extends AbstractSimulator {
public ParallelSynchronous(SystemSetup<SynchronousTask> systemSetup) {
public ParallelSynchronous(SystemManager systemSetup) {
super(systemSetup);
}
@Override
protected boolean releaseTasks(long timeStep) {
for (SynchronousTask t : systemSetup.getTasks()) {
if (timeStep % t.getPeriod() == 0) {
TaskContext tc = new TaskContext(t, timeStep);
if (!readyTasks.add(tc)) {
EventPrinter
.print("Time " + timeStep + ": Task " + tc + " could not be released!");
return false;
}
EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!");
}
}
// for (SynchronousTask t : systemSetup.getTasks()) {
// if (timeStep % t.getPeriod() == 0) {
// TaskContext tc = new TaskContext(t, timeStep);
// if (!readyTasks.add(tc)) {
// EventPrinter
// .print("Time " + timeStep + ": Task " + tc + " could not be released!");
// return false;
// }
// EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!");
// }
// }
return true;
}
......
package mvd.jester.tests;
import mvd.jester.TypeInterface;
import mvd.jester.model.SystemManager;
import mvd.jester.model.Task;
/**
......@@ -8,10 +9,10 @@ import mvd.jester.model.Task;
*/
public abstract class AbstractTest<T extends Task> implements TestInterface<T>, TypeInterface {
protected final long numberOfProcessors;
protected final SystemManager manager;
public AbstractTest(long numberOfProcessors) {
this.numberOfProcessors = numberOfProcessors;
public AbstractTest(final SystemManager manager) {
this.manager = manager;
}
}
......@@ -13,6 +13,7 @@ import mvd.jester.info.TerminationInfo;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SynchronousTask;
import mvd.jester.model.SystemManager;
import mvd.jester.priority.EarliestDeadlineFirst;
import mvd.jester.priority.PriorityManager;
......@@ -24,8 +25,8 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
private final Map<SynchronousTask, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public ChwaLee(final long numberOfProcessors) {
super(numberOfProcessors);
public ChwaLee(final SystemManager manager) {
super(manager);
this.responseTimes = new HashMap<>();
this.priorityManager = new EarliestDeadlineFirst();
}
......@@ -72,8 +73,8 @@ public class ChwaLee extends AbstractTest<SynchronousTask> {
Math.min(getSelfInterference(task, deadline, p + 1), deadline - minimumWcet);
}
final boolean feasible = taskInterference + selfInterference <= numberOfProcessors
* (deadline - minimumWcet);
final boolean feasible = taskInterference
+ selfInterference <= manager.getNumberOfProcessors() * (deadline - minimumWcet);
return feasible ? deadline - 1 : deadline + 1;
}
......
......@@ -21,6 +21,7 @@ import mvd.jester.model.DagTask;
import mvd.jester.model.Job;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SystemManager;
import mvd.jester.model.Task;
import mvd.jester.model.TreeJob;
import mvd.jester.utils.DagUtils;
......@@ -36,8 +37,8 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
private final PriorityManager priorityManager;
private final Map<Task, Set<Segment>> carryOutSegments;
public FonsecaNelis(final long numberOfProcessors) {
super(numberOfProcessors);
public FonsecaNelis(final SystemManager manager) {
super(manager);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
this.carryOutSegments = new HashMap<>();
......@@ -76,6 +77,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
private Set<Segment> constructCarryOutDistribution(
final DirectedAcyclicGraph<Job, DefaultEdge> nfjDag,
final BinaryDecompositionTree<Job> tree) {
// List statt Set
final Set<Segment> carryOutWorkload = new LinkedHashSet<>();
final BinaryDecompositionTree<TreeJob> modifiedTree = transformTree(tree);
......@@ -156,7 +158,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
}
}
taskInterference /= numberOfProcessors;
taskInterference /= manager.getNumberOfProcessors();
final double selfInterference = getSelfInterference(task);
......@@ -242,7 +244,8 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
final long improvedWorkloadFromTask =
taskWorkload - Math.max(0, criticalPath - carryOutPeriod);
final long improvedWorkloadFromProcessors = carryOutPeriod * numberOfProcessors;
final long improvedWorkloadFromProcessors =
carryOutPeriod * manager.getNumberOfProcessors();
return Math.min(Math.min(improvedWorkloadFromTask, improvedWorkloadFromProcessors),
workload);
......@@ -266,8 +269,8 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
workload += Math.max(Math.min(width, s.getJobWcet()), 0) * s.getNumberOfJobs();
}
final long improvedWorkload =
Math.max(carryInPeriod - (period - responseTime), 0) * numberOfProcessors;
final long improvedWorkload = Math.max(carryInPeriod - (period - responseTime), 0)
* manager.getNumberOfProcessors();
return Math.min(improvedWorkload, workload);
}
......@@ -276,7 +279,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
final long criticalPath = task.getCriticalPath();
final long workload = task.getWorkload();
return (double) (workload - criticalPath) / numberOfProcessors;
return (double) (workload - criticalPath) / manager.getNumberOfProcessors();
}
@Override
......
......@@ -10,6 +10,7 @@ import mvd.jester.info.TerminationInfo;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SynchronousTask;
import mvd.jester.model.SystemManager;
import mvd.jester.priority.PriorityManager;
import mvd.jester.priority.RateMonotonic;
......@@ -21,8 +22,8 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
private final Map<SynchronousTask, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public MaiaBertogna(final long numberOfProcessors) {
super(numberOfProcessors);
public MaiaBertogna(final SystemManager manager) {
super(manager);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
}
......@@ -77,7 +78,7 @@ public class MaiaBertogna extends AbstractTest<SynchronousTask> {
}
final long totalInterference = LongMath.divide(taskInterference + selfInterference,
numberOfProcessors, RoundingMode.FLOOR);
manager.getNumberOfProcessors(), RoundingMode.FLOOR);
responseTime = minimumWcet + totalInterference;
} while (previousResponseTime != responseTime);
......
......@@ -9,6 +9,7 @@ import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.model.DagTask;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SystemManager;
import mvd.jester.model.Task;
import mvd.jester.priority.PriorityManager;
import mvd.jester.priority.RateMonotonic;
......@@ -18,8 +19,8 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
private final Map<Task, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public MelaniButtazzo(final long numberOfProcessors) {
super(numberOfProcessors);
public MelaniButtazzo(final SystemManager manager) {
super(manager);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
}
......@@ -46,8 +47,8 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
}
private long calculateResponseTime(final Set<DagTask> tasks, final DagTask task) {
final long minimumWcet = task.getCriticalPath();
long responseTime = minimumWcet;
final long criticalPath = task.getCriticalPath();
long responseTime = criticalPath;
long previousResponseTime = 0;
do {
......@@ -60,12 +61,12 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
}
}
taskInterference /= numberOfProcessors;
taskInterference /= manager.getNumberOfProcessors();
final double selfInterference = getSelfInterference(task);
// TODO: Einzeln abrunden oder self interference als long abrunden
final long totalInterference = (long) Math.floor(taskInterference + selfInterference);
responseTime = minimumWcet + totalInterference;
responseTime = criticalPath + totalInterference;
} while (previousResponseTime != responseTime);
return responseTime;
......@@ -75,7 +76,7 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
final long criticalPath = task.getCriticalPath();
final long workload = task.getWorkload();
return (double) (workload - criticalPath) / numberOfProcessors;
return (double) (workload - criticalPath) / manager.getNumberOfProcessors();
}
......@@ -85,13 +86,13 @@ public class MelaniButtazzo extends AbstractTest<DagTask> {
final long singleWorkload = task.getWorkload();
final long period = task.getPeriod();
final double nominator =
(interval + responseTime - (double) singleWorkload / numberOfProcessors);
final double nominator = (interval + responseTime
- (double) singleWorkload / manager.getNumberOfProcessors());
final long amountOfJobs =
DoubleMath.roundToLong(nominator / period, RoundingMode.FLOOR);
final double carryOutPortion =
Math.min(singleWorkload, numberOfProcessors * (nominator % period));
final double carryOutPortion = Math.min(singleWorkload,
manager.getNumberOfProcessors() * (nominator % period));
final double interference = amountOfJobs * singleWorkload + carryOutPortion;
......
......@@ -10,6 +10,7 @@ import mvd.jester.info.TerminationInfo;
import mvd.jester.model.DagTask;
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.SystemManager;
import mvd.jester.model.Task;
import mvd.jester.priority.PriorityManager;
import mvd.jester.priority.RateMonotonic;
......@@ -22,8 +23,8 @@ public class SchmidMottok extends AbstractTest<DagTask> {
private final Map<Task, TerminationInfo> responseTimes;
private final PriorityManager priorityManager;
public SchmidMottok(final long numberOfProcessors) {
super(numberOfProcessors);
public SchmidMottok(final SystemManager manager) {
super(manager);
this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic();
}
......@@ -68,7 +69,7 @@ public class SchmidMottok extends AbstractTest<DagTask> {
}
}
taskInterference /= numberOfProcessors;
taskInterference /= manager.getNumberOfProcessors();
final double selfInterference = getSelfInterference(task);
final long totalInterference = (long) Math.floor(taskInterference + selfInterference);
......@@ -81,13 +82,11 @@ public class SchmidMottok extends AbstractTest<DagTask> {
private double getSelfInterference(final DagTask task) {
double interference = 0;
final long numberOfThreads = task.getNumberOfThreads();
for (final Segment s : task.getWorkloadDistribution()) {
interference += (double) (s.getNumberOfJobs() - 1) * s.getJobWcet();
}
// final long numberOfThreads = task.getNumberOfThreads();
// TODO: Change back to number of threads
final long numberOfThreads = manager.getNumberOfProcessors();
double interference = task.getWorkload() - task.getCriticalPath();
interference /= numberOfThreads;
return interference;
......
......@@ -8,7 +8,7 @@ import org.jgrapht.graph.DefaultEdge;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.utils.DagUtils;
import mvd.jester.model.SystemSetup.DagTaskBuilder;
import mvd.jester.model.SystemManager.DagTaskBuilder;
import mvd.jester.utils.BinaryDecompositionTree;
public class TestDagUtils {
......
package mvd.jester.model;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import java.util.Set;
import java.util.concurrent.ThreadLocalRandom;
import com.google.common.math.DoubleMath;
......@@ -9,7 +8,7 @@ import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.graph.DefaultEdge;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.model.SystemSetup.DagTaskBuilder;
import mvd.jester.model.SystemManager.DagTaskBuilder;
public class TestSystemSetup {
......@@ -21,8 +20,8 @@ public class TestSystemSetup {
public void testRandomTaskSetGeneration() {
for (int i = 0; i < NUMBER_OF_RUNS; ++i) {
long numberOfProcessors = ThreadLocalRandom.current().nextLong(2, 8);
SystemSetup.SynchronousTaskBuilder systemSetupBuilder =
new SystemSetup.SynchronousTaskBuilder()//
SystemManager.SynchronousTaskBuilder systemSetupBuilder =
new SystemManager.SynchronousTaskBuilder()//
.setNumberOfSegments(1, 7)//
.setNumberOfJobs(2, 10)//
.setPeriods(100, 1000, 1000)//
......@@ -58,16 +57,16 @@ public class TestSystemSetup {
@Test
@DisplayName("Check Getters and Setters.")
void testGettersAndSetters() {
@SuppressWarnings("unchecked")
Set<SynchronousTask> t1 = mock(Set.class);
// @SuppressWarnings("unchecked")
// Set<SynchronousTask> t1 = mock(Set.class);
@SuppressWarnings("unchecked")
Set<SynchronousTask> t2 = mock(Set.class);
// @SuppressWarnings("unchecked")
// Set<SynchronousTask> t2 = mock(Set.class);
SystemSetup<SynchronousTask> systemSetup = new SystemSetup<>(t1, 2);
// SystemManager<SynchronousTask> systemSetup = new SystemManager<>(t1, 2);
systemSetup.setTasks(t2);
assertTrue(systemSetup.getTasks() == t2);
// systemSetup.setTasks(t2);
// assertTrue(systemSetup.getTasks() == t2);
}
@Test
......@@ -101,7 +100,7 @@ public class TestSystemSetup {
@Test
@DisplayName("Check if utilization works correctly.")
void testUtil() {
for (int i = 0; i < 5; ++i) {
for (int i = 0; i < 10; ++i) {
for (double d = 0.25; d < 4; d += 0.25) {
DagTaskBuilder builder = new DagTaskBuilder();
Set<DagTask> taskSet = builder.generateTaskSet(d);
......@@ -110,18 +109,28 @@ public class TestSystemSetup {
taskSetUtil += t.getUtilization();
}
assertTrue(DoubleMath.fuzzyEquals(taskSetUtil, d, 0.002));
assertTrue(DoubleMath.fuzzyEquals(taskSetUtil, d, 0.1));
}
}
}
// @Test
// @DisplayName("Check if parser works correclty.")
// void testParser() throws IOException {
// SystemSetup systemSetup = new SystemSetup.Builder().setNumberOfSegments(1, 7)
// .setNumberOfJobs(2, 10).setPeriods(100, 1000, 1000).build();
@Test
@DisplayName("Check if UUnifast Algorithm works correctly.")
void testUUnifast() {
for (int i = 1; i <= 100; ++i) {
long numberOfTasks = ThreadLocalRandom.current().nextLong(1, 20);
double utilization = ThreadLocalRandom.current().nextDouble(0.25, 8);
DagTaskBuilder builder = new DagTaskBuilder();
Set<DagTask> taskSet = builder.generateUUnifastTaskSet(numberOfTasks, utilization);
double taskSetUtil = 0;
for (DagTask t : taskSet) {
taskSetUtil += t.getUtilization();
}
assertTrue(taskSet.size() == numberOfTasks);
assertTrue(DoubleMath.fuzzyEquals(taskSetUtil, utilization, 0.1));
}
}
// systemSetup.writeToFile(null);
// systemSetup.writeToFile(tf.getRoot().getAbsolutePath());
// }
}
......@@ -7,6 +7,7 @@ import static org.mockito.Mockito.when;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.model.SynchronousTask;
import mvd.jester.model.SystemManager;
import mvd.jester.simulator.DynamicForkJoin;
import mvd.jester.simulator.ParallelSynchronous;
import mvd.jester.simulator.internals.parallelsynchronous.TaskContext;
......@@ -47,15 +48,16 @@ public class TestEarliestDeadlineFirst {
@DisplayName("Check Getters, Tests and Simulators.")
void testGettersTestsAndSimulators() {
EarliestDeadlineFirst edf = new EarliestDeadlineFirst();
SystemManager manager = new SystemManager(4);
assertTrue(edf.hasTest(ChwaLee.class));
assertFalse(edf.hasTest(MaiaBertogna.class));
assertFalse(edf.hasTest(SchmidMottok.class));
assertTrue(edf.hasSimulator(ParallelSynchronous.class));
assertTrue(edf.hasSimulator(DynamicForkJoin.class));
assertTrue(edf.hasTest(new ChwaLee(4)));
assertFalse(edf.hasTest(new SchmidMottok(4)));
assertFalse(edf.hasTest(new MaiaBertogna(4)));
assertTrue(edf.hasTest(new ChwaLee(manager)));
assertFalse(edf.hasTest(new SchmidMottok(manager)));
assertFalse(edf.hasTest(new MaiaBertogna(manager)));
// assertTrue(edf.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(edf.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
......
......@@ -7,6 +7,7 @@ import static org.mockito.Mockito.when;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import mvd.jester.model.SynchronousTask;
import mvd.jester.model.SystemManager;
import mvd.jester.simulator.DynamicForkJoin;
import mvd.jester.simulator.ParallelSynchronous;
import mvd.jester.simulator.internals.parallelsynchronous.TaskContext;
......@@ -46,15 +47,16 @@ public class TestRateMonotonic {
@DisplayName("Check Tests and Simulators.")
void testTestsAndSimulators() {
RateMonotonic rm = new RateMonotonic();
SystemManager manager = new SystemManager(8);
assertFalse(rm.hasTest(ChwaLee.class));
assertTrue(rm.hasTest(MaiaBertogna.class));
assertTrue(rm.hasTest(SchmidMottok.class));
assertTrue(rm.hasSimulator(ParallelSynchronous.class));
assertTrue(rm.hasSimulator(DynamicForkJoin.class));
assertFalse(rm.hasTest(new ChwaLee(8)));
assertTrue(rm.hasTest(new SchmidMottok(8)));
assertTrue(rm.hasTest(new MaiaBertogna(8)));
assertFalse(rm.hasTest(new ChwaLee(manager)));
assertTrue(rm.hasTest(new SchmidMottok(manager)));
assertTrue(rm.hasTest(new MaiaBertogna(manager)));
// assertTrue(rm.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class))));
// assertTrue(rm.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class))));
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment