From 31cab85def1a788dd5898756586ec8d18dab05ea Mon Sep 17 00:00:00 2001 From: Michael Schmid Date: Tue, 23 Jun 2020 11:58:58 +0200 Subject: [PATCH] finishing work on tests --- src/main/java/mvd/jester/App.java | 38 +++++++++++++++++++++++++++++++------- src/main/java/mvd/jester/ResultLogger.java | 29 ++++++++++++++++++----------- src/main/java/mvd/jester/TestEnvironment.java | 133 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++--------- src/main/java/mvd/jester/model/SystemManager.java | 369 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ src/main/java/mvd/jester/model/SystemSetup.java | 374 -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- src/main/java/mvd/jester/simulator/AbstractSimulator.java | 15 +++++++++------ src/main/java/mvd/jester/simulator/DynamicForkJoin.java | 28 +++++++++++++--------------- src/main/java/mvd/jester/simulator/ParallelSynchronous.java | 28 +++++++++++++--------------- src/main/java/mvd/jester/tests/AbstractTest.java | 7 ++++--- src/main/java/mvd/jester/tests/ChwaLee.java | 9 +++++---- src/main/java/mvd/jester/tests/FonsecaNelis.java | 17 ++++++++++------- src/main/java/mvd/jester/tests/MaiaBertogna.java | 7 ++++--- src/main/java/mvd/jester/tests/MelaniButtazzo.java | 25 +++++++++++++------------ src/main/java/mvd/jester/tests/SchmidMottok.java | 17 ++++++++--------- src/test/java/mvd/jester/model/TestDagUtils.java | 2 +- src/test/java/mvd/jester/model/TestSystemSetup.java | 51 ++++++++++++++++++++++++++++++--------------------- src/test/java/mvd/jester/priority/TestEarliestDeadlineFirst.java | 8 +++++--- src/test/java/mvd/jester/priority/TestRateMonotonic.java | 8 +++++--- 18 files changed, 662 insertions(+), 503 deletions(-) create mode 100644 src/main/java/mvd/jester/model/SystemManager.java delete mode 100644 src/main/java/mvd/jester/model/SystemSetup.java diff --git a/src/main/java/mvd/jester/App.java b/src/main/java/mvd/jester/App.java index 2aea9e8..a0a7a2d 100644 --- a/src/main/java/mvd/jester/App.java +++ b/src/main/java/mvd/jester/App.java @@ -3,7 +3,8 @@ package mvd.jester; import java.util.Arrays; import java.util.List; import mvd.jester.model.DagTask; -import mvd.jester.model.SystemSetup; +import mvd.jester.model.SystemManager; +import mvd.jester.model.SystemManager.DagTaskBuilder; import mvd.jester.tests.AbstractTest; import mvd.jester.tests.FonsecaNelis; import mvd.jester.tests.MelaniButtazzo; @@ -16,15 +17,38 @@ import mvd.jester.tests.SchmidMottok; */ public class App { public static void main(String[] args) { - for (int p = 8; p <= 8; p *= 2) { - SystemSetup.DagTaskBuilder builder = - new SystemSetup.DagTaskBuilder().setNumberOfProcessors(p); + { + SystemManager manager = new SystemManager(8); + DagTaskBuilder builder = new DagTaskBuilder(); TestEnvironment te = new TestEnvironment(); - List> tests = te.registerTests(Arrays.asList(new SchmidMottok(p), - /* new MelaniButtazzo(p), */ new FonsecaNelis(p))); + List> tests = + te.registerTests(Arrays.asList(new SchmidMottok(manager), + new MelaniButtazzo(manager), new FonsecaNelis(manager))); - te.runExperiments(builder, tests, p, 100); // TODO: Change back to 500 + te.varyUtilization(builder, tests, 8, 500); + } + { + SystemManager manager = new SystemManager(8); + DagTaskBuilder builder = new DagTaskBuilder(); + TestEnvironment te = new TestEnvironment(); + + List> tests = + te.registerTests(Arrays.asList(new SchmidMottok(manager), + new MelaniButtazzo(manager), new FonsecaNelis(manager))); + + te.varyNumberOfProcessors(builder, tests, manager, 500); + } + { + SystemManager manager = new SystemManager(8); + DagTaskBuilder builder = new DagTaskBuilder(); + TestEnvironment te = new TestEnvironment(); + + List> tests = + te.registerTests(Arrays.asList(new SchmidMottok(manager), + new MelaniButtazzo(manager), new FonsecaNelis(manager))); + + te.varyNumberOfTasks(builder, tests, 8, 500); } } } diff --git a/src/main/java/mvd/jester/ResultLogger.java b/src/main/java/mvd/jester/ResultLogger.java index adf5bc4..87ba8ad 100644 --- a/src/main/java/mvd/jester/ResultLogger.java +++ b/src/main/java/mvd/jester/ResultLogger.java @@ -13,24 +13,31 @@ import mvd.jester.utils.Logger; public class ResultLogger { private final Logger logger; - private boolean headerLogged = false; - public ResultLogger(final long numberOfProcessors) { - this.logger = new Logger("./results/feasibility_" + numberOfProcessors + ".txt"); + public ResultLogger(final String fileName) { + this.logger = new Logger("./results/" + fileName + ".txt"); } + public void logHeader(final Map, Long> results, + String xAxisName) { + final Appendable out = new StringBuilder(); + try { + out.append(xAxisName); + for (final Entry, Long> rc : results.entrySet()) { + out.append("\t" + rc.getKey().getName()); + } + out.append("\n"); + } catch (final Exception e) { + throw new RuntimeException("Failed to log header!"); + } + logger.log(out); + } + + public void logLine(final double utilization, final Map, Long> results) { final Appendable out = new StringBuilder(); try { - if (!headerLogged) { - headerLogged = true; - out.append("Utilization"); - for (final Entry, Long> rc : results.entrySet()) { - out.append("\t" + rc.getKey().getName()); - } - out.append("\n"); - } out.append("" + utilization); for (final Entry, Long> rc : results.entrySet()) { final long numberOfFeasibleTasks = rc.getValue(); diff --git a/src/main/java/mvd/jester/TestEnvironment.java b/src/main/java/mvd/jester/TestEnvironment.java index 92ad2a1..1ba1a20 100644 --- a/src/main/java/mvd/jester/TestEnvironment.java +++ b/src/main/java/mvd/jester/TestEnvironment.java @@ -5,14 +5,19 @@ import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.Map.Entry; +import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; +import com.google.common.base.Stopwatch; import mvd.jester.info.SchedulingInfo; import mvd.jester.info.SchedulingInfo.Feasiblity; import mvd.jester.model.DagTask; import mvd.jester.model.SortedTaskSet; import mvd.jester.model.SynchronousTask; +import mvd.jester.model.SystemManager; import mvd.jester.model.Task; -import mvd.jester.model.SystemSetup.DagTaskBuilder; -import mvd.jester.model.SystemSetup.SynchronousTaskBuilder; +import mvd.jester.model.SystemManager.DagTaskBuilder; +import mvd.jester.model.SystemManager.SynchronousTaskBuilder; import mvd.jester.priority.PriorityManager; import mvd.jester.tests.AbstractTest; @@ -26,10 +31,6 @@ public class TestEnvironment { } public List> registerTests(final List> tests) { - // Set>> testSet = new HashSet<>(); - // for (AbstractTest t : tests) { - // testSet.add(new ResultCollector>(t.getPriorityManager(), t)); - // } return new ArrayList<>(tests); } @@ -73,22 +74,136 @@ public class TestEnvironment { } } System.out.println(""); - final ResultLogger resultLogger = new ResultLogger(numberOfProcessors); + // final ResultLogger resultLogger = new ResultLogger(numberOfProcessors); // resultLogger.logTests(abstractTestInstances); } + public void measureExecutionTimes(final DagTaskBuilder builder, + final List> abstractTestInstances, + final long numberOfMeasurements) { + Map, List> results = new LinkedHashMap<>(); + abstractTestInstances.forEach(t -> results.put(t, new ArrayList<>())); + for (int i = 0; i < numberOfMeasurements; ++i) { + double utilization = ThreadLocalRandom.current().nextDouble(1, 7); + Set taskSet = builder.generateTaskSet(utilization); + for (AbstractTest testInstance : abstractTestInstances) { + final PriorityManager priorityManager = testInstance.getPriorityManager(); + + final SortedTaskSet sortedTaskSet = new SortedTaskSet<>(priorityManager); + sortedTaskSet.addAll(taskSet); + Stopwatch w = Stopwatch.createStarted(); + testInstance.runSchedulabilityCheck(sortedTaskSet); + w.stop(); + long micros = w.elapsed(TimeUnit.MICROSECONDS); + results.get(testInstance).add(micros); + } + } + + for (Entry, List> entry : results.entrySet()) { + long size = entry.getValue().size(); + long total = entry.getValue().stream().reduce((long) 0, Long::sum); + System.out.println(entry.getKey().getName() + ": " + (double) total / size + " µs"); + } + + } + + public void varyNumberOfProcessors(final DagTaskBuilder builder, + final List> abstractTestInstances, SystemManager manager, + final long numberOfTaskSetsPerStep) { + long checkedTasksets = 0; + final long numberOfTaskSets = 8 * numberOfTaskSetsPerStep; + + final ResultLogger resultLogger = new ResultLogger("numberOfProcessors"); + final Map, Long> resultMap = new LinkedHashMap<>(); + abstractTestInstances.forEach(t -> resultMap.put(t, (long) 0)); + resultLogger.logHeader(resultMap, "NoOfProcessors"); + + for (long numberOfProcessors = 2; numberOfProcessors <= 16; numberOfProcessors += 2) { + manager.setNumberOfProcessors(numberOfProcessors); + resultMap.replaceAll((k, v) -> (long) 0); + for (int i = 0; i < numberOfTaskSetsPerStep; ++i) { + final Set taskSet = builder.generateUUnifastTaskSet( + (long) (1.5 * numberOfProcessors), (double) numberOfProcessors * 0.5); + + System.out.print(Math.round((double) checkedTasksets / numberOfTaskSets * 100) + + "% of " + numberOfTaskSets + " tasksets tested!\r"); + for (final AbstractTest testInstance : abstractTestInstances) { + final PriorityManager priorityManager = testInstance.getPriorityManager(); - public void runExperiments(final DagTaskBuilder builder, + final SortedTaskSet sortedTaskSet = + new SortedTaskSet<>(priorityManager); + sortedTaskSet.addAll(taskSet); + final SchedulingInfo schedulingInfo = + testInstance.runSchedulabilityCheck(sortedTaskSet); + + if (schedulingInfo.getFeasibility() == Feasiblity.SUCCEEDED) { + resultMap.computeIfPresent(testInstance, (k, v) -> v + 1); + } + } + checkedTasksets++; + } + resultLogger.logLine(numberOfProcessors, resultMap); + resultLogger.newLine(); + } + System.out.println(""); + resultLogger.finalize(); + } + + + public void varyNumberOfTasks(final DagTaskBuilder builder, + final List> abstractTestInstances, final long numberOfProcessors, + final long numberOfTaskSetsPerStep) { + long checkedTasksets = 0; + final long numberOfTaskSets = 9 * numberOfTaskSetsPerStep; + + final ResultLogger resultLogger = new ResultLogger("numberOfTasks_" + numberOfProcessors); + final Map, Long> resultMap = new LinkedHashMap<>(); + abstractTestInstances.forEach(t -> resultMap.put(t, (long) 0)); + resultLogger.logHeader(resultMap, "NoOfTasks"); + + for (long numberOfTasks = 4; numberOfTasks <= 20; numberOfTasks += 2) { + resultMap.replaceAll((k, v) -> (long) 0); + for (int i = 0; i < numberOfTaskSetsPerStep; ++i) { + final Set taskSet = builder.generateUUnifastTaskSet(numberOfTasks, + (double) numberOfProcessors * 0.5); + + System.out.print(Math.round((double) checkedTasksets / numberOfTaskSets * 100) + + "% of " + numberOfTaskSets + " tasksets tested!\r"); + for (final AbstractTest testInstance : abstractTestInstances) { + final PriorityManager priorityManager = testInstance.getPriorityManager(); + + final SortedTaskSet sortedTaskSet = + new SortedTaskSet<>(priorityManager); + sortedTaskSet.addAll(taskSet); + final SchedulingInfo schedulingInfo = + testInstance.runSchedulabilityCheck(sortedTaskSet); + + if (schedulingInfo.getFeasibility() == Feasiblity.SUCCEEDED) { + resultMap.computeIfPresent(testInstance, (k, v) -> v + 1); + } + } + checkedTasksets++; + } + resultLogger.logLine(numberOfTasks, resultMap); + resultLogger.newLine(); + } + System.out.println(""); + resultLogger.finalize(); + } + + public void varyUtilization(final DagTaskBuilder builder, final List> abstractTestInstances, final long numberOfProcessors, final long numberOfTaskSetsPerUtil) { long checkedTasksets = 0; final long numberOfTaskSets = ((numberOfProcessors * 4) - 3) * numberOfTaskSetsPerUtil; - final ResultLogger resultLogger = new ResultLogger(numberOfProcessors); + final ResultLogger resultLogger = new ResultLogger("utilization_" + numberOfProcessors); final Map, Long> resultMap = new LinkedHashMap<>(); abstractTestInstances.forEach(t -> resultMap.put(t, (long) 0)); + resultLogger.logHeader(resultMap, "Utilization"); + for (double util = 1; util <= numberOfProcessors; util += 0.25) { resultMap.replaceAll((k, v) -> (long) 0); for (int i = 0; i < numberOfTaskSetsPerUtil; ++i) { diff --git a/src/main/java/mvd/jester/model/SystemManager.java b/src/main/java/mvd/jester/model/SystemManager.java new file mode 100644 index 0000000..97ba7d5 --- /dev/null +++ b/src/main/java/mvd/jester/model/SystemManager.java @@ -0,0 +1,369 @@ +package mvd.jester.model; + +import java.util.HashSet; +import java.util.LinkedHashSet; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ThreadLocalRandom; +import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.Multimap; +import org.jgrapht.experimental.dag.DirectedAcyclicGraph; +import org.jgrapht.graph.DefaultEdge; +import mvd.jester.utils.DagUtils; + +/** + * TaskSet + */ +public class SystemManager { + private long numberOfProcessors; + + public SystemManager(final long numberOfProcessors) { + this.numberOfProcessors = numberOfProcessors; + } + + /** + * @return the numberOfProcessors + */ + public long getNumberOfProcessors() { + return numberOfProcessors; + } + + /** + * @param numberOfProcessors the numberOfProcessors to set + */ + public void setNumberOfProcessors(long numberOfProcessors) { + this.numberOfProcessors = numberOfProcessors; + } + + public static class SynchronousTaskBuilder { + private long numberOfProcessors = 4; + private long minPeriod = 100; + private long maxSequentialPeriod = 1000; + private long maxParallelPeriod = 10000; + private long minNumberOfSegments = 3; + private long maxNumberOfSegments = 7; + private long minNumberOfJobs = 2; + private long maxNumberOfJobs = 3 * numberOfProcessors / 2; + private final long minWcet = 1; + private long ratio = randomTaskRatio(this.parallelTaskRatio); + private final long parallelTaskRatio = 0; + + public SynchronousTaskBuilder() { + + } + + private long randomSequentialTaskPeriod() { + return ThreadLocalRandom.current().nextLong(minPeriod, maxSequentialPeriod); + } + + private long randomParallelTaskPeriod() { + return ThreadLocalRandom.current().nextLong(minPeriod, maxParallelPeriod); + } + + private long randomTaskRatio(final long min) { + return ThreadLocalRandom.current().nextLong(min, 100); + } + + private long randomNumberOfSegments() { + return ThreadLocalRandom.current().nextLong(minNumberOfSegments, maxNumberOfSegments); + } + + private long randomNumberOfJobs() { + this.maxNumberOfJobs = 3 * this.numberOfProcessors / 2; + return ThreadLocalRandom.current().nextLong(minNumberOfJobs, maxNumberOfJobs); + } + + private long randomWcet(final long period, final long numberOfSegments) { + final long maxWcet = period / numberOfSegments; + + return ThreadLocalRandom.current().nextLong(minWcet, maxWcet); + } + + private SynchronousTask generateTask() { + final boolean serial = randomTaskRatio(0) > this.ratio; + final long period = serial ? randomSequentialTaskPeriod() : randomParallelTaskPeriod(); + final long numberOfSegments = serial ? 1 : randomNumberOfSegments(); + final long parallelNumberOfJobs = serial ? 1 : randomNumberOfJobs(); + final Set segments = new LinkedHashSet(); + + for (int i = 0; i < numberOfSegments; ++i) { + final long numberOfJobs = i % 2 == 1 ? parallelNumberOfJobs : 1; + final long wcet = randomWcet(period, numberOfSegments); + segments.add(new Segment(wcet, numberOfJobs)); + } + return new SynchronousTask(segments, period, numberOfProcessors); + } + + public Set generateTaskSet() { + this.ratio = randomTaskRatio(this.parallelTaskRatio); + final Set taskSet = new HashSet<>(); + + for (int i = 0; i < numberOfProcessors; ++i) { + final SynchronousTask task = generateTask(); + taskSet.add(task); + } + + return taskSet; + } + + // public SystemSetup build() { + // this.ratio = randomTaskRatio(this.parallelTaskRatio); + // final Set taskSet = generateTaskSet(); + // return new SystemSetup<>(taskSet, numberOfProcessors); + // } + + // public Set rebuild(final SystemSetup systemSetup) { + // this.ratio = randomTaskRatio(this.parallelTaskRatio); + // return generateTaskSet(); + // } + + public boolean addTask(final Set taskSet) { + return taskSet.add(generateTask()); + } + + public SynchronousTaskBuilder setNumberOfProcessors(final long numberOfProcessors) { + this.numberOfProcessors = numberOfProcessors; + + return this; + } + + public SynchronousTaskBuilder setNumberOfSegments(final long minNumberOfSegments, + final long maxNumberOfSegments) { + this.minNumberOfSegments = minNumberOfSegments; + this.maxNumberOfSegments = maxNumberOfSegments; + + return this; + } + + public SynchronousTaskBuilder setPeriods(final long minPeriod, + final long maxSequentialPeriod, final long maxParallelPeriod) { + this.minPeriod = minPeriod; + this.maxSequentialPeriod = maxSequentialPeriod; + this.maxParallelPeriod = maxParallelPeriod; + + return this; + } + + /** + * @param maxNumberOfJobs the maxNumberOfJobs to set + */ + public SynchronousTaskBuilder setNumberOfJobs(final long minNumberOfJobs, + final long maxNumberOfJobs) { + this.minNumberOfJobs = minNumberOfJobs; + this.maxNumberOfJobs = maxNumberOfJobs; + return this; + } + } + + public static class DagTaskBuilder { + private long numberOfProcessors = 8; + private long minimumWcet = 1; + private long maximumWcet = 100; + private long maxNumberOfBranches = 5; + private long depth = 2; + private long p_par = 80; + private long p_add = 20; + + public DagTaskBuilder() { + } + + public double getBeta() { + return 0.035 * numberOfProcessors; + } + + private long randomProbability() { + return ThreadLocalRandom.current().nextLong(0, 100); + } + + public Set generateUUnifastTaskSet(final long numberOfTasks, + final double totalUtilization) { + final LinkedHashSet taskSet = new LinkedHashSet<>(); + + if (numberOfTasks > 0) { + double sumU = totalUtilization; + for (int i = 1; i <= numberOfTasks - 1; i++) { + Double nextSumU = sumU * Math.pow(ThreadLocalRandom.current().nextDouble(), + (1.0 / (double) (numberOfTasks - i))); + DagTask task = generateTask(sumU - nextSumU); + taskSet.add(task); + sumU = nextSumU; + } + DagTask task = generateTask(sumU); + taskSet.add(task); + } + + return taskSet; + } + + public Set generateTaskSet(final double totalUtilization) { + final LinkedHashSet taskSet = new LinkedHashSet<>(); + double currentUtilization = 0; + while (currentUtilization <= totalUtilization) { + final DagTask dagTask = generateTask(); + + if (currentUtilization + dagTask.getUtilization() < totalUtilization) { + currentUtilization += dagTask.getUtilization(); + taskSet.add(dagTask); + } else { + final double remainingUtilization = totalUtilization - currentUtilization; + final long period = + (long) Math.ceil(dagTask.getWorkload() / remainingUtilization); + if (period >= dagTask.getCriticalPath()) { + final DagTask modifiedTask = + new DagTask(dagTask.getJobDag(), period, numberOfProcessors); + taskSet.add(modifiedTask); + break; + } + } + } + + return taskSet; + } + + public DagTask generateTask(double utilization) { + final DirectedAcyclicGraph jobDag = + new DirectedAcyclicGraph<>(DefaultEdge.class); + + final Job j = fork(jobDag, Optional.empty(), this.depth); + fork(jobDag, Optional.of(j), this.depth); + randomEdges(jobDag); + + final long workload = DagUtils.calculateWorkload(jobDag); + + final long period = Math.round(workload / utilization); + + return new DagTask(jobDag, period, numberOfProcessors); + } + + public DagTask generateTask() { + final DirectedAcyclicGraph jobDag = + new DirectedAcyclicGraph<>(DefaultEdge.class); + + final Job j = fork(jobDag, Optional.empty(), this.depth); + fork(jobDag, Optional.of(j), this.depth); + randomEdges(jobDag); + + final long workload = DagUtils.calculateWorkload(jobDag); + final long criticalPath = DagUtils.calculateCriticalPath(jobDag); + + final long period = randomTaskPeriod(criticalPath, workload); + + return new DagTask(jobDag, period, numberOfProcessors); + } + + private Job join(final DirectedAcyclicGraph jobDag, final Job current, + final Set childs) { + if (childs.size() > 0) { + final Job job = new Job(randomWcet()); + jobDag.addVertex(job); + for (final Job c : childs) { + try { + jobDag.addDagEdge(c, job); + } catch (final Exception e) { + System.out.println("Failed to join nodes!"); + } + } + return job; + } + return current; + } + + private Job fork(final DirectedAcyclicGraph jobDag, + final Optional predecessor, final long depth) { + final Job job = new Job(randomWcet()); + jobDag.addVertex(job); + final Set childs = new HashSet<>(); + if (predecessor.isPresent()) { + try { + jobDag.addDagEdge(predecessor.get(), job); + } catch (final Exception e) { + System.out.println("Adding fork edge failed!"); + } + } + if (depth >= 0 && randomProbability() < p_par) { + final long numberOfJobs = randomNumberOfBranches(); + for (int i = 0; i < numberOfJobs; ++i) { + childs.add(fork(jobDag, Optional.of(job), depth - 1)); + } + } + + return join(jobDag, job, childs); + } + + private void randomEdges(final DirectedAcyclicGraph jobDag) { + final Multimap edgePairs = ArrayListMultimap.create(); + for (final Job j1 : jobDag) { + for (final Job j2 : jobDag) { + if (randomProbability() < p_add) { + edgePairs.put(j1, j2); + } + } + } + + for (final Map.Entry pairs : edgePairs.entries()) { + try { + jobDag.addDagEdge(pairs.getKey(), pairs.getValue()); + } catch (final Exception e) { + // nothing to do here + } + } + } + + private long randomTaskPeriod(final long criticalPathLength, final long workload) { + return ThreadLocalRandom.current().nextLong(criticalPathLength, + (long) (workload / getBeta())); + } + + private long randomNumberOfBranches() { + return ThreadLocalRandom.current().nextLong(2, maxNumberOfBranches); + } + + private long randomWcet() { + return ThreadLocalRandom.current().nextLong(minimumWcet, maximumWcet); + } + + /** + * @param numberOfProcessors the numberOfProcessors to set + */ + public DagTaskBuilder setNumberOfProcessors(final long numberOfProcessors) { + this.numberOfProcessors = numberOfProcessors; + return this; + } + + /** + * @return the numberOfProcessors + */ + public long getNumberOfProcessors() { + return numberOfProcessors; + } + + public DagTaskBuilder setWcets(final long minimumWcet, final long maximumWcet) { + this.minimumWcet = minimumWcet; + this.maximumWcet = maximumWcet; + return this; + } + + /** + * @param maxNumberOfBranches the maxNumberOfBranches to set + */ + public DagTaskBuilder setMaxNumberOfBranches(final long maxNumberOfBranches) { + this.maxNumberOfBranches = maxNumberOfBranches; + return this; + } + + public DagTaskBuilder setPropabilities(final long p_par, final long p_add) { + this.p_par = p_par; + this.p_add = p_add; + return this; + } + + /** + * @param depth the depth to set + */ + public DagTaskBuilder setDepth(final long depth) { + this.depth = depth; + return this; + } + } +} diff --git a/src/main/java/mvd/jester/model/SystemSetup.java b/src/main/java/mvd/jester/model/SystemSetup.java deleted file mode 100644 index 03d732f..0000000 --- a/src/main/java/mvd/jester/model/SystemSetup.java +++ /dev/null @@ -1,374 +0,0 @@ -package mvd.jester.model; - -import java.io.PrintWriter; -import java.util.HashSet; -import java.util.LinkedHashSet; -import java.util.Map; -import java.util.Optional; -import java.util.Set; -import java.util.concurrent.ThreadLocalRandom; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.Multimap; -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.jgrapht.experimental.dag.DirectedAcyclicGraph; -import org.jgrapht.graph.DefaultEdge; -import mvd.jester.utils.DagUtils; - -/** - * TaskSet - */ -public class SystemSetup { - private Set tasks; - private final long numberOfProcessors; - - public SystemSetup(final Set tasks, final long numberOfProcessors) { - this.tasks = tasks; - this.numberOfProcessors = numberOfProcessors; - } - - /** - * @return the tasks - */ - public Set getTasks() { - return tasks; - } - - public void setTasks(final Set tasks) { - this.tasks = tasks; - } - - /** - * @return the numberOfProcessors - */ - public long getNumberOfProcessors() { - return numberOfProcessors; - } - - - @Override - public String toString() { - final Gson gson = new GsonBuilder().setPrettyPrinting().create(); - return gson.toJson(tasks); - } - - public void writeToFile(final String path) { - try (PrintWriter pw = new PrintWriter(path)) { - pw.write(toString()); - } catch (final Exception e) { - System.err.println("Something went wrong when writing to file!"); - } - } - - // public static SystemSetup readFromFile(final String path, - // final long numberOfProcessors) { - // String jsonString; - // try { - // final byte[] encoded = Files.readAllBytes(Paths.get(path)); - // jsonString = new String(encoded, Charset.defaultCharset()); - // } catch (final IOException e) { - // System.out.println(e.getMessage()); - // jsonString = new String(""); - // } - // return SystemSetup.fromString(jsonString, numberOfProcessors); - // } - - // public static SystemSetup fromString(final String json, - // final long numberOfProcessors) { - // final Gson gson = new GsonBuilder().registerTypeAdapter(SortedTaskSet.class, - // new SortedTaskSet.Deserializer()).create(); - - // final SortedTaskSet tasks = gson.fromJson(json, SortedTaskSet.class); - // return new SystemSetup<>(tasks, numberOfProcessors); - // } - - public static class SynchronousTaskBuilder { - private long numberOfProcessors = 4; - private long minPeriod = 100; - private long maxSequentialPeriod = 1000; - private long maxParallelPeriod = 10000; - private long minNumberOfSegments = 3; - private long maxNumberOfSegments = 7; - private long minNumberOfJobs = 2; - private long maxNumberOfJobs = 3 * numberOfProcessors / 2; - private final long minWcet = 1; - private long ratio = randomTaskRatio(this.parallelTaskRatio); - private final long parallelTaskRatio = 0; - - public SynchronousTaskBuilder() { - - } - - private long randomSequentialTaskPeriod() { - return ThreadLocalRandom.current().nextLong(minPeriod, maxSequentialPeriod); - } - - private long randomParallelTaskPeriod() { - return ThreadLocalRandom.current().nextLong(minPeriod, maxParallelPeriod); - } - - private long randomTaskRatio(final long min) { - return ThreadLocalRandom.current().nextLong(min, 100); - } - - private long randomNumberOfSegments() { - return ThreadLocalRandom.current().nextLong(minNumberOfSegments, maxNumberOfSegments); - } - - private long randomNumberOfJobs() { - this.maxNumberOfJobs = 3 * this.numberOfProcessors / 2; - return ThreadLocalRandom.current().nextLong(minNumberOfJobs, maxNumberOfJobs); - } - - private long randomWcet(final long period, final long numberOfSegments) { - final long maxWcet = period / numberOfSegments; - - return ThreadLocalRandom.current().nextLong(minWcet, maxWcet); - } - - private SynchronousTask generateTask() { - final boolean serial = randomTaskRatio(0) > this.ratio; - final long period = serial ? randomSequentialTaskPeriod() : randomParallelTaskPeriod(); - final long numberOfSegments = serial ? 1 : randomNumberOfSegments(); - final long parallelNumberOfJobs = serial ? 1 : randomNumberOfJobs(); - final Set segments = new LinkedHashSet(); - - for (int i = 0; i < numberOfSegments; ++i) { - final long numberOfJobs = i % 2 == 1 ? parallelNumberOfJobs : 1; - final long wcet = randomWcet(period, numberOfSegments); - segments.add(new Segment(wcet, numberOfJobs)); - } - return new SynchronousTask(segments, period, numberOfProcessors); - } - - public Set generateTaskSet() { - this.ratio = randomTaskRatio(this.parallelTaskRatio); - final Set taskSet = new HashSet<>(); - - for (int i = 0; i < numberOfProcessors; ++i) { - final SynchronousTask task = generateTask(); - taskSet.add(task); - } - - return taskSet; - } - - // public SystemSetup build() { - // this.ratio = randomTaskRatio(this.parallelTaskRatio); - // final Set taskSet = generateTaskSet(); - // return new SystemSetup<>(taskSet, numberOfProcessors); - // } - - // public Set rebuild(final SystemSetup systemSetup) { - // this.ratio = randomTaskRatio(this.parallelTaskRatio); - // return generateTaskSet(); - // } - - public boolean addTask(final Set taskSet) { - return taskSet.add(generateTask()); - } - - public SynchronousTaskBuilder setNumberOfProcessors(final long numberOfProcessors) { - this.numberOfProcessors = numberOfProcessors; - - return this; - } - - public SynchronousTaskBuilder setNumberOfSegments(final long minNumberOfSegments, - final long maxNumberOfSegments) { - this.minNumberOfSegments = minNumberOfSegments; - this.maxNumberOfSegments = maxNumberOfSegments; - - return this; - } - - public SynchronousTaskBuilder setPeriods(final long minPeriod, - final long maxSequentialPeriod, final long maxParallelPeriod) { - this.minPeriod = minPeriod; - this.maxSequentialPeriod = maxSequentialPeriod; - this.maxParallelPeriod = maxParallelPeriod; - - return this; - } - - /** - * @param maxNumberOfJobs the maxNumberOfJobs to set - */ - public SynchronousTaskBuilder setNumberOfJobs(final long minNumberOfJobs, - final long maxNumberOfJobs) { - this.minNumberOfJobs = minNumberOfJobs; - this.maxNumberOfJobs = maxNumberOfJobs; - return this; - } - } - - public static class DagTaskBuilder { - private long numberOfProcessors = 4; - private long minimumWcet = 1; - private long maximumWcet = 100; - private long maxNumberOfBranches = 5; - private long depth = 2; - private long p_par = 80; - private long p_add = 5; // TODO: Change back to 20 - - public DagTaskBuilder() { - } - - public double getBeta() { - return 0.035 * numberOfProcessors; - } - - private long randomProbability() { - return ThreadLocalRandom.current().nextLong(0, 100); - } - - public Set generateTaskSet(final double totalUtilization) { - final LinkedHashSet taskSet = new LinkedHashSet<>(); - double currentUtilization = 0; - while (currentUtilization <= totalUtilization) { - final DagTask dagTask = generateTask(); - - if (currentUtilization + dagTask.getUtilization() < totalUtilization) { - currentUtilization += dagTask.getUtilization(); - taskSet.add(dagTask); - } else { - final double remainingUtilization = totalUtilization - currentUtilization; - final long period = - (long) Math.ceil(dagTask.getWorkload() / remainingUtilization); - if (period >= dagTask.getCriticalPath()) { - final DagTask modifiedTask = - new DagTask(dagTask.getJobDag(), period, numberOfProcessors); - taskSet.add(modifiedTask); - break; - } - } - } - - return taskSet; - } - - - public DagTask generateTask() { - final DirectedAcyclicGraph jobDag = - new DirectedAcyclicGraph<>(DefaultEdge.class); - - final Job j = fork(jobDag, Optional.empty(), this.depth); - fork(jobDag, Optional.of(j), this.depth); - randomEdges(jobDag); - - final long workload = DagUtils.calculateWorkload(jobDag); - final long criticalPath = DagUtils.calculateCriticalPath(jobDag); - - final long period = randomTaskPeriod(criticalPath, workload); - - return new DagTask(jobDag, period, numberOfProcessors); - } - - private Job join(final DirectedAcyclicGraph jobDag, final Job current, - final Set childs) { - if (childs.size() > 0) { - final Job job = new Job(randomWcet()); - jobDag.addVertex(job); - for (final Job c : childs) { - try { - jobDag.addDagEdge(c, job); - } catch (final Exception e) { - System.out.println("Failed to join nodes!"); - } - } - return job; - } - return current; - } - - private Job fork(final DirectedAcyclicGraph jobDag, - final Optional predecessor, final long depth) { - final Job job = new Job(randomWcet()); - jobDag.addVertex(job); - final Set childs = new HashSet<>(); - if (predecessor.isPresent()) { - try { - jobDag.addDagEdge(predecessor.get(), job); - } catch (final Exception e) { - System.out.println("Adding fork edge failed!"); - } - } - if (depth >= 0 && randomProbability() < p_par) { - final long numberOfJobs = randomNumberOfBranches(); - for (int i = 0; i < numberOfJobs; ++i) { - childs.add(fork(jobDag, Optional.of(job), depth - 1)); - } - } - - return join(jobDag, job, childs); - } - - private void randomEdges(final DirectedAcyclicGraph jobDag) { - final Multimap edgePairs = ArrayListMultimap.create(); - for (final Job j1 : jobDag) { - for (final Job j2 : jobDag) { - if (randomProbability() < p_add) { - edgePairs.put(j1, j2); - } - } - } - - for (final Map.Entry pairs : edgePairs.entries()) { - try { - jobDag.addDagEdge(pairs.getKey(), pairs.getValue()); - } catch (final Exception e) { - // nothing to do here - } - } - } - - private long randomTaskPeriod(final long criticalPathLength, final long workload) { - return ThreadLocalRandom.current().nextLong(criticalPathLength, - (long) (workload / getBeta())); - } - - private long randomNumberOfBranches() { - return ThreadLocalRandom.current().nextLong(2, maxNumberOfBranches); - } - - private long randomWcet() { - return ThreadLocalRandom.current().nextLong(minimumWcet, maximumWcet); - } - - /** - * @param numberOfProcessors the numberOfProcessors to set - */ - public DagTaskBuilder setNumberOfProcessors(final long numberOfProcessors) { - this.numberOfProcessors = numberOfProcessors; - return this; - } - - public DagTaskBuilder setWcets(final long minimumWcet, final long maximumWcet) { - this.minimumWcet = minimumWcet; - this.maximumWcet = maximumWcet; - return this; - } - - /** - * @param maxNumberOfBranches the maxNumberOfBranches to set - */ - public DagTaskBuilder setMaxNumberOfBranches(final long maxNumberOfBranches) { - this.maxNumberOfBranches = maxNumberOfBranches; - return this; - } - - public DagTaskBuilder setPropabilities(final long p_par, final long p_add) { - this.p_par = p_par; - this.p_add = p_add; - return this; - } - - /** - * @param depth the depth to set - */ - public DagTaskBuilder setDepth(final long depth) { - this.depth = depth; - return this; - } - } -} diff --git a/src/main/java/mvd/jester/simulator/AbstractSimulator.java b/src/main/java/mvd/jester/simulator/AbstractSimulator.java index a6dba01..54152df 100644 --- a/src/main/java/mvd/jester/simulator/AbstractSimulator.java +++ b/src/main/java/mvd/jester/simulator/AbstractSimulator.java @@ -5,8 +5,7 @@ import java.util.HashSet; import java.util.Set; import java.util.TreeSet; import com.google.common.collect.TreeMultiset; -import mvd.jester.model.SystemSetup; -import mvd.jester.model.SynchronousTask; +import mvd.jester.model.SystemManager; import mvd.jester.priority.PriorityManager; import mvd.jester.priority.RateMonotonic; import mvd.jester.TypeInterface; @@ -20,11 +19,11 @@ import mvd.jester.simulator.internals.TaskContextInterface; */ public abstract class AbstractSimulator implements SimulatorInterface, TypeInterface { - protected final SystemSetup systemSetup; + protected final SystemManager systemSetup; protected final Set processors; protected TreeMultiset readyTasks; - AbstractSimulator(SystemSetup systemSetup) { + AbstractSimulator(SystemManager systemSetup) { this.systemSetup = systemSetup; this.readyTasks = TreeMultiset.create((t1, t2) -> new RateMonotonic().compare(t1, t2)); processors = new HashSet<>(); @@ -98,10 +97,14 @@ public abstract class AbstractSimulator implements SimulatorInterface, TypeInter } private long getHyperPeriod() { - return systemSetup.getTasks().stream().max(Comparator.comparing(SynchronousTask::getPeriod)) - .get().getPeriod() * 10; + // return + // systemSetup.getTasks().stream().max(Comparator.comparing(SynchronousTask::getPeriod)) + // .get().getPeriod() * 10; + return 10; } + + private class ProcessorComparator implements Comparator { @Override diff --git a/src/main/java/mvd/jester/simulator/DynamicForkJoin.java b/src/main/java/mvd/jester/simulator/DynamicForkJoin.java index ed45b8a..a0d6a1b 100644 --- a/src/main/java/mvd/jester/simulator/DynamicForkJoin.java +++ b/src/main/java/mvd/jester/simulator/DynamicForkJoin.java @@ -1,33 +1,31 @@ package mvd.jester.simulator; -import mvd.jester.model.SystemSetup; -import mvd.jester.model.SynchronousTask; -import mvd.jester.simulator.internals.dynamicforkjoin.TaskContext; +import mvd.jester.model.SystemManager; /** * SchmidMottok */ public class DynamicForkJoin extends AbstractSimulator { - public DynamicForkJoin(SystemSetup systemSetup) { + public DynamicForkJoin(SystemManager systemSetup) { super(systemSetup); } @Override protected boolean releaseTasks(long timeStep) { - for (SynchronousTask t : systemSetup.getTasks()) { - if (timeStep % t.getPeriod() == 0) { - TaskContext tc = new TaskContext(t, systemSetup.getNumberOfProcessors(), timeStep); - if (!readyTasks.add(tc)) { - EventPrinter - .print("Time " + timeStep + ": Task " + tc + " could not be released!"); - return false; - } - EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!"); - } - } + // // for (SynchronousTask t : systemSetup.getTasks()) { + // if (timeStep % t.getPeriod() == 0) { + // TaskContext tc = new TaskContext(t, systemSetup.getNumberOfProcessors(), timeStep); + // if (!readyTasks.add(tc)) { + // EventPrinter + // .print("Time " + timeStep + ": Task " + tc + " could not be released!"); + // return false; + // } + // EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!"); + // } + // } return true; } diff --git a/src/main/java/mvd/jester/simulator/ParallelSynchronous.java b/src/main/java/mvd/jester/simulator/ParallelSynchronous.java index 2ba8fe9..99f2346 100644 --- a/src/main/java/mvd/jester/simulator/ParallelSynchronous.java +++ b/src/main/java/mvd/jester/simulator/ParallelSynchronous.java @@ -1,31 +1,29 @@ package mvd.jester.simulator; -import mvd.jester.model.SystemSetup; -import mvd.jester.model.SynchronousTask; -import mvd.jester.simulator.internals.parallelsynchronous.TaskContext; +import mvd.jester.model.SystemManager; /** * MaiaBertogna */ public class ParallelSynchronous extends AbstractSimulator { - public ParallelSynchronous(SystemSetup systemSetup) { + public ParallelSynchronous(SystemManager systemSetup) { super(systemSetup); } @Override protected boolean releaseTasks(long timeStep) { - for (SynchronousTask t : systemSetup.getTasks()) { - if (timeStep % t.getPeriod() == 0) { - TaskContext tc = new TaskContext(t, timeStep); - if (!readyTasks.add(tc)) { - EventPrinter - .print("Time " + timeStep + ": Task " + tc + " could not be released!"); - return false; - } - EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!"); - } - } + // for (SynchronousTask t : systemSetup.getTasks()) { + // if (timeStep % t.getPeriod() == 0) { + // TaskContext tc = new TaskContext(t, timeStep); + // if (!readyTasks.add(tc)) { + // EventPrinter + // .print("Time " + timeStep + ": Task " + tc + " could not be released!"); + // return false; + // } + // EventPrinter.print("Time " + timeStep + ": Task " + tc + " released!"); + // } + // } return true; } diff --git a/src/main/java/mvd/jester/tests/AbstractTest.java b/src/main/java/mvd/jester/tests/AbstractTest.java index 4c1caa8..347d9f0 100644 --- a/src/main/java/mvd/jester/tests/AbstractTest.java +++ b/src/main/java/mvd/jester/tests/AbstractTest.java @@ -1,6 +1,7 @@ package mvd.jester.tests; import mvd.jester.TypeInterface; +import mvd.jester.model.SystemManager; import mvd.jester.model.Task; /** @@ -8,10 +9,10 @@ import mvd.jester.model.Task; */ public abstract class AbstractTest implements TestInterface, TypeInterface { - protected final long numberOfProcessors; + protected final SystemManager manager; - public AbstractTest(long numberOfProcessors) { - this.numberOfProcessors = numberOfProcessors; + public AbstractTest(final SystemManager manager) { + this.manager = manager; } } diff --git a/src/main/java/mvd/jester/tests/ChwaLee.java b/src/main/java/mvd/jester/tests/ChwaLee.java index d9c9f89..e363bcf 100644 --- a/src/main/java/mvd/jester/tests/ChwaLee.java +++ b/src/main/java/mvd/jester/tests/ChwaLee.java @@ -13,6 +13,7 @@ import mvd.jester.info.TerminationInfo; import mvd.jester.model.Segment; import mvd.jester.model.SortedTaskSet; import mvd.jester.model.SynchronousTask; +import mvd.jester.model.SystemManager; import mvd.jester.priority.EarliestDeadlineFirst; import mvd.jester.priority.PriorityManager; @@ -24,8 +25,8 @@ public class ChwaLee extends AbstractTest { private final Map responseTimes; private final PriorityManager priorityManager; - public ChwaLee(final long numberOfProcessors) { - super(numberOfProcessors); + public ChwaLee(final SystemManager manager) { + super(manager); this.responseTimes = new HashMap<>(); this.priorityManager = new EarliestDeadlineFirst(); } @@ -72,8 +73,8 @@ public class ChwaLee extends AbstractTest { Math.min(getSelfInterference(task, deadline, p + 1), deadline - minimumWcet); } - final boolean feasible = taskInterference + selfInterference <= numberOfProcessors - * (deadline - minimumWcet); + final boolean feasible = taskInterference + + selfInterference <= manager.getNumberOfProcessors() * (deadline - minimumWcet); return feasible ? deadline - 1 : deadline + 1; } diff --git a/src/main/java/mvd/jester/tests/FonsecaNelis.java b/src/main/java/mvd/jester/tests/FonsecaNelis.java index 333fd43..f575783 100644 --- a/src/main/java/mvd/jester/tests/FonsecaNelis.java +++ b/src/main/java/mvd/jester/tests/FonsecaNelis.java @@ -21,6 +21,7 @@ import mvd.jester.model.DagTask; import mvd.jester.model.Job; import mvd.jester.model.Segment; import mvd.jester.model.SortedTaskSet; +import mvd.jester.model.SystemManager; import mvd.jester.model.Task; import mvd.jester.model.TreeJob; import mvd.jester.utils.DagUtils; @@ -36,8 +37,8 @@ public class FonsecaNelis extends AbstractTest { private final PriorityManager priorityManager; private final Map> carryOutSegments; - public FonsecaNelis(final long numberOfProcessors) { - super(numberOfProcessors); + public FonsecaNelis(final SystemManager manager) { + super(manager); this.responseTimes = new HashMap<>(); this.priorityManager = new RateMonotonic(); this.carryOutSegments = new HashMap<>(); @@ -76,6 +77,7 @@ public class FonsecaNelis extends AbstractTest { private Set constructCarryOutDistribution( final DirectedAcyclicGraph nfjDag, final BinaryDecompositionTree tree) { + // List statt Set final Set carryOutWorkload = new LinkedHashSet<>(); final BinaryDecompositionTree modifiedTree = transformTree(tree); @@ -156,7 +158,7 @@ public class FonsecaNelis extends AbstractTest { } } - taskInterference /= numberOfProcessors; + taskInterference /= manager.getNumberOfProcessors(); final double selfInterference = getSelfInterference(task); @@ -242,7 +244,8 @@ public class FonsecaNelis extends AbstractTest { final long improvedWorkloadFromTask = taskWorkload - Math.max(0, criticalPath - carryOutPeriod); - final long improvedWorkloadFromProcessors = carryOutPeriod * numberOfProcessors; + final long improvedWorkloadFromProcessors = + carryOutPeriod * manager.getNumberOfProcessors(); return Math.min(Math.min(improvedWorkloadFromTask, improvedWorkloadFromProcessors), workload); @@ -266,8 +269,8 @@ public class FonsecaNelis extends AbstractTest { workload += Math.max(Math.min(width, s.getJobWcet()), 0) * s.getNumberOfJobs(); } - final long improvedWorkload = - Math.max(carryInPeriod - (period - responseTime), 0) * numberOfProcessors; + final long improvedWorkload = Math.max(carryInPeriod - (period - responseTime), 0) + * manager.getNumberOfProcessors(); return Math.min(improvedWorkload, workload); } @@ -276,7 +279,7 @@ public class FonsecaNelis extends AbstractTest { final long criticalPath = task.getCriticalPath(); final long workload = task.getWorkload(); - return (double) (workload - criticalPath) / numberOfProcessors; + return (double) (workload - criticalPath) / manager.getNumberOfProcessors(); } @Override diff --git a/src/main/java/mvd/jester/tests/MaiaBertogna.java b/src/main/java/mvd/jester/tests/MaiaBertogna.java index a43789d..3255c37 100644 --- a/src/main/java/mvd/jester/tests/MaiaBertogna.java +++ b/src/main/java/mvd/jester/tests/MaiaBertogna.java @@ -10,6 +10,7 @@ import mvd.jester.info.TerminationInfo; import mvd.jester.model.Segment; import mvd.jester.model.SortedTaskSet; import mvd.jester.model.SynchronousTask; +import mvd.jester.model.SystemManager; import mvd.jester.priority.PriorityManager; import mvd.jester.priority.RateMonotonic; @@ -21,8 +22,8 @@ public class MaiaBertogna extends AbstractTest { private final Map responseTimes; private final PriorityManager priorityManager; - public MaiaBertogna(final long numberOfProcessors) { - super(numberOfProcessors); + public MaiaBertogna(final SystemManager manager) { + super(manager); this.responseTimes = new HashMap<>(); this.priorityManager = new RateMonotonic(); } @@ -77,7 +78,7 @@ public class MaiaBertogna extends AbstractTest { } final long totalInterference = LongMath.divide(taskInterference + selfInterference, - numberOfProcessors, RoundingMode.FLOOR); + manager.getNumberOfProcessors(), RoundingMode.FLOOR); responseTime = minimumWcet + totalInterference; } while (previousResponseTime != responseTime); diff --git a/src/main/java/mvd/jester/tests/MelaniButtazzo.java b/src/main/java/mvd/jester/tests/MelaniButtazzo.java index cfc1c84..a91e7c7 100644 --- a/src/main/java/mvd/jester/tests/MelaniButtazzo.java +++ b/src/main/java/mvd/jester/tests/MelaniButtazzo.java @@ -9,6 +9,7 @@ import mvd.jester.info.SchedulingInfo; import mvd.jester.info.TerminationInfo; import mvd.jester.model.DagTask; import mvd.jester.model.SortedTaskSet; +import mvd.jester.model.SystemManager; import mvd.jester.model.Task; import mvd.jester.priority.PriorityManager; import mvd.jester.priority.RateMonotonic; @@ -18,8 +19,8 @@ public class MelaniButtazzo extends AbstractTest { private final Map responseTimes; private final PriorityManager priorityManager; - public MelaniButtazzo(final long numberOfProcessors) { - super(numberOfProcessors); + public MelaniButtazzo(final SystemManager manager) { + super(manager); this.responseTimes = new HashMap<>(); this.priorityManager = new RateMonotonic(); } @@ -46,8 +47,8 @@ public class MelaniButtazzo extends AbstractTest { } private long calculateResponseTime(final Set tasks, final DagTask task) { - final long minimumWcet = task.getCriticalPath(); - long responseTime = minimumWcet; + final long criticalPath = task.getCriticalPath(); + long responseTime = criticalPath; long previousResponseTime = 0; do { @@ -60,12 +61,12 @@ public class MelaniButtazzo extends AbstractTest { } } - taskInterference /= numberOfProcessors; + taskInterference /= manager.getNumberOfProcessors(); final double selfInterference = getSelfInterference(task); - + // TODO: Einzeln abrunden oder self interference als long abrunden final long totalInterference = (long) Math.floor(taskInterference + selfInterference); - responseTime = minimumWcet + totalInterference; + responseTime = criticalPath + totalInterference; } while (previousResponseTime != responseTime); return responseTime; @@ -75,7 +76,7 @@ public class MelaniButtazzo extends AbstractTest { final long criticalPath = task.getCriticalPath(); final long workload = task.getWorkload(); - return (double) (workload - criticalPath) / numberOfProcessors; + return (double) (workload - criticalPath) / manager.getNumberOfProcessors(); } @@ -85,13 +86,13 @@ public class MelaniButtazzo extends AbstractTest { final long singleWorkload = task.getWorkload(); final long period = task.getPeriod(); - final double nominator = - (interval + responseTime - (double) singleWorkload / numberOfProcessors); + final double nominator = (interval + responseTime + - (double) singleWorkload / manager.getNumberOfProcessors()); final long amountOfJobs = DoubleMath.roundToLong(nominator / period, RoundingMode.FLOOR); - final double carryOutPortion = - Math.min(singleWorkload, numberOfProcessors * (nominator % period)); + final double carryOutPortion = Math.min(singleWorkload, + manager.getNumberOfProcessors() * (nominator % period)); final double interference = amountOfJobs * singleWorkload + carryOutPortion; diff --git a/src/main/java/mvd/jester/tests/SchmidMottok.java b/src/main/java/mvd/jester/tests/SchmidMottok.java index 590c052..3d29ac3 100644 --- a/src/main/java/mvd/jester/tests/SchmidMottok.java +++ b/src/main/java/mvd/jester/tests/SchmidMottok.java @@ -10,6 +10,7 @@ import mvd.jester.info.TerminationInfo; import mvd.jester.model.DagTask; import mvd.jester.model.Segment; import mvd.jester.model.SortedTaskSet; +import mvd.jester.model.SystemManager; import mvd.jester.model.Task; import mvd.jester.priority.PriorityManager; import mvd.jester.priority.RateMonotonic; @@ -22,8 +23,8 @@ public class SchmidMottok extends AbstractTest { private final Map responseTimes; private final PriorityManager priorityManager; - public SchmidMottok(final long numberOfProcessors) { - super(numberOfProcessors); + public SchmidMottok(final SystemManager manager) { + super(manager); this.responseTimes = new HashMap<>(); this.priorityManager = new RateMonotonic(); } @@ -68,7 +69,7 @@ public class SchmidMottok extends AbstractTest { } } - taskInterference /= numberOfProcessors; + taskInterference /= manager.getNumberOfProcessors(); final double selfInterference = getSelfInterference(task); final long totalInterference = (long) Math.floor(taskInterference + selfInterference); @@ -81,13 +82,11 @@ public class SchmidMottok extends AbstractTest { private double getSelfInterference(final DagTask task) { - double interference = 0; - final long numberOfThreads = task.getNumberOfThreads(); - - for (final Segment s : task.getWorkloadDistribution()) { - interference += (double) (s.getNumberOfJobs() - 1) * s.getJobWcet(); - } + // final long numberOfThreads = task.getNumberOfThreads(); + // TODO: Change back to number of threads + final long numberOfThreads = manager.getNumberOfProcessors(); + double interference = task.getWorkload() - task.getCriticalPath(); interference /= numberOfThreads; return interference; diff --git a/src/test/java/mvd/jester/model/TestDagUtils.java b/src/test/java/mvd/jester/model/TestDagUtils.java index f1fa9b8..bbf3de6 100644 --- a/src/test/java/mvd/jester/model/TestDagUtils.java +++ b/src/test/java/mvd/jester/model/TestDagUtils.java @@ -8,7 +8,7 @@ import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import mvd.jester.utils.DagUtils; -import mvd.jester.model.SystemSetup.DagTaskBuilder; +import mvd.jester.model.SystemManager.DagTaskBuilder; import mvd.jester.utils.BinaryDecompositionTree; public class TestDagUtils { diff --git a/src/test/java/mvd/jester/model/TestSystemSetup.java b/src/test/java/mvd/jester/model/TestSystemSetup.java index 7648e4c..bffc2bb 100644 --- a/src/test/java/mvd/jester/model/TestSystemSetup.java +++ b/src/test/java/mvd/jester/model/TestSystemSetup.java @@ -1,7 +1,6 @@ package mvd.jester.model; import static org.junit.jupiter.api.Assertions.assertTrue; -import static org.mockito.Mockito.mock; import java.util.Set; import java.util.concurrent.ThreadLocalRandom; import com.google.common.math.DoubleMath; @@ -9,7 +8,7 @@ import org.jgrapht.experimental.dag.DirectedAcyclicGraph; import org.jgrapht.graph.DefaultEdge; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -import mvd.jester.model.SystemSetup.DagTaskBuilder; +import mvd.jester.model.SystemManager.DagTaskBuilder; public class TestSystemSetup { @@ -21,8 +20,8 @@ public class TestSystemSetup { public void testRandomTaskSetGeneration() { for (int i = 0; i < NUMBER_OF_RUNS; ++i) { long numberOfProcessors = ThreadLocalRandom.current().nextLong(2, 8); - SystemSetup.SynchronousTaskBuilder systemSetupBuilder = - new SystemSetup.SynchronousTaskBuilder()// + SystemManager.SynchronousTaskBuilder systemSetupBuilder = + new SystemManager.SynchronousTaskBuilder()// .setNumberOfSegments(1, 7)// .setNumberOfJobs(2, 10)// .setPeriods(100, 1000, 1000)// @@ -58,16 +57,16 @@ public class TestSystemSetup { @Test @DisplayName("Check Getters and Setters.") void testGettersAndSetters() { - @SuppressWarnings("unchecked") - Set t1 = mock(Set.class); + // @SuppressWarnings("unchecked") + // Set t1 = mock(Set.class); - @SuppressWarnings("unchecked") - Set t2 = mock(Set.class); + // @SuppressWarnings("unchecked") + // Set t2 = mock(Set.class); - SystemSetup systemSetup = new SystemSetup<>(t1, 2); + // SystemManager systemSetup = new SystemManager<>(t1, 2); - systemSetup.setTasks(t2); - assertTrue(systemSetup.getTasks() == t2); + // systemSetup.setTasks(t2); + // assertTrue(systemSetup.getTasks() == t2); } @Test @@ -101,7 +100,7 @@ public class TestSystemSetup { @Test @DisplayName("Check if utilization works correctly.") void testUtil() { - for (int i = 0; i < 5; ++i) { + for (int i = 0; i < 10; ++i) { for (double d = 0.25; d < 4; d += 0.25) { DagTaskBuilder builder = new DagTaskBuilder(); Set taskSet = builder.generateTaskSet(d); @@ -110,18 +109,28 @@ public class TestSystemSetup { taskSetUtil += t.getUtilization(); } - assertTrue(DoubleMath.fuzzyEquals(taskSetUtil, d, 0.002)); + assertTrue(DoubleMath.fuzzyEquals(taskSetUtil, d, 0.1)); } } } - // @Test - // @DisplayName("Check if parser works correclty.") - // void testParser() throws IOException { - // SystemSetup systemSetup = new SystemSetup.Builder().setNumberOfSegments(1, 7) - // .setNumberOfJobs(2, 10).setPeriods(100, 1000, 1000).build(); + @Test + @DisplayName("Check if UUnifast Algorithm works correctly.") + void testUUnifast() { + for (int i = 1; i <= 100; ++i) { + long numberOfTasks = ThreadLocalRandom.current().nextLong(1, 20); + double utilization = ThreadLocalRandom.current().nextDouble(0.25, 8); + DagTaskBuilder builder = new DagTaskBuilder(); + Set taskSet = builder.generateUUnifastTaskSet(numberOfTasks, utilization); + + double taskSetUtil = 0; + for (DagTask t : taskSet) { + taskSetUtil += t.getUtilization(); + } + + assertTrue(taskSet.size() == numberOfTasks); + assertTrue(DoubleMath.fuzzyEquals(taskSetUtil, utilization, 0.1)); + } + } - // systemSetup.writeToFile(null); - // systemSetup.writeToFile(tf.getRoot().getAbsolutePath()); - // } } diff --git a/src/test/java/mvd/jester/priority/TestEarliestDeadlineFirst.java b/src/test/java/mvd/jester/priority/TestEarliestDeadlineFirst.java index 17ac2c1..cd8a721 100644 --- a/src/test/java/mvd/jester/priority/TestEarliestDeadlineFirst.java +++ b/src/test/java/mvd/jester/priority/TestEarliestDeadlineFirst.java @@ -7,6 +7,7 @@ import static org.mockito.Mockito.when; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import mvd.jester.model.SynchronousTask; +import mvd.jester.model.SystemManager; import mvd.jester.simulator.DynamicForkJoin; import mvd.jester.simulator.ParallelSynchronous; import mvd.jester.simulator.internals.parallelsynchronous.TaskContext; @@ -47,15 +48,16 @@ public class TestEarliestDeadlineFirst { @DisplayName("Check Getters, Tests and Simulators.") void testGettersTestsAndSimulators() { EarliestDeadlineFirst edf = new EarliestDeadlineFirst(); + SystemManager manager = new SystemManager(4); assertTrue(edf.hasTest(ChwaLee.class)); assertFalse(edf.hasTest(MaiaBertogna.class)); assertFalse(edf.hasTest(SchmidMottok.class)); assertTrue(edf.hasSimulator(ParallelSynchronous.class)); assertTrue(edf.hasSimulator(DynamicForkJoin.class)); - assertTrue(edf.hasTest(new ChwaLee(4))); - assertFalse(edf.hasTest(new SchmidMottok(4))); - assertFalse(edf.hasTest(new MaiaBertogna(4))); + assertTrue(edf.hasTest(new ChwaLee(manager))); + assertFalse(edf.hasTest(new SchmidMottok(manager))); + assertFalse(edf.hasTest(new MaiaBertogna(manager))); // assertTrue(edf.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class)))); // assertTrue(edf.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class)))); diff --git a/src/test/java/mvd/jester/priority/TestRateMonotonic.java b/src/test/java/mvd/jester/priority/TestRateMonotonic.java index 62d978a..7b616b4 100644 --- a/src/test/java/mvd/jester/priority/TestRateMonotonic.java +++ b/src/test/java/mvd/jester/priority/TestRateMonotonic.java @@ -7,6 +7,7 @@ import static org.mockito.Mockito.when; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; import mvd.jester.model.SynchronousTask; +import mvd.jester.model.SystemManager; import mvd.jester.simulator.DynamicForkJoin; import mvd.jester.simulator.ParallelSynchronous; import mvd.jester.simulator.internals.parallelsynchronous.TaskContext; @@ -46,15 +47,16 @@ public class TestRateMonotonic { @DisplayName("Check Tests and Simulators.") void testTestsAndSimulators() { RateMonotonic rm = new RateMonotonic(); + SystemManager manager = new SystemManager(8); assertFalse(rm.hasTest(ChwaLee.class)); assertTrue(rm.hasTest(MaiaBertogna.class)); assertTrue(rm.hasTest(SchmidMottok.class)); assertTrue(rm.hasSimulator(ParallelSynchronous.class)); assertTrue(rm.hasSimulator(DynamicForkJoin.class)); - assertFalse(rm.hasTest(new ChwaLee(8))); - assertTrue(rm.hasTest(new SchmidMottok(8))); - assertTrue(rm.hasTest(new MaiaBertogna(8))); + assertFalse(rm.hasTest(new ChwaLee(manager))); + assertTrue(rm.hasTest(new SchmidMottok(manager))); + assertTrue(rm.hasTest(new MaiaBertogna(manager))); // assertTrue(rm.hasSimulator(new ParallelSynchronous(mock(SystemSetup.class)))); // assertTrue(rm.hasSimulator(new DynamicForkJoin(mock(SystemSetup.class)))); -- libgit2 0.26.0