package mvd.jester.tests; import java.math.RoundingMode; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.common.math.LongMath; import org.jgrapht.Graphs; import org.jgrapht.experimental.dag.DirectedAcyclicGraph; import org.jgrapht.graph.DefaultEdge; import mvd.jester.info.SchedulingInfo; import mvd.jester.info.TerminationInfo; import mvd.jester.model.DagTask; import mvd.jester.model.Job; import mvd.jester.model.Segment; import mvd.jester.model.SortedTaskSet; import mvd.jester.model.Task; import mvd.jester.model.TreeJob; import mvd.jester.utils.DagUtils; import mvd.jester.priority.PriorityManager; import mvd.jester.priority.RateMonotonic; import mvd.jester.utils.BinaryDecompositionTree; import mvd.jester.utils.BinaryDecompositionTree.Node; import mvd.jester.utils.BinaryDecompositionTree.NodeType; public class FonsecaNelis extends AbstractTest { private final Map responseTimes; private final PriorityManager priorityManager; private final Map> sortedSegments; public FonsecaNelis(final long numberOfProcessors) { super(numberOfProcessors); this.responseTimes = new HashMap<>(); this.priorityManager = new RateMonotonic(); this.sortedSegments = new HashMap<>(); } @Override public PriorityManager getPriorityManager() { return priorityManager; } @Override public SchedulingInfo runSchedulabilityCheck(final SortedTaskSet tasks) { sortedSegments.clear(); responseTimes.clear(); createNFJandDecompositionTree(tasks); for (final DagTask t : tasks) { final long responseTime = calculateResponseTime(tasks, t); responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime)); } return new SchedulingInfo(responseTimes.values()); } private void createNFJandDecompositionTree(final SortedTaskSet tasks) { for (final DagTask t : tasks) { final DirectedAcyclicGraph jobDag = t.getJobDag(); final DirectedAcyclicGraph nfjJobDag = DagUtils.createNFJGraph(jobDag); final BinaryDecompositionTree tree = DagUtils.createDecompositionTree(nfjJobDag); // TODO: remove this if (!DagUtils.checkNFJProperty(nfjJobDag) || !checkTree(nfjJobDag, tree)) { throw new RuntimeException("Nicht NFJ in Test!"); } sortedSegments.put(t, constructCarryOutDistribution(nfjJobDag, tree)); } } // TODO: Remove this function private boolean checkTree(DirectedAcyclicGraph nfjJobDag, BinaryDecompositionTree tree) { DirectedAcyclicGraph jobDagFromTree = DagUtils.createNFJfromDecompositionTree(tree); if (nfjJobDag.vertexSet().size() != jobDagFromTree.vertexSet().size()) { return false; } if (jobDagFromTree.edgeSet().size() != nfjJobDag.edgeSet().size()) { return false; } for (DefaultEdge e : nfjJobDag.edgeSet()) { Job target = nfjJobDag.getEdgeTarget(e); Job source = nfjJobDag.getEdgeSource(e); if (!jobDagFromTree.containsEdge(source, target)) { return false; } } for (Job j : nfjJobDag) { for (Job n : nfjJobDag) { if (n == j) { continue; } if (nfjJobDag.containsEdge(n, j) && !jobDagFromTree.containsEdge(n, j)) { return false; } } if (nfjJobDag.inDegreeOf(j) != jobDagFromTree.inDegreeOf(j)) return false; if (nfjJobDag.outDegreeOf(j) != jobDagFromTree.outDegreeOf(j)) return false; for (Job p : Graphs.predecessorListOf(nfjJobDag, j)) { if (!Graphs.predecessorListOf(jobDagFromTree, j).contains(p)) { return false; } } for (Job s : Graphs.successorListOf(nfjJobDag, j)) { if (!Graphs.successorListOf(jobDagFromTree, j).contains(s)) { return false; } } for (Job a : nfjJobDag.getAncestors(nfjJobDag, j)) { if (!jobDagFromTree.getAncestors(jobDagFromTree, j).contains(a)) { return false; } } for (Job d : nfjJobDag.getDescendants(nfjJobDag, j)) { if (!jobDagFromTree.getDescendants(jobDagFromTree, j).contains(d)) { return false; } } } return true; } private Set constructCarryOutDistribution( final DirectedAcyclicGraph nfjDag, final BinaryDecompositionTree tree) { final Set carryOutWorkload = new LinkedHashSet<>(); final BinaryDecompositionTree modifiedTree = transformTree(tree); boolean isEmpty = false; do { final Set parallelJobs = getMaximumParallelism(modifiedTree.getRootNode()); final Optional min = parallelJobs.stream().min((p1, p2) -> Long.compare(p1.getWcet(), p2.getWcet())); if (min.isPresent()) { final long width = min.get().getWcet(); carryOutWorkload.add(new Segment(width, parallelJobs.size())); for (final TreeJob p : parallelJobs) { p.setWcet(p.getWcet() - width); } } else { break; } isEmpty = parallelJobs.isEmpty(); } while (!isEmpty); return carryOutWorkload; } private BinaryDecompositionTree transformTree( final BinaryDecompositionTree tree) { final BinaryDecompositionTree modifiedTree = new BinaryDecompositionTree<>(); final Node root = transformNode(null, tree.getRootNode()); modifiedTree.setRoot(root); return modifiedTree; } private Node transformNode(final Node parent, final Node node) { if (node.getNodeType().equals(NodeType.LEAF)) { return new Node(parent, new TreeJob(node.getObject())); } else { final Node modifiedNode = new Node(null, node.getNodeType()); modifiedNode.setLeftNode(transformNode(modifiedNode, node.getLeftNode())); modifiedNode.setRightNode(transformNode(modifiedNode, node.getRightNode())); return modifiedNode; } } private Set getMaximumParallelism(final Node node) { final NodeType nodeType = node.getNodeType(); if (nodeType.equals(NodeType.PAR)) { final Set left = getMaximumParallelism(node.getLeftNode()); final Set right = getMaximumParallelism(node.getRightNode()); return Sets.union(left, right); } else if (nodeType.equals(NodeType.SEQ)) { final Set left = getMaximumParallelism(node.getLeftNode()); final Set right = getMaximumParallelism(node.getRightNode()); if (left.size() >= right.size()) { return left; } else { return right; } } else { if (node.getObject().getWcet() > 0) { return new HashSet<>(Arrays.asList(node.getObject())); } else { return new HashSet<>(); } } } private long calculateResponseTime(final SortedTaskSet tasks, final DagTask task) { final long criticalPath = task.getCriticalPath(); long responseTime = criticalPath; long previousResponseTime = 0; do { previousResponseTime = responseTime; double taskInterference = 0; for (final DagTask t : tasks) { if (t.getPeriod() < task.getPeriod()) { taskInterference += getTaskInterference(t, responseTime); } } taskInterference /= numberOfProcessors; final double selfInterference = getSelfInterference(task); final long totalInterference = (long) Math.floor(taskInterference + selfInterference); responseTime = criticalPath + totalInterference; } while (responseTime != previousResponseTime); return responseTime; } private double getTaskInterference(final DagTask task, final long interval) { final long period = task.getPeriod(); final long criticalPath = task.getCriticalPath(); final long numberOfIntervals = LongMath.divide(interval - criticalPath, period, RoundingMode.FLOOR); final long carryInAndOutInterval = interval - Math.max(0, numberOfIntervals) * period; final long numberOfBodyJobs = LongMath.divide(interval - carryInAndOutInterval, period, RoundingMode.FLOOR); final long bodyWorkload = Math.max(0, numberOfBodyJobs) * task.getWorkload(); final long carryInAndOutWorkload = getCarryInAndOutWorkload(task, task.getWorkloadDistribution(), sortedSegments.get(task), carryInAndOutInterval); return carryInAndOutWorkload + bodyWorkload; } private long getCarryInAndOutWorkload(final DagTask task, final Set carryInDistribution, final Set carryOutDistribution, final long carryInAndOutPeriod) { long workload = getCarryOutWorkload(task, carryOutDistribution, carryInAndOutPeriod); long carryInPeriod = task.getPeriod() - responseTimes.get(task).getResponseTime(); long carryOutPeriod = 0; final List carryInList = Lists.newArrayList(carryInDistribution); Collections.reverse(carryInList); for (final Segment s : carryInList) { carryInPeriod += s.getJobWcet(); carryOutPeriod = carryInAndOutPeriod - carryInPeriod; final long carryInWorkload = getCarryInWorkload(task, carryInDistribution, carryInPeriod); final long carryOutWorkload = getCarryOutWorkload(task, carryOutDistribution, carryOutPeriod); workload = Math.max(workload, carryInWorkload + carryOutWorkload); } workload = Math.max(workload, getCarryInWorkload(task, carryInDistribution, carryInAndOutPeriod)); carryOutPeriod = 0; for (final Segment s : carryOutDistribution) { carryOutPeriod += s.getJobWcet(); carryInPeriod = carryInAndOutPeriod - carryOutPeriod; final long carryInWorkload = getCarryInWorkload(task, carryInDistribution, carryInPeriod); final long carryOutWorkload = getCarryOutWorkload(task, carryOutDistribution, carryOutPeriod); workload = Math.max(workload, carryInWorkload + carryOutWorkload); } return workload; } private long getCarryOutWorkload(final DagTask task, final Set carryOutDistribution, final long carryOutPeriod) { long workload = 0; final long period = task.getPeriod(); final long responseTime = responseTimes.get(task).getResponseTime(); final List distributionList = Lists.newArrayList(carryOutDistribution); for (int i = 0; i < distributionList.size(); ++i) { final Segment s = distributionList.get(i); long weightOfPreviousSegments = 0; for (int j = 0; j < i; ++j) { weightOfPreviousSegments += distributionList.get(j).getJobWcet(); } final long width = carryOutPeriod - weightOfPreviousSegments; workload += Math.max(Math.min(width, s.getJobWcet()), 0) * s.getNumberOfJobs(); } final long improvedWorkload = Math.max(carryOutPeriod - (period - responseTime), 0) * numberOfProcessors; return Math.min(improvedWorkload, workload); } private long getCarryInWorkload(final DagTask task, final Set carryInDistribution, final long carryInPeriod) { long workload = 0; final long period = task.getPeriod(); final long responseTime = responseTimes.get(task).getResponseTime(); final List distributionList = Lists.newArrayList(carryInDistribution); for (int i = 0; i < carryInDistribution.size(); ++i) { final Segment s = distributionList.get(i); long weightOfRemainingSegments = 0; for (int j = i + 1; j < carryInDistribution.size(); ++j) { weightOfRemainingSegments += distributionList.get(j).getJobWcet(); } final long width = carryInPeriod - period + responseTime - weightOfRemainingSegments; workload += Math.max(Math.min(width, s.getJobWcet()), 0) * s.getNumberOfJobs(); } final long improvedWorkload = Math.max(carryInPeriod - (period - responseTime), 0) * numberOfProcessors; return Math.min(improvedWorkload, workload); } private double getSelfInterference(final DagTask task) { final long criticalPath = task.getCriticalPath(); final long workload = task.getWorkload(); return (double) (workload - criticalPath) / numberOfProcessors; } @Override public String getName() { return "FonsecaNelis"; } }