FonsecaNelis.java 13.7 KB
Newer Older
1 2 3
package mvd.jester.tests;

import java.math.RoundingMode;
4
import java.util.Arrays;
5 6 7
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
8
import java.util.LinkedHashSet;
9 10
import java.util.List;
import java.util.Map;
11
import java.util.Optional;
12 13
import java.util.Set;
import com.google.common.collect.Lists;
14
import com.google.common.collect.Sets;
15
import com.google.common.math.LongMath;
16
import org.jgrapht.Graphs;
Michael Schmid committed
17 18
import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.graph.DefaultEdge;
19 20 21
import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo;
import mvd.jester.model.DagTask;
Michael Schmid committed
22
import mvd.jester.model.Job;
23 24 25
import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.Task;
26
import mvd.jester.model.TreeJob;
27
import mvd.jester.utils.DagUtils;
28 29
import mvd.jester.priority.PriorityManager;
import mvd.jester.priority.RateMonotonic;
30
import mvd.jester.utils.BinaryDecompositionTree;
31 32
import mvd.jester.utils.BinaryDecompositionTree.Node;
import mvd.jester.utils.BinaryDecompositionTree.NodeType;
33 34 35 36 37

public class FonsecaNelis extends AbstractTest<DagTask> {

    private final Map<Task, TerminationInfo> responseTimes;
    private final PriorityManager priorityManager;
Michael Schmid committed
38
    private final Map<Task, Set<Segment>> sortedSegments;
39

40
    public FonsecaNelis(final long numberOfProcessors) {
41 42 43
        super(numberOfProcessors);
        this.responseTimes = new HashMap<>();
        this.priorityManager = new RateMonotonic();
Michael Schmid committed
44
        this.sortedSegments = new HashMap<>();
45 46 47 48 49 50 51 52
    }

    @Override
    public PriorityManager getPriorityManager() {
        return priorityManager;
    }

    @Override
53
    public SchedulingInfo runSchedulabilityCheck(final SortedTaskSet<DagTask> tasks) {
Michael Schmid committed
54
        sortedSegments.clear();
55
        responseTimes.clear();
Michael Schmid committed
56
        createNFJandDecompositionTree(tasks);
57 58 59
        for (final DagTask t : tasks) {
            final long responseTime = calculateResponseTime(tasks, t);
            responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime));
60 61
        }

62
        return new SchedulingInfo(responseTimes.values());
63 64
    }

65 66 67
    private void createNFJandDecompositionTree(final SortedTaskSet<DagTask> tasks) {
        for (final DagTask t : tasks) {
            final DirectedAcyclicGraph<Job, DefaultEdge> jobDag = t.getJobDag();
68

69 70 71
            final DirectedAcyclicGraph<Job, DefaultEdge> nfjJobDag =
                    DagUtils.createNFJGraph(jobDag);
            final BinaryDecompositionTree<Job> tree = DagUtils.createDecompositionTree(nfjJobDag);
72 73 74 75
            // TODO: remove this
            if (!DagUtils.checkNFJProperty(nfjJobDag) || !checkTree(nfjJobDag, tree)) {
                throw new RuntimeException("Nicht NFJ in Test!");
            }
76 77 78 79 80

            sortedSegments.put(t, constructCarryOutDistribution(nfjJobDag, tree));
        }
    }

81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145
    // TODO: Remove this function
    private boolean checkTree(DirectedAcyclicGraph<Job, DefaultEdge> nfjJobDag,
            BinaryDecompositionTree<Job> tree) {
        DirectedAcyclicGraph<Job, DefaultEdge> jobDagFromTree =
                DagUtils.createNFJfromDecompositionTree(tree);

        if (nfjJobDag.vertexSet().size() != jobDagFromTree.vertexSet().size()) {
            return false;
        }
        if (jobDagFromTree.edgeSet().size() != nfjJobDag.edgeSet().size()) {
            return false;
        }


        for (DefaultEdge e : nfjJobDag.edgeSet()) {
            Job target = nfjJobDag.getEdgeTarget(e);
            Job source = nfjJobDag.getEdgeSource(e);

            if (!jobDagFromTree.containsEdge(source, target)) {
                return false;
            }
        }

        for (Job j : nfjJobDag) {
            for (Job n : nfjJobDag) {
                if (n == j) {
                    continue;
                }
                if (nfjJobDag.containsEdge(n, j) && !jobDagFromTree.containsEdge(n, j)) {
                    return false;
                }
            }
            if (nfjJobDag.inDegreeOf(j) != jobDagFromTree.inDegreeOf(j))
                return false;
            if (nfjJobDag.outDegreeOf(j) != jobDagFromTree.outDegreeOf(j))
                return false;

            for (Job p : Graphs.predecessorListOf(nfjJobDag, j)) {
                if (!Graphs.predecessorListOf(jobDagFromTree, j).contains(p)) {
                    return false;
                }
            }

            for (Job s : Graphs.successorListOf(nfjJobDag, j)) {
                if (!Graphs.successorListOf(jobDagFromTree, j).contains(s)) {
                    return false;
                }
            }

            for (Job a : nfjJobDag.getAncestors(nfjJobDag, j)) {
                if (!jobDagFromTree.getAncestors(jobDagFromTree, j).contains(a)) {
                    return false;
                }
            }

            for (Job d : nfjJobDag.getDescendants(nfjJobDag, j)) {
                if (!jobDagFromTree.getDescendants(jobDagFromTree, j).contains(d)) {
                    return false;
                }
            }
        }

        return true;
    }

146
    private Set<Segment> constructCarryOutDistribution(
147 148 149 150
            final DirectedAcyclicGraph<Job, DefaultEdge> nfjDag,
            final BinaryDecompositionTree<Job> tree) {
        final Set<Segment> carryOutWorkload = new LinkedHashSet<>();
        final BinaryDecompositionTree<TreeJob> modifiedTree = transformTree(tree);
151 152 153

        boolean isEmpty = false;
        do {
154 155
            final Set<TreeJob> parallelJobs = getMaximumParallelism(modifiedTree.getRootNode());
            final Optional<TreeJob> min =
156 157
                    parallelJobs.stream().min((p1, p2) -> Long.compare(p1.getWcet(), p2.getWcet()));
            if (min.isPresent()) {
158
                final long width = min.get().getWcet();
159
                carryOutWorkload.add(new Segment(width, parallelJobs.size()));
160
                for (final TreeJob p : parallelJobs) {
161
                    p.setWcet(p.getWcet() - width);
162
                }
163 164 165 166 167 168 169 170 171 172
            } else {
                break;
            }
            isEmpty = parallelJobs.isEmpty();
        } while (!isEmpty);

        return carryOutWorkload;
    }


173 174 175 176
    private BinaryDecompositionTree<TreeJob> transformTree(
            final BinaryDecompositionTree<Job> tree) {
        final BinaryDecompositionTree<TreeJob> modifiedTree = new BinaryDecompositionTree<>();
        final Node<TreeJob> root = transformNode(null, tree.getRootNode());
177 178 179 180
        modifiedTree.setRoot(root);
        return modifiedTree;
    }

181
    private Node<TreeJob> transformNode(final Node<TreeJob> parent, final Node<Job> node) {
182 183 184
        if (node.getNodeType().equals(NodeType.LEAF)) {
            return new Node<TreeJob>(parent, new TreeJob(node.getObject()));
        } else {
185
            final Node<TreeJob> modifiedNode = new Node<TreeJob>(null, node.getNodeType());
186 187 188 189 190 191
            modifiedNode.setLeftNode(transformNode(modifiedNode, node.getLeftNode()));
            modifiedNode.setRightNode(transformNode(modifiedNode, node.getRightNode()));
            return modifiedNode;
        }
    }

192 193
    private Set<TreeJob> getMaximumParallelism(final Node<TreeJob> node) {
        final NodeType nodeType = node.getNodeType();
194
        if (nodeType.equals(NodeType.PAR)) {
195 196
            final Set<TreeJob> left = getMaximumParallelism(node.getLeftNode());
            final Set<TreeJob> right = getMaximumParallelism(node.getRightNode());
197 198
            return Sets.union(left, right);
        } else if (nodeType.equals(NodeType.SEQ)) {
199 200
            final Set<TreeJob> left = getMaximumParallelism(node.getLeftNode());
            final Set<TreeJob> right = getMaximumParallelism(node.getRightNode());
201 202 203 204 205 206 207 208 209 210
            if (left.size() >= right.size()) {
                return left;
            } else {
                return right;
            }
        } else {
            if (node.getObject().getWcet() > 0) {
                return new HashSet<>(Arrays.asList(node.getObject()));
            } else {
                return new HashSet<>();
211
            }
Michael Schmid committed
212
        }
213 214
    }

215 216
    private long calculateResponseTime(final SortedTaskSet<DagTask> tasks, final DagTask task) {
        final long criticalPath = task.getCriticalPath();
217 218 219 220 221 222
        long responseTime = criticalPath;
        long previousResponseTime = 0;

        do {
            previousResponseTime = responseTime;
            double taskInterference = 0;
223
            for (final DagTask t : tasks) {
224 225 226 227 228 229 230
                if (t.getPeriod() < task.getPeriod()) {
                    taskInterference += getTaskInterference(t, responseTime);
                }
            }

            taskInterference /= numberOfProcessors;

231
            final double selfInterference = getSelfInterference(task);
232

233
            final long totalInterference = (long) Math.floor(taskInterference + selfInterference);
234 235 236 237 238 239 240
            responseTime = criticalPath + totalInterference;

        } while (responseTime != previousResponseTime);

        return responseTime;
    }

241 242 243 244
    private double getTaskInterference(final DagTask task, final long interval) {
        final long period = task.getPeriod();
        final long criticalPath = task.getCriticalPath();
        final long numberOfIntervals =
245
                LongMath.divide(interval - criticalPath, period, RoundingMode.FLOOR);
246 247
        final long carryInAndOutInterval = interval - Math.max(0, numberOfIntervals) * period;
        final long numberOfBodyJobs =
248 249
                LongMath.divide(interval - carryInAndOutInterval, period, RoundingMode.FLOOR);

250
        final long bodyWorkload = Math.max(0, numberOfBodyJobs) * task.getWorkload();
251 252


253 254
        final long carryInAndOutWorkload = getCarryInAndOutWorkload(task,
                task.getWorkloadDistribution(), sortedSegments.get(task), carryInAndOutInterval);
255 256 257 258

        return carryInAndOutWorkload + bodyWorkload;
    }

259 260 261
    private long getCarryInAndOutWorkload(final DagTask task,
            final Set<Segment> carryInDistribution, final Set<Segment> carryOutDistribution,
            final long carryInAndOutPeriod) {
262 263 264
        long workload = getCarryOutWorkload(task, carryOutDistribution, carryInAndOutPeriod);
        long carryInPeriod = task.getPeriod() - responseTimes.get(task).getResponseTime();
        long carryOutPeriod = 0;
265
        final List<Segment> carryInList = Lists.newArrayList(carryInDistribution);
266 267
        Collections.reverse(carryInList);

268
        for (final Segment s : carryInList) {
269 270
            carryInPeriod += s.getJobWcet();
            carryOutPeriod = carryInAndOutPeriod - carryInPeriod;
271 272 273 274
            final long carryInWorkload =
                    getCarryInWorkload(task, carryInDistribution, carryInPeriod);
            final long carryOutWorkload =
                    getCarryOutWorkload(task, carryOutDistribution, carryOutPeriod);
275 276 277 278 279 280 281
            workload = Math.max(workload, carryInWorkload + carryOutWorkload);
        }

        workload = Math.max(workload,
                getCarryInWorkload(task, carryInDistribution, carryInAndOutPeriod));
        carryOutPeriod = 0;

282
        for (final Segment s : carryOutDistribution) {
283 284
            carryOutPeriod += s.getJobWcet();
            carryInPeriod = carryInAndOutPeriod - carryOutPeriod;
285 286 287 288
            final long carryInWorkload =
                    getCarryInWorkload(task, carryInDistribution, carryInPeriod);
            final long carryOutWorkload =
                    getCarryOutWorkload(task, carryOutDistribution, carryOutPeriod);
289 290 291 292 293 294
            workload = Math.max(workload, carryInWorkload + carryOutWorkload);
        }

        return workload;
    }

295 296
    private long getCarryOutWorkload(final DagTask task, final Set<Segment> carryOutDistribution,
            final long carryOutPeriod) {
297
        long workload = 0;
298 299 300
        final long period = task.getPeriod();
        final long responseTime = responseTimes.get(task).getResponseTime();
        final List<Segment> distributionList = Lists.newArrayList(carryOutDistribution);
301 302

        for (int i = 0; i < distributionList.size(); ++i) {
303
            final Segment s = distributionList.get(i);
304 305 306 307
            long weightOfPreviousSegments = 0;
            for (int j = 0; j < i; ++j) {
                weightOfPreviousSegments += distributionList.get(j).getJobWcet();
            }
308
            final long width = carryOutPeriod - weightOfPreviousSegments;
309 310 311 312

            workload += Math.max(Math.min(width, s.getJobWcet()), 0) * s.getNumberOfJobs();
        }

313
        final long improvedWorkload =
314 315 316 317 318
                Math.max(carryOutPeriod - (period - responseTime), 0) * numberOfProcessors;

        return Math.min(improvedWorkload, workload);
    }

319 320
    private long getCarryInWorkload(final DagTask task, final Set<Segment> carryInDistribution,
            final long carryInPeriod) {
321
        long workload = 0;
322 323 324
        final long period = task.getPeriod();
        final long responseTime = responseTimes.get(task).getResponseTime();
        final List<Segment> distributionList = Lists.newArrayList(carryInDistribution);
325 326

        for (int i = 0; i < carryInDistribution.size(); ++i) {
327
            final Segment s = distributionList.get(i);
328 329 330 331
            long weightOfRemainingSegments = 0;
            for (int j = i + 1; j < carryInDistribution.size(); ++j) {
                weightOfRemainingSegments += distributionList.get(j).getJobWcet();
            }
332
            final long width = carryInPeriod - period + responseTime - weightOfRemainingSegments;
333 334 335 336

            workload += Math.max(Math.min(width, s.getJobWcet()), 0) * s.getNumberOfJobs();
        }

337
        final long improvedWorkload =
338 339 340 341 342
                Math.max(carryInPeriod - (period - responseTime), 0) * numberOfProcessors;

        return Math.min(improvedWorkload, workload);
    }

343 344 345
    private double getSelfInterference(final DagTask task) {
        final long criticalPath = task.getCriticalPath();
        final long workload = task.getWorkload();
346 347 348 349 350 351 352 353 354 355 356

        return (double) (workload - criticalPath) / numberOfProcessors;
    }

    @Override
    public String getName() {
        return "FonsecaNelis";
    }


}