Commit cdffd7ee by Michael Schmid

Creation of NFJ DAG

parent 4d9fd939
...@@ -16,7 +16,7 @@ import mvd.jester.tests.SchmidMottok; ...@@ -16,7 +16,7 @@ import mvd.jester.tests.SchmidMottok;
*/ */
public class App { public class App {
public static void main(String[] args) { public static void main(String[] args) {
for (int p = 4; p <= 4; p *= 2) { for (int p = 8; p <= 8; p *= 2) {
SystemSetup.DagTaskBuilder builder = SystemSetup.DagTaskBuilder builder =
new SystemSetup.DagTaskBuilder().setNumberOfProcessors(p); new SystemSetup.DagTaskBuilder().setNumberOfProcessors(p);
TestEnvironment te = new TestEnvironment(); TestEnvironment te = new TestEnvironment();
......
package mvd.jester.model; package mvd.jester.model;
import java.util.HashSet;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set; import java.util.Set;
import org.jgrapht.Graphs;
import org.jgrapht.experimental.dag.DirectedAcyclicGraph; import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.graph.DefaultEdge; import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.traverse.BreadthFirstIterator;
public class DagTask implements Task { public class DagTask implements Task {
...@@ -154,11 +159,70 @@ public class DagTask implements Task { ...@@ -154,11 +159,70 @@ public class DagTask implements Task {
return segments; return segments;
} }
public static void createNFJGraph(DirectedAcyclicGraph<Job, DefaultEdge> jobDag) { public static DirectedAcyclicGraph<Job, DefaultEdge> createNFJGraph(
Set<Job> joinNodes = new LinkedHashSet<>(); DirectedAcyclicGraph<Job, DefaultEdge> jobDag) {
Set<Job> forkNodes = new LinkedHashSet<>(); DirectedAcyclicGraph<Job, DefaultEdge> modifiedJobDag =
new DirectedAcyclicGraph<>(DefaultEdge.class);
Graphs.addGraph(modifiedJobDag, jobDag);
LinkedList<Job> joinNodes = new LinkedList<>();
List<Job> forkNodes = new LinkedList<>();
for (Job j : jobDag) { BreadthFirstIterator<Job, DefaultEdge> breadthFirstIterator =
new BreadthFirstIterator<>(modifiedJobDag);
while (breadthFirstIterator.hasNext()) {
Job j = breadthFirstIterator.next();
if (modifiedJobDag.inDegreeOf(j) > 1) {
joinNodes.add(j);
}
if (modifiedJobDag.outDegreeOf(j) > 1) {
forkNodes.add(j);
}
}
Job sink = joinNodes.getLast();
for (Job j : joinNodes) {
Set<DefaultEdge> edgeSet = new HashSet<>(modifiedJobDag.incomingEdgesOf(j));
for (DefaultEdge e : edgeSet) {
Job predecessor = modifiedJobDag.getEdgeSource(e);
boolean satisfiesProposition =
DagUtils.checkForFork(modifiedJobDag, j, forkNodes, predecessor);
if (!satisfiesProposition) {
modifiedJobDag.removeEdge(e);
if (modifiedJobDag.outgoingEdgesOf(predecessor).isEmpty()) {
try {
modifiedJobDag.addDagEdge(predecessor, sink);
} catch (Exception ex) {
}
}
}
if (modifiedJobDag.inDegreeOf(j) == 1) {
break;
}
}
// Find fork node f following the path along this edge e
// if f has successor that is not ancestor of j -> e is conflicting edge
// get sorcetarget of e
// remove e
// if sourcetarget has no successor -> connect sourcetraget to sink
// if indegree = 1 -> break;
}
// if (!DagUtils.checkProperty1(modifiedJobDag)) {
// throw new RuntimeException("abs");
// }
return modifiedJobDag;
}
private static boolean checkProperty1(DirectedAcyclicGraph<Job, DefaultEdge> jobDag) {
LinkedList<Job> joinNodes = new LinkedList<>();
List<Job> forkNodes = new LinkedList<>();
BreadthFirstIterator<Job, DefaultEdge> breadthFirstIterator =
new BreadthFirstIterator<>(jobDag);
while (breadthFirstIterator.hasNext()) {
Job j = breadthFirstIterator.next();
if (jobDag.inDegreeOf(j) > 1) { if (jobDag.inDegreeOf(j) > 1) {
joinNodes.add(j); joinNodes.add(j);
} }
...@@ -168,11 +232,49 @@ public class DagTask implements Task { ...@@ -168,11 +232,49 @@ public class DagTask implements Task {
} }
for (Job j : joinNodes) { for (Job j : joinNodes) {
for (Job f : forkNodes) { nextFork: for (Job f : forkNodes) {
Set<DefaultEdge> edges = jobDag.getAllEdges(f, j); Set<DefaultEdge> edgeSet = jobDag.getAllEdges(f, j);
// jobDag.
for (DefaultEdge e : edgeSet) {
Job a = jobDag.getEdgeSource(e);
if (a != f) {
Set<Job> succAndPred = new HashSet<>();
succAndPred.addAll(Graphs.predecessorListOf(jobDag, a));
succAndPred.addAll(Graphs.successorListOf(jobDag, a));
for (Job b : succAndPred) {
if (!((jobDag.getAncestors(jobDag, j).contains(b) || b == j)
&& (jobDag.getDescendants(jobDag, f).contains(b)
|| b == f))) {
continue nextFork;
}
}
}
}
return true;
}
}
return false;
}
private static boolean checkForFork(DirectedAcyclicGraph<Job, DefaultEdge> jobDag,
Job joinNode, List<Job> forkNodes, Job job) {
List<Job> pred = Graphs.predecessorListOf(jobDag, job);
for (Job p : pred) {
if (forkNodes.contains(p)) {
for (DefaultEdge successorEdge : jobDag.outgoingEdgesOf(p)) {
Job successor = jobDag.getEdgeSource(successorEdge);
if (jobDag.getAncestors(jobDag, joinNode).contains(successor)) {
return false;
}
}
} else {
return checkForFork(jobDag, joinNode, forkNodes, p);
} }
} }
return true;
} }
} }
......
...@@ -9,13 +9,17 @@ import java.util.Map; ...@@ -9,13 +9,17 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.math.LongMath; import com.google.common.math.LongMath;
import org.jgrapht.experimental.dag.DirectedAcyclicGraph;
import org.jgrapht.graph.DefaultEdge;
import mvd.jester.info.SchedulingInfo; import mvd.jester.info.SchedulingInfo;
import mvd.jester.info.TerminationInfo; import mvd.jester.info.TerminationInfo;
import mvd.jester.info.TerminationInfo.Level; import mvd.jester.info.TerminationInfo.Level;
import mvd.jester.model.DagTask; import mvd.jester.model.DagTask;
import mvd.jester.model.Job;
import mvd.jester.model.Segment; import mvd.jester.model.Segment;
import mvd.jester.model.SortedTaskSet; import mvd.jester.model.SortedTaskSet;
import mvd.jester.model.Task; import mvd.jester.model.Task;
import mvd.jester.model.DagTask.DagUtils;
import mvd.jester.priority.PriorityManager; import mvd.jester.priority.PriorityManager;
import mvd.jester.priority.RateMonotonic; import mvd.jester.priority.RateMonotonic;
...@@ -23,11 +27,13 @@ public class FonsecaNelis extends AbstractTest<DagTask> { ...@@ -23,11 +27,13 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
private final Map<Task, TerminationInfo> responseTimes; private final Map<Task, TerminationInfo> responseTimes;
private final PriorityManager priorityManager; private final PriorityManager priorityManager;
private final Map<Task, Set<Segment>> sortedSegments;
public FonsecaNelis(long numberOfProcessors) { public FonsecaNelis(long numberOfProcessors) {
super(numberOfProcessors); super(numberOfProcessors);
this.responseTimes = new HashMap<>(); this.responseTimes = new HashMap<>();
this.priorityManager = new RateMonotonic(); this.priorityManager = new RateMonotonic();
this.sortedSegments = new HashMap<>();
} }
@Override @Override
...@@ -37,8 +43,9 @@ public class FonsecaNelis extends AbstractTest<DagTask> { ...@@ -37,8 +43,9 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
@Override @Override
public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<DagTask> tasks) { public SchedulingInfo runSchedulabilityCheck(SortedTaskSet<DagTask> tasks) {
createNFJandDecompositionTree(tasks); sortedSegments.clear();
responseTimes.clear(); responseTimes.clear();
createNFJandDecompositionTree(tasks);
for (DagTask t : tasks) { for (DagTask t : tasks) {
long responseTime = calculateResponseTime(tasks, t); long responseTime = calculateResponseTime(tasks, t);
responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, Level.HIGH)); responseTimes.put(t, new TerminationInfo(t.getDeadline(), responseTime, Level.HIGH));
...@@ -49,6 +56,16 @@ public class FonsecaNelis extends AbstractTest<DagTask> { ...@@ -49,6 +56,16 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
} }
private void createNFJandDecompositionTree(SortedTaskSet<DagTask> tasks) { private void createNFJandDecompositionTree(SortedTaskSet<DagTask> tasks) {
for (DagTask t : tasks) {
DirectedAcyclicGraph<Job, DefaultEdge> modifiedJobDag =
DagUtils.createNFJGraph(t.getJobDag());
// List<Segment> sortedSegment = new LinkedList<>(t.getWorkloadDistribution());
// Collections.sort(sortedSegment,
// (s1, s2) -> (int) (s2.getNumberOfJobs() - s1.getNumberOfJobs()));
// Set<Segment> sortedSet = new LinkedHashSet<>(sortedSegment);
// sortedSegments.put(t, sortedSet);
}
} }
private long calculateResponseTime(SortedTaskSet<DagTask> tasks, DagTask task) { private long calculateResponseTime(SortedTaskSet<DagTask> tasks, DagTask task) {
...@@ -90,7 +107,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> { ...@@ -90,7 +107,7 @@ public class FonsecaNelis extends AbstractTest<DagTask> {
long carryInAndOutWorkload = getCarryInAndOutWorkload(task, task.getWorkloadDistribution(), long carryInAndOutWorkload = getCarryInAndOutWorkload(task, task.getWorkloadDistribution(),
new HashSet<>(), carryInAndOutInterval); sortedSegments.get(task), carryInAndOutInterval);
return carryInAndOutWorkload + bodyWorkload; return carryInAndOutWorkload + bodyWorkload;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or sign in to comment