1 | /* $Id$ $Revision$ */ |
2 | /* vim:set shiftwidth=4 ts=8: */ |
3 | |
4 | /************************************************************************* |
5 | * Copyright (c) 2011 AT&T Intellectual Property |
6 | * All rights reserved. This program and the accompanying materials |
7 | * are made available under the terms of the Eclipse Public License v1.0 |
8 | * which accompanies this distribution, and is available at |
9 | * http://www.eclipse.org/legal/epl-v10.html |
10 | * |
11 | * Contributors: See CVS logs. Details at http://www.graphviz.org/ |
12 | *************************************************************************/ |
13 | |
14 | |
15 | #include "dot.h" |
16 | |
17 | static node_t* |
18 | map_interclust_node(node_t * n) |
19 | { |
20 | node_t *rv; |
21 | |
22 | if ((ND_clust(n) == NULL) || ( GD_expanded(ND_clust(n))) ) |
23 | rv = n; |
24 | else |
25 | rv = GD_rankleader(ND_clust(n))[ND_rank(n)]; |
26 | return rv; |
27 | } |
28 | |
29 | /* make d slots starting at position pos (where 1 already exists) */ |
30 | static void |
31 | make_slots(graph_t * root, int r, int pos, int d) |
32 | { |
33 | int i; |
34 | node_t *v, **vlist; |
35 | vlist = GD_rank(root)[r].v; |
36 | if (d <= 0) { |
37 | for (i = pos - d + 1; i < GD_rank(root)[r].n; i++) { |
38 | v = vlist[i]; |
39 | ND_order(v) = i + d - 1; |
40 | vlist[ND_order(v)] = v; |
41 | } |
42 | for (i = GD_rank(root)[r].n + d - 1; i < GD_rank(root)[r].n; i++) |
43 | vlist[i] = NULL; |
44 | } else { |
45 | /*assert(ND_rank(root)[r].n + d - 1 <= ND_rank(root)[r].an);*/ |
46 | for (i = GD_rank(root)[r].n - 1; i > pos; i--) { |
47 | v = vlist[i]; |
48 | ND_order(v) = i + d - 1; |
49 | vlist[ND_order(v)] = v; |
50 | } |
51 | for (i = pos + 1; i < pos + d; i++) |
52 | vlist[i] = NULL; |
53 | } |
54 | GD_rank(root)[r].n += d - 1; |
55 | } |
56 | |
57 | static node_t* |
58 | clone_vn(graph_t * g, node_t * vn) |
59 | { |
60 | node_t *rv; |
61 | int r; |
62 | |
63 | r = ND_rank(vn); |
64 | make_slots(g, r, ND_order(vn), 2); |
65 | rv = virtual_node(g); |
66 | ND_lw(rv) = ND_lw(vn); |
67 | ND_rw(rv) = ND_rw(vn); |
68 | ND_rank(rv) = ND_rank(vn); |
69 | ND_order(rv) = ND_order(vn) + 1; |
70 | GD_rank(g)[r].v[ND_order(rv)] = rv; |
71 | return rv; |
72 | } |
73 | |
74 | static void |
75 | map_path(node_t * from, node_t * to, edge_t * orig, edge_t * ve, int type) |
76 | { |
77 | int r; |
78 | node_t *u, *v; |
79 | edge_t *e; |
80 | |
81 | assert(ND_rank(from) < ND_rank(to)); |
82 | |
83 | if ((agtail(ve) == from) && (aghead(ve) == to)) |
84 | return; |
85 | |
86 | if (ED_count(ve) > 1) { |
87 | ED_to_virt(orig) = NULL; |
88 | if (ND_rank(to) - ND_rank(from) == 1) { |
89 | if ((e = find_fast_edge(from, to)) && (ports_eq(orig, e))) { |
90 | merge_oneway(orig, e); |
91 | if ((ND_node_type(from) == NORMAL) |
92 | && (ND_node_type(to) == NORMAL)) |
93 | other_edge(orig); |
94 | return; |
95 | } |
96 | } |
97 | u = from; |
98 | for (r = ND_rank(from); r < ND_rank(to); r++) { |
99 | if (r < ND_rank(to) - 1) |
100 | v = clone_vn(dot_root(from), aghead(ve)); |
101 | else |
102 | v = to; |
103 | e = virtual_edge(u, v, orig); |
104 | ED_edge_type(e) = type; |
105 | u = v; |
106 | ED_count(ve)--; |
107 | ve = ND_out(aghead(ve)).list[0]; |
108 | } |
109 | } else { |
110 | if (ND_rank(to) - ND_rank(from) == 1) { |
111 | if ((ve = find_fast_edge(from, to)) && (ports_eq(orig, ve))) { |
112 | /*ED_to_orig(ve) = orig; */ |
113 | ED_to_virt(orig) = ve; |
114 | ED_edge_type(ve) = type; |
115 | ED_count(ve)++; |
116 | if ((ND_node_type(from) == NORMAL) |
117 | && (ND_node_type(to) == NORMAL)) |
118 | other_edge(orig); |
119 | } else { |
120 | ED_to_virt(orig) = NULL; |
121 | ve = virtual_edge(from, to, orig); |
122 | ED_edge_type(ve) = type; |
123 | } |
124 | } |
125 | if (ND_rank(to) - ND_rank(from) > 1) { |
126 | e = ve; |
127 | if (agtail(ve) != from) { |
128 | ED_to_virt(orig) = NULL; |
129 | e = ED_to_virt(orig) = virtual_edge(from, aghead(ve), orig); |
130 | delete_fast_edge(ve); |
131 | } else |
132 | e = ve; |
133 | while (ND_rank(aghead(e)) != ND_rank(to)) |
134 | e = ND_out(aghead(e)).list[0]; |
135 | if (aghead(e) != to) { |
136 | ve = e; |
137 | e = virtual_edge(agtail(e), to, orig); |
138 | ED_edge_type(e) = type; |
139 | delete_fast_edge(ve); |
140 | } |
141 | } |
142 | } |
143 | } |
144 | |
145 | static void |
146 | make_interclust_chain(graph_t * g, node_t * from, node_t * to, edge_t * orig) |
147 | { |
148 | int newtype; |
149 | node_t *u, *v; |
150 | |
151 | u = map_interclust_node(from); |
152 | v = map_interclust_node(to); |
153 | if ((u == from) && (v == to)) |
154 | newtype = VIRTUAL; |
155 | else |
156 | newtype = CLUSTER_EDGE; |
157 | map_path(u, v, orig, ED_to_virt(orig), newtype); |
158 | } |
159 | |
160 | /* |
161 | * attach and install edges between clusters. |
162 | * essentially, class2() for interclust edges. |
163 | */ |
164 | void interclexp(graph_t * subg) |
165 | { |
166 | graph_t *g; |
167 | node_t *n; |
168 | edge_t *e, *prev, *next; |
169 | |
170 | g = dot_root(subg); |
171 | for (n = agfstnode(subg); n; n = agnxtnode(subg, n)) { |
172 | |
173 | /* N.B. n may be in a sub-cluster of subg */ |
174 | prev = NULL; |
175 | for (e = agfstedge(g, n); e; e = next) { |
176 | next = agnxtedge(g, e, n); |
177 | if (agcontains(subg, e)) |
178 | continue; |
179 | |
180 | /* canonicalize edge */ |
181 | e = AGMKOUT(e); |
182 | /* short/flat multi edges */ |
183 | if (mergeable(prev, e)) { |
184 | if (ND_rank(agtail(e)) == ND_rank(aghead(e))) |
185 | ED_to_virt(e) = prev; |
186 | else |
187 | ED_to_virt(e) = NULL; |
188 | if (ED_to_virt(prev) == NULL) |
189 | continue; /* internal edge */ |
190 | merge_chain(subg, e, ED_to_virt(prev), FALSE); |
191 | safe_other_edge(e); |
192 | continue; |
193 | } |
194 | |
195 | /* flat edges */ |
196 | if (ND_rank(agtail(e)) == ND_rank(aghead(e))) { |
197 | edge_t* fe; |
198 | if ((fe = find_flat_edge(agtail(e), aghead(e))) == NULL) { |
199 | flat_edge(g, e); |
200 | prev = e; |
201 | } else if (e != fe) { |
202 | safe_other_edge(e); |
203 | if (!ED_to_virt(e)) merge_oneway(e, fe); |
204 | } |
205 | continue; |
206 | } |
207 | |
208 | /* forward edges */ |
209 | if (ND_rank(aghead(e)) > ND_rank(agtail(e))) { |
210 | make_interclust_chain(g, agtail(e), aghead(e), e); |
211 | prev = e; |
212 | continue; |
213 | } |
214 | |
215 | /* backward edges */ |
216 | else { |
217 | /* |
218 | I think that make_interclust_chain should create call other_edge(e) anyway |
219 | if (agcontains(subg,agtail(e)) |
220 | && agfindedge(g,aghead(e),agtail(e))) other_edge(e); |
221 | */ |
222 | make_interclust_chain(g, aghead(e), agtail(e), e); |
223 | prev = e; |
224 | } |
225 | } |
226 | } |
227 | } |
228 | |
229 | static void |
230 | merge_ranks(graph_t * subg) |
231 | { |
232 | int i, d, r, pos, ipos; |
233 | node_t *v; |
234 | graph_t *root; |
235 | |
236 | root = dot_root(subg); |
237 | if (GD_minrank(subg) > 0) |
238 | GD_rank(root)[GD_minrank(subg) - 1].valid = FALSE; |
239 | for (r = GD_minrank(subg); r <= GD_maxrank(subg); r++) { |
240 | d = GD_rank(subg)[r].n; |
241 | ipos = pos = ND_order(GD_rankleader(subg)[r]); |
242 | make_slots(root, r, pos, d); |
243 | for (i = 0; i < GD_rank(subg)[r].n; i++) { |
244 | v = GD_rank(root)[r].v[pos] = GD_rank(subg)[r].v[i]; |
245 | ND_order(v) = pos++; |
246 | /* real nodes automatically have v->root = root graph */ |
247 | if (ND_node_type(v) == VIRTUAL) |
248 | v->root = agroot(root); |
249 | delete_fast_node(subg, v); |
250 | fast_node(root, v); |
251 | GD_n_nodes(root)++; |
252 | } |
253 | GD_rank(subg)[r].v = GD_rank(root)[r].v + ipos; |
254 | GD_rank(root)[r].valid = FALSE; |
255 | } |
256 | if (r < GD_maxrank(root)) |
257 | GD_rank(root)[r].valid = FALSE; |
258 | GD_expanded(subg) = TRUE; |
259 | } |
260 | |
261 | static void |
262 | remove_rankleaders(graph_t * g) |
263 | { |
264 | int r; |
265 | node_t *v; |
266 | edge_t *e; |
267 | |
268 | for (r = GD_minrank(g); r <= GD_maxrank(g); r++) { |
269 | v = GD_rankleader(g)[r]; |
270 | |
271 | /* remove the entire chain */ |
272 | while ((e = ND_out(v).list[0])) |
273 | delete_fast_edge(e); |
274 | while ((e = ND_in(v).list[0])) |
275 | delete_fast_edge(e); |
276 | delete_fast_node(dot_root(g), v); |
277 | GD_rankleader(g)[r] = NULL; |
278 | } |
279 | } |
280 | |
281 | /* delete virtual nodes of a cluster, and install real nodes or sub-clusters */ |
282 | void expand_cluster(graph_t * subg) |
283 | { |
284 | /* build internal structure of the cluster */ |
285 | class2(subg); |
286 | GD_comp(subg).size = 1; |
287 | GD_comp(subg).list[0] = GD_nlist(subg); |
288 | allocate_ranks(subg); |
289 | build_ranks(subg, 0); |
290 | merge_ranks(subg); |
291 | |
292 | /* build external structure of the cluster */ |
293 | interclexp(subg); |
294 | remove_rankleaders(subg); |
295 | } |
296 | |
297 | /* this function marks every node in <g> with its top-level cluster under <g> */ |
298 | void mark_clusters(graph_t * g) |
299 | { |
300 | int c; |
301 | node_t *n, *nn, *vn; |
302 | edge_t *orig, *e; |
303 | graph_t *clust; |
304 | |
305 | /* remove sub-clusters below this level */ |
306 | for (n = agfstnode(g); n; n = agnxtnode(g, n)) { |
307 | if (ND_ranktype(n) == CLUSTER) |
308 | UF_singleton(n); |
309 | ND_clust(n) = NULL; |
310 | } |
311 | |
312 | for (c = 1; c <= GD_n_cluster(g); c++) { |
313 | clust = GD_clust(g)[c]; |
314 | for (n = agfstnode(clust); n; n = nn) { |
315 | nn = agnxtnode(clust,n); |
316 | if (ND_ranktype(n) != NORMAL) { |
317 | agerr(AGWARN, |
318 | "%s was already in a rankset, deleted from cluster %s\n" , |
319 | agnameof(n), agnameof(g)); |
320 | agdelete(clust,n); |
321 | continue; |
322 | } |
323 | UF_setname(n, GD_leader(clust)); |
324 | ND_clust(n) = clust; |
325 | ND_ranktype(n) = CLUSTER; |
326 | |
327 | /* here we mark the vnodes of edges in the cluster */ |
328 | for (orig = agfstout(clust, n); orig; |
329 | orig = agnxtout(clust, orig)) { |
330 | if ((e = ED_to_virt(orig))) { |
331 | while (e && ND_node_type(vn =aghead(e)) == VIRTUAL) { |
332 | ND_clust(vn) = clust; |
333 | e = ND_out(aghead(e)).list[0]; |
334 | /* trouble if concentrators and clusters are mixed */ |
335 | } |
336 | } |
337 | } |
338 | } |
339 | } |
340 | } |
341 | |
342 | void build_skeleton(graph_t * g, graph_t * subg) |
343 | { |
344 | int r; |
345 | node_t *v, *prev, *rl; |
346 | edge_t *e; |
347 | |
348 | prev = NULL; |
349 | GD_rankleader(subg) = N_NEW(GD_maxrank(subg) + 2, node_t *); |
350 | for (r = GD_minrank(subg); r <= GD_maxrank(subg); r++) { |
351 | v = GD_rankleader(subg)[r] = virtual_node(g); |
352 | ND_rank(v) = r; |
353 | ND_ranktype(v) = CLUSTER; |
354 | ND_clust(v) = subg; |
355 | if (prev) { |
356 | e = virtual_edge(prev, v, NULL); |
357 | ED_xpenalty(e) *= CL_CROSS; |
358 | } |
359 | prev = v; |
360 | } |
361 | |
362 | /* set the counts on virtual edges of the cluster skeleton */ |
363 | for (v = agfstnode(subg); v; v = agnxtnode(subg, v)) { |
364 | rl = GD_rankleader(subg)[ND_rank(v)]; |
365 | ND_UF_size(rl)++; |
366 | for (e = agfstout(subg, v); e; e = agnxtout(subg, e)) { |
367 | for (r = ND_rank(agtail(e)); r < ND_rank(aghead(e)); r++) { |
368 | ED_count(ND_out(rl).list[0])++; |
369 | } |
370 | } |
371 | } |
372 | for (r = GD_minrank(subg); r <= GD_maxrank(subg); r++) { |
373 | rl = GD_rankleader(subg)[r]; |
374 | if (ND_UF_size(rl) > 1) |
375 | ND_UF_size(rl)--; |
376 | } |
377 | } |
378 | |
379 | void install_cluster(graph_t * g, node_t * n, int pass, nodequeue * q) |
380 | { |
381 | int r; |
382 | graph_t *clust; |
383 | |
384 | clust = ND_clust(n); |
385 | if (GD_installed(clust) != pass + 1) { |
386 | for (r = GD_minrank(clust); r <= GD_maxrank(clust); r++) |
387 | install_in_rank(g, GD_rankleader(clust)[r]); |
388 | for (r = GD_minrank(clust); r <= GD_maxrank(clust); r++) |
389 | enqueue_neighbors(q, GD_rankleader(clust)[r], pass); |
390 | GD_installed(clust) = pass + 1; |
391 | } |
392 | } |
393 | |
394 | static void mark_lowcluster_basic(Agraph_t * g); |
395 | void mark_lowclusters(Agraph_t * root) |
396 | { |
397 | Agnode_t *n, *vn; |
398 | Agedge_t *orig, *e; |
399 | |
400 | /* first, zap any previous cluster labelings */ |
401 | for (n = agfstnode(root); n; n = agnxtnode(root, n)) { |
402 | ND_clust(n) = NULL; |
403 | for (orig = agfstout(root, n); orig; orig = agnxtout(root, orig)) { |
404 | if ((e = ED_to_virt(orig))) { |
405 | while (e && (ND_node_type(vn = aghead(e))) == VIRTUAL) { |
406 | ND_clust(vn) = NULL; |
407 | e = ND_out(aghead(e)).list[0]; |
408 | } |
409 | } |
410 | } |
411 | } |
412 | |
413 | /* do the recursion */ |
414 | mark_lowcluster_basic(root); |
415 | } |
416 | |
417 | static void mark_lowcluster_basic(Agraph_t * g) |
418 | { |
419 | Agraph_t *clust; |
420 | Agnode_t *n, *vn; |
421 | Agedge_t *orig, *e; |
422 | int c; |
423 | |
424 | for (c = 1; c <= GD_n_cluster(g); c++) { |
425 | clust = GD_clust(g)[c]; |
426 | mark_lowcluster_basic(clust); |
427 | } |
428 | /* see what belongs to this graph that wasn't already marked */ |
429 | for (n = agfstnode(g); n; n = agnxtnode(g, n)) { |
430 | if (ND_clust(n) == NULL) |
431 | ND_clust(n) = g; |
432 | for (orig = agfstout(g, n); orig; orig = agnxtout(g, orig)) { |
433 | if ((e = ED_to_virt(orig))) { |
434 | while (e && (ND_node_type(vn = aghead(e))) == VIRTUAL) { |
435 | if (ND_clust(vn) == NULL) |
436 | ND_clust(vn) = g; |
437 | e = ND_out(aghead(e)).list[0]; |
438 | } |
439 | } |
440 | } |
441 | } |
442 | } |
443 | |