-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathhybrid.go
472 lines (359 loc) · 14.2 KB
/
hybrid.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
package main
// Hybrid algorithm of log-k-decomp and det-k-decomp.
import (
"fmt"
"log"
"reflect"
"runtime"
"github.com/cem-okulmus/BalancedGo/lib"
"github.com/cem-okulmus/disjoint"
logk "github.com/cem-okulmus/log-k-decomp/lib"
)
// HybridPredicate is used to determine when to switch from LogKDecomp to using DetKDecomp
type HybridPredicate = func(H lib.Graph, K int) bool
type recursiveCall = func(H lib.Graph, Conn []int, allwowed lib.Edges, recDepth int) lib.Decomp
// LogKHybrid implements a hybridised algorithm, using LogKDecomp and DetKDecomp in tandem
type LogKHybrid struct {
Graph lib.Graph
K int
cache lib.Cache
BalFactor int
Predicate HybridPredicate // used to determine when to switch to DetK
Size int
level int // keep track of
Generator lib.SearchGenerator
}
// SetGenerator defines the type of Search to use
func (l *LogKHybrid) SetGenerator(Gen lib.SearchGenerator) {
l.Generator = Gen
}
// OneRoundPred will match the behaviour of BalDetK, with Depth 1
func (l *LogKHybrid) OneRoundPred(H lib.Graph, K int) bool {
// log.Println("One Round Predicate")
return true
}
// NumberEdgesPred checks the number of edges of the subgraph
func (l *LogKHybrid) NumberEdgesPred(H lib.Graph, K int) bool {
output := H.Edges.Len() < l.Size
if output {
// log.Println("Predicate NumberEdgesPred")
// log.Println("Current Graph: ", H.Edges.Len(), " Edges / ", l.Size)
}
return output
}
// SumEdgesPred checks the sum over all edges of the subgraph
func (l *LogKHybrid) SumEdgesPred(H lib.Graph, K int) bool {
count := 0
for i := range H.Edges.Slice() {
count = count + len(H.Edges.Slice()[i].Vertices)
}
output := count < l.Size
if output {
// log.Println("Predicate SumEdgesPred")
// log.Println("Current Graph: ", count, " Sum of Edges / ", l.Size)
}
return output
}
// ETimesKDivAvgEdgePred checks a complex formula over the subgraph and used K
func (l *LogKHybrid) ETimesKDivAvgEdgePred(H lib.Graph, K int) bool {
count := 0
for i := range H.Edges.Slice() {
count = count + len(H.Edges.Slice()[i].Vertices)
}
avgEdgeSize := count / H.Edges.Len()
output := ((H.Edges.Len() * l.K) / avgEdgeSize) < l.Size
if output {
// log.Println("Predicate ETimesKDivAvgEdgePred")
// log.Println("Current Graph: ", ((H.Edges.Len() * l.K) / avgEdgeSize), " / ", l.Size)
}
return output
}
// SetWidth sets the current width parameter of the algorithm
func (l *LogKHybrid) SetWidth(K int) {
l.cache.Reset() // reset the cache as the new width might invalidate any old results
l.K = K
}
// Name returns the name of the algorithm
func (l *LogKHybrid) Name() string {
return "LogKHybrid"
}
// FindDecomp finds a decomp
func (l *LogKHybrid) FindDecomp() lib.Decomp {
l.cache.Init()
return l.findDecomp(l.Graph, []int{}, l.Graph.Edges, 0)
}
// FindDecompGraph finds a decomp, for an explicit graph
func (l *LogKHybrid) FindDecompGraph(Graph lib.Graph) lib.Decomp {
l.Graph = Graph
return l.FindDecomp()
}
func (l *LogKHybrid) detKWrapper(H lib.Graph, Conn []int, allwowed lib.Edges, recDepth int) lib.Decomp {
det := DetKDecomp{K: l.K, Graph: lib.Graph{Edges: allwowed}, BalFactor: l.BalFactor, SubEdge: false}
l.cache.CopyRef(&det.cache) // reuse the same cache as log-k
return det.findDecomp(H, Conn, recDepth)
}
// determine whether we have reached a (positive or negative) base case
func (l *LogKHybrid) baseCaseCheck(lenE int, lenSp int, lenAE int) bool {
if lenE <= l.K && lenSp == 0 {
return true
}
if lenE == 0 && lenSp == 1 {
return true
}
if lenE == 0 && lenSp > 1 {
return true
}
if lenAE == 0 && (lenE+lenSp) >= 0 {
return true
}
return false
}
func (l *LogKHybrid) baseCase(H lib.Graph, lenAE int) lib.Decomp {
// log.Printf("Base case reached. Number of Special Edges %d\n", len(Sp))
var output lib.Decomp
// cover faiure cases
if H.Edges.Len() == 0 && len(H.Special) > 1 {
return lib.Decomp{}
}
if lenAE == 0 && (H.Len()) >= 0 {
return lib.Decomp{}
}
// construct a decomp in the remaining two
if H.Edges.Len() <= l.K && len(H.Special) == 0 {
output = lib.Decomp{Graph: H, Root: lib.Node{Bag: H.Vertices(), Cover: H.Edges}}
}
if H.Edges.Len() == 0 && len(H.Special) == 1 {
sp1 := H.Special[0]
output = lib.Decomp{Graph: H,
Root: lib.Node{Bag: sp1.Vertices(), Cover: sp1}}
}
return output
}
func (l *LogKHybrid) findDecomp(H lib.Graph, Conn []int, allowedFull lib.Edges, recDepth int) lib.Decomp {
recDepth = recDepth + 1 // increase the recursive depth
// log.Printf("\n\nCurrent SubGraph: %v\n", H)
// log.Printf("Current Allowed Edges: %v\n", allowedFull)
// log.Println("Conn: ", PrintVertices(Conn), "\n\n")
if !lib.Subset(Conn, H.Vertices()) {
log.Panicln("Conn invariant violated.")
}
// Base Case
if l.baseCaseCheck(H.Edges.Len(), len(H.Special), allowedFull.Len()) {
return l.baseCase(H, allowedFull.Len())
}
// Determine the function to use for the recursive calls
var recCall recursiveCall
if l.Predicate(H, l.K) {
recCall = l.detKWrapper
} else {
recCall = l.findDecomp
}
//all vertices within (H ∪ Sp)
verticesH := append(H.Vertices())
allowed := lib.FilterVertices(allowedFull, verticesH)
// Set up iterator for child
genChild := lib.SplitCombin(allowed.Len(), l.K, runtime.GOMAXPROCS(-1), false)
parallelSearch := l.Generator.GetSearch(&H, &allowed, l.BalFactor, genChild)
// parallelSearch := lib.Search{H: &H, Edges: &allowed, BalFactor: l.BalFactor, Generators: genChild}
pred := lib.BalancedCheck{}
parallelSearch.FindNext(pred) // initial Search
var Vertices = make(map[int]*disjoint.Element)
// checks all possibles nodes in H, together with PARENT loops, it covers all parent-child pairings
CHILD:
for ; !parallelSearch.SearchEnded(); parallelSearch.FindNext(pred) {
childλ := lib.GetSubset(allowed, parallelSearch.GetResult())
compsε, _, _ := H.GetComponents(childλ, Vertices)
// log.Println("Balanced Child found, ", childλ)
// Check if child is possible root
if lib.Subset(Conn, childλ.Vertices()) {
// log.Printf("Child-Root cover chosen: %v\n", Graph{Edges: childλ})
// log.Printf("Comps of Child-Root: %v\n", comps_c)
childχ := lib.Inter(childλ.Vertices(), verticesH)
// check cache for previous encounters
if l.cache.CheckNegative(childλ, compsε) {
// log.Println("Skipping a child sep", childχ)
continue CHILD
}
var subtrees []lib.Node
for y := range compsε {
VCompε := compsε[y].Vertices()
Connγ := lib.Inter(VCompε, childχ)
decomp := recCall(compsε[y], Connγ, allowedFull, recDepth)
if reflect.DeepEqual(decomp, lib.Decomp{}) {
// log.Println("Rejecting child-root")
// log.Printf("\nCurrent SubGraph: %v\n", H)
// log.Printf("Current Allowed Edges: %v\n", allowed)
// log.Println("Conn: ", PrintVertices(Conn), "\n\n")
l.cache.AddNegative(childλ, compsε[y])
continue CHILD
}
// log.Printf("Produced Decomp w Child-Root: %+v\n", decomp)
subtrees = append(subtrees, decomp.Root)
}
root := lib.Node{Bag: childχ, Cover: childλ, Children: subtrees}
return lib.Decomp{Graph: H, Root: root}
}
allowedParent := lib.FilterVertices(allowed, append(Conn, childλ.Vertices()...))
genParent := lib.SplitCombin(allowedParent.Len(), l.K, runtime.GOMAXPROCS(-1), false)
parentalSearch := l.Generator.GetSearch(&H, &allowedParent, l.BalFactor, genParent)
// parentalSearch := lib.Search{H: &H, Edges: &allowedParent, BalFactor: l.BalFactor, Generators: genParent}
predPar := logk.ParentCheck{Conn: Conn, Child: childλ.Vertices()}
parentalSearch.FindNext(predPar)
// parentFound := false
PARENT:
for ; !parentalSearch.SearchEnded(); parentalSearch.FindNext(predPar) {
parentλ := lib.GetSubset(allowedParent, parentalSearch.GetResult())
// log.Println("Looking at parent ", parentλ)
compsπ, _, isolatedEdges := H.GetComponents(parentλ, Vertices)
// log.Println("Parent components ", comps_p)
foundLow := false
var compLowIndex int
var compLow lib.Graph
// Check if parent is un-balanced
for i := range compsπ {
if compsπ[i].Len() > H.Len()/2 {
foundLow = true
compLowIndex = i //keep track of the index for composing comp_up later
compLow = compsπ[i]
}
}
if !foundLow {
fmt.Println("Current SubGraph, ", H)
fmt.Println("Conn ", lib.PrintVertices(Conn))
fmt.Printf("Current Allowed Edges: %v\n", allowed)
// fmt.Printf("Current Allowed Edges in Parent Search: %v\n", parentalSearch.Edges)
fmt.Println("Child ", childλ, " ", lib.PrintVertices(childλ.Vertices()))
fmt.Println("Comps of child ", compsε)
fmt.Println("parent ", parentλ, "( ", parentalSearch.GetResult(), " ) from the set: ", allowedParent)
fmt.Println("Comps of p: ")
for i := range compsπ {
fmt.Println("Component: ", compsπ[i], " Len: ", compsπ[i].Len())
}
log.Panicln("the parallel search didn't actually find a valid parent")
}
vertCompLow := compLow.Vertices()
childχ := lib.Inter(childλ.Vertices(), vertCompLow)
// determine which components of child are inside comp_low
compsε, _, _ := compLow.GetComponents(childλ, Vertices)
//omitting the check for balancedness as it's guaranteed to still be conserved at this point
// check cache for previous encounters
if l.cache.CheckNegative(childλ, compsε) {
// log.Println("Skipping a child sep", childχ)
continue PARENT
}
// log.Printf("Parent Found: %v (%s) \n", parentλ, PrintVertices(parentλ.Vertices()))
// parentFound = true
// log.Println("Comp low: ", comp_low, "Vertices of comp_low", PrintVertices(vertCompLow))
// log.Printf("Child chosen: %v (%s) for H %v \n", childλ, PrintVertices(childχ), H)
// log.Printf("Comps of Child: %v\n", comps_c)
//Computing subcomponents of Child
// 1. CREATE GOROUTINES
// ---------------------
//Computing upper component in parallel
chanUp := make(chan lib.Decomp)
var compUp lib.Graph
var decompUp lib.Decomp
var specialChild lib.Edges
tempEdgeSlice := []lib.Edge{}
tempSpecialSlice := []lib.Edges{}
tempEdgeSlice = append(tempEdgeSlice, isolatedEdges...)
for i := range compsπ {
if i != compLowIndex {
tempEdgeSlice = append(tempEdgeSlice, compsπ[i].Edges.Slice()...)
tempSpecialSlice = append(tempSpecialSlice, compsπ[i].Special...)
}
}
// specialChild = NewEdges([]Edge{Edge{Vertices: Inter(childχ, comp_up.Vertices())}})
specialChild = lib.NewEdges([]lib.Edge{{Vertices: childχ}})
// if no comps_p, other than comp_low, just use parent as is
if len(compsπ) == 1 {
compUp.Edges = parentλ
// adding new Special Edge to connect Child to comp_up
compUp.Special = append(compUp.Special, specialChild)
decompTemp := lib.Decomp{Graph: compUp, Root: lib.Node{Bag: lib.Inter(parentλ.Vertices(), verticesH),
Cover: parentλ, Children: []lib.Node{{Bag: childχ, Cover: childλ}}}}
go func(decomp lib.Decomp) {
chanUp <- decomp
}(decompTemp)
} else if len(tempEdgeSlice) > 0 { // otherwise compute decomp for comp_up
compUp.Edges = lib.NewEdges(tempEdgeSlice)
compUp.Special = tempSpecialSlice
// adding new Special Edge to connect Child to comp_up
compUp.Special = append(compUp.Special, specialChild)
// log.Println("Upper component:", comp_up)
//Reducing the allowed edges
allowedReduced := allowedFull.Diff(compLow.Edges)
go func(comp_up lib.Graph, Conn []int, allowedReduced lib.Edges) {
chanUp <- recCall(comp_up, Conn, allowedReduced, recDepth)
}(compUp, Conn, allowedReduced)
}
// Parallel Recursive Calls:
ch := make(chan decompInt)
var subtrees []lib.Node
for x := range compsε {
Connχ := lib.Inter(compsε[x].Vertices(), childχ)
go func(x int, comps_c []lib.Graph, Conn_x []int, allowedFull lib.Edges) {
var out decompInt
out.Decomp = recCall(comps_c[x], Conn_x, allowedFull, recDepth)
out.Int = x
ch <- out
}(x, compsε, Connχ, allowedFull)
}
// 2. WAIT ON GOROUTINES TO FINISH
// ---------------------
for i := 0; i < len(compsε)+1; i++ {
select {
case decompInt := <-ch:
if reflect.DeepEqual(decompInt.Decomp, lib.Decomp{}) {
// l.cache.AddNegative(childλ, comps_c[x])
// log.Println("Rejecting child")
continue PARENT
}
// log.Printf("Produced Decomp: %+v\n", decomp)
subtrees = append(subtrees, decompInt.Decomp.Root)
case decompUpChan := <-chanUp:
if reflect.DeepEqual(decompUpChan, lib.Decomp{}) {
// l.addNegative(childχ, comp_up, Sp)
// log.Println("Rejecting comp_up ", comp_up, " of H ", H)
continue PARENT
}
if !lib.Subset(Conn, decompUpChan.Root.Bag) {
fmt.Println("Current SubGraph, ", H)
fmt.Println("Conn ", lib.PrintVertices(Conn))
fmt.Printf("Current Allowed Edges: %v\n", allowed)
// fmt.Printf("Current Allowed Edges in Parent Search: %v\n", parentalSearch.Edges)
fmt.Println("Child ", childλ, " ", lib.PrintVertices(childλ.Vertices()))
fmt.Println("Comps of child ", compsε)
fmt.Println("parent ", parentλ, "( ", parentalSearch.GetResult(), " ) from the set: ", allowedParent)
fmt.Println("comp_up ", compUp, " V(comp_up) ", lib.PrintVertices(compUp.Vertices()))
fmt.Println("Decomp up: ", decompUpChan)
fmt.Println("Comps of p", compsπ)
fmt.Println("Compare against PredSearch: ", predPar.Check(&H, &parentλ, l.BalFactor, Vertices))
log.Panicln("Conn not covered in parent, Wait, what?")
}
decompUp = decompUpChan
}
}
// 3. POST-PROCESSING (sequentially)
// ---------------------
// rearrange subtrees to form one that covers total of H
rootChild := lib.Node{Bag: childχ, Cover: childλ, Children: subtrees}
var finalRoot lib.Node
if len(tempEdgeSlice) > 0 {
finalRoot = attachingSubtrees(decompUp.Root, rootChild, specialChild)
} else {
finalRoot = rootChild
}
// log.Printf("Produced Decomp: %v\n", finalRoot)
return lib.Decomp{Graph: H, Root: finalRoot}
}
// if parentFound {
// log.Println("Rejecting child ", childλ, " for H ", H)
// log.Printf("\nCurrent SubGraph: %v\n", H)
// log.Printf("Current Allowed Edges: %v\n", allowed)
// log.Println("Conn: ", PrintVertices(Conn), "\n\n")
// }
}
// exhausted search space
return lib.Decomp{}
}