diff --git a/Makefile b/Makefile index 007aadd..62696db 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ VERSION := $(shell git tag | sort -V | tail -1) DATE := $(shell git log -1 --format=%cd --date=short) BUILD := $(shell git rev-parse --short HEAD) -PROJECTNAME := $(shell basename "$(PWD)") +PROJECTNAME := "BalancedGo" # Go related variables. GOBASE := $(shell pwd) diff --git a/algorithms/algorithms.go b/algorithms/algorithms.go index 9de327f..7715f01 100644 --- a/algorithms/algorithms.go +++ b/algorithms/algorithms.go @@ -1,8 +1,11 @@ +// This package implements various algorithms to compute Generalized Hypertree Decompositions as well as +// the more restricted set of Hypertree Deocmpositions. + package algorithms import "github.com/cem-okulmus/BalancedGo/lib" -// Algorithm serves as the common interfacea of all hypergraph decomposition algorithms +// Algorithm serves as the common interface of all hypergraph decomposition algorithms type Algorithm interface { // A Name is useful to identify the individual algorithms in the result Name() string diff --git a/algorithms/balsepHybrid.go b/algorithms/balsepHybrid.go index 007afbd..4615862 100644 --- a/algorithms/balsepHybrid.go +++ b/algorithms/balsepHybrid.go @@ -99,7 +99,6 @@ func (b BalSepHybrid) findDecomp(currentDepth int, H lib.Graph) lib.Decomp { ch := make(chan lib.Decomp) var subtrees []lib.Decomp - //var outDecomp []Decomp for i := range comps { if currentDepth > 0 { @@ -114,24 +113,18 @@ func (b BalSepHybrid) findDecomp(currentDepth int, H lib.Graph) lib.Decomp { //stop if there are at most two special edges left if comps[i].Len() <= 2 { ch <- baseCaseSmart(b.Graph, comps[i]) - //outDecomp = append(outDecomp, baseCaseSmart(b.Graph, comps[i], Sp)) return } //Early termination if comps[i].Edges.Len() <= b.K && len(comps[i].Special) == 1 { ch <- earlyTermination(comps[i]) - //outDecomp = append(outDecomp, earlyTermination(comps[i], Sp[0])) return } det := DetKDecomp{K: b.K, Graph: b.Graph, BalFactor: b.BalFactor, SubEdge: true} - - // edgesFromSpecial := EdgesSpecial(Sp) - // comps[i].Edges.Append(edgesFromSpecial...) - - // det.cache = make(map[uint64]*CompCache) det.cache.Init() + result := det.findDecomp(comps[i], balsep.Vertices()) if !reflect.DeepEqual(result, lib.Decomp{}) && currentDepth == 0 { result.SkipRerooting = true @@ -146,14 +139,12 @@ func (b BalSepHybrid) findDecomp(currentDepth int, H lib.Graph) lib.Decomp { // } } ch <- result - // outDecomp = append(outDecomp, result) }(i, comps, SepSpecial) } } for i := 0; i < len(comps); i++ { - // decomp := outDecomp[i] decomp := <-ch if reflect.DeepEqual(decomp, lib.Decomp{}) { // log.Printf("balDet REJECTING %v: couldn't decompose a component of H %v \n", diff --git a/algorithms/balsepHybridSeq.go b/algorithms/balsepHybridSeq.go index bb14e43..f714642 100644 --- a/algorithms/balsepHybridSeq.go +++ b/algorithms/balsepHybridSeq.go @@ -179,7 +179,6 @@ func (s BalSepHybridSeq) findDecomp(currentDepth int, H lib.Graph) lib.Decomp { output := lib.Node{Bag: balsep.Vertices(), Cover: balsep} for _, s := range subtrees { - //TODO: Reroot only after all subtrees received if currentDepth == 0 && s.SkipRerooting { // log.Println("\nFrom detK on", decomp.Graph, ":\n", decomp) // local := BalSepGlobal{Graph: b.Graph, BalFactor: b.BalFactor} diff --git a/algorithms/detKDecomp.go b/algorithms/detKDecomp.go index e269f8f..bf1be55 100644 --- a/algorithms/detKDecomp.go +++ b/algorithms/detKDecomp.go @@ -221,9 +221,7 @@ OUTER: continue OUTER } } - //d.Cache.AddPositive(sepActual, comps[i]) - // log.Printf("Produced Decomp: %v\n", decomp) subtrees = append(subtrees, decomp.Root) } @@ -231,9 +229,7 @@ OUTER: return lib.Decomp{Graph: H, Root: lib.Node{Bag: bag, Cover: sepActual, Children: subtrees}} } } - } - } return lib.Decomp{} // Reject if no separator could be found diff --git a/algorithms/logkHybrid.go b/algorithms/logkHybrid.go index b3d8fde..1ba04a0 100644 --- a/algorithms/logkHybrid.go +++ b/algorithms/logkHybrid.go @@ -100,7 +100,6 @@ func (l *LogKHybrid) Name() string { // FindDecomp finds a decomp func (l *LogKHybrid) FindDecomp() lib.Decomp { - // l.cache = make(map[uint32]*CompCache) l.cache.Init() return l.findDecomp(l.Graph, []int{}, l.Graph.Edges) } @@ -112,12 +111,9 @@ func (l *LogKHybrid) FindDecompGraph(Graph lib.Graph) lib.Decomp { } func (l *LogKHybrid) detKWrapper(H lib.Graph, Conn []int, allwowed lib.Edges) lib.Decomp { - det := DetKDecomp{K: l.K, Graph: lib.Graph{Edges: allwowed}, BalFactor: l.BalFactor, SubEdge: false} - // TODO: reuse the same cache as for Logk? - // det.Cache.Init() - l.cache.CopyRef(&det.cache) + l.cache.CopyRef(&det.cache) // reuse the same cache as log-k return det.findDecomp(H, Conn) } @@ -154,7 +150,6 @@ func (l *LogKHybrid) baseCase(H lib.Graph, lenAE int) lib.Decomp { } // construct a decomp in the remaining two - if H.Edges.Len() <= l.K && len(H.Special) == 0 { output = lib.Decomp{Graph: H, Root: lib.Node{Bag: H.Vertices(), Cover: H.Edges}} } @@ -183,7 +178,7 @@ func (l *LogKHybrid) findDecomp(H lib.Graph, Conn []int, allowedFull lib.Edges) return l.baseCase(H, allowedFull.Len()) } - // Deterime the function to use for the recursive calls + // Determine the function to use for the recursive calls var recCall recursiveCall if l.Predicate(H, l.K) { diff --git a/balanced.go b/balanced.go index 96d165c..6d6a0b7 100644 --- a/balanced.go +++ b/balanced.go @@ -1,3 +1,18 @@ +// BalancedGo - A research prototype to compute structural decompositions of Conjunctive Queries and CSPs +// via the use of Balanced Separators with a focus on parallelism using the programming language Go. +// +// For more detailed information, see "Fast and Parallel Decomposition of Constraint Satisfaction Problems", +// Georg Gottlob, Cem Okulmus, Reinhard Pichler, released in Proc. IJCAI 2020. +// https://www.ijcai.org/Proceedings/2020/161 +// +// The tool is split into three packages. main is responsible to actually run the various algorithms supported +// by the tool, lib is used to implement various functionality used by the algorithms and lastly algorithms which +// implements the actual algorithms to compute various decompositions. +// +// In addition to this, there is also a tool subdirectory in the repository which is intended to support functionality +// not directly related to the computation of decompositions, such as changing the formatting of hypergraphs, or fixing +// a faulty decomposition. + package main import ( @@ -16,13 +31,13 @@ import ( "github.com/cem-okulmus/BalancedGo/lib" ) -// Decomp used to improve readabilty +// Decomp used to improve readability type Decomp = lib.Decomp -// Edge used to improve readabilty +// Edge used to improve readability type Edge = lib.Edge -// Graph used to improve readabilty +// Graph used to improve readability type Graph = lib.Graph func logActive(b bool) { diff --git a/lib/base.go b/lib/base.go index 1779d1e..60cf3a8 100644 --- a/lib/base.go +++ b/lib/base.go @@ -1,3 +1,6 @@ +// This package provides various functions, data structures and methods to aid in the design of algorithms to +// compute structural decomposition methodds. + package lib import ( @@ -56,7 +59,7 @@ OUTER: return output } -// mem64 is the same as Mem, but for uint64 +// mem64 is the same as mem, but for uint64 func mem64(as []uint64, b uint64) bool { for _, a := range as { if a == b { @@ -74,7 +77,6 @@ func diffEdges(a Edges, e ...Edge) Edges { for i := range e { hashes = append(hashes, e[i].Hash()) } - for i := range a.Slice() { if !mem64(hashes, a.Slice()[i].Hash()) { output = append(output, a.Slice()[i]) diff --git a/lib/cache.go b/lib/cache.go index ece98c5..879f9ca 100644 --- a/lib/cache.go +++ b/lib/cache.go @@ -22,7 +22,6 @@ func (c *Cache) CopyRef(other *Cache) { if c.cache == nil { // to be sure only an initialised cache is copied c.Init() } - c.cacheMux.RLock() defer c.cacheMux.RUnlock() @@ -74,12 +73,10 @@ func (c *Cache) AddNegative(sep Edges, comp Graph) { defer c.cacheMux.Unlock() _, ok := c.cache[sep.Hash()] - if !ok { var newCache compCache c.cache[sep.Hash()] = &newCache } - // fmt.Println("Adding negative, current length of cache", len(c.cache)) c.cache[sep.Hash()].Fail = append(c.cache[sep.Hash()].Fail, comp.Hash()) } @@ -90,7 +87,6 @@ func (c *Cache) CheckNegative(sep Edges, comps []Graph) bool { defer c.cacheMux.RUnlock() //check cache for previous encounters - compCachePrev, ok := c.cache[sep.Hash()] if !ok { // sep not encountered before @@ -100,7 +96,6 @@ func (c *Cache) CheckNegative(sep Edges, comps []Graph) bool { for j := range comps { for i := range compCachePrev.Fail { if comps[j].Hash() == compCachePrev.Fail[i] { - // log.Println("Comp ", comp, "(hash ", comp.Edges.Hash(), ") known as negative for sep ", sep) return true } } @@ -124,7 +119,6 @@ func (c *Cache) CheckPositive(sep Edges, comps []Graph) bool { for j := range comps { for i := range compCachePrev.Succ { if comps[j].Hash() == compCachePrev.Succ[i] { - // log.Println("Comp ", comp, " known as negative for sep ", sep) return true } } diff --git a/lib/combin.go b/lib/combin.go index fc4ae40..fcc1b45 100644 --- a/lib/combin.go +++ b/lib/combin.go @@ -25,7 +25,7 @@ func binomial(n, k int) int { if n < k { panic("combin: n < k") } - // (n,k) = (n, n-k) + if k > n/2 { k = n - k } @@ -165,15 +165,12 @@ func SplitCombin(n int, k int, split int, unextended bool) []*CombinationIterato var output []*CombinationIterator initial := CombinationIterator{n: n, k: k, stepSize: split, extended: !unextended, confirmed: true} - output = append(output, &initial) for i := 1; i < split; i++ { tempIter := CombinationIterator{n: n, k: k, stepSize: split, extended: !unextended, confirmed: true} - tempIter.hasNext() nextCombinationStep(tempIter.combination, n, k, i) - output = append(output, &tempIter) } diff --git a/lib/cover.go b/lib/cover.go index 2b865c6..a8fe98a 100644 --- a/lib/cover.go +++ b/lib/cover.go @@ -69,7 +69,6 @@ func NewCover(K int, vertices []int, bound Edges, compVertices []int) Cover { func (c *Cover) NextSubset() int { if !c.first { if !c.backtrack() { - // log.Println("No more covers possible.") return -1 // no more backtracking possible } c.pos++ diff --git a/lib/decomp.go b/lib/decomp.go index 5af3d83..5fb9f6b 100644 --- a/lib/decomp.go +++ b/lib/decomp.go @@ -32,7 +32,6 @@ func (d *Decomp) RestoreSubedges() { // It also checks for the special condition of HDs, though it merely prints a warning if it is not satisfied, // the output is not affected by this additional check. func (d Decomp) Correct(g Graph) bool { - if reflect.DeepEqual(d, Decomp{}) { // empty Decomp is always false return false } @@ -63,7 +62,6 @@ func (d Decomp) Correct(g Graph) bool { //connectedness for _, i := range d.Graph.Edges.Vertices() { - nodeCheck, _ := d.Root.connected(i, false) if !nodeCheck { mutex.RLock() @@ -71,14 +69,9 @@ func (d Decomp) Correct(g Graph) bool { mutex.RUnlock() return false } - // if d.connected(i) != nodeCheck { - // log.Panicln("Node based connectedness check not working!") - // } - } //special condition (optionally) - if !d.Root.noSCViolation() { fmt.Println("SCV found!. Not a valid hypertree decomposition!") } @@ -99,7 +92,6 @@ func (d Decomp) CheckWidth() int { if n.Cover.Len() > output { output = n.Cover.Len() } - for _, c := range n.Children { children = append(children, c) // build up the next level of the tree } diff --git a/lib/edge.go b/lib/edge.go index f12151f..a0a897c 100644 --- a/lib/edge.go +++ b/lib/edge.go @@ -189,8 +189,8 @@ func removeDuplicateEdges(elementsSlice []Edge) Edges { return NewEdges(elements.slice[:j+1]) } -// subedges computes all subdges for an Edges slice. -// TODO: Unnessarily adds empty edge +// subedges computes all subedges for an Edges slice. +// TODO: Unnecessarily adds empty edge func (e Edge) subedges() []Edge { var output []Edge diff --git a/lib/heuristics.go b/lib/heuristics.go index b2d838c..fd1b933 100644 --- a/lib/heuristics.go +++ b/lib/heuristics.go @@ -10,7 +10,7 @@ import ( "time" ) -// GetMSCOrder produes the Maximal Cardinality Search Ordering. +// GetMSCOrder produces the Maximal Cardinality Search Ordering. // Implementation is based det-k-decomp of Samer and Gottlob '09 func GetMSCOrder(edges Edges) Edges { rand.Seed(time.Now().UTC().UnixNano()) @@ -158,7 +158,7 @@ func diffDistances(old, new [][]int) int { for i := 0; i < len(old[j]); i++ { if isInf(old[j][i]) && !isInf(new[j][i]) { // disconnected a path output = output + SepWeight - } else if !isInf(old[j][i]) && !isInf(new[j][i]) { // check if parth shortened + } else if !isInf(old[j][i]) && !isInf(new[j][i]) { // check if path shortened diff := old[j][i] - new[j][i] output = output + diff } diff --git a/lib/node.go b/lib/node.go index d997e37..a39b7ee 100644 --- a/lib/node.go +++ b/lib/node.go @@ -280,7 +280,6 @@ func (n *Node) CombineNodes(subtree Node, connecting Edges) *Node { // leaf that covers the connecting vertices if Subset(n.Bag, connecting.Vertices()) && len(n.Children) == 0 { n.Children = subtree.Children - // log.Println("Base case activated at node Bag: ", PrintVertices(n.Bag), " Cover: ", n.Cover) return &subtree } @@ -288,7 +287,6 @@ func (n *Node) CombineNodes(subtree Node, connecting Edges) *Node { result := n.Children[i].CombineNodes(subtree, connecting) if result != nil { - // log.Println("Child of node Bag: ", PrintVertices(n.Bag), " Cover: ", n.Cover, " activated") n.Children[i] = *result return n } diff --git a/lib/parser.go b/lib/parser.go index ee78f73..cd95259 100644 --- a/lib/parser.go +++ b/lib/parser.go @@ -29,7 +29,7 @@ type ParseGraph struct { Encoding map[string]int } -// GetGraph parses a string in Hyperbench format into a graph +// GetGraph parses a string in HyperBench format into a graph func GetGraph(s string) (Graph, ParseGraph) { graphLexer := lexer.Must(ebnf.New(` diff --git a/lib/preprocessing.go b/lib/preprocessing.go index 5c69c10..bcd3d17 100644 --- a/lib/preprocessing.go +++ b/lib/preprocessing.go @@ -264,7 +264,6 @@ func (g Graph) TypeCollapse() (Graph, map[int][]int, int) { for _, v := range g.Vertices() { typeString := g.getType(v).String() - // fmt.Println("Type of ", m[v], "is ", typeString) if _, ok := encountered[typeString]; ok { // already seen this type before @@ -272,7 +271,7 @@ func (g Graph) TypeCollapse() (Graph, map[int][]int, int) { substituteMap[v] = encountered[typeString] restorationMap[encountered[typeString]] = append(restorationMap[encountered[typeString]], v) } else { - // Record thie type as a new element + // Record this type as a new element encountered[typeString] = v substituteMap[v] = v } diff --git a/test/hash_test.go b/test/hash_test.go index 0bd2c08..5f23343 100644 --- a/test/hash_test.go +++ b/test/hash_test.go @@ -1,3 +1,5 @@ +// This package implements various black box unit tests + package tests import ( @@ -22,15 +24,12 @@ func check(e error) { // TestIntHash provides a basic test for hashes of integers func TestIntHash(t *testing.T) { - s := rand.NewSource(time.Now().UnixNano()) r := rand.New(s) for x := 0; x < 1000; x++ { arity := r.Intn(100) + 1 - var vertices []int - for i := 0; i < arity; i++ { vertices = append(vertices, r.Intn(1000)+i) } @@ -38,13 +37,11 @@ func TestIntHash(t *testing.T) { hash1 := lib.IntHash(vertices) r.Shuffle(len(vertices), func(i, j int) { vertices[i], vertices[j] = vertices[j], vertices[i] }) - hash2 := lib.IntHash(vertices) newVal := r.Intn(100) + len(vertices) different := vertices[len(vertices)/2] != newVal vertices[len(vertices)/2] = newVal - hash3 := lib.IntHash(vertices) if hash1 != hash2 { @@ -52,20 +49,15 @@ func TestIntHash(t *testing.T) { } if different && hash3 == hash2 { - fmt.Println("vertex", vertices) t.Errorf("hash collision 1") } - } // Collision Test // generate two different integers and see if their hashs collide - for x := 0; x < 1000; x++ { - arity := r.Intn(20) + 1 - var temp1 []int var temp2 []int @@ -77,11 +69,9 @@ func TestIntHash(t *testing.T) { temp2 = append(temp2, r.Intn(100)) } - hash1 := lib.IntHash(temp1) hash2 := lib.IntHash(temp2) - temp1 = lib.RemoveDuplicates(temp1) temp2 = lib.RemoveDuplicates(temp2) @@ -89,18 +79,15 @@ func TestIntHash(t *testing.T) { continue } - if hash1 == hash2 { fmt.Println("Collision", temp1, temp2) t.Errorf("hash collision 2") } } - } // BenchmarkSeparator uses a specific hypergraph func BenchmarkSeparator(b *testing.B) { - // Get the data resp, err := http.Get("http://hyperbench.dbai.tuwien.ac.at/download/hypergraph/655") if err != nil { @@ -118,13 +105,10 @@ func BenchmarkSeparator(b *testing.B) { r := rand.New(s) parsedGraph, _ := lib.GetGraph(buf.String()) - pred := lib.BalancedCheck{} for i := 0; i < b.N; i++ { - var edges []int - k := 20 for i := 0; i < k; i++ { @@ -132,22 +116,18 @@ func BenchmarkSeparator(b *testing.B) { } sep := lib.GetSubset(parsedGraph.Edges, edges) - pred.Check(&parsedGraph, &sep, 1) } } // TestEdgeHash tests the hash function of Edge against collisions and stability under permutation func TestEdgeHash(t *testing.T) { - s := rand.NewSource(time.Now().UnixNano()) r := rand.New(s) for x := 0; x < 100; x++ { arity := r.Intn(100) + 1 - var vertices []int - name := r.Intn(1000) for i := 0; i < arity; i++ { @@ -155,21 +135,16 @@ func TestEdgeHash(t *testing.T) { } edge := lib.Edge{Name: name, Vertices: vertices} - hash1 := edge.Hash() r.Shuffle(len(vertices), func(i, j int) { vertices[i], vertices[j] = vertices[j], vertices[i] }) - edge2 := lib.Edge{Name: name, Vertices: vertices} - hash2 := edge2.Hash() newVal := r.Intn(100) + len(vertices) different := vertices[len(vertices)/2] != newVal vertices[len(vertices)/2] = newVal - edge3 := lib.Edge{Name: name, Vertices: vertices} - hash3 := edge3.Hash() if hash1 != hash2 { @@ -177,20 +152,15 @@ func TestEdgeHash(t *testing.T) { } if different && hash3 == hash2 { - fmt.Println("vertex", vertices) t.Errorf("hash collision 3") } - } // Collision Test // generate two different edges and see if their hashs collide - for x := 0; x < 1000; x++ { - arity := r.Intn(20) + 1 - var temp1 []int var temp2 []int @@ -207,11 +177,9 @@ func TestEdgeHash(t *testing.T) { } edge := lib.Edge{Name: 0, Vertices: lib.RemoveDuplicates(temp1)} - edge2 := lib.Edge{Name: 0, Vertices: lib.RemoveDuplicates(temp2)} hash1 := edge.Hash() - hash2 := edge2.Hash() if hash1 == hash2 { @@ -219,54 +187,40 @@ func TestEdgeHash(t *testing.T) { t.Errorf("hash collision 4") } } - } // TestEdgesHash tests the hash function of Edges against collisions and stability under permutation func TestEdgesHash(t *testing.T) { - s := rand.NewSource(time.Now().UnixNano()) r := rand.New(s) for x := 0; x < 100; x++ { - length := r.Intn(20) + 1 var temp []lib.Edge for c := 0; c < length; c++ { - arity := r.Intn(100) + 1 - var vertices []int - name := r.Intn(1000) - for i := 0; i < arity; i++ { vertices = append(vertices, r.Intn(1000)+i) } edge := lib.Edge{Name: name, Vertices: vertices} - temp = append(temp, edge) - } edges := lib.NewEdges(temp) - hash1 := edges.Hash() r.Shuffle(len(temp), func(i, j int) { temp[i], temp[j] = temp[j], temp[i] }) - edges = lib.NewEdges(temp) - hash2 := edges.Hash() index := r.Intn(len(temp)) index2 := r.Intn(len(temp[index].Vertices)) temp[index].Vertices[index2] = temp[index].Vertices[index2] + 1 - edges = lib.NewEdges(temp) - hash3 := edges.Hash() if hash1 != hash2 { @@ -281,20 +235,14 @@ func TestEdgesHash(t *testing.T) { // Collision Test // generate two different edges and see if their hashs collide - for x := 0; x < 1000; x++ { - length := r.Intn(20) + 1 - var temp []lib.Edge var temp2 []lib.Edge for j := 0; j < length; j++ { - arity := r.Intn(20) + 1 - var temp1a []int - for i := 0; i < arity; i++ { temp1a = append(temp1a, r.Intn(100)) } @@ -303,11 +251,8 @@ func TestEdgesHash(t *testing.T) { } for j := 0; j < length; j++ { - arity := r.Intn(20) + 1 - var temp1a []int - for i := 0; i < arity; i++ { temp1a = append(temp1a, r.Intn(100)) } @@ -320,11 +265,9 @@ func TestEdgesHash(t *testing.T) { } edges := lib.NewEdges(temp) - edges2 := lib.NewEdges(temp2) hash1 := edges.Hash() - hash2 := edges2.Hash() if hash1 == hash2 { @@ -332,12 +275,10 @@ func TestEdgesHash(t *testing.T) { t.Errorf("hash collision 6") } } - } -// TestGraphHash tests the hash function of Graph against collisions and stability under permutation -func TestGraphHash(t *testing.T) { - +// TestGraphHash tests the hash function of Graph against stability under permutation +func TestGraphHash1(t *testing.T) { s := rand.NewSource(time.Now().UnixNano()) r := rand.New(s) @@ -348,13 +289,10 @@ func TestGraphHash(t *testing.T) { lengthK := r.Intn(5) + 1 for c := 0; c < lengthSpeciale; c++ { - var slice []lib.Edge for o := 0; o < lengthK; o++ { - arity := r.Intn(100) + 1 - var vertices []int for i := 0; i < arity; i++ { @@ -365,20 +303,15 @@ func TestGraphHash(t *testing.T) { } Sp = append(Sp, lib.NewEdges(slice)) - } for x := 0; x < 100; x++ { - length := r.Intn(20) + 1 var temp []lib.Edge for c := 0; c < length; c++ { - arity := r.Intn(100) + 1 - var vertices []int - name := r.Intn(1000) for i := 0; i < arity; i++ { @@ -386,31 +319,25 @@ func TestGraphHash(t *testing.T) { } edge := lib.Edge{Name: name, Vertices: vertices} - temp = append(temp, edge) } edges := lib.NewEdges(temp) - graph := lib.Graph{Edges: edges, Special: Sp} - hash1 := graph.Hash() r.Shuffle(len(temp), func(i, j int) { temp[i], temp[j] = temp[j], temp[i] }) edges = lib.NewEdges(temp) graph2 := lib.Graph{Edges: edges, Special: Sp} - hash2 := graph2.Hash() index := r.Intn(len(temp)) index2 := r.Intn(len(temp[index].Vertices)) temp[index].Vertices[index2] = temp[index].Vertices[index2] + 1 - edges = lib.NewEdges(temp) graph3 := lib.Graph{Edges: edges, Special: Sp} - hash3 := graph3.Hash() if hash1 != hash2 { @@ -420,23 +347,45 @@ func TestGraphHash(t *testing.T) { if hash3 == hash2 { t.Errorf("hash collision 7") } + } +} + +//TestGraphTest2 is a Collision Test for hashes of Graphs +func TestGraphHash2(t *testing.T) { + s := rand.NewSource(time.Now().UnixNano()) + r := rand.New(s) + + var Sp []lib.Edges + lengthSpeciale := r.Intn(20) + 1 + + lengthK := r.Intn(5) + 1 + + for c := 0; c < lengthSpeciale; c++ { + var slice []lib.Edge + + for o := 0; o < lengthK; o++ { + arity := r.Intn(100) + 1 + var vertices []int + + for i := 0; i < arity; i++ { + vertices = append(vertices, r.Intn(1000)+i) + } + + slice = append(slice, lib.Edge{Vertices: vertices}) + } + + Sp = append(Sp, lib.NewEdges(slice)) } - // Collision Test // generate two different edges and see if their hashs collide - for x := 0; x < 1000; x++ { - length := r.Intn(20) + 1 - var temp []lib.Edge var temp2 []lib.Edge for j := 0; j < length; j++ { - arity := r.Intn(20) + 1 - var temp1a []int for i := 0; i < arity; i++ { @@ -447,16 +396,12 @@ func TestGraphHash(t *testing.T) { } for j := 0; j < length; j++ { - arity := r.Intn(20) + 1 - var temp1a []int - for i := 0; i < arity; i++ { temp1a = append(temp1a, r.Intn(100)) } temp2 = append(temp2, lib.Edge{Name: r.Intn(100) + 1, Vertices: temp1a}) - } if cmp.Equal(temp, temp2) { @@ -470,7 +415,6 @@ func TestGraphHash(t *testing.T) { graph5 := lib.Graph{Edges: edges2, Special: Sp} hash1 := graph4.Hash() - hash2 := graph5.Hash() if hash1 == hash2 { @@ -478,5 +422,4 @@ func TestGraphHash(t *testing.T) { t.Errorf("hash collision 9") } } - } diff --git a/test/search_test.go b/test/search_test.go new file mode 100644 index 0000000..3a3cf29 --- /dev/null +++ b/test/search_test.go @@ -0,0 +1,123 @@ +package tests + +import ( + "math/rand" + "runtime" + "testing" + "time" + + "github.com/cem-okulmus/BalancedGo/lib" +) + +// a unit test for the parallel search using the struct Search + +//TestSearchBal ensures that the parallel search for balanced separators always returns the same results, +// no matter how many splits are generated and run in parallel +func TestSearchBal(t *testing.T) { + s := rand.NewSource(time.Now().UnixNano()) + r := rand.New(s) + + randGraph := getRandomGraph(20) + k := r.Intn(5) + 1 + + combinParallel := lib.SplitCombin(randGraph.Edges.Len(), k, runtime.GOMAXPROCS(-1), false) + combinSeq := lib.SplitCombin(randGraph.Edges.Len(), k, 1, false) + + parallelSearch := lib.Search{ + H: &randGraph, + Edges: &randGraph.Edges, + BalFactor: 2, + Generators: combinParallel, + } + seqSearch := lib.Search{ + H: &randGraph, + Edges: &randGraph.Edges, + BalFactor: 2, + Generators: combinSeq, + } + pred := lib.BalancedCheck{} + + var allSepsSeq []lib.Edges + var allSepsPar []lib.Edges + + for ; !parallelSearch.ExhaustedSearch; parallelSearch.FindNext(pred) { + sep := lib.GetSubset(randGraph.Edges, parallelSearch.Result) + allSepsPar = append(allSepsPar, sep) + } + + for ; !seqSearch.ExhaustedSearch; seqSearch.FindNext(pred) { + sep := lib.GetSubset(randGraph.Edges, seqSearch.Result) + allSepsSeq = append(allSepsSeq, sep) + } + +OUTER: + for i := range allSepsSeq { + sep := allSepsPar[i] + + for j := range allSepsPar { + other := allSepsPar[j] + if other.Hash() == sep.Hash() { + continue OUTER // found matching sep + } + } + + t.Errorf("Mismatch in returned seps between sequential and parallel Search") + } +} + +//TestSearchPar ensures that the parallel search for good parents always returns the same results, +// no matter how many splits are generated and run in parallel +func TestSearchPar(t *testing.T) { + s := rand.NewSource(time.Now().UnixNano()) + r := rand.New(s) + + randGraph := getRandomGraph(30) + k := r.Intn(5) + 1 + prevSep := getRandomSep(randGraph, k) + + allowedParent := lib.FilterVertices(randGraph.Edges, prevSep.Vertices()) + + combinParallel := lib.SplitCombin(allowedParent.Len(), k, runtime.GOMAXPROCS(-1), false) + combinSeq := lib.SplitCombin(allowedParent.Len(), k, 1, false) + + parallelSearch := lib.Search{ + H: &randGraph, + Edges: &allowedParent, + BalFactor: 2, + Generators: combinParallel, + } + seqSearch := lib.Search{ + H: &randGraph, + Edges: &allowedParent, + BalFactor: 2, + Generators: combinSeq, + } + predPar := lib.ParentCheck{Conn: lib.Inter(prevSep.Vertices(), randGraph.Vertices()), Child: prevSep.Vertices()} + + var allSepsSeq []lib.Edges + var allSepsPar []lib.Edges + + for ; !parallelSearch.ExhaustedSearch; parallelSearch.FindNext(predPar) { + sep := lib.GetSubset(randGraph.Edges, parallelSearch.Result) + allSepsPar = append(allSepsPar, sep) + } + + for ; !seqSearch.ExhaustedSearch; seqSearch.FindNext(predPar) { + sep := lib.GetSubset(randGraph.Edges, seqSearch.Result) + allSepsSeq = append(allSepsSeq, sep) + } + +OUTER: + for i := range allSepsSeq { + sep := allSepsSeq[i] + + for j := range allSepsPar { + other := allSepsPar[j] + if other.Hash() == sep.Hash() { + continue OUTER // found matching sep + } + } + + t.Errorf("Mismatch in returned seps between sequential and parallel Search") + } +} diff --git a/tools/HyperParse/hyperParse.go b/tools/HyperParse/hyperParse.go index 13ccd29..355cb27 100644 --- a/tools/HyperParse/hyperParse.go +++ b/tools/HyperParse/hyperParse.go @@ -1,3 +1,6 @@ +// This package implements a basic tool to change the formatting of hypergraphs, as well as restore +// various errors in an input GHD and also compute various statistics for an input hypergraph. + package main import (