Skip to content

Commit

Permalink
Added a unit test for parallel search
Browse files Browse the repository at this point in the history
  • Loading branch information
cem-okulmus committed Feb 13, 2021
1 parent 352eb75 commit ff0a1de
Show file tree
Hide file tree
Showing 20 changed files with 195 additions and 146 deletions.
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
VERSION := $(shell git tag | sort -V | tail -1)
DATE := $(shell git log -1 --format=%cd --date=short)
BUILD := $(shell git rev-parse --short HEAD)
PROJECTNAME := $(shell basename "$(PWD)")
PROJECTNAME := "BalancedGo"

# Go related variables.
GOBASE := $(shell pwd)
Expand Down
5 changes: 4 additions & 1 deletion algorithms/algorithms.go
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
// This package implements various algorithms to compute Generalized Hypertree Decompositions as well as
// the more restricted set of Hypertree Deocmpositions.

package algorithms

import "github.com/cem-okulmus/BalancedGo/lib"

// Algorithm serves as the common interfacea of all hypergraph decomposition algorithms
// Algorithm serves as the common interface of all hypergraph decomposition algorithms
type Algorithm interface {
// A Name is useful to identify the individual algorithms in the result
Name() string
Expand Down
11 changes: 1 addition & 10 deletions algorithms/balsepHybrid.go
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,6 @@ func (b BalSepHybrid) findDecomp(currentDepth int, H lib.Graph) lib.Decomp {
ch := make(chan lib.Decomp)
var subtrees []lib.Decomp

//var outDecomp []Decomp
for i := range comps {

if currentDepth > 0 {
Expand All @@ -114,24 +113,18 @@ func (b BalSepHybrid) findDecomp(currentDepth int, H lib.Graph) lib.Decomp {
//stop if there are at most two special edges left
if comps[i].Len() <= 2 {
ch <- baseCaseSmart(b.Graph, comps[i])
//outDecomp = append(outDecomp, baseCaseSmart(b.Graph, comps[i], Sp))
return
}

//Early termination
if comps[i].Edges.Len() <= b.K && len(comps[i].Special) == 1 {
ch <- earlyTermination(comps[i])
//outDecomp = append(outDecomp, earlyTermination(comps[i], Sp[0]))
return
}

det := DetKDecomp{K: b.K, Graph: b.Graph, BalFactor: b.BalFactor, SubEdge: true}

// edgesFromSpecial := EdgesSpecial(Sp)
// comps[i].Edges.Append(edgesFromSpecial...)

// det.cache = make(map[uint64]*CompCache)
det.cache.Init()

result := det.findDecomp(comps[i], balsep.Vertices())
if !reflect.DeepEqual(result, lib.Decomp{}) && currentDepth == 0 {
result.SkipRerooting = true
Expand All @@ -146,14 +139,12 @@ func (b BalSepHybrid) findDecomp(currentDepth int, H lib.Graph) lib.Decomp {
// }
}
ch <- result
// outDecomp = append(outDecomp, result)
}(i, comps, SepSpecial)
}

}

for i := 0; i < len(comps); i++ {
// decomp := outDecomp[i]
decomp := <-ch
if reflect.DeepEqual(decomp, lib.Decomp{}) {
// log.Printf("balDet REJECTING %v: couldn't decompose a component of H %v \n",
Expand Down
1 change: 0 additions & 1 deletion algorithms/balsepHybridSeq.go
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,6 @@ func (s BalSepHybridSeq) findDecomp(currentDepth int, H lib.Graph) lib.Decomp {
output := lib.Node{Bag: balsep.Vertices(), Cover: balsep}

for _, s := range subtrees {
//TODO: Reroot only after all subtrees received
if currentDepth == 0 && s.SkipRerooting {
// log.Println("\nFrom detK on", decomp.Graph, ":\n", decomp)
// local := BalSepGlobal{Graph: b.Graph, BalFactor: b.BalFactor}
Expand Down
4 changes: 0 additions & 4 deletions algorithms/detKDecomp.go
Original file line number Diff line number Diff line change
Expand Up @@ -221,19 +221,15 @@ OUTER:
continue OUTER
}
}

//d.Cache.AddPositive(sepActual, comps[i])

// log.Printf("Produced Decomp: %v\n", decomp)
subtrees = append(subtrees, decomp.Root)
}

return lib.Decomp{Graph: H, Root: lib.Node{Bag: bag, Cover: sepActual, Children: subtrees}}
}
}

}

}

return lib.Decomp{} // Reject if no separator could be found
Expand Down
9 changes: 2 additions & 7 deletions algorithms/logkHybrid.go
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,6 @@ func (l *LogKHybrid) Name() string {

// FindDecomp finds a decomp
func (l *LogKHybrid) FindDecomp() lib.Decomp {
// l.cache = make(map[uint32]*CompCache)
l.cache.Init()
return l.findDecomp(l.Graph, []int{}, l.Graph.Edges)
}
Expand All @@ -112,12 +111,9 @@ func (l *LogKHybrid) FindDecompGraph(Graph lib.Graph) lib.Decomp {
}

func (l *LogKHybrid) detKWrapper(H lib.Graph, Conn []int, allwowed lib.Edges) lib.Decomp {

det := DetKDecomp{K: l.K, Graph: lib.Graph{Edges: allwowed}, BalFactor: l.BalFactor, SubEdge: false}

// TODO: reuse the same cache as for Logk?
// det.Cache.Init()
l.cache.CopyRef(&det.cache)
l.cache.CopyRef(&det.cache) // reuse the same cache as log-k

return det.findDecomp(H, Conn)
}
Expand Down Expand Up @@ -154,7 +150,6 @@ func (l *LogKHybrid) baseCase(H lib.Graph, lenAE int) lib.Decomp {
}

// construct a decomp in the remaining two

if H.Edges.Len() <= l.K && len(H.Special) == 0 {
output = lib.Decomp{Graph: H, Root: lib.Node{Bag: H.Vertices(), Cover: H.Edges}}
}
Expand Down Expand Up @@ -183,7 +178,7 @@ func (l *LogKHybrid) findDecomp(H lib.Graph, Conn []int, allowedFull lib.Edges)
return l.baseCase(H, allowedFull.Len())
}

// Deterime the function to use for the recursive calls
// Determine the function to use for the recursive calls
var recCall recursiveCall

if l.Predicate(H, l.K) {
Expand Down
21 changes: 18 additions & 3 deletions balanced.go
Original file line number Diff line number Diff line change
@@ -1,3 +1,18 @@
// BalancedGo - A research prototype to compute structural decompositions of Conjunctive Queries and CSPs
// via the use of Balanced Separators with a focus on parallelism using the programming language Go.
//
// For more detailed information, see "Fast and Parallel Decomposition of Constraint Satisfaction Problems",
// Georg Gottlob, Cem Okulmus, Reinhard Pichler, released in Proc. IJCAI 2020.
// https://www.ijcai.org/Proceedings/2020/161
//
// The tool is split into three packages. main is responsible to actually run the various algorithms supported
// by the tool, lib is used to implement various functionality used by the algorithms and lastly algorithms which
// implements the actual algorithms to compute various decompositions.
//
// In addition to this, there is also a tool subdirectory in the repository which is intended to support functionality
// not directly related to the computation of decompositions, such as changing the formatting of hypergraphs, or fixing
// a faulty decomposition.

package main

import (
Expand All @@ -16,13 +31,13 @@ import (
"github.com/cem-okulmus/BalancedGo/lib"
)

// Decomp used to improve readabilty
// Decomp used to improve readability
type Decomp = lib.Decomp

// Edge used to improve readabilty
// Edge used to improve readability
type Edge = lib.Edge

// Graph used to improve readabilty
// Graph used to improve readability
type Graph = lib.Graph

func logActive(b bool) {
Expand Down
6 changes: 4 additions & 2 deletions lib/base.go
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
// This package provides various functions, data structures and methods to aid in the design of algorithms to
// compute structural decomposition methodds.

package lib

import (
Expand Down Expand Up @@ -56,7 +59,7 @@ OUTER:
return output
}

// mem64 is the same as Mem, but for uint64
// mem64 is the same as mem, but for uint64
func mem64(as []uint64, b uint64) bool {
for _, a := range as {
if a == b {
Expand All @@ -74,7 +77,6 @@ func diffEdges(a Edges, e ...Edge) Edges {
for i := range e {
hashes = append(hashes, e[i].Hash())
}

for i := range a.Slice() {
if !mem64(hashes, a.Slice()[i].Hash()) {
output = append(output, a.Slice()[i])
Expand Down
6 changes: 0 additions & 6 deletions lib/cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ func (c *Cache) CopyRef(other *Cache) {
if c.cache == nil { // to be sure only an initialised cache is copied
c.Init()
}

c.cacheMux.RLock()
defer c.cacheMux.RUnlock()

Expand Down Expand Up @@ -74,12 +73,10 @@ func (c *Cache) AddNegative(sep Edges, comp Graph) {
defer c.cacheMux.Unlock()

_, ok := c.cache[sep.Hash()]

if !ok {
var newCache compCache
c.cache[sep.Hash()] = &newCache
}
// fmt.Println("Adding negative, current length of cache", len(c.cache))

c.cache[sep.Hash()].Fail = append(c.cache[sep.Hash()].Fail, comp.Hash())
}
Expand All @@ -90,7 +87,6 @@ func (c *Cache) CheckNegative(sep Edges, comps []Graph) bool {
defer c.cacheMux.RUnlock()

//check cache for previous encounters

compCachePrev, ok := c.cache[sep.Hash()]

if !ok { // sep not encountered before
Expand All @@ -100,7 +96,6 @@ func (c *Cache) CheckNegative(sep Edges, comps []Graph) bool {
for j := range comps {
for i := range compCachePrev.Fail {
if comps[j].Hash() == compCachePrev.Fail[i] {
// log.Println("Comp ", comp, "(hash ", comp.Edges.Hash(), ") known as negative for sep ", sep)
return true
}
}
Expand All @@ -124,7 +119,6 @@ func (c *Cache) CheckPositive(sep Edges, comps []Graph) bool {
for j := range comps {
for i := range compCachePrev.Succ {
if comps[j].Hash() == compCachePrev.Succ[i] {
// log.Println("Comp ", comp, " known as negative for sep ", sep)
return true
}
}
Expand Down
5 changes: 1 addition & 4 deletions lib/combin.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ func binomial(n, k int) int {
if n < k {
panic("combin: n < k")
}
// (n,k) = (n, n-k)

if k > n/2 {
k = n - k
}
Expand Down Expand Up @@ -165,15 +165,12 @@ func SplitCombin(n int, k int, split int, unextended bool) []*CombinationIterato
var output []*CombinationIterator

initial := CombinationIterator{n: n, k: k, stepSize: split, extended: !unextended, confirmed: true}

output = append(output, &initial)

for i := 1; i < split; i++ {
tempIter := CombinationIterator{n: n, k: k, stepSize: split, extended: !unextended, confirmed: true}

tempIter.hasNext()
nextCombinationStep(tempIter.combination, n, k, i)

output = append(output, &tempIter)
}

Expand Down
1 change: 0 additions & 1 deletion lib/cover.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ func NewCover(K int, vertices []int, bound Edges, compVertices []int) Cover {
func (c *Cover) NextSubset() int {
if !c.first {
if !c.backtrack() {
// log.Println("No more covers possible.")
return -1 // no more backtracking possible
}
c.pos++
Expand Down
8 changes: 0 additions & 8 deletions lib/decomp.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ func (d *Decomp) RestoreSubedges() {
// It also checks for the special condition of HDs, though it merely prints a warning if it is not satisfied,
// the output is not affected by this additional check.
func (d Decomp) Correct(g Graph) bool {

if reflect.DeepEqual(d, Decomp{}) { // empty Decomp is always false
return false
}
Expand Down Expand Up @@ -63,22 +62,16 @@ func (d Decomp) Correct(g Graph) bool {

//connectedness
for _, i := range d.Graph.Edges.Vertices() {

nodeCheck, _ := d.Root.connected(i, false)
if !nodeCheck {
mutex.RLock()
fmt.Printf("Vertex %v doesn't span connected subtree\n", m[i])
mutex.RUnlock()
return false
}
// if d.connected(i) != nodeCheck {
// log.Panicln("Node based connectedness check not working!")
// }

}

//special condition (optionally)

if !d.Root.noSCViolation() {
fmt.Println("SCV found!. Not a valid hypertree decomposition!")
}
Expand All @@ -99,7 +92,6 @@ func (d Decomp) CheckWidth() int {
if n.Cover.Len() > output {
output = n.Cover.Len()
}

for _, c := range n.Children {
children = append(children, c) // build up the next level of the tree
}
Expand Down
4 changes: 2 additions & 2 deletions lib/edge.go
Original file line number Diff line number Diff line change
Expand Up @@ -189,8 +189,8 @@ func removeDuplicateEdges(elementsSlice []Edge) Edges {
return NewEdges(elements.slice[:j+1])
}

// subedges computes all subdges for an Edges slice.
// TODO: Unnessarily adds empty edge
// subedges computes all subedges for an Edges slice.
// TODO: Unnecessarily adds empty edge
func (e Edge) subedges() []Edge {
var output []Edge

Expand Down
4 changes: 2 additions & 2 deletions lib/heuristics.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import (
"time"
)

// GetMSCOrder produes the Maximal Cardinality Search Ordering.
// GetMSCOrder produces the Maximal Cardinality Search Ordering.
// Implementation is based det-k-decomp of Samer and Gottlob '09
func GetMSCOrder(edges Edges) Edges {
rand.Seed(time.Now().UTC().UnixNano())
Expand Down Expand Up @@ -158,7 +158,7 @@ func diffDistances(old, new [][]int) int {
for i := 0; i < len(old[j]); i++ {
if isInf(old[j][i]) && !isInf(new[j][i]) { // disconnected a path
output = output + SepWeight
} else if !isInf(old[j][i]) && !isInf(new[j][i]) { // check if parth shortened
} else if !isInf(old[j][i]) && !isInf(new[j][i]) { // check if path shortened
diff := old[j][i] - new[j][i]
output = output + diff
}
Expand Down
2 changes: 0 additions & 2 deletions lib/node.go
Original file line number Diff line number Diff line change
Expand Up @@ -280,15 +280,13 @@ func (n *Node) CombineNodes(subtree Node, connecting Edges) *Node {
// leaf that covers the connecting vertices
if Subset(n.Bag, connecting.Vertices()) && len(n.Children) == 0 {
n.Children = subtree.Children
// log.Println("Base case activated at node Bag: ", PrintVertices(n.Bag), " Cover: ", n.Cover)
return &subtree
}

for i := range n.Children {
result := n.Children[i].CombineNodes(subtree, connecting)

if result != nil {
// log.Println("Child of node Bag: ", PrintVertices(n.Bag), " Cover: ", n.Cover, " activated")
n.Children[i] = *result
return n
}
Expand Down
2 changes: 1 addition & 1 deletion lib/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ type ParseGraph struct {
Encoding map[string]int
}

// GetGraph parses a string in Hyperbench format into a graph
// GetGraph parses a string in HyperBench format into a graph
func GetGraph(s string) (Graph, ParseGraph) {

graphLexer := lexer.Must(ebnf.New(`
Expand Down
3 changes: 1 addition & 2 deletions lib/preprocessing.go
Original file line number Diff line number Diff line change
Expand Up @@ -264,15 +264,14 @@ func (g Graph) TypeCollapse() (Graph, map[int][]int, int) {

for _, v := range g.Vertices() {
typeString := g.getType(v).String()
// fmt.Println("Type of ", m[v], "is ", typeString)

if _, ok := encountered[typeString]; ok {
// already seen this type before
count++
substituteMap[v] = encountered[typeString]
restorationMap[encountered[typeString]] = append(restorationMap[encountered[typeString]], v)
} else {
// Record thie type as a new element
// Record this type as a new element
encountered[typeString] = v
substituteMap[v] = v
}
Expand Down
Loading

0 comments on commit ff0a1de

Please sign in to comment.