From dbdd2f77a688914540dce9601d457b40aeb75ae2 Mon Sep 17 00:00:00 2001 From: Jean-Baptiste Guerraz <861556+jbguerraz@users.noreply.github.com> Date: Tue, 24 Nov 2020 13:15:31 +0100 Subject: [PATCH] groupby debug --- builder/query/group_by.go | 56 +++++++++++++++++++++++++++---------- builder/query/timeseries.go | 2 -- examples/main.go | 7 ++++- 3 files changed, 48 insertions(+), 17 deletions(-) diff --git a/builder/query/group_by.go b/builder/query/group_by.go index a32e10e..7035444 100644 --- a/builder/query/group_by.go +++ b/builder/query/group_by.go @@ -5,6 +5,7 @@ import ( "github.com/grafadruid/go-druid/builder" "github.com/grafadruid/go-druid/builder/aggregation" + "github.com/grafadruid/go-druid/builder/dimension" "github.com/grafadruid/go-druid/builder/filter" "github.com/grafadruid/go-druid/builder/granularity" "github.com/grafadruid/go-druid/builder/havingspec" @@ -16,6 +17,7 @@ import ( type GroupBy struct { Base + Dimensions []builder.Dimension `json:"dimensions"` VirtualColumns []builder.VirtualColumn `json:"virtualColumns"` Filter builder.Filter `json:"filter"` Granularity builder.Granularity `json:"granularity"` @@ -23,7 +25,7 @@ type GroupBy struct { PostAggregations []builder.PostAggregator `json:"postAggregations"` Having builder.HavingSpec `json:"having"` LimitSpec builder.LimitSpec `json:"limitSpec"` - SubtotalsSpec [][]string `json:"subtotalsSpec"` + SubtotalsSpec [][]string `json:"subtotalsSpec",omitempty` } func NewGroupBy() *GroupBy { @@ -47,6 +49,11 @@ func (g *GroupBy) SetContext(context map[string]interface{}) *GroupBy { return g } +func (g *GroupBy) SetDimensions(dimensions []builder.Dimension) *GroupBy { + g.Dimensions = dimensions + return g +} + func (g *GroupBy) SetVirtualColumns(virtualColumns []builder.VirtualColumn) *GroupBy { g.VirtualColumns = virtualColumns return g @@ -88,8 +95,9 @@ func (g *GroupBy) SetSubtotalsSpec(subtotalsSpec [][]string) *GroupBy { } func (g *GroupBy) UnmarshalJSON(data []byte) error { + var err error var tmp struct { - Base + Dimensions []json.RawMessage `json:"dimensions"` VirtualColumns []json.RawMessage `json:"virtualColumns"` Filter json.RawMessage `json:"filter"` Granularity json.RawMessage `json:"granularity"` @@ -99,10 +107,17 @@ func (g *GroupBy) UnmarshalJSON(data []byte) error { LimitSpec json.RawMessage `json:"limitSpec"` SubtotalsSpec [][]string `json:"subtotalsSpec"` } - if err := json.Unmarshal(data, &tmp); err != nil { + if err = json.Unmarshal(data, &tmp); err != nil { return err } - var err error + var d builder.Dimension + dd := make([]builder.Dimension, len(tmp.Dimensions)) + for i := range tmp.Dimensions { + if d, err = dimension.Load(tmp.Dimensions[i]); err != nil { + return err + } + dd[i] = d + } var v builder.VirtualColumn vv := make([]builder.VirtualColumn, len(tmp.VirtualColumns)) for i := range tmp.VirtualColumns { @@ -111,9 +126,12 @@ func (g *GroupBy) UnmarshalJSON(data []byte) error { } vv[i] = v } - f, err := filter.Load(tmp.Filter) - if err != nil { - return err + var f builder.Filter + if tmp.Filter != nil { + f, err = filter.Load(tmp.Filter) + if err != nil { + return err + } } gr, err := granularity.Load(tmp.Granularity) if err != nil { @@ -135,15 +153,25 @@ func (g *GroupBy) UnmarshalJSON(data []byte) error { } pp[i] = p } - h, err := havingspec.Load(tmp.Having) - if err != nil { - return err + var h builder.HavingSpec + if tmp.Having != nil { + h, err = havingspec.Load(tmp.Having) + if err != nil { + return err + } } - l, err := limitspec.Load(tmp.LimitSpec) - if err != nil { - return err + var l builder.LimitSpec + if tmp.LimitSpec != nil { + l, err = limitspec.Load(tmp.LimitSpec) + if err != nil { + return err + } + } + if len(tmp.SubtotalsSpec) == 0 { + tmp.SubtotalsSpec = nil } - g.Base = tmp.Base + g.Base.UnmarshalJSON(data) + g.Dimensions = dd g.VirtualColumns = vv g.Filter = f g.Granularity = gr diff --git a/builder/query/timeseries.go b/builder/query/timeseries.go index b88c723..90316fe 100644 --- a/builder/query/timeseries.go +++ b/builder/query/timeseries.go @@ -4,7 +4,6 @@ import ( "encoding/json" "errors" - "github.com/davecgh/go-spew/spew" "github.com/grafadruid/go-druid/builder" "github.com/grafadruid/go-druid/builder/aggregation" "github.com/grafadruid/go-druid/builder/filter" @@ -139,6 +138,5 @@ func (t *Timeseries) UnmarshalJSON(data []byte) error { t.Aggregations = aa t.PostAggregations = pp t.Limit = tmp.Limit - spew.Dump(t) return nil } diff --git a/examples/main.go b/examples/main.go index 4ee5207..80ab2d1 100644 --- a/examples/main.go +++ b/examples/main.go @@ -57,7 +57,12 @@ func main() { d.Query().Execute(q, &results) spew.Dump(results) - q, err = d.Query().Load([]byte("{\"aggregations\":[{\"fieldName\":\"sum_delta\",\"name\":\"delta\",\"type\":\"longSum\"}],\"context\":{\"plop\":\"plep\"},\"dataSource\":{\"name\":\"wikipedia\",\"type\":\"table\"},\"dimension\":{\"dimension\":\"regionName\",\"outputName\":\"region\",\"outputType\":\"STRING\",\"type\":\"default\"},\"filter\":{\"dimension\":\"countryName\",\"extractionFn\":null,\"type\":\"selector\",\"value\":\"France\"},\"granularity\":\"day\",\"intervals\":[\"2016-06-26T23:46:21.573Z/2016-06-27T16:03:59.999Z\"],\"metric\":{\"metric\":\"delta\",\"type\":\"numeric\"},\"postAggregations\":[],\"queryType\":\"topN\",\"threshold\":50,\"virtualColumns\":[]}")) + q, err = d.Query().Load([]byte("{\"aggregations\":[{\"fieldName\":\"sum_delta\",\"name\":\"delta\",\"type\":\"longSum\"}],\"context\":{\"plop\":\"plep\"},\"dataSource\":{\"name\":\"wikipedia\",\"type\":\"table\"},\"dimension\":{\"dimension\":\"regionName\",\"outputName\":\"region\",\"outputType\":\"STRING\",\"type\":\"default\"},\"filter\":{\"dimension\":\"countryName\",\"extractionFn\":null,\"type\":\"selector\",\"value\":\"France\"},\"granularity\":\"hour\",\"intervals\":[\"2016-06-26T23:46:21.573Z/2016-07-27T16:03:59.999Z\"],\"metric\":{\"metric\":\"delta\",\"type\":\"numeric\"},\"postAggregations\":[],\"queryType\":\"topN\",\"threshold\":50,\"virtualColumns\":[]}")) + spew.Dump(q, err) + d.Query().Execute(q, &results) + spew.Dump(results) + + q, err = d.Query().Load([]byte("{\"aggregations\":[{\"fieldName\":\"sum_delta\",\"name\":\"delta\",\"type\":\"longSum\"}],\"context\":{\"plop\":\"plep\"},\"dataSource\":{\"name\":\"wikipedia\",\"type\":\"table\"},\"dimensions\":[{\"dimension\":\"countryName\",\"outputName\":\"country\",\"outputType\":\"STRING\",\"type\":\"default\"},{\"dimension\":\"regionName\",\"outputName\":\"region\",\"outputType\":\"STRING\",\"type\":\"default\"}],\"filter\":{\"dimension\":\"countryName\",\"extractionFn\":null,\"type\":\"selector\",\"value\":\"France\"},\"granularity\":\"minute\",\"intervals\":[\"2016-06-26T23:58:44.369Z/2016-06-27T21:02:53.165Z\"],\"postAggregations\":[],\"queryType\":\"groupBy\",\"subtotalsSpec\":[],\"virtualColumns\":[]}")) spew.Dump(q, err) d.Query().Execute(q, &results) spew.Dump(results)