Fix 'having' condition on RDBMS DAL aggregation
This commit is contained in:
parent
f1557e451f
commit
2fc9392367
@ -248,7 +248,7 @@ func (d *model) Search(f filter.Filter) (i *iterator, err error) {
|
||||
// expressions when constructing expressions & columns to select from.
|
||||
//
|
||||
// Passing in filter with cursor, empty groupBy or aggrExpr slice will result in an error
|
||||
func (d *model) Aggregate(f filter.Filter, groupBy []*dal.AggregateAttr, aggrExpr []*dal.AggregateAttr, having string) (i *iterator, err error) {
|
||||
func (d *model) Aggregate(f filter.Filter, groupBy []*dal.AggregateAttr, aggrExpr []*dal.AggregateAttr, having *ql.ASTNode) (i *iterator, err error) {
|
||||
if f.Cursor() != nil {
|
||||
return nil, fmt.Errorf("can not use cursors when aggregating")
|
||||
}
|
||||
@ -491,7 +491,7 @@ func (d *model) searchSql(f filter.Filter) *goqu.SelectDataset {
|
||||
return base.Where(cnd...)
|
||||
}
|
||||
|
||||
func (d *model) aggregateSql(f filter.Filter, groupBy []*dal.AggregateAttr, out []*dal.AggregateAttr, having string) (q *goqu.SelectDataset) {
|
||||
func (d *model) aggregateSql(f filter.Filter, groupBy []*dal.AggregateAttr, out []*dal.AggregateAttr, having *ql.ASTNode) (q *goqu.SelectDataset) {
|
||||
// get SELECT query based on
|
||||
// the given filter
|
||||
q = d.searchSql(f)
|
||||
@ -541,8 +541,8 @@ func (d *model) aggregateSql(f filter.Filter, groupBy []*dal.AggregateAttr, out
|
||||
q = q.SelectAppend(expr)
|
||||
}
|
||||
|
||||
if len(having) > 0 {
|
||||
if expr, err = d.parseQuery(having); err != nil {
|
||||
if having != nil {
|
||||
if expr, err = d.convertQuery(having); err != nil {
|
||||
return q.SetError(err)
|
||||
}
|
||||
|
||||
|
||||
@ -6,6 +6,7 @@ import (
|
||||
"github.com/cortezaproject/corteza-server/pkg/dal"
|
||||
"github.com/cortezaproject/corteza-server/pkg/filter"
|
||||
"github.com/cortezaproject/corteza-server/pkg/logger"
|
||||
"github.com/cortezaproject/corteza-server/pkg/ql"
|
||||
. "github.com/cortezaproject/corteza-server/store/adapters/rdbms/dal"
|
||||
"github.com/spf13/cast"
|
||||
"github.com/stretchr/testify/require"
|
||||
@ -155,7 +156,7 @@ func TestModel_Aggregate(t *testing.T) {
|
||||
{Identifier: "avg", RawExpr: "AVG(price)", Type: &dal.TypeNumber{}},
|
||||
{Identifier: "stock", RawExpr: "SUM(quantity)", Type: &dal.TypeNumber{}},
|
||||
},
|
||||
"", // <== here be having condition
|
||||
nil, // <== here be having condition
|
||||
)
|
||||
req.NoError(err)
|
||||
req.NotNil(i)
|
||||
@ -186,6 +187,104 @@ func TestModel_Aggregate(t *testing.T) {
|
||||
req.Equal("avg=2000.00 count=2 date=2022-10-06 group=g1 max=3000 min=1000 stock=40.00", rows[2].String())
|
||||
}
|
||||
|
||||
func TestModel_AggregateWithHaving(t *testing.T) {
|
||||
_ = logger.Default()
|
||||
|
||||
var (
|
||||
req = require.New(t)
|
||||
|
||||
ctx = context.Background()
|
||||
|
||||
baseModel = &dal.Model{
|
||||
Ident: "test_dal_having",
|
||||
Attributes: []*dal.Attribute{
|
||||
{Ident: "item", Type: &dal.TypeText{}},
|
||||
{Ident: "date", Type: &dal.TypeDate{}, Store: &dal.CodecRecordValueSetJSON{Ident: "values"}, Sortable: true},
|
||||
{Ident: "group", Type: &dal.TypeText{}, Store: &dal.CodecRecordValueSetJSON{Ident: "values"}, Sortable: true},
|
||||
{Ident: "quantity", Type: &dal.TypeNumber{}, Store: &dal.CodecRecordValueSetJSON{Ident: "values"}, Filterable: true},
|
||||
{Ident: "price", Type: &dal.TypeNumber{}, Filterable: true},
|
||||
{Ident: "published", Type: &dal.TypeBoolean{}, Filterable: true},
|
||||
},
|
||||
}
|
||||
|
||||
m = Model(baseModel, s.DB, s.Dialect)
|
||||
|
||||
i dal.Iterator
|
||||
|
||||
table, err = s.DataDefiner.ConvertModel(baseModel)
|
||||
row kv
|
||||
)
|
||||
|
||||
ctx = logger.ContextWithValue(context.Background(), logger.MakeDebugLogger())
|
||||
|
||||
t.Logf("Creating temporary table %q", table.Ident)
|
||||
table.Temporary = true
|
||||
req.NoError(s.DataDefiner.TableCreate(ctx, table))
|
||||
|
||||
req.NoError(m.Create(ctx, &kv{"item": "i1", "date": "2022-10-06", "group": "g1", "price": "1000", "quantity": "0", "published": true}))
|
||||
req.NoError(m.Create(ctx, &kv{"item": "i2", "date": "2022-10-06", "group": "g1", "price": "3000", "quantity": "0", "published": true}))
|
||||
req.NoError(m.Create(ctx, &kv{"item": "i3", "date": "2022-10-06", "group": "g2", "price": "4000", "quantity": "40", "published": false}))
|
||||
req.NoError(m.Create(ctx, &kv{"item": "i4", "date": "2022-10-06", "group": "g2", "price": "1000", "quantity": "10", "published": true}))
|
||||
req.NoError(m.Create(ctx, &kv{"item": "i5", "date": "2022-10-07", "group": "g2", "price": "1000", "quantity": "10", "published": false}))
|
||||
req.NoError(m.Create(ctx, &kv{"item": "i6", "date": "2022-10-07", "group": "g2", "price": "5000", "quantity": "50", "published": true}))
|
||||
|
||||
t.Log("Aggregating all records, calculating min & max price per group, ignoring empty quantity")
|
||||
i, err = m.Aggregate(
|
||||
filter.Generic(
|
||||
filter.WithExpression("published"),
|
||||
filter.WithOrderBy(filter.SortExprSet{
|
||||
&filter.SortExpr{Column: "group", Descending: true},
|
||||
&filter.SortExpr{Column: "date", Descending: false},
|
||||
}),
|
||||
),
|
||||
// group-by
|
||||
[]*dal.AggregateAttr{
|
||||
{Identifier: "date", Type: &dal.TypeDate{}, Store: &dal.CodecRecordValueSetJSON{Ident: "values"}},
|
||||
{Identifier: "group", Type: &dal.TypeText{}, Store: &dal.CodecRecordValueSetJSON{Ident: "values"}},
|
||||
},
|
||||
// aggregation expressions
|
||||
[]*dal.AggregateAttr{
|
||||
{Identifier: "count", RawExpr: "COUNT(*)", Type: &dal.TypeNumber{}},
|
||||
{Identifier: "max", RawExpr: "MAX(price)", Type: &dal.TypeNumber{}},
|
||||
{Identifier: "min", RawExpr: "MIN(price)", Type: &dal.TypeNumber{}},
|
||||
{Identifier: "avg", RawExpr: "AVG(price)", Type: &dal.TypeNumber{}},
|
||||
{Identifier: "stock", RawExpr: "SUM(quantity)", Type: &dal.TypeNumber{}},
|
||||
},
|
||||
//
|
||||
func() *ql.ASTNode {
|
||||
n, err := ql.NewParser().Parse("SUM(quantity) > 0")
|
||||
req.NoError(err)
|
||||
return n
|
||||
}(),
|
||||
)
|
||||
req.NoError(err)
|
||||
req.NotNil(i)
|
||||
|
||||
defer req.NoError(i.Close())
|
||||
|
||||
// uncomment to se generated query
|
||||
ctx = logger.ContextWithValue(context.Background(), logger.MakeDebugLogger())
|
||||
|
||||
t.Log("Iterating over results")
|
||||
rows := make([]kv, 0, 3)
|
||||
for i.Next(ctx) {
|
||||
row = kv{}
|
||||
req.NoError(i.Scan(row))
|
||||
|
||||
// due to difference of number of decimal digits in different DBs, we need to do this
|
||||
// to make sure we get the same result
|
||||
row["avg"] = fmt.Sprintf("%.2f", cast.ToFloat64(row["avg"]))
|
||||
row["stock"] = fmt.Sprintf("%.2f", cast.ToFloat64(row["stock"]))
|
||||
|
||||
rows = append(rows, row)
|
||||
}
|
||||
|
||||
req.NoError(i.Err())
|
||||
req.Len(rows, 2)
|
||||
req.Equal("avg=1000.00 count=1 date=2022-10-06 group=g2 max=1000 min=1000 stock=10.00", rows[0].String())
|
||||
req.Equal("avg=5000.00 count=1 date=2022-10-07 group=g2 max=5000 min=5000 stock=50.00", rows[1].String())
|
||||
}
|
||||
|
||||
func TestModel_Distinct(t *testing.T) {
|
||||
_ = logger.Default()
|
||||
|
||||
@ -236,7 +335,7 @@ func TestModel_Distinct(t *testing.T) {
|
||||
{Identifier: "group", Type: &dal.TypeText{}, Store: &dal.CodecRecordValueSetJSON{Ident: "values"}},
|
||||
},
|
||||
nil,
|
||||
"", // <== here be having condition
|
||||
nil, // <== here be having condition
|
||||
)
|
||||
req.NoError(err)
|
||||
req.NotNil(i)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user