3
0

Port compose record builder

This commit is contained in:
Denis Arh
2020-08-29 14:57:01 +02:00
parent 68525e823b
commit 720dc38da6
22 changed files with 676 additions and 49 deletions

View File

@@ -304,8 +304,7 @@ func (svc record) Report(namespaceID, moduleID uint64, metrics, dimensions, filt
aProps.setNamespace(ns)
aProps.setModule(m)
panic("refactor")
//out, err = store.ComposeRecordReport(m, svc.store, metrics, dimensions, filter)
out, err = store.ComposeRecordReport(svc.ctx, svc.store, m, metrics, dimensions, filter)
return err
}()

View File

@@ -2,7 +2,6 @@ package ql
import (
"fmt"
"github.com/Masterminds/squirrel"
)
@@ -16,7 +15,7 @@ type (
}
ASTSet []ASTNode // Stream of comma delimited nodes
ASTNodes []ASTNode // Stream of un-delimited nodes
ASTNodes []ASTNode // Stream of space delimited nodes
Columns []Column
@@ -59,6 +58,11 @@ type (
Name string
Arguments ASTSet
}
NodeF struct {
Expr string
Arguments []ASTNode
}
)
func (n LNull) Validate() (err error) { return }
@@ -128,7 +132,11 @@ func (nn ASTNodes) Validate() (err error) {
}
func (nn ASTNodes) String() (out string) {
for _, n := range nn {
for i, n := range nn {
if i > 0 {
out = out + " "
}
out = out + n.String()
}
@@ -171,6 +179,13 @@ func (nn Columns) Strings() (out []string) {
return
}
func MakeFormattedNode(expr string, nn ...ASTNode) *NodeF {
return &NodeF{Expr: expr, Arguments: nn}
}
func (n NodeF) Validate() (err error) { return }
func (n NodeF) String() string { return n.Expr }
func validate(nn []ASTNode) (err error) {
if len(nn) == 0 {
return fmt.Errorf("empty node set")

View File

@@ -19,14 +19,14 @@ type (
}
IdentHandler func(ident Ident) (Ident, error)
FunctionHandler func(ident Function) (Function, error)
FunctionHandler func(ident Function) (ASTNode, error)
)
// NewParser returns a new instance of Parser.
func NewParser() *Parser {
p := &Parser{
OnIdent: func(ident Ident) (Ident, error) { return ident, nil },
OnFunction: func(ident Function) (Function, error) { return ident, nil },
OnFunction: func(ident Function) (ASTNode, error) { return ident, nil },
}
return p

View File

@@ -95,6 +95,18 @@ func TestAstParser_Parser(t *testing.T) {
LNumber{Value: "2010"},
},
},
//{
// parser: NewParser().ParseExpression,
// in: `EXTRACT(QUARTER FROM created_at)`,
// tree: Function{
// Name: "EXTRACT",
// Arguments: ASTSet{
// Keyword{Keyword: "QUARTER"},
// Keyword{Keyword: "FROM"},
// Ident{Value: "created_at"},
// },
// },
//},
{
parser: NewParser().ParseExpression,
in: `year(created_at) != 2010 AND month(created_at) = 6`,

View File

@@ -116,3 +116,25 @@ func (n LString) ToSql() (string, []interface{}, error) {
func (n LNumber) ToSql() (string, []interface{}, error) {
return n.Value, nil, nil
}
func (n NodeF) ToSql() (string, []interface{}, error) {
var (
// used for sprintf to complete the base expression
fArgs []interface{}
// collection of al args from ToSql() that
// are passed on to the caller
adtArgs []interface{}
)
for _, s := range n.Arguments {
if fa, aa, err := s.ToSql(); err != nil {
return "", nil, err
} else {
fArgs = append(fArgs, fa)
adtArgs = append(adtArgs, aa...)
}
}
return fmt.Sprintf(n.Expr, fArgs...), adtArgs, nil
}

View File

@@ -29,6 +29,11 @@ type (
DeleteComposeRecordByID(ctx context.Context, _mod *types.Module, ID uint64) error
TruncateComposeRecords(ctx context.Context, _mod *types.Module) error
// Additional custom functions
// ComposeRecordReport (custom function)
ComposeRecordReport(ctx context.Context, _mod *types.Module, _metrics string, _dimensions string, _filters string) ([]map[string]interface{}, error)
}
)
@@ -80,3 +85,7 @@ func DeleteComposeRecordByID(ctx context.Context, s ComposeRecords, _mod *types.
func TruncateComposeRecords(ctx context.Context, s ComposeRecords, _mod *types.Module) error {
return s.TruncateComposeRecords(ctx, _mod)
}
func ComposeRecordReport(ctx context.Context, s ComposeRecords, _mod *types.Module, _metrics string, _dimensions string, _filters string) ([]map[string]interface{}, error) {
return s.ComposeRecordReport(ctx, _mod, _metrics, _dimensions, _filters)
}

View File

@@ -16,6 +16,15 @@ fields:
- { field: UpdatedAt, sortable: true }
- { field: DeletedAt, sortable: true }
functions:
- name: ComposeRecordReport
arguments:
- { name: mod, type: "*types.Module" }
- { name: metrics, type: string }
- { name: dimensions, type: string }
- { name: filters, type: string }
return: [ ComposeRecordsReport, error ]
lookups:
- fields: [ ID ]
export: false

View File

@@ -1,7 +1,9 @@
package mysql
import (
"github.com/Masterminds/squirrel"
"github.com/cortezaproject/corteza-server/store"
"github.com/cortezaproject/corteza-server/store/rdbms"
"github.com/stretchr/testify/require"
"testing"
)
@@ -9,9 +11,10 @@ import (
func TestBuilder(t *testing.T) {
var (
req = require.New(t)
cfg = &rdbms.Config{PlaceholderFormat: squirrel.Question}
)
upsert, err := UpsertBuilder("tbl", store.Payload{"c1": "v1", "c2": "v2"}, "c1")
upsert, err := UpsertBuilder(cfg, "tbl", store.Payload{"c1": "v1", "c2": "v2"}, "c1")
req.NoError(err)
sql, args, err := upsert.ToSql()
req.NoError(err)

View File

@@ -29,6 +29,7 @@ func New(ctx context.Context, dsn string) (s *Store, err error) {
cfg.TxRetryErrHandler = txRetryErrHandler
cfg.ErrorHandler = errorHandler
cfg.UpsertBuilder = UpsertBuilder
cfg.CastModuleFieldToColumnType = fieldToColumnTypeCaster
s = new(Store)
if s.Store, err = rdbms.New(ctx, cfg); err != nil {

View File

@@ -0,0 +1,24 @@
package mysql
import (
"fmt"
"github.com/cortezaproject/corteza-server/pkg/ql"
"github.com/cortezaproject/corteza-server/store/rdbms"
)
func fieldToColumnTypeCaster(field rdbms.ModuleFieldTypeDetector, i ql.Ident) (ql.Ident, error) {
switch true {
case field.IsBoolean():
i.Value = fmt.Sprintf("(rv_%s.value NOT IN ('', '0', 'false', 'f', 'FALSE', 'F', false))", i.Value)
case field.IsNumeric():
i.Value = fmt.Sprintf("CAST(rv_%s.value AS SIGNED)", i.Value)
case field.IsDateTime():
i.Value = fmt.Sprintf("CAST(rv_%s.value AS DATETIME)", i.Value)
case field.IsRef():
i.Value = fmt.Sprintf("rv_%s.ref ", i.Value)
default:
i.Value = fmt.Sprintf("rv_%s.value ", i.Value)
}
return i, nil
}

View File

@@ -27,6 +27,8 @@ func New(ctx context.Context, dsn string) (s *Store, err error) {
cfg.PlaceholderFormat = squirrel.Dollar
cfg.ErrorHandler = errorHandler
cfg.SqlFunctionHandler = sqlFunctionHandler
cfg.CastModuleFieldToColumnType = fieldToColumnTypeCaster
s = new(Store)
if s.Store, err = rdbms.New(ctx, cfg); err != nil {

View File

@@ -0,0 +1,16 @@
package pgsql
import (
"fmt"
"github.com/cortezaproject/corteza-server/pkg/ql"
"strings"
)
func sqlFunctionHandler(f ql.Function) (ql.ASTNode, error) {
switch strings.ToUpper(f.Name) {
case "QUARTER", "YEAR":
return ql.MakeFormattedNode(fmt.Sprintf("EXTRACT(%s FROM %%s::date)", f.Name), f.Arguments...), nil
}
return f, nil
}

View File

@@ -0,0 +1,24 @@
package pgsql
import (
"fmt"
"github.com/cortezaproject/corteza-server/pkg/ql"
"github.com/cortezaproject/corteza-server/store/rdbms"
)
func fieldToColumnTypeCaster(field rdbms.ModuleFieldTypeDetector, i ql.Ident) (ql.Ident, error) {
switch true {
case field.IsBoolean():
i.Value = fmt.Sprintf("rv_%s.value NOT IN ('', '0', 'false', 'f', 'FALSE', 'F', false)", i.Value)
case field.IsNumeric():
i.Value = fmt.Sprintf("rv_%s.value::NUMERIC", i.Value)
case field.IsDateTime():
i.Value = fmt.Sprintf("rv_%s.value::TIMESTAMP", i.Value)
case field.IsRef():
i.Value = fmt.Sprintf("rv_%s.ref ", i.Value)
default:
i.Value = fmt.Sprintf("rv_%s.value ", i.Value)
}
return i, nil
}

View File

@@ -0,0 +1,263 @@
package rdbms
import (
"context"
"database/sql"
"fmt"
"github.com/Masterminds/squirrel"
"github.com/cortezaproject/corteza-server/compose/types"
"github.com/cortezaproject/corteza-server/pkg/handle"
"github.com/cortezaproject/corteza-server/pkg/ql"
"github.com/cortezaproject/corteza-server/pkg/rh"
"github.com/cortezaproject/corteza-server/pkg/slice"
"github.com/jmoiron/sqlx"
"strconv"
"strings"
)
type (
ComposeRecordsReport []map[string]interface{}
recordReportBuilder struct {
// This is set by metric/column building to assist Cast()
numerics []string
parser *ql.Parser
module *types.Module
metrics string
dimensions string
filters string
store recordReportBuilderStoreQuerier
supportedAggregationFunctions map[string]bool
supportedFilterFunctions map[string]bool
}
recordReportBuilderStoreQuerier interface {
SelectBuilder(string, ...string) squirrel.SelectBuilder
Query(context.Context, squirrel.SelectBuilder) (*sql.Rows, error)
SqlFunctionHandler(f ql.Function) (ql.ASTNode, error)
FieldToColumnTypeCaster(f ModuleFieldTypeDetector, i ql.Ident) (ql.Ident, error)
}
)
func ComposeRecordReportBuilder(s *Store, module *types.Module, metrics, dimensions, filters string) *recordReportBuilder {
return &recordReportBuilder{
parser: ql.NewParser(),
store: s,
module: module,
metrics: metrics,
dimensions: dimensions,
filters: filters,
supportedAggregationFunctions: slice.ToStringBoolMap([]string{
"COUNT",
"SUM",
"MAX",
"MIN",
"AVG",
"STD",
}),
supportedFilterFunctions: slice.ToStringBoolMap([]string{
"CONCAT",
"QUARTER",
"YEAR",
"DATE",
"NOW",
"DATE_ADD",
"DATE_SUB",
"DATE_FORMAT",
}),
}
}
func (b *recordReportBuilder) Run(ctx context.Context) (ComposeRecordsReport, error) {
var (
result = make(ComposeRecordsReport, 0)
)
if sb, err := b.Build(); err != nil {
return nil, fmt.Errorf("can not generate report query: %w", err)
} else if rows, err := b.store.Query(ctx, sb); err != nil {
return nil, fmt.Errorf("can not execute report query: %w", err)
} else {
err = func() error {
defer rows.Close()
for rows.Next() {
r, err := b.Cast(rows)
if err != nil {
return err
}
result = append(result, r)
}
return nil
}()
if err = rows.Err(); err != nil {
return nil, err
}
return result, nil
}
}
func (b recordReportBuilder) ToSql() (string, []interface{}, error) {
if sb, err := b.Build(); err != nil {
return "", nil, err
} else {
return sb.ToSql()
}
}
// Identifiers should be names of the fields (physical table columns OR json fields, defined in module)
func (b *recordReportBuilder) stdAggregationHandler(f ql.Function) (ql.ASTNode, error) {
if !b.supportedAggregationFunctions[strings.ToUpper(f.Name)] {
return f, fmt.Errorf("unsupported aggregate function %q", f.Name)
}
return b.store.SqlFunctionHandler(f)
}
// Identifiers should be names of the fields (physical table columns OR json fields, defined in module)
func (b *recordReportBuilder) stdFilterFuncHandler(f ql.Function) (ql.ASTNode, error) {
if !b.supportedFilterFunctions[strings.ToUpper(f.Name)] {
return f, fmt.Errorf("unsupported filter function %q", f.Name)
}
return b.store.SqlFunctionHandler(f)
}
func (b *recordReportBuilder) Build() (sb squirrel.SelectBuilder, err error) {
var (
joinTpl = "compose_record_value AS rv_%s ON (rv_%s.record_id = crd.id AND rv_%s.name = '%s' AND rv_%s.deleted_at IS NULL)"
report = b.store.SelectBuilder("compose_record AS crd").
Column(squirrel.Alias(squirrel.Expr("COUNT(*)"), "count")).
Where("crd.deleted_at IS NULL").
Where("crd.module_id = ?", b.module.ID)
joinedFields = []string{}
alreadyJoined = func(f string) bool {
for _, a := range joinedFields {
if a == f {
return true
}
}
joinedFields = append(joinedFields, f)
return false
}
)
b.parser.OnIdent = func(i ql.Ident) (ql.Ident, error) {
var is bool
if i.Value, is = isRealRecordCol(i.Value); is {
return i, nil
}
if !b.module.Fields.HasName(i.Value) {
return i, fmt.Errorf("unknown field %q", i.Value)
}
if !handle.IsValid(i.Value) {
return i, fmt.Errorf("invalid field name: %q", i.Value)
}
if !alreadyJoined(i.Value) {
report = report.LeftJoin(strings.ReplaceAll(joinTpl, "%s", i.Value))
}
return b.store.FieldToColumnTypeCaster(b.module.Fields.FindByName(i.Value), i)
}
var columns ql.Columns
b.parser.OnFunction = b.stdAggregationHandler
if columns, err = b.parser.ParseColumns(b.metrics); err != nil {
err = fmt.Errorf("could not parse metrics %q: %w", b.metrics, err)
return
}
// Add all metrics to columns
for i, m := range columns {
if m.Alias == "" {
// Generate alias
m.Alias = fmt.Sprintf("metric_%d", i)
}
// Wrap to cast func to ensure numeric output
col := squirrel.Alias(rh.SquirrelConcatExpr("CAST(", m.Expr, " AS DECIMAL(14,2))"), m.Alias)
report = report.Column(col)
b.numerics = append(b.numerics, m.Alias)
}
b.parser.OnFunction = b.stdFilterFuncHandler
if columns, err = b.parser.ParseColumns(b.dimensions); err != nil {
err = fmt.Errorf("could not parse dimensions %q: %w", b.dimensions, err)
return
}
// Add dimensions
for i, d := range columns {
if d.Alias == "" {
d.Alias = fmt.Sprintf("dimension_%d", i)
}
report = report.
Column(d).
GroupBy(d.Alias).
OrderBy(d.Alias)
}
// Use a different handler for filter functions for this
b.parser.OnFunction = b.stdFilterFuncHandler
if len(b.filters) > 0 {
var filter ql.ASTNode
if filter, err = b.parser.ParseExpression(b.filters); err != nil {
err = fmt.Errorf("could not parse filters %q: %w", b.filters, err)
return
}
// We need to wrap this one level deeper, since additional filters should
// be evaluated as a whole.
// For example A AND B OR C =should be> (A AND B OR C)
// so the output becomes BASE AND (ADDITIONAL)
report = report.Where(ql.ASTNodes{filter})
}
return report, nil
}
func (b recordReportBuilder) Cast(row sqlx.ColScanner) (out map[string]interface{}, err error) {
out = map[string]interface{}{}
if err = sqlx.MapScan(row, out); err != nil {
return nil, err
}
for k, v := range out {
switch cv := v.(type) {
case []uint8:
out[k] = string(cv)
default:
}
}
// Cast all metrics to float64
for _, fname := range b.numerics {
switch num := out[fname].(type) {
case string:
out[fname], err = strconv.ParseFloat(num, 64)
if err != nil {
return nil, err
}
}
}
return
}

View File

@@ -143,7 +143,7 @@ func (s Store) UpsertComposeRecord(ctx context.Context, m *types.Module, rr ...*
// DeleteComposeRecordByID Deletes ComposeRecord from store
func (s Store) DeleteComposeRecordByID(ctx context.Context, _ *types.Module, ID uint64) (err error) {
err = s.DeleteComposeRecordByID(ctx, nil, ID)
err = s.deleteComposeRecordByID(ctx, nil, ID)
if err != nil {
return
}
@@ -171,6 +171,10 @@ func (s Store) TruncateComposeRecords(ctx context.Context, _ *types.Module) (err
return
}
func (s Store) ComposeRecordReport(ctx context.Context, m *types.Module, metrics, dimensions, filter string) ([]map[string]interface{}, error) {
return ComposeRecordReportBuilder(&s, m, metrics, dimensions, filter).Run(ctx)
}
func (s Store) convertComposeRecordFilter(m *types.Module, f types.RecordFilter) (query squirrel.SelectBuilder, err error) {
if m == nil {
err = fmt.Errorf("module not provided")
@@ -224,22 +228,7 @@ func (s Store) convertComposeRecordFilter(m *types.Module, f types.RecordFilter)
), i.Value)
}
field := m.Fields.FindByName(i.Value)
switch true {
case field.IsBoolean():
i.Value = fmt.Sprintf("(rv_%s.value NOT IN ('', '0', 'false', 'f', 'FALSE', 'F', false))", i.Value)
case field.IsNumeric():
i.Value = fmt.Sprintf("CAST(rv_%s.value AS SIGNED)", i.Value)
case field.IsDateTime():
i.Value = fmt.Sprintf("CAST(rv_%s.value AS DATETIME)", i.Value)
case field.IsRef():
i.Value = fmt.Sprintf("rv_%s.ref ", i.Value)
default:
i.Value = fmt.Sprintf("rv_%s.value ", i.Value)
}
return i, nil
return s.FieldToColumnTypeCaster(m.Fields.FindByName(i.Value), i)
}
)

View File

@@ -63,6 +63,10 @@ func (g genericUpgrades) Upgrade(ctx context.Context, t *ddl.Table) error {
g.AlterUsersDropOrganisation,
g.AlterUsersDropRelatedUser,
)
//case "compose_attachment_binds":
// return g.all(ctx,
// g.MigrateComposeAttachmentsToBindsTable,
// )
}
return nil
@@ -264,3 +268,71 @@ func (g genericUpgrades) RenameTable(ctx context.Context, old, new string) error
return nil
}
//func (g genericUpgrades) MigrateComposeAttachmentsToLinksTable(ctx context.Context) error {
// var (
// err error
// tt = []struct {
// tbl string
// }{
// {tbl: "sys_permission_rules"},
// {tbl: "compose_permission_rules"},
// {tbl: "messaging_permission_rules"},
// }
//
// // Are there entries in the attachment_binds table?
// check = `SELECT COUNT(*) > 0 FROM compose_attachment_links LIMIT 1`
//
// splitRecordAttachments = `
// INSERT INTO compose_attachment_links (
// rel_namespace, rel_attachment, kind,
// ref,
// owned_by
// created_by, updated_by, deleted_by,
// created_at, updated_at, deleted_at
// )
// SELECT rel_namespace, rel_attachment, kind,
// CASE WHEN kind = 'page' THEN
//(SELECT
// WHEN kind = 'record' THEN 2
// ELSE 0 END,
// owned_by, 0, 0,
// created_at, updated_at, deleted_at
// FROM compose_attachment
// INNER JOIN compose_record_Value`
//
// splitPageAttachments = `
// INSERT INTO compose_attachment_links (
// rel_namespace, rel_attachment, kind,
// ref,
// owned_by
// created_by, updated_by, deleted_by,
// created_at, updated_at, deleted_at
// )
// SELECT rel_namespace, rel_attachment, kind,
// CASE WHEN kind = 'page' THEN
//(SELECT
// WHEN kind = 'record' THEN 2
// ELSE 0 END,
// owned_by, 0, 0,
// created_at, updated_at, deleted_at
// FROM compose_attachment`
// )
//
// g.log.Debug("splitting parts of compose_attachment to compose_attachment_links")
// err = g.u.Exec(ctx, split)
// if err != nil {
// return fmt.Errorf("failed to split compose_attachment: %w", err)
// }
//
// for _, col := range []string{"rel_namespace", "kind"} {
// _, err = g.u.DropColumn(ctx, "compose_attachment", col)
// if err != nil {
// return fmt.Errorf("could not drop column compose_attachment.%s: %w", col, err)
// }
// }
//
// g.log.Debug("compose_attachment split to compose_attachment_links")
//
// return nil
//}

View File

@@ -5,6 +5,7 @@ import (
"database/sql"
"fmt"
"github.com/Masterminds/squirrel"
"github.com/cortezaproject/corteza-server/pkg/ql"
"github.com/cortezaproject/corteza-server/store"
"github.com/cortezaproject/corteza-server/store/rdbms/ddl"
"github.com/jmoiron/sqlx"
@@ -87,6 +88,14 @@ type (
//
//
UniqueConstraintCheck bool
// FunctionHandler takes care of translation & transformation of (sql) functions
// and their parameters
//
// Functions are used in filters and aggregations
SqlFunctionHandler func(f ql.Function) (ql.ASTNode, error)
CastModuleFieldToColumnType func(field ModuleFieldTypeDetector, ident ql.Ident) (ql.Ident, error)
}
Store struct {
@@ -107,6 +116,13 @@ type (
QueryContext(context.Context, string, ...interface{}) (*sql.Rows, error)
}
ModuleFieldTypeDetector interface {
IsBoolean() bool
IsNumeric() bool
IsDateTime() bool
IsRef() bool
}
dbTransactionMaker interface {
BeginTxx(ctx context.Context, opts *sql.TxOptions) (*sqlx.Tx, error)
}
@@ -310,6 +326,22 @@ func (s Store) preprocessValue(val interface{}, p string) interface{} {
}
}
// SqlFunctionHandler calls configured sql function handler if set
// otherwise returns passed arguments directly
func (s Store) SqlFunctionHandler(f ql.Function) (ql.ASTNode, error) {
if s.config.SqlFunctionHandler == nil {
return f, nil
}
return s.config.SqlFunctionHandler(f)
}
// FieldToColumnTypeCaster calls configured field type caster if set
// otherwise returns passed arguments directly
func (s Store) FieldToColumnTypeCaster(f ModuleFieldTypeDetector, i ql.Ident) (ql.Ident, error) {
return s.config.CastModuleFieldToColumnType(f, i)
}
// tx begins a new db transaction and handles it's retries when possible
//
// It utilizes configured transaction error handlers and max-retry limits

View File

@@ -238,6 +238,34 @@ func (Schema) ComposeAttachment() *Table {
)
}
//func (Schema) ComposeRecordAttachment() *Table {
// return TableDef("compose_record_attachments",
// ColumnDef("rel_attachment", ColumnTypeIdentifier),
// ColumnDef("rel_namespace", ColumnTypeIdentifier),
// ColumnDef("rel_module", ColumnTypeIdentifier),
// ColumnDef("rel_record", ColumnTypeIdentifier),
// ColumnDef("field", ColumnTypeIdentifier),
// ColumnDef("owned_by", ColumnTypeIdentifier),
// CUDTimestamps,
// CUDUsers,
//
// PrimaryKey(IColumn("rel_attachment", "rel_namespace", "rel_module", "field")),
// )
//}
//func (Schema) ComposePageAttachment() *Table {
// return TableDef("compose_record_attachments",
// ColumnDef("rel_attachment", ColumnTypeIdentifier),
// ColumnDef("rel_namespace", ColumnTypeIdentifier),
// ColumnDef("rel_page", ColumnTypeIdentifier),
// ColumnDef("owned_by", ColumnTypeIdentifier),
// CUDTimestamps,
// CUDUsers,
//
// PrimaryKey(IColumn("rel_attachment", "rel_namespace", "rel_page")),
// )
//}
func (Schema) ComposeChart() *Table {
return TableDef("compose_chart",
ID,
@@ -423,7 +451,6 @@ func (Schema) MessagingMessageAttachment() *Table {
ColumnDef("rel_attachment", ColumnTypeIdentifier),
PrimaryKey(IColumn("rel_message")),
)
}
func (Schema) MessagingMessageFlag() *Table {

View File

@@ -0,0 +1,28 @@
package sqlite
import (
"fmt"
"github.com/cortezaproject/corteza-server/pkg/ql"
"strings"
)
func sqlFunctionHandler(f ql.Function) (ql.ASTNode, error) {
switch strings.ToUpper(f.Name) {
case "QUARTER":
return ql.MakeFormattedNode("(CAST(STRFTIME('%%m', %s) AS INTEGER) + 2) / 3", f.Arguments...), nil
case "YEAR":
return ql.MakeFormattedNode("STRFTIME('%%y', %s)", f.Arguments...), nil
case "NOW":
return ql.MakeFormattedNode("DATE('now')", f.Arguments...), nil
case "DATE_FORMAT":
if len(f.Arguments) != 2 {
return nil, fmt.Errorf("expecting exactly two arguments for DATE_FORMAT function")
}
return ql.MakeFormattedNode("STRFTIME('%s', %s)", f.Arguments[0], f.Arguments[1]), nil
case "DATE_ADD", "DATE_SUB", "STD":
return nil, fmt.Errorf("%q function is currently unsupported in SQLite store backend", f.Name)
}
return f, nil
}

View File

@@ -0,0 +1,22 @@
package sqlite
import (
"fmt"
"github.com/cortezaproject/corteza-server/pkg/ql"
"github.com/cortezaproject/corteza-server/store/rdbms"
)
func fieldToColumnTypeCaster(field rdbms.ModuleFieldTypeDetector, i ql.Ident) (ql.Ident, error) {
switch true {
case field.IsBoolean():
i.Value = fmt.Sprintf("(rv_%s.value NOT IN ('', '0', 'false', 'f', 'FALSE', 'F', false))", i.Value)
case field.IsNumeric():
i.Value = fmt.Sprintf("CAST(rv_%s.value AS SIGNED)", i.Value)
case field.IsRef():
i.Value = fmt.Sprintf("rv_%s.ref ", i.Value)
default:
i.Value = fmt.Sprintf("rv_%s.value ", i.Value)
}
return i, nil
}

View File

@@ -29,6 +29,8 @@ func New(ctx context.Context, dsn string) (s *Store, err error) {
cfg.TxRetryErrHandler = txRetryErrHandler
cfg.ErrorHandler = errorHandler
//cfg.TxDisabled = true
cfg.SqlFunctionHandler = sqlFunctionHandler
cfg.CastModuleFieldToColumnType = fieldToColumnTypeCaster
s = new(Store)
if s.Store, err = rdbms.New(ctx, cfg); err != nil {

View File

@@ -22,9 +22,13 @@ func testComposeRecords(t *testing.T, s store.ComposeRecords) {
Name: "testComposeRecords",
CreatedAt: time.Now(),
Fields: types.ModuleFieldSet{
&types.ModuleField{Kind: "string", Name: "f1"},
&types.ModuleField{Kind: "string", Name: "f2"},
&types.ModuleField{Kind: "string", Name: "f3"},
&types.ModuleField{Kind: "String", Name: "str1"},
&types.ModuleField{Kind: "String", Name: "str2"},
&types.ModuleField{Kind: "String", Name: "str3"},
&types.ModuleField{Kind: "Number", Name: "num1"},
&types.ModuleField{Kind: "Number", Name: "num2"},
&types.ModuleField{Kind: "Number", Name: "num3"},
&types.ModuleField{Kind: "DateTime", Name: "dt1"},
},
}
@@ -69,9 +73,9 @@ func testComposeRecords(t *testing.T, s store.ComposeRecords) {
t.Run("lookup by ID", func(t *testing.T) {
req, rr := truncAndCreate(t, makeNew(
&types.RecordValue{Name: "f1", Value: "v1", Ref: 1},
&types.RecordValue{Name: "f2", Value: "v2", Ref: 2},
&types.RecordValue{Name: "f3", Value: "v3", Ref: 3},
&types.RecordValue{Name: "str1", Value: "v1", Ref: 1},
&types.RecordValue{Name: "str2", Value: "v2", Ref: 2},
&types.RecordValue{Name: "str3", Value: "v3", Ref: 3},
))
rec := rr[0]
@@ -82,7 +86,7 @@ func testComposeRecords(t *testing.T, s store.ComposeRecords) {
req.Nil(fetched.UpdatedAt)
req.Nil(fetched.DeletedAt)
req.Len(fetched.Values, len(rec.Values))
req.Equal("f2", fetched.Values[1].Name)
req.Equal("str2", fetched.Values[1].Name)
req.Equal("v2", fetched.Values[1].Value)
req.Equal(uint64(2), fetched.Values[1].Ref)
})
@@ -126,8 +130,8 @@ func testComposeRecords(t *testing.T, s store.ComposeRecords) {
t.Run("update values", func(t *testing.T) {
req, rr := truncAndCreate(t, makeNew(
&types.RecordValue{Name: "f1", Value: "v1", Ref: 1},
&types.RecordValue{Name: "f2", Value: "v2", Ref: 2},
&types.RecordValue{Name: "str1", Value: "v1", Ref: 1},
&types.RecordValue{Name: "str2", Value: "v2", Ref: 2},
))
rec := rr[0]
@@ -142,7 +146,7 @@ func testComposeRecords(t *testing.T, s store.ComposeRecords) {
rec.Values[0].Value = "vv10"
rec.Values[1].Value = "vv20"
rec.Values = append(rec.Values, &types.RecordValue{Name: "f3", Value: "vv30", Ref: 3})
rec.Values = append(rec.Values, &types.RecordValue{Name: "str3", Value: "vv30", Ref: 3})
rec.Values.SetRecordID(rec.ID)
req.NoError(s.UpdateComposeRecord(ctx, mod, rec))
@@ -151,14 +155,14 @@ func testComposeRecords(t *testing.T, s store.ComposeRecords) {
req.NoError(err)
req.Equal(rec.OwnedBy, updated.OwnedBy)
req.Len(updated.Values, len(rec.Values))
req.Equal("f2", updated.Values[1].Name)
req.Equal("str2", updated.Values[1].Name)
req.Equal("vv20", updated.Values[1].Value)
})
t.Run("soft delete values", func(t *testing.T) {
req, rr := truncAndCreate(t, makeNew(
&types.RecordValue{Name: "f1", Value: "v1", Ref: 1},
&types.RecordValue{Name: "f2", Value: "v2", Ref: 2},
&types.RecordValue{Name: "str1", Value: "v1", Ref: 1},
&types.RecordValue{Name: "str2", Value: "v2", Ref: 2},
))
rec := rr[0]
rec.DeletedAt = &rec.CreatedAt
@@ -214,14 +218,14 @@ func testComposeRecords(t *testing.T, s store.ComposeRecords) {
set types.RecordSet
req, _ = truncAndCreate(t,
makeNew(&types.RecordValue{Name: "f1", Value: "v1"}, &types.RecordValue{Name: "f2", Value: "same"}, &types.RecordValue{Name: "f3", Value: "three"}),
makeNew(&types.RecordValue{Name: "f1", Value: "v2"}, &types.RecordValue{Name: "f2", Value: "same"}, &types.RecordValue{Name: "f3", Value: "three"}),
makeNew(&types.RecordValue{Name: "f1", Value: "v3"}, &types.RecordValue{Name: "f2", Value: "same"}, &types.RecordValue{Name: "f3", Value: "three"}),
makeNew(&types.RecordValue{Name: "f1", Value: "v4"}, &types.RecordValue{Name: "f2", Value: "same"}),
makeNew(&types.RecordValue{Name: "f1", Value: "v5"}, &types.RecordValue{Name: "f2", Value: "same"}),
makeNew(&types.RecordValue{Name: "str1", Value: "v1"}, &types.RecordValue{Name: "str2", Value: "same"}, &types.RecordValue{Name: "str3", Value: "three"}),
makeNew(&types.RecordValue{Name: "str1", Value: "v2"}, &types.RecordValue{Name: "str2", Value: "same"}, &types.RecordValue{Name: "str3", Value: "three"}),
makeNew(&types.RecordValue{Name: "str1", Value: "v3"}, &types.RecordValue{Name: "str2", Value: "same"}, &types.RecordValue{Name: "str3", Value: "three"}),
makeNew(&types.RecordValue{Name: "str1", Value: "v4"}, &types.RecordValue{Name: "str2", Value: "same"}),
makeNew(&types.RecordValue{Name: "str1", Value: "v5"}, &types.RecordValue{Name: "str2", Value: "same"}),
// Add one additional record with deleted values
makeNew(&types.RecordValue{Name: "f1", Value: "v6", DeletedAt: now()}, &types.RecordValue{Name: "f2", Value: "deleted", DeletedAt: now()}),
makeNew(&types.RecordValue{Name: "str1", Value: "v6", DeletedAt: now()}, &types.RecordValue{Name: "str2", Value: "deleted", DeletedAt: now()}),
)
f = types.RecordFilter{
@@ -230,25 +234,77 @@ func testComposeRecords(t *testing.T, s store.ComposeRecords) {
}
)
f.Query = `f1 = 'v1'`
f.Query = `str1 = 'v1'`
set, _, err = s.SearchComposeRecords(ctx, mod, f)
req.NoError(err)
req.Len(set, 1)
f.Query = `f2 = 'same'`
f.Query = `str2 = 'same'`
set, _, err = s.SearchComposeRecords(ctx, mod, f)
req.NoError(err)
req.Len(set, 5)
f.Query = `f2 = 'different'`
f.Query = `str2 = 'different'`
set, _, err = s.SearchComposeRecords(ctx, mod, f)
req.NoError(err)
req.Len(set, 0)
f.Query = `f3 = 'three' AND f1 = 'v1'`
f.Query = `str3 = 'three' AND str1 = 'v1'`
set, _, err = s.SearchComposeRecords(ctx, mod, f)
req.NoError(err)
req.Len(set, 1)
})
})
t.Run("report", func(t *testing.T) {
var (
err error
req, _ = truncAndCreate(t,
makeNew(&types.RecordValue{Name: "dt1", Value: "2020-01-01"}, &types.RecordValue{Name: "num1", Value: "1"}, &types.RecordValue{Name: "str3", Value: "three"}),
makeNew(&types.RecordValue{Name: "dt1", Value: "2020-01-01"}, &types.RecordValue{Name: "num1", Value: "2"}, &types.RecordValue{Name: "str3", Value: "three"}),
makeNew(&types.RecordValue{Name: "dt1", Value: "2020-01-01"}, &types.RecordValue{Name: "num1", Value: "3"}, &types.RecordValue{Name: "str3", Value: "three"}),
makeNew(&types.RecordValue{Name: "dt1", Value: "2020-05-01"}, &types.RecordValue{Name: "num1", Value: "4"}),
makeNew(&types.RecordValue{Name: "dt1", Value: "2020-05-01"}, &types.RecordValue{Name: "num1", Value: "5"}),
// Add one additional record with deleted values
makeNew(&types.RecordValue{Name: "dt1", Value: "2020-05-01", DeletedAt: now()}, &types.RecordValue{Name: "num1", Value: "6", DeletedAt: now()}, &types.RecordValue{Name: "str2", Value: "deleted", DeletedAt: now()}),
)
report []map[string]interface{}
)
report, err = s.ComposeRecordReport(ctx, mod, "MAX(num1)", "QUARTER(dt1)", "")
req.NoError(err)
req.Len(report, 3)
// @todo find a way to compare the results
//expected := []map[string]interface{}{
// {"count": 3, "dimension_0": 1, "metric_0": 3},
// {"count": 2, "dimension_0": 2, "metric_0": 5},
// {"count": 1, "dimension_0": nil, "metric_0": nil},
//}
//
////spew.Dump(report, expected)
//req.True(
// reflect.DeepEqual(report, expected),
// "report does not match expected results:\n%#v\n%#v", report, expected)
report, err = s.ComposeRecordReport(ctx, mod, "COUNT(num1)", "YEAR(dt1)", "")
req.NoError(err)
report, err = s.ComposeRecordReport(ctx, mod, "SUM(num1)", "DATE(dt1)", "")
req.NoError(err)
report, err = s.ComposeRecordReport(ctx, mod, "MIN(num1)", "DATE(NOW())", "")
req.NoError(err)
report, err = s.ComposeRecordReport(ctx, mod, "AVG(num1)", "DATE(NOW())", "")
req.NoError(err)
// Note that not all functions are compatible across all backends
})
}