3
0

Post test tweaks and fixes

This commit is contained in:
Tomaž Jerman 2023-06-23 15:32:41 +02:00
parent 7c6fb592bc
commit 3eb709b90a
11 changed files with 331 additions and 100 deletions

View File

@ -81,42 +81,33 @@ func (aa AlterationSet) Merge(bb AlterationSet) (cc AlterationSet) {
// * attribute A renamed to A' and then renamed to A''
// * attribute A deleted and then created
cc = make(AlterationSet, 0, len(aa)+len(bb))
skip := make(map[int]bool, (len(aa)+len(bb))/2)
aux := append(aa, bb...)
seen := make(map[int]bool, len(aux)/2)
// For each item in aa, check if it has a matching element in bb.
// If it does, mark the bb index as skipped, if it doesn't use the aa element.
// If a duplicate is found, the bb element is used (considered newer),
//
// This is sub-optimal but the slices are expected to be small and this
// won't be ran often.
for _, a := range aa {
found := false
for j, b := range bb {
if skip[j] {
for i, a := range aux {
if seen[i] {
continue
}
if a.compare(*b) {
skip[j] = true
cc = append(cc, b)
found := false
for j := i + 1; j < len(aux); j++ {
if seen[j] {
continue
}
if a.compare(*aux[j]) {
seen[j] = true
found = true
cc = append(cc, aux[j])
break
}
}
if !found {
cc = append(cc, a)
}
}
for j, b := range bb {
if skip[j] {
continue
}
cc = append(cc, b)
}
return
}

View File

@ -69,6 +69,38 @@ func TestMerge(t *testing.T) {
},
},
},
{
name: "remove duplicates",
aa: AlterationSet{},
bb: AlterationSet{
&Alteration{
AttributeAdd: &AttributeAdd{
Attr: &Attribute{
Ident: "foo",
Type: &TypeJSON{Nullable: false},
},
},
},
&Alteration{
AttributeAdd: &AttributeAdd{
Attr: &Attribute{
Ident: "foo",
Type: &TypeJSON{Nullable: false},
},
},
},
},
cc: AlterationSet{
&Alteration{
AttributeAdd: &AttributeAdd{
Attr: &Attribute{
Ident: "foo",
Type: &TypeJSON{Nullable: false},
},
},
},
},
},
{
name: "un matching types",
aa: AlterationSet{

View File

@ -2,15 +2,13 @@ package dal
type (
modelDiffType string
ModelModification string
// ModelDiff defines one identified missmatch between two models
ModelDiff struct {
Type modelDiffType
Modification ModelModification
// Original will be nil when a new attribute is being added
Original *Attribute
// Inserted will be nil wen an existing attribute is being removed
Inserted *Attribute
// Asserted will be nil wen an existing attribute is being removed
Asserted *Attribute
}
ModelDiffSet []*ModelDiff
@ -21,9 +19,6 @@ const (
AttributeTypeMissmatch modelDiffType = "typeMissmatch"
AttributeSensitivityMismatch modelDiffType = "sensitivityMismatch"
AttributeCodecMismatch modelDiffType = "codecMismatch"
AttributeDeleted ModelModification = "deleted"
AttributeAdded ModelModification = "added"
AttributeChanged ModelModification = "changed"
)
// Diff calculates the diff between models a and b where a is used as base
@ -76,7 +71,6 @@ func (a *Model) Diff(b *Model) (out ModelDiffSet) {
if !ok {
out = append(out, &ModelDiff{
Type: AttributeMissing,
Modification: AttributeDeleted,
Original: attrA,
})
continue
@ -86,9 +80,8 @@ func (a *Model) Diff(b *Model) (out ModelDiffSet) {
if attrA.Type.Type() != attrBAux.attr.Type.Type() {
out = append(out, &ModelDiff{
Type: AttributeTypeMissmatch,
Modification: AttributeChanged,
Original: attrA,
Inserted: attrBAux.attr,
Asserted: attrBAux.attr,
})
}
@ -97,17 +90,15 @@ func (a *Model) Diff(b *Model) (out ModelDiffSet) {
if attrA.SensitivityLevelID != attrBAux.attr.SensitivityLevelID {
out = append(out, &ModelDiff{
Type: AttributeSensitivityMismatch,
Modification: AttributeChanged,
Original: attrA,
Inserted: attrBAux.attr,
Asserted: attrBAux.attr,
})
}
if attrA.Store.Type() != attrBAux.attr.Store.Type() {
out = append(out, &ModelDiff{
Type: AttributeCodecMismatch,
Modification: AttributeChanged,
Original: attrA,
Inserted: attrBAux.attr,
Asserted: attrBAux.attr,
})
}
}
@ -121,9 +112,8 @@ func (a *Model) Diff(b *Model) (out ModelDiffSet) {
if !ok {
out = append(out, &ModelDiff{
Type: AttributeMissing,
Modification: AttributeAdded,
Original: nil,
Inserted: attrB,
Asserted: attrB,
})
continue
}
@ -140,7 +130,7 @@ func (dd ModelDiffSet) Alterations() (out []*Alteration) {
for _, d := range dd {
switch d.Type {
case AttributeMissing:
if d.Inserted == nil {
if d.Asserted == nil {
// @todo if this was the last attribute we can consider dropping this column
if d.Original.Store.Type() == AttributeCodecRecordValueSetJSON {
break
@ -152,11 +142,11 @@ func (dd ModelDiffSet) Alterations() (out []*Alteration) {
},
})
} else {
if d.Inserted.Store.Type() == AttributeCodecRecordValueSetJSON {
if d.Asserted.Store.Type() == AttributeCodecRecordValueSetJSON {
add(&Alteration{
AttributeAdd: &AttributeAdd{
Attr: &Attribute{
Ident: d.Inserted.StoreIdent(),
Ident: d.Asserted.StoreIdent(),
Type: &TypeJSON{Nullable: false},
Store: &CodecPlain{},
},
@ -165,7 +155,7 @@ func (dd ModelDiffSet) Alterations() (out []*Alteration) {
} else {
add(&Alteration{
AttributeAdd: &AttributeAdd{
Attr: d.Inserted,
Attr: d.Asserted,
},
})
}
@ -180,16 +170,16 @@ func (dd ModelDiffSet) Alterations() (out []*Alteration) {
add(&Alteration{
AttributeReType: &AttributeReType{
Attr: d.Inserted,
To: d.Inserted.Type,
Attr: d.Asserted,
To: d.Asserted.Type,
},
})
case AttributeCodecMismatch:
add(&Alteration{
AttributeReEncode: &AttributeReEncode{
Attr: d.Inserted,
To: d.Inserted.Store,
Attr: d.Asserted,
To: d.Asserted.Store,
},
})
}

View File

@ -38,7 +38,6 @@ func TestDiff_wrongAttrType(t *testing.T) {
dd := a.Diff(b)
require.Len(t, dd, 1)
require.Equal(t, AttributeTypeMissmatch, dd[0].Type)
require.Equal(t, AttributeChanged, dd[0].Modification)
}
func TestDiff_removedAttr(t *testing.T) {
@ -64,9 +63,8 @@ func TestDiff_removedAttr(t *testing.T) {
dd := a.Diff(b)
require.Len(t, dd, 1)
require.Equal(t, AttributeMissing, dd[0].Type)
require.Equal(t, AttributeDeleted, dd[0].Modification)
require.NotNil(t, dd[0].Original)
require.Nil(t, dd[0].Inserted)
require.Nil(t, dd[0].Asserted)
}
func TestDiff_addedAttr(t *testing.T) {
@ -92,9 +90,8 @@ func TestDiff_addedAttr(t *testing.T) {
dd := a.Diff(b)
require.Len(t, dd, 1)
require.Equal(t, AttributeMissing, dd[0].Type)
require.Equal(t, AttributeAdded, dd[0].Modification)
require.Nil(t, dd[0].Original)
require.NotNil(t, dd[0].Inserted)
require.NotNil(t, dd[0].Asserted)
}
func TestDiff_changedCodec(t *testing.T) {
@ -116,5 +113,4 @@ func TestDiff_changedCodec(t *testing.T) {
dd := a.Diff(b)
require.Len(t, dd, 1)
require.Equal(t, AttributeCodecMismatch, dd[0].Type)
require.Equal(t, AttributeChanged, dd[0].Modification)
}

View File

@ -2,6 +2,7 @@ package mssql
import (
"context"
"fmt"
"github.com/cortezaproject/corteza/server/pkg/dal"
"github.com/cortezaproject/corteza/server/store/adapters/rdbms/ddl"
@ -16,6 +17,29 @@ type (
is *informationSchema
d *mssqlDialect
}
// Custom ddl commands tweaked to SQL server specifics.
// It might be cleaner to solve this with goqu or some custom string templates
// but this will do for now.
addColumn struct {
Dialect *mssqlDialect
Table string
Column *ddl.Column
}
renameColumn struct {
Dialect *mssqlDialect
Table string
Old string
New string
}
reTypeColumn struct {
Dialect *mssqlDialect
Table string
Column string
Type *ddl.ColumnType
}
)
var (
@ -78,7 +102,7 @@ func (dd *dataDefiner) TableLookup(ctx context.Context, t string) (*ddl.Table, e
}
func (dd *dataDefiner) ColumnAdd(ctx context.Context, t string, c *ddl.Column) error {
return ddl.Exec(ctx, dd.conn, &ddl.AddColumn{
return ddl.Exec(ctx, dd.conn, &addColumn{
Dialect: dd.d,
Table: t,
Column: c,
@ -94,7 +118,7 @@ func (dd *dataDefiner) ColumnDrop(ctx context.Context, t, col string) error {
}
func (dd *dataDefiner) ColumnRename(ctx context.Context, t string, o string, n string) error {
return ddl.Exec(ctx, dd.conn, &ddl.RenameColumn{
return ddl.Exec(ctx, dd.conn, &renameColumn{
Dialect: dd.d,
Table: t,
Old: o,
@ -103,7 +127,7 @@ func (dd *dataDefiner) ColumnRename(ctx context.Context, t string, o string, n s
}
func (dd *dataDefiner) ColumnReType(ctx context.Context, t string, col string, tp *ddl.ColumnType) error {
return ddl.Exec(ctx, dd.conn, &ddl.ReTypeColumn{
return ddl.Exec(ctx, dd.conn, &reTypeColumn{
Dialect: dd.d,
Table: t,
Column: col,
@ -133,3 +157,42 @@ func (dd *dataDefiner) IndexDrop(ctx context.Context, t, i string) error {
Ident: i,
})
}
func (c *addColumn) ToSQL() (sql string, aa []interface{}, err error) {
sql = fmt.Sprintf(
`ALTER TABLE %s ADD %s %s`,
c.Dialect.QuoteIdent(c.Table),
c.Dialect.QuoteIdent(c.Column.Ident),
c.Column.Type.Name,
)
if !c.Column.Type.Null {
sql += " NOT NULL"
}
if len(c.Column.Default) > 0 {
// @todo right now we can (and need to) trust that default
// values are unharmful!
sql += " DEFAULT " + c.Column.Default
}
return
}
func (c *renameColumn) ToSQL() (sql string, aa []interface{}, err error) {
return fmt.Sprintf(
`EXEC sp_RENAME '%s.%s' , '%s', 'COLUMN'`,
c.Table,
c.Old,
c.New,
), nil, nil
}
func (c *reTypeColumn) ToSQL() (sql string, aa []interface{}, err error) {
return fmt.Sprintf(
`ALTER TABLE %s MODIFY COLUMN %s %s`,
c.Dialect.QuoteIdent(c.Table),
c.Dialect.QuoteIdent(c.Column),
c.Type.Name,
), nil, nil
}

View File

@ -6,6 +6,7 @@ import (
"github.com/cortezaproject/corteza/server/store/adapters/rdbms/ddl"
"github.com/cortezaproject/corteza/server/store/adapters/rdbms/ql"
"github.com/spf13/cast"
"github.com/cortezaproject/corteza/server/pkg/cast2"
"github.com/cortezaproject/corteza/server/pkg/dal"
@ -264,17 +265,46 @@ func (mssqlDialect) ColumnFits(target, assert *ddl.Column) bool {
},
}
baseMatch := matches[assertName][targetName]
baseMatch := assertName == targetName || matches[assertName][targetName]
// Special cases
switch {
case assertName == "varchar" && targetName == "varchar":
// Check varchar size
return baseMatch && assertMeta[0] <= targetMeta[0]
for i := len(assertMeta); i < 1; i++ {
assertMeta = append(assertMeta, "0")
}
if assertMeta[0] == "max" {
assertMeta[0] = "-1"
}
assertA := cast.ToInt(assertMeta[0])
for i := len(targetMeta); i < 1; i++ {
targetMeta = append(targetMeta, "0")
}
if targetMeta[0] == "max" {
targetMeta[0] = "-1"
}
targetA := cast.ToInt(targetMeta[0])
// -1 means no limit so it can fit any length
if targetA == -1 {
return baseMatch
}
return baseMatch && assertA <= targetA
case assertName == "decimal" && targetName == "decimal":
// Check decimal size and precision
return baseMatch && assertMeta[0] <= targetMeta[0] && assertMeta[1] <= targetMeta[1]
for i := len(assertMeta); i < 2; i++ {
assertMeta = append(assertMeta, "0")
}
for i := len(targetMeta); i < 2; i++ {
targetMeta = append(targetMeta, "0")
}
return baseMatch && cast.ToInt(assertMeta[0]) <= cast.ToInt(targetMeta[0]) && cast.ToInt(assertMeta[1]) <= cast.ToInt(targetMeta[1])
}
return baseMatch

View File

@ -11,6 +11,8 @@ import (
"github.com/doug-martin/goqu/v9"
"github.com/doug-martin/goqu/v9/exp"
"github.com/jmoiron/sqlx"
"github.com/modern-go/reflect2"
"github.com/spf13/cast"
)
type (
@ -48,6 +50,9 @@ func (i *informationSchema) columnSelect(dbname string) *goqu.SelectDataset {
"COLUMN_NAME",
"IS_NULLABLE",
"DATA_TYPE",
"CHARACTER_MAXIMUM_LENGTH",
"NUMERIC_SCALE",
"DATETIME_PRECISION",
).
// @note this goqu.I is a cheat; try to figure out if we have something nicer available (same applies to lower code)
From(goqu.I(fmt.Sprintf("%s.information_schema.columns", dbname))).
@ -69,6 +74,11 @@ func (i *informationSchema) scanColumns(ctx context.Context, sd *goqu.SelectData
Column string `db:"COLUMN_NAME"`
IsNullable string `db:"IS_NULLABLE"`
Type string `db:"DATA_TYPE"`
// We'll use these two to get numeric's precision and scale
Precision any `db:"DATETIME_PRECISION"`
Scale any `db:"NUMERIC_SCALE"`
// We'll use this one to get varchar length
MaxLength any `db:"CHARACTER_MAXIMUM_LENGTH"`
}, 0)
)
@ -85,6 +95,35 @@ func (i *informationSchema) scanColumns(ctx context.Context, sd *goqu.SelectData
out = append(out, &ddl.Table{Ident: v.Table})
}
if v.Type == "decimal" {
var (
p, s int
)
if !reflect2.IsNil(v.Precision) {
p = cast.ToInt(v.Precision)
}
if !reflect2.IsNil(v.Scale) {
s = cast.ToInt(v.Scale)
}
if p+s > 0 {
v.Type = fmt.Sprintf("%s(%d,%d)", v.Type, p, s)
}
}
if v.Type == "varchar" {
var (
l int
)
if !reflect2.IsNil(v.MaxLength) {
l = cast.ToInt(v.MaxLength)
}
v.Type = fmt.Sprintf("%s(%d)", v.Type, l)
}
out[at].Columns = append(out[at].Columns, &ddl.Column{
Ident: v.Column,
Type: &ddl.ColumnType{

View File

@ -233,6 +233,7 @@ func (mysqlDialect) AttributeToColumn(attr *dal.Attribute) (col *ddl.Column, err
return
}
// target is the existing one
func (mysqlDialect) ColumnFits(target, assert *ddl.Column) bool {
targetType, targetName, targetMeta := ddl.ParseColumnTypes(target)
assertType, assertName, assertMeta := ddl.ParseColumnTypes(assert)
@ -303,7 +304,7 @@ func (mysqlDialect) ColumnFits(target, assert *ddl.Column) bool {
},
}
baseMatch := matches[assertName][targetName]
baseMatch := assertName == targetName || matches[assertName][targetName]
// Special cases
switch {
@ -312,8 +313,15 @@ func (mysqlDialect) ColumnFits(target, assert *ddl.Column) bool {
return baseMatch && assertMeta[0] <= targetMeta[0]
case assertName == "decimal" && targetName == "decimal":
// Check decimal size and precision
return baseMatch && assertMeta[0] <= targetMeta[0] && assertMeta[1] <= targetMeta[1]
// Check numeric size and precision
for i := len(assertMeta); i < 2; i++ {
assertMeta = append(assertMeta, "0")
}
for i := len(targetMeta); i < 2; i++ {
targetMeta = append(targetMeta, "0")
}
return baseMatch && cast.ToInt(assertMeta[0]) <= cast.ToInt(targetMeta[0]) && cast.ToInt(assertMeta[1]) <= cast.ToInt(targetMeta[1])
}
return baseMatch

View File

@ -228,27 +228,36 @@ func (postgresDialect) ColumnFits(target, assert *ddl.Column) bool {
matches := map[string]map[string]bool{
"numeric": {
"text": true,
"varchar": true,
},
"timestamp": {
"text": true,
"varchar": true,
"timestamptz": true,
},
"timestamptz": {
"text": true,
"varchar": true,
},
"time": {
"text": true,
"varchar": true,
"timetz": true,
},
"timetz": {
"text": true,
"varchar": true,
},
"date": {
"text": true,
"varchar": true,
},
"text": {},
"varchar": {
"text": true,
},
"jsonb": {},
"bytea": {},
"boolean": {
@ -256,15 +265,32 @@ func (postgresDialect) ColumnFits(target, assert *ddl.Column) bool {
},
"uuid": {
"text": true,
"varchar": true,
},
}
baseMatch := matches[assertName][targetName]
baseMatch := assertName == targetName || matches[assertName][targetName]
// Special cases
switch {
case assertName == "varchar" && targetName == "varchar":
// Check varchar size
for i := len(assertMeta); i < 1; i++ {
assertMeta = append(assertMeta, "0")
}
for i := len(targetMeta); i < 1; i++ {
targetMeta = append(targetMeta, "0")
}
case assertName == "numeric" && targetName == "numeric":
// Check numeric size and precision
for i := len(assertMeta); i < 2; i++ {
assertMeta = append(assertMeta, "0")
}
for i := len(targetMeta); i < 2; i++ {
targetMeta = append(targetMeta, "0")
}
return baseMatch && assertMeta[0] <= targetMeta[0] && assertMeta[1] <= targetMeta[1]
}

View File

@ -2,12 +2,16 @@ package postgres
import (
"context"
"fmt"
"strings"
"github.com/cortezaproject/corteza/server/pkg/errors"
"github.com/cortezaproject/corteza/server/store/adapters/rdbms/ddl"
"github.com/doug-martin/goqu/v9"
"github.com/doug-martin/goqu/v9/exp"
"github.com/jmoiron/sqlx"
"strings"
"github.com/modern-go/reflect2"
"github.com/spf13/cast"
)
type (
@ -46,6 +50,11 @@ func (i *informationSchema) columnSelect() *goqu.SelectDataset {
"column_name",
"is_nullable",
"data_type",
// We'll use these two to get numeric's precision and scale
"numeric_precision",
"numeric_scale",
// We'll use this one to get varchar length
"character_maximum_length",
).
From("information_schema.columns").
Order(
@ -59,6 +68,12 @@ func (i *informationSchema) scanColumns(ctx context.Context, sd *goqu.SelectData
at int
has bool
n2p = make(map[string]int)
// Making column type naming consistent between different databases
typeMapping = map[string]string{
"timestamp with time zone": "timestamptz",
"timestamp without time zone": "timestamp",
"character varying": "varchar",
}
// https://dev.mysql.com/doc/mysql-infoschema-excerpt/5.7/en/information-schema-statistics-table.html
aux = make([]struct {
@ -66,6 +81,9 @@ func (i *informationSchema) scanColumns(ctx context.Context, sd *goqu.SelectData
Column string `db:"column_name"`
IsNullable string `db:"is_nullable"`
Type string `db:"data_type"`
Precision any `db:"numeric_precision"`
Scale any `db:"numeric_scale"`
MaxLength any `db:"character_maximum_length"`
}, 0)
)
@ -82,10 +100,44 @@ func (i *informationSchema) scanColumns(ctx context.Context, sd *goqu.SelectData
out = append(out, &ddl.Table{Ident: v.Table})
}
tn, has := typeMapping[v.Type]
if !has {
tn = v.Type
}
if tn == "numeric" {
var (
p, s int
)
if !reflect2.IsNil(v.Precision) {
p = cast.ToInt(v.Precision)
}
if !reflect2.IsNil(v.Scale) {
s = cast.ToInt(v.Scale)
}
if p+s > 0 {
tn = fmt.Sprintf("%s(%d,%d)", tn, p, s)
}
}
if tn == "varchar" {
var (
l int
)
if !reflect2.IsNil(v.MaxLength) {
l = cast.ToInt(v.MaxLength)
}
tn = fmt.Sprintf("%s(%d)", tn, l)
}
out[at].Columns = append(out[at].Columns, &ddl.Column{
Ident: v.Column,
Type: &ddl.ColumnType{
Name: v.Type,
Name: tn,
Null: v.IsNullable == "YES",
},
})

View File

@ -159,9 +159,13 @@ func (svc dalSchemaAlteration) UndeleteByID(ctx context.Context, dalSchemaAltera
// ModelAlterations returns all non deleted, non completed, and non dismissed alterations for the given model
func (svc dalSchemaAlteration) ModelAlterations(ctx context.Context, m *dal.Model) (out []*dal.Alteration, err error) {
return svc.modelAlterations(ctx, svc.store, m)
}
func (svc dalSchemaAlteration) modelAlterations(ctx context.Context, s store.Storer, m *dal.Model) (out []*dal.Alteration, err error) {
// @todo boilerplate code around this
aux, _, err := store.SearchDalSchemaAlterations(ctx, svc.store, types.DalSchemaAlterationFilter{
aux, _, err := store.SearchDalSchemaAlterations(ctx, s, types.DalSchemaAlterationFilter{
Resource: []string{m.Resource},
Deleted: filter.StateExcluded,
Completed: filter.StateExcluded,
@ -333,7 +337,7 @@ func (svc dalSchemaAlteration) Dismiss(ctx context.Context, ids ...uint64) (err
// originally, we wanted to hook into ComposeModule resource (or any resource that defined a model)
return store.Tx(ctx, svc.store, func(ctx context.Context, s store.Storer) (err error) {
alt, _, err := store.SearchDalSchemaAlterations(ctx, svc.store, types.DalSchemaAlterationFilter{
alt, _, err := store.SearchDalSchemaAlterations(ctx, s, types.DalSchemaAlterationFilter{
AlterationID: id.Strings(ids...),
})
if err != nil {
@ -341,11 +345,11 @@ func (svc dalSchemaAlteration) Dismiss(ctx context.Context, ids ...uint64) (err
}
alt = svc.appliableAlterations(alt...)
identity := intAuth.GetIdentityFromContext(ctx).Identity()
for _, a := range alt {
a.Error = ""
a.DismissedAt = now()
a.DismissedBy = intAuth.GetIdentityFromContext(ctx).Identity()
a.DismissedBy = identity
}
err = store.UpdateDalSchemaAlteration(ctx, s, alt...)
@ -474,7 +478,7 @@ func (svc dalSchemaAlteration) reloadAlteredModels(ctx context.Context, s store.
return
}
currentAlts, err := svc.ModelAlterations(ctx, model)
currentAlts, err := svc.modelAlterations(ctx, s, model)
if err != nil {
return
}