3
0

Add missing bits from V1, slight touchup

This commit is contained in:
Tomaž Jerman 2023-02-25 11:34:52 +01:00
parent a8b60c6525
commit 2e9ba97dda
37 changed files with 1602 additions and 72 deletions

View File

@ -84,6 +84,12 @@ func (d StoreDecoder) decode(ctx context.Context, s store.Storer, dl dal.FullSer
if err != nil {
return nil, err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return nil, fmt.Errorf("invalid reference %v", ref)
}
@ -124,6 +130,16 @@ func (d StoreDecoder) decode(ctx context.Context, s store.Storer, dl dal.FullSer
}
out = append(out, aux...)
default:
aux, err = d.extendDecoder(ctx, s, dl, wf.rt, refNodes[i], wf.f)
if err != nil {
return
}
for _, a := range aux {
a.Identifiers = a.Identifiers.Merge(wf.f.Identifiers)
a.References = envoyx.MergeRefs(a.References, refRefs[i])
}
out = append(out, aux...)
}
}
@ -187,6 +203,12 @@ func (d StoreDecoder) decodeWorkflow(ctx context.Context, s store.Storer, dl dal
})
}
aux, err := d.extendedWorkflowDecoder(ctx, s, dl, f, out)
if err != nil {
return
}
out = append(out, aux...)
return
}

View File

@ -1,10 +1,22 @@
package envoy
import (
"context"
"github.com/cortezaproject/corteza/server/automation/types"
"github.com/cortezaproject/corteza/server/pkg/dal"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/store"
)
func (e StoreEncoder) prepare(ctx context.Context, p envoyx.EncodeParams, s store.Storer, rt string, nn envoyx.NodeSet) (err error) {
return
}
func (d StoreDecoder) extendDecoder(ctx context.Context, s store.Storer, dl dal.FullService, rt string, nodes map[string]*envoyx.Node, f envoyx.ResourceFilter) (out envoyx.NodeSet, err error) {
return
}
func (d StoreDecoder) makeWorkflowFilter(scope *envoyx.Node, refs map[string]*envoyx.Node, auxf envoyx.ResourceFilter) (out types.WorkflowFilter) {
out.Limit = auxf.Limit
@ -32,3 +44,20 @@ func (d StoreDecoder) makeTriggerFilter(scope *envoyx.Node, refs map[string]*env
return
}
func (d StoreDecoder) extendedWorkflowDecoder(ctx context.Context, s store.Storer, dl dal.FullService, f types.WorkflowFilter, base envoyx.NodeSet) (out envoyx.NodeSet, err error) {
for _, b := range base {
wf := b.Resource.(*types.Workflow)
filters, err := d.decodeTrigger(ctx, s, dl, types.TriggerFilter{
WorkflowID: []uint64{wf.ID},
})
if err != nil {
return nil, err
}
out = append(out, filters...)
}
return
}

View File

@ -206,6 +206,7 @@ func (d *auxYamlDoc) unmarshalWorkflowNode(dctx documentContext, n *yaml.Node, m
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -318,6 +319,8 @@ func (d *auxYamlDoc) unmarshalWorkflowNode(dctx documentContext, n *yaml.Node, m
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -400,6 +403,8 @@ func (d *auxYamlDoc) unmarshalWorkflowNode(dctx documentContext, n *yaml.Node, m
ResourceType: types.WorkflowResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -478,6 +483,7 @@ func (d *auxYamlDoc) unmarshalTriggerNode(dctx documentContext, n *yaml.Node, me
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
)
_ = auxOut
_ = refs
@ -563,6 +569,8 @@ func (d *auxYamlDoc) unmarshalTriggerNode(dctx documentContext, n *yaml.Node, me
break
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -637,6 +645,8 @@ func (d *auxYamlDoc) unmarshalTriggerNode(dctx documentContext, n *yaml.Node, me
ResourceType: types.TriggerResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Put it all together...
@ -762,6 +772,25 @@ func unmarshalLocaleNode(n *yaml.Node) (out envoyx.NodeSet, err error) {
})
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Envoy config unmarshal logic
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d *auxYamlDoc) decodeEnvoyConfig(n *yaml.Node) (out envoyx.NodeConfig) {
y7s.EachMap(n, func(k, v *yaml.Node) (err error) {
switch strings.ToLower(k.Value) {
case "skipif", "skip":
return y7s.DecodeScalar(v, "decode skip if", &out.SkipIf)
case "onexisting", "mergealg":
out.MergeAlg = envoyx.CastMergeAlg(v.Value)
}
return nil
})
return
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utilities
// // // // // // // // // // // // // // // // // // // // // // // // //

View File

@ -147,7 +147,6 @@ func (e YamlEncoder) encodeWorkflow(ctx context.Context, p envoyx.EncodeParams,
"enabled", res.Enabled,
"handle", res.Handle,
"id", res.ID,
"issues", res.Issues,
"keepSessions", res.KeepSessions,
"meta", res.Meta,
"ownedBy", auxOwnedBy,
@ -167,6 +166,17 @@ func (e YamlEncoder) encodeWorkflow(ctx context.Context, p envoyx.EncodeParams,
var aux *yaml.Node
_ = aux
aux, err = e.encodeTriggers(ctx, p, tt.ChildrenForResourceType(node, types.TriggerResourceType), tt)
if err != nil {
return
}
out, err = y7s.AddMap(out,
"trigger", aux,
)
if err != nil {
return
}
return
}

View File

@ -52,6 +52,11 @@ workflow: {
dal: { type: "JSON", defaultEmptyObject: true }
omitSetter: true
omitGetter: true
envoy: {
yaml: {
omitEncoder: true
}
}
}
run_as: schema.AttributeUserRef
@ -81,9 +86,15 @@ workflow: {
supportMappedInput: false
identKeys: ["triggers"]
}]
extendedResourceEncoders: [{
ident: "trigger"
expIdent: "Trigger"
identKey: "trigger"
}]
}
store: {
customFilterBuilder: true
extendedDecoder: true
}
}

View File

@ -245,6 +245,7 @@ func (d *auxYamlDoc) unmarshal{{ .expIdent }}Node(dctx documentContext, n *yaml.
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
{{- if .rbac }}
rbacNodes envoyx.NodeSet
{{- end }}
@ -332,7 +333,7 @@ func (d *auxYamlDoc) unmarshal{{ .expIdent }}Node(dctx documentContext, n *yaml.
auxRefs map[string]envoyx.Ref
auxIdents envoyx.Identifiers
)
auxRefs, auxIdents, err = unmarshal{{ $resource.expIdent }}{{ $attr.expIdent }}Node(r, n)
auxRefs, auxIdents, err = d.unmarshal{{ $resource.expIdent }}{{ $attr.expIdent }}Node(r, n)
if err != nil {
return err
}
@ -359,6 +360,8 @@ func (d *auxYamlDoc) unmarshal{{ .expIdent }}Node(dctx documentContext, n *yaml.
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
{{- end }}
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -537,6 +540,7 @@ func (d *auxYamlDoc) unmarshal{{ .expIdent }}Node(dctx documentContext, n *yaml.
{{if or .envoy.scoped}}
Scope: scope,
{{end}}
Config: envoyConfig,
}
{{- if .rbac }}
// Update RBAC resource nodes with references regarding the resource
@ -686,6 +690,25 @@ func unmarshalLocaleNode(n *yaml.Node) (out envoyx.NodeSet, err error) {
})
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Envoy config unmarshal logic
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d *auxYamlDoc) decodeEnvoyConfig(n *yaml.Node) (out envoyx.NodeConfig) {
y7s.EachMap(n, func(k, v *yaml.Node) (err error) {
switch strings.ToLower(k.Value) {
case "skipif", "skip":
return y7s.DecodeScalar(v, "decode skip if", &out.SkipIf)
case "onexisting", "mergealg":
out.MergeAlg = envoyx.CastMergeAlg(v.Value)
}
return nil
})
return
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utilities
// // // // // // // // // // // // // // // // // // // // // // // // //

View File

@ -123,23 +123,29 @@ func (e YamlEncoder) encode{{.expIdent}}(ctx context.Context, p envoyx.EncodePar
if err != nil {
return
}
{{- else if .envoy.yaml.customEncoder -}}
aux{{.expIdent}}, err := e.encode{{$res}}{{.expIdent}}C(ctx, p, tt, node, res, res.{{.expIdent}})
if err != nil {
return
}
{{- end }}
{{end}}
out, err = y7s.AddMap(out,
{{ range .model.attributes -}}
{{- if .envoy.yaml.omitEncoder }}{{continue}}{{ end -}}
{{- if .envoy.yaml.customEncoder -}}
"{{.ident}}", e.encode{{$res}}{{.expIdent}}(p, res.{{.expIdent}}),
"{{.envoy.yaml.identKeyEncode}}", aux{{.expIdent}},
{{- else if eq .dal.type "Timestamp" -}}
{{- if .dal.nullable -}}
"{{.ident}}", aux{{.expIdent}},
"{{.envoy.yaml.identKeyEncode}}", aux{{.expIdent}},
{{- else -}}
"{{.ident}}", aux{{.expIdent}},
"{{.envoy.yaml.identKeyEncode}}", aux{{.expIdent}},
{{- end -}}
{{- else if eq .dal.type "Ref" -}}
"{{.ident}}", aux{{.expIdent}},
"{{.envoy.yaml.identKeyEncode}}", aux{{.expIdent}},
{{- else -}}
"{{.ident}}", res.{{.expIdent}},
"{{.envoy.yaml.identKeyEncode}}", res.{{.expIdent}},
{{- end }}
{{end}}
)
@ -176,6 +182,20 @@ func (e YamlEncoder) encode{{.expIdent}}(ctx context.Context, p envoyx.EncodePar
{{ end }}
{{- end }}
{{- range .envoy.yaml.extendedResourceEncoders }}
aux, err = e.encode{{.expIdent}}s(ctx, p, tt.ChildrenForResourceType(node, types.{{.expIdent}}ResourceType), tt)
if err != nil {
return
}
out, err = y7s.AddMap(out,
"{{.identKey}}", aux,
)
if err != nil {
return
}
{{- end }}
return
}

View File

@ -100,6 +100,10 @@ import (
customDecoder: bool | *false
customEncoder: bool | *false
identKeyEncode: string | *$attrIdent
omitEncoder: bool | *false
}
// store decode/encode configs

View File

@ -186,6 +186,12 @@ import (
supportMappedInput: bool | *true
mappedField: string | *""
}] | *[]
extendedResourceEncoders: [...{
ident: string
expIdent: string
identKey: string | *""
}] | *[]
}
// store decode/encode configs

View File

@ -12,7 +12,13 @@ chart: {
model: {
ident: "compose_chart"
attributes: {
id: schema.IdField
id: schema.IdField & {
envoy: {
yaml: {
identKeyEncode: "chartID"
}
}
}
handle: schema.HandleField
namespace_id: {
ident: "namespaceID",
@ -37,6 +43,7 @@ chart: {
envoy: {
yaml: {
customDecoder: true
customEncoder: true
}
}
}

View File

@ -2,6 +2,7 @@ package envoy
import (
"context"
"strings"
"github.com/cortezaproject/corteza/server/pkg/dal"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
@ -43,7 +44,7 @@ func (rd *RecordDatasource) SetProvider(s envoyx.Provider) bool {
return true
}
func (rd *RecordDatasource) Next(ctx context.Context, out map[string]string) (ident string, more bool, err error) {
func (rd *RecordDatasource) Next(ctx context.Context, out map[string]string) (ident []string, more bool, err error) {
if rd.rowCache == nil {
rd.rowCache = make(map[string]string)
}
@ -55,7 +56,9 @@ func (rd *RecordDatasource) Next(ctx context.Context, out map[string]string) (id
rd.applyMapping(rd.rowCache, out)
ident = out[rd.mapping.KeyField]
for _, k := range rd.mapping.KeyField {
ident = append(ident, out[k])
}
return
}
@ -66,32 +69,83 @@ func (rd *RecordDatasource) Reset(ctx context.Context) (err error) {
func (rd *RecordDatasource) applyMapping(in, out map[string]string) {
if len(rd.mapping.Mapping.m) == 0 {
if !rd.mapping.Defaultable {
return
}
for k, v := range in {
out[k] = v
}
return
}
if rd.mapping.Defaultable {
rd.applyMappingWithDefaults(in, out)
} else {
rd.applyMappingWoDefaults(in, out)
}
}
func (rd *RecordDatasource) applyMappingWithDefaults(in, out map[string]string) {
maps := make(map[string]mapEntry)
for k, v := range rd.mapping.Mapping.m {
maps[k] = v
}
for k, v := range in {
if m, ok := maps[k]; ok {
if m.Skip {
continue
}
out[m.Field] = v
} else {
out[k] = v
}
}
}
func (rd *RecordDatasource) applyMappingWoDefaults(in, out map[string]string) {
for _, m := range rd.mapping.Mapping.m {
if m.Skip {
continue
}
// @todo expand when needed (expressions and such)
out[m.Field] = in[m.Column]
}
}
func (rd *RecordDatasource) ResolveRef(ref any) (out uint64, err error) {
r, err := cast.ToStringE(ref)
func (rd *RecordDatasource) ResolveRef(ref ...any) (out uint64, err error) {
idents, err := cast.ToStringSliceE(ref)
if err != nil {
return
}
out = rd.refToID[r]
for i, ident := range idents {
idents[i] = strings.Replace(ident, "-", "_", -1)
}
out = rd.refToID[strings.Join(idents, "-")]
return
}
func (rd *RecordDatasource) ResolveRefS(ref ...string) (out uint64, err error) {
aux := make([]any, len(ref))
for i, r := range ref {
aux[i] = r
}
return rd.ResolveRef(aux...)
}
// @todo this should be replaced by some smarter structure
func (rd *RecordDatasource) AddRef(id uint64, idents ...string) {
for i, ident := range idents {
idents[i] = strings.Replace(ident, "-", "_", -1)
}
rd.refToID[strings.Join(idents, "-")] = id
}
func (ar auxRecord) SetValue(name string, pos uint, value any) (err error) {
ar[name] = cast.ToString(value)
return

View File

@ -395,6 +395,8 @@ func (d StoreDecoder) decodeModuleField(ctx context.Context, s store.Storer, dl
},
}
refs = envoyx.MergeRefs(refs, d.decodeModuleFieldRefs(r))
var scope envoyx.Scope
scope = envoyx.Scope{
@ -524,6 +526,8 @@ func (d StoreDecoder) decodePage(ctx context.Context, s store.Storer, dl dal.Ful
},
}
refs = envoyx.MergeRefs(refs, d.decodePageRefs(r))
var scope envoyx.Scope
scope = envoyx.Scope{

View File

@ -87,38 +87,94 @@ func (d StoreDecoder) makeModuleFieldFilter(scope *envoyx.Node, refs map[string]
}
func (d StoreDecoder) extendedModuleDecoder(ctx context.Context, s store.Storer, dl dal.FullService, f types.ModuleFilter, base envoyx.NodeSet) (out envoyx.NodeSet, err error) {
var ff types.ModuleFieldSet
var ff envoyx.NodeSet
for _, b := range base {
mod := b.Resource.(*types.Module)
ff, _, err = store.SearchComposeModuleFields(ctx, s, types.ModuleFieldFilter{ModuleID: []uint64{b.Resource.GetID()}})
// Get all of the related module fields, append them to the output and
// the original module (so other code can have access to the related fields)
ff, err = d.decodeModuleField(ctx, s, dl, types.ModuleFieldFilter{ModuleID: []uint64{mod.ID}})
if err != nil {
return
}
// No need to assign them under the module since we're working with nodes now
for _, f := range ff {
out = append(out, &envoyx.Node{
Resource: f,
ResourceType: types.ModuleFieldResourceType,
Identifiers: envoyx.MakeIdentifiers(f.ID, f.Name),
References: envoyx.MergeRefs(b.References, map[string]envoyx.Ref{
"ModuleID": b.ToRef(),
}),
Scope: b.Scope,
f.Scope = b.Scope
f.References = envoyx.MergeRefs(b.References, map[string]envoyx.Ref{
"ModuleID": b.ToRef(),
})
mod.Fields = append(mod.Fields, f)
mod.Fields = append(mod.Fields, f.Resource.(*types.ModuleField))
}
out = append(out, ff...)
}
return
}
func (d StoreDecoder) decodeChartRefs(c *types.Chart) (refs map[string]envoyx.Ref) {
refs = make(map[string]envoyx.Ref, len(c.Config.Reports))
for i, r := range c.Config.Reports {
if r.ModuleID == 0 {
continue
}
refs[fmt.Sprintf("Config.Reports.%d.ModuleID", i)] = envoyx.Ref{
ResourceType: types.ModuleResourceType,
Identifiers: envoyx.MakeIdentifiers(r.ModuleID),
}
}
return
}
func (d StoreDecoder) decodeModuleFieldRefs(c *types.ModuleField) (refs map[string]envoyx.Ref) {
refs = make(map[string]envoyx.Ref, 1)
id := c.Options.UInt64("moduleID")
if id == 0 {
return
}
refs["Options.ModuleID"] = envoyx.Ref{
ResourceType: types.ModuleResourceType,
Identifiers: envoyx.MakeIdentifiers(id),
}
return
}
func (d StoreDecoder) decodePageRefs(p *types.Page) (refs map[string]envoyx.Ref) {
refs = make(map[string]envoyx.Ref, len(p.Blocks)/2)
for index, b := range p.Blocks {
switch b.Kind {
case "RecordList":
refs = envoyx.MergeRefs(refs, getPageBlockRecordListRefs(b, index))
case "Automation":
refs = envoyx.MergeRefs(refs, getPageBlockAutomationRefs(b, index))
case "RecordOrganizer":
refs = envoyx.MergeRefs(refs, getPageBlockRecordOrganizerRefs(b, index))
case "Chart":
refs = envoyx.MergeRefs(refs, getPageBlockChartRefs(b, index))
case "Calendar":
refs = envoyx.MergeRefs(refs, getPageBlockCalendarRefs(b, index))
case "Metric":
refs = envoyx.MergeRefs(refs, getPageBlockMetricRefs(b, index))
case "Comment":
refs = envoyx.MergeRefs(refs, getPageBlockCommentRefs(b, index))
}
}
// @todo
return
}

View File

@ -3,6 +3,7 @@ package envoy
import (
"context"
"fmt"
"time"
"github.com/cortezaproject/corteza/server/compose/service"
"github.com/cortezaproject/corteza/server/compose/types"
@ -12,6 +13,9 @@ import (
)
func (e StoreEncoder) setChartDefaults(res *types.Chart) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -20,6 +24,9 @@ func (e StoreEncoder) validateChart(*types.Chart) (err error) {
}
func (e StoreEncoder) setModuleDefaults(res *types.Module) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -28,6 +35,25 @@ func (e StoreEncoder) validateModule(*types.Module) (err error) {
}
func (e StoreEncoder) setModuleFieldDefaults(res *types.ModuleField) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
// Update validator ID
maxValidatorID := uint64(0)
for _, v := range res.Expressions.Validators {
if v.ValidatorID > maxValidatorID {
maxValidatorID = v.ValidatorID
}
}
for _, v := range res.Expressions.Validators {
if v.ValidatorID == 0 {
v.ValidatorID = maxValidatorID + 1
maxValidatorID++
}
}
return
}
@ -36,6 +62,9 @@ func (e StoreEncoder) validateModuleField(*types.ModuleField) (err error) {
}
func (e StoreEncoder) setNamespaceDefaults(res *types.Namespace) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -44,6 +73,29 @@ func (e StoreEncoder) validateNamespace(*types.Namespace) (err error) {
}
func (e StoreEncoder) setPageDefaults(res *types.Page) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
if res.Title == "" {
res.Title = res.Handle
}
// Update pageblock ID
maxPageBlockID := uint64(0)
for _, b := range res.Blocks {
if b.BlockID > maxPageBlockID {
maxPageBlockID = b.BlockID
}
}
for _, b := range res.Blocks {
if b.BlockID == 0 {
b.BlockID = maxPageBlockID + 1
maxPageBlockID++
}
}
return
}

View File

@ -44,7 +44,7 @@ func (e StoreEncoder) prepareRecords(ctx context.Context, p envoyx.EncodeParams,
var (
aux = make(map[string]string)
more bool
ident string
ident []string
rec types.Record
)
@ -56,7 +56,7 @@ func (e StoreEncoder) prepareRecords(ctx context.Context, p envoyx.EncodeParams,
return
}
ds.refToID[ident] = id.Next()
ds.AddRef(id.Next(), ident...)
rec, err = e.auxToRecord(aux)
if err != nil {
@ -93,7 +93,7 @@ func (e StoreEncoder) encodeRecordDatasource(ctx context.Context, p envoyx.Encod
var (
auxRec = make(map[string]string)
more bool
ident string
ident []string
rec types.Record
nsNode *envoyx.Node
@ -173,7 +173,11 @@ func (e StoreEncoder) encodeRecordDatasource(ctx context.Context, p envoyx.Encod
rec.CreatedAt = time.Now()
// Values and refs
rec.ID = ds.refToID[ident]
rec.ID, err = ds.ResolveRefS(ident...)
if err != nil {
return err
}
for i, v := range rec.Values {
if getters[v.Name] == nil {
continue

View File

@ -231,6 +231,7 @@ func (d *auxYamlDoc) unmarshalChartNode(dctx documentContext, n *yaml.Node, meta
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -252,7 +253,7 @@ func (d *auxYamlDoc) unmarshalChartNode(dctx documentContext, n *yaml.Node, meta
auxRefs map[string]envoyx.Ref
auxIdents envoyx.Identifiers
)
auxRefs, auxIdents, err = unmarshalChartConfigNode(r, n)
auxRefs, auxIdents, err = d.unmarshalChartConfigNode(r, n)
if err != nil {
return err
}
@ -317,6 +318,8 @@ func (d *auxYamlDoc) unmarshalChartNode(dctx documentContext, n *yaml.Node, meta
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -415,6 +418,8 @@ func (d *auxYamlDoc) unmarshalChartNode(dctx documentContext, n *yaml.Node, meta
References: refs,
Scope: scope,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -528,6 +533,7 @@ func (d *auxYamlDoc) unmarshalModuleNode(dctx documentContext, n *yaml.Node, met
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -595,6 +601,8 @@ func (d *auxYamlDoc) unmarshalModuleNode(dctx documentContext, n *yaml.Node, met
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -659,7 +667,7 @@ func (d *auxYamlDoc) unmarshalModuleNode(dctx documentContext, n *yaml.Node, met
}
break
case "source", "datasource":
case "source", "datasource", "records":
if y7s.IsSeq(n) {
nestedNodes, err = d.unmarshalExtendedSourceSeq(dctx, n)
if err != nil {
@ -720,6 +728,8 @@ func (d *auxYamlDoc) unmarshalModuleNode(dctx documentContext, n *yaml.Node, met
References: refs,
Scope: scope,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -817,6 +827,7 @@ func (d *auxYamlDoc) unmarshalModuleFieldNode(dctx documentContext, n *yaml.Node
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -828,6 +839,44 @@ func (d *auxYamlDoc) unmarshalModuleFieldNode(dctx documentContext, n *yaml.Node
switch strings.ToLower(k.Value) {
case "defaultvalue":
// Handle custom node decoder
//
// The decoder may update the passed resource with arbitrary values
// as well as provide additional references and identifiers for the node.
var (
auxRefs map[string]envoyx.Ref
auxIdents envoyx.Identifiers
)
auxRefs, auxIdents, err = d.unmarshalModuleFieldDefaultValueNode(r, n)
if err != nil {
return err
}
refs = envoyx.MergeRefs(refs, auxRefs)
ii = ii.Merge(auxIdents)
break
case "expressions":
// Handle custom node decoder
//
// The decoder may update the passed resource with arbitrary values
// as well as provide additional references and identifiers for the node.
var (
auxRefs map[string]envoyx.Ref
auxIdents envoyx.Identifiers
)
auxRefs, auxIdents, err = d.unmarshalModuleFieldExpressionsNode(r, n)
if err != nil {
return err
}
refs = envoyx.MergeRefs(refs, auxRefs)
ii = ii.Merge(auxIdents)
break
case "id":
// Handle identifiers
err = y7s.DecodeScalar(n, "id", &auxNodeValue)
@ -871,7 +920,7 @@ func (d *auxYamlDoc) unmarshalModuleFieldNode(dctx documentContext, n *yaml.Node
auxRefs map[string]envoyx.Ref
auxIdents envoyx.Identifiers
)
auxRefs, auxIdents, err = unmarshalModuleFieldOptionsNode(r, n)
auxRefs, auxIdents, err = d.unmarshalModuleFieldOptionsNode(r, n)
if err != nil {
return err
}
@ -896,6 +945,8 @@ func (d *auxYamlDoc) unmarshalModuleFieldNode(dctx documentContext, n *yaml.Node
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -994,6 +1045,8 @@ func (d *auxYamlDoc) unmarshalModuleFieldNode(dctx documentContext, n *yaml.Node
References: refs,
Scope: scope,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -1091,6 +1144,7 @@ func (d *auxYamlDoc) unmarshalNamespaceNode(dctx documentContext, n *yaml.Node,
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -1138,6 +1192,8 @@ func (d *auxYamlDoc) unmarshalNamespaceNode(dctx documentContext, n *yaml.Node,
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -1292,6 +1348,8 @@ func (d *auxYamlDoc) unmarshalNamespaceNode(dctx documentContext, n *yaml.Node,
References: refs,
Scope: scope,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -1358,6 +1416,41 @@ func (d *auxYamlDoc) unmarshalPageMap(dctx documentContext, n *yaml.Node) (out e
return
}
// unmarshalPagesExtendedSeq unmarshals Pages when provided as a sequence node
func (d *auxYamlDoc) unmarshalExtendedPagesSeq(dctx documentContext, n *yaml.Node) (out envoyx.NodeSet, err error) {
var aux envoyx.NodeSet
err = y7s.EachSeq(n, func(n *yaml.Node) error {
aux, err = d.unmarshalPagesExtendedNode(dctx, n)
if err != nil {
return err
}
out = append(out, aux...)
return nil
})
return
}
// unmarshalPagesExtendedMap unmarshals Pages when provided as a mapping node
//
// When map encoded, the map key is used as a preset identifier.
// The identifier is passed to the node function as a meta node
func (d *auxYamlDoc) unmarshalExtendedPagesMap(dctx documentContext, n *yaml.Node) (out envoyx.NodeSet, err error) {
var aux envoyx.NodeSet
err = y7s.EachMap(n, func(k, n *yaml.Node) error {
aux, err = d.unmarshalPagesExtendedNode(dctx, n, k)
if err != nil {
return err
}
out = append(out, aux...)
return nil
})
return
}
// unmarshalPageNode is a cookie-cutter function to unmarshal
// the yaml node into the corresponding Corteza type & Node
func (d *auxYamlDoc) unmarshalPageNode(dctx documentContext, n *yaml.Node, meta ...*yaml.Node) (out envoyx.NodeSet, err error) {
@ -1389,6 +1482,7 @@ func (d *auxYamlDoc) unmarshalPageNode(dctx documentContext, n *yaml.Node, meta
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -1400,6 +1494,25 @@ func (d *auxYamlDoc) unmarshalPageNode(dctx documentContext, n *yaml.Node, meta
switch strings.ToLower(k.Value) {
case "blocks":
// Handle custom node decoder
//
// The decoder may update the passed resource with arbitrary values
// as well as provide additional references and identifiers for the node.
var (
auxRefs map[string]envoyx.Ref
auxIdents envoyx.Identifiers
)
auxRefs, auxIdents, err = d.unmarshalPageBlocksNode(r, n)
if err != nil {
return err
}
refs = envoyx.MergeRefs(refs, auxRefs)
ii = ii.Merge(auxIdents)
break
case "handle":
// Handle identifiers
err = y7s.DecodeScalar(n, "handle", &auxNodeValue)
@ -1446,7 +1559,14 @@ func (d *auxYamlDoc) unmarshalPageNode(dctx documentContext, n *yaml.Node, meta
break
case "selfid":
case "selfid", "parent":
// Handle field alias
//
// @todo consider adding an is empty check before overwriting
err = y7s.DecodeScalar(n, "selfID", &r.SelfID)
if err != nil {
return err
}
// Handle references
err = y7s.DecodeScalar(n, "selfID", &auxNodeValue)
if err != nil {
@ -1459,6 +1579,17 @@ func (d *auxYamlDoc) unmarshalPageNode(dctx documentContext, n *yaml.Node, meta
break
case "weight", "order":
// Handle field alias
//
// @todo consider adding an is empty check before overwriting
err = y7s.DecodeScalar(n, "weight", &r.Weight)
if err != nil {
return err
}
break
// Handle RBAC rules
case "allow":
auxOut, err = unmarshalAllowNode(n)
@ -1475,6 +1606,8 @@ func (d *auxYamlDoc) unmarshalPageNode(dctx documentContext, n *yaml.Node, meta
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -1525,6 +1658,19 @@ func (d *auxYamlDoc) unmarshalPageNode(dctx documentContext, n *yaml.Node, meta
switch strings.ToLower(k.Value) {
case "children", "pages":
if y7s.IsSeq(n) {
nestedNodes, err = d.unmarshalExtendedPagesSeq(dctx, n)
if err != nil {
return err
}
} else {
nestedNodes, err = d.unmarshalExtendedPagesMap(dctx, n)
if err != nil {
return err
}
}
break
}
// Iterate nested nodes and update their reference to the current resource
@ -1573,6 +1719,8 @@ func (d *auxYamlDoc) unmarshalPageNode(dctx documentContext, n *yaml.Node, meta
References: refs,
Scope: scope,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -1716,6 +1864,25 @@ func unmarshalLocaleNode(n *yaml.Node) (out envoyx.NodeSet, err error) {
})
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Envoy config unmarshal logic
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d *auxYamlDoc) decodeEnvoyConfig(n *yaml.Node) (out envoyx.NodeConfig) {
y7s.EachMap(n, func(k, v *yaml.Node) (err error) {
switch strings.ToLower(k.Value) {
case "skipif", "skip":
return y7s.DecodeScalar(v, "decode skip if", &out.SkipIf)
case "onexisting", "mergealg":
out.MergeAlg = envoyx.CastMergeAlg(v.Value)
}
return nil
})
return
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utilities
// // // // // // // // // // // // // // // // // // // // // // // // //

View File

@ -4,6 +4,7 @@ import (
"fmt"
"strings"
automationTypes "github.com/cortezaproject/corteza/server/automation/types"
"github.com/cortezaproject/corteza/server/compose/types"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/pkg/y7s"
@ -22,10 +23,16 @@ type (
}
datasourceMapping struct {
SourceIdent string `yaml:"source"`
KeyField string `yaml:"key"`
SourceIdent string `yaml:"source"`
KeyField []string `yaml:"key"`
References map[string]string
Scope map[string]string
// Defaultable indicates wether the mapping should keep the values where
// the ident is not explicitly mapped.
//
// When true, the value is assigned to the given identifier.
Defaultable bool `yaml:"defaultable"`
Mapping fieldMapping
}
)
@ -34,7 +41,7 @@ const (
ComposeRecordDatasourceAuxType = "corteza::compose:record-datasource"
)
func unmarshalChartConfigNode(r *types.Chart, n *yaml.Node) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
func (d *auxYamlDoc) unmarshalChartConfigNode(r *types.Chart, n *yaml.Node) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
err = y7s.EachMap(n, func(k, v *yaml.Node) error {
if k.Value != "reports" {
return nil
@ -49,7 +56,7 @@ func unmarshalChartConfigNode(r *types.Chart, n *yaml.Node) (refs map[string]env
err = y7s.EachSeq(v, func(c *yaml.Node) error {
i++
auxRefs, auxIdents, err = unmarshalChartConfigReportNode(r, c, i)
auxRefs, auxIdents, err = d.unmarshalChartConfigReportNode(r, c, i)
refs = envoyx.MergeRefs(refs, auxRefs)
idents = idents.Merge(auxIdents)
return err
@ -58,7 +65,7 @@ func unmarshalChartConfigNode(r *types.Chart, n *yaml.Node) (refs map[string]env
return err
}
} else {
refs, idents, err = unmarshalChartConfigReportNode(r, v, 0)
refs, idents, err = d.unmarshalChartConfigReportNode(r, v, 0)
return err
}
return nil
@ -67,7 +74,7 @@ func unmarshalChartConfigNode(r *types.Chart, n *yaml.Node) (refs map[string]env
return
}
func unmarshalChartConfigReportNode(r *types.Chart, n *yaml.Node, index int) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
func (d *auxYamlDoc) unmarshalChartConfigReportNode(r *types.Chart, n *yaml.Node, index int) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
err = y7s.EachMap(n, func(k, v *yaml.Node) error {
switch strings.ToLower(k.Value) {
case "module", "mod", "moduleid", "module_id":
@ -85,7 +92,153 @@ func unmarshalChartConfigReportNode(r *types.Chart, n *yaml.Node, index int) (re
return
}
func unmarshalModuleFieldOptionsNode(r *types.ModuleField, n *yaml.Node) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
func (d *auxYamlDoc) unmarshalPageBlocksNode(r *types.Page, n *yaml.Node) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
refs = map[string]envoyx.Ref{}
for index, b := range r.Blocks {
switch b.Kind {
case "RecordList":
refs = envoyx.MergeRefs(refs, getPageBlockRecordListRefs(b, index))
case "Automation":
refs = envoyx.MergeRefs(refs, getPageBlockAutomationRefs(b, index))
case "RecordOrganizer":
refs = envoyx.MergeRefs(refs, getPageBlockRecordOrganizerRefs(b, index))
case "Chart":
refs = envoyx.MergeRefs(refs, getPageBlockChartRefs(b, index))
case "Calendar":
refs = envoyx.MergeRefs(refs, getPageBlockCalendarRefs(b, index))
case "Metric":
refs = envoyx.MergeRefs(refs, getPageBlockMetricRefs(b, index))
case "Comment":
refs = envoyx.MergeRefs(refs, getPageBlockCommentRefs(b, index))
}
}
return
}
func getPageBlockRecordListRefs(b types.PageBlock, index int) (refs map[string]envoyx.Ref) {
refs = make(map[string]envoyx.Ref)
id := optString(b.Options, "module", "moduleID")
if id == "" || id == "0" {
return
}
refs[fmt.Sprintf("Blocks.%d.Options.ModuleID", index)] = envoyx.Ref{
ResourceType: types.ModuleResourceType,
Identifiers: envoyx.MakeIdentifiers(id),
}
return
}
func getPageBlockChartRefs(b types.PageBlock, index int) (refs map[string]envoyx.Ref) {
refs = make(map[string]envoyx.Ref)
id := optString(b.Options, "chart", "chartID")
if id == "" || id == "0" {
return
}
refs[fmt.Sprintf("Blocks.%d.Options.ChartID", index)] = envoyx.Ref{
ResourceType: types.ChartResourceType,
Identifiers: envoyx.MakeIdentifiers(id),
}
return
}
func getPageBlockCalendarRefs(b types.PageBlock, index int) (refs map[string]envoyx.Ref) {
refs = make(map[string]envoyx.Ref)
ff, _ := b.Options["feeds"].([]interface{})
for j, f := range ff {
feed, _ := f.(map[string]interface{})
opt, _ := (feed["options"]).(map[string]interface{})
id := optString(opt, "module", "moduleID")
if id == "" || id == "0" {
return
}
refs[fmt.Sprintf("Blocks.%d.Options.feeds.%d.ModuleID", index, j)] = envoyx.Ref{
ResourceType: types.ChartResourceType,
Identifiers: envoyx.MakeIdentifiers(id),
}
}
return
}
func getPageBlockMetricRefs(b types.PageBlock, index int) (refs map[string]envoyx.Ref) {
refs = make(map[string]envoyx.Ref)
mm, _ := b.Options["metrics"].([]interface{})
for j, m := range mm {
mops, _ := m.(map[string]interface{})
id := optString(mops, "module", "moduleID")
if id == "" || id == "0" {
return
}
refs[fmt.Sprintf("Blocks.%d.Options.metrics.%d.ModuleID", index, j)] = envoyx.Ref{
ResourceType: types.ChartResourceType,
Identifiers: envoyx.MakeIdentifiers(id),
}
}
return
}
func getPageBlockCommentRefs(b types.PageBlock, index int) (refs map[string]envoyx.Ref) {
// Same difference
return getPageBlockRecordListRefs(b, index)
}
func getPageBlockRecordOrganizerRefs(b types.PageBlock, index int) (refs map[string]envoyx.Ref) {
// Same difference
return getPageBlockRecordListRefs(b, index)
}
func getPageBlockAutomationRefs(b types.PageBlock, index int) (refs map[string]envoyx.Ref) {
refs = make(map[string]envoyx.Ref)
bb, _ := b.Options["buttons"].([]interface{})
for _, b := range bb {
button, _ := b.(map[string]interface{})
id := optString(button, "workflow", "workflowID")
if id == "" || id == "0" {
return
}
refs[fmt.Sprintf("Blocks.%d.Options.WorkflowID", index)] = envoyx.Ref{
ResourceType: automationTypes.WorkflowResourceType,
Identifiers: envoyx.MakeIdentifiers(id),
}
}
return
}
func optString(opt map[string]interface{}, kk ...string) string {
for _, k := range kk {
if vr, has := opt[k]; has {
v, _ := vr.(string)
return v
}
}
return ""
}
func (d *auxYamlDoc) unmarshalModuleFieldOptionsNode(r *types.ModuleField, n *yaml.Node) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
refs = make(map[string]envoyx.Ref)
err = y7s.EachMap(n, func(k, v *yaml.Node) error {
@ -109,6 +262,83 @@ func unmarshalModuleFieldOptionsNode(r *types.ModuleField, n *yaml.Node) (refs m
return
}
func (d *auxYamlDoc) unmarshalModuleFieldDefaultValueNode(r *types.ModuleField, n *yaml.Node) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
var rvs = types.RecordValueSet{}
switch n.Kind {
case yaml.ScalarNode:
rvs = rvs.Set(&types.RecordValue{Value: n.Value})
case yaml.SequenceNode:
_ = y7s.EachSeq(n, func(v *yaml.Node) error {
rvs = rvs.Set(&types.RecordValue{Value: n.Value, Place: uint(len(rvs))})
return nil
})
}
r.DefaultValue = rvs
return
}
func (d *auxYamlDoc) unmarshalModuleFieldExpressionsNode(r *types.ModuleField, n *yaml.Node) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
err = y7s.EachMap(n, func(k *yaml.Node, v *yaml.Node) error {
switch k.Value {
case "sanitizer":
var aux string
err = y7s.DecodeScalar(v, "sanitizer", &aux)
if err != nil {
return err
}
r.Expressions.Sanitizers = append(r.Expressions.Sanitizers, aux)
case "sanitizers":
return y7s.EachSeq(v, func(san *yaml.Node) error {
r.Expressions.Sanitizers = append(r.Expressions.Sanitizers, san.Value)
return nil
})
case "validator":
var aux types.ModuleFieldValidator
err = v.Decode(&aux)
if err != nil {
return err
}
r.Expressions.Validators = append(r.Expressions.Validators, aux)
case "validators":
return y7s.Each(v, func(k *yaml.Node, v *yaml.Node) error {
var aux types.ModuleFieldValidator
if y7s.IsKind(v, yaml.MappingNode) {
err = v.Decode(&aux)
if err != nil {
return err
}
} else {
aux.Test = k.Value
aux.Error = v.Value
}
r.Expressions.Validators = append(r.Expressions.Validators, aux)
return nil
})
case "formatter":
r.Expressions.Formatters = append(r.Expressions.Formatters, v.Value)
return nil
case "formatters":
return y7s.EachSeq(v, func(san *yaml.Node) error {
r.Expressions.Formatters = append(r.Expressions.Formatters, san.Value)
return nil
})
}
return nil
})
return
}
func (d *auxYamlDoc) unmarshalYAML(k string, n *yaml.Node) (out envoyx.NodeSet, err error) {
return
}
@ -152,6 +382,10 @@ func (d *auxYamlDoc) postProcessNestedModuleNodes(nn envoyx.NodeSet) (out envoyx
return
}
func (d *auxYamlDoc) unmarshalPagesExtendedNode(dctx documentContext, n *yaml.Node, meta ...*yaml.Node) (out envoyx.NodeSet, err error) {
return d.unmarshalPageNode(dctx, n, meta...)
}
func (d *auxYamlDoc) unmarshalSourceExtendedNode(dctx documentContext, n *yaml.Node, meta ...*yaml.Node) (out envoyx.NodeSet, err error) {
var r datasourceMapping
@ -163,6 +397,35 @@ func (d *auxYamlDoc) unmarshalSourceExtendedNode(dctx documentContext, n *yaml.N
// and then unmarshal into the resource while omitting errors.
n.Decode(&r)
err = y7s.EachMap(n, func(k, n *yaml.Node) error {
var auxNodeValue any
_ = auxNodeValue
switch strings.ToLower(k.Value) {
case "origin", "from":
err = y7s.DecodeScalar(n, "origin", &r.SourceIdent)
if err != nil {
return err
}
case "key", "index", "pk":
if !y7s.IsKind(n, yaml.SequenceNode) {
r.KeyField = []string{n.Value}
} else {
r.KeyField = make([]string, 0, 3)
y7s.EachSeq(n, func(n *yaml.Node) error {
r.KeyField = append(r.KeyField, n.Value)
return nil
})
}
case "map":
return n.Decode(&r.Mapping)
}
return nil
})
// @todo for now we only support record datasources; extend when needed
auxN := &envoyx.Node{
Datasource: &RecordDatasource{

View File

@ -133,7 +133,10 @@ func (e YamlEncoder) encodeChart(ctx context.Context, p envoyx.EncodeParams, nod
res := node.Resource.(*types.Chart)
// Pre-compute some map values so we can omit error checking when encoding yaml nodes
auxConfig, err := e.encodeChartConfigC(ctx, p, tt, node, res, res.Config)
if err != nil {
return
}
auxCreatedAt, err := e.encodeTimestamp(p, res.CreatedAt)
if err != nil {
return
@ -153,11 +156,11 @@ func (e YamlEncoder) encodeChart(ctx context.Context, p envoyx.EncodeParams, nod
}
out, err = y7s.AddMap(out,
"config", res.Config,
"config", auxConfig,
"createdAt", auxCreatedAt,
"deletedAt", auxDeletedAt,
"handle", res.Handle,
"id", res.ID,
"chartID", res.ID,
"name", res.Name,
"namespaceID", auxNamespaceID,
"updatedAt", auxUpdatedAt,
@ -208,6 +211,10 @@ func (e YamlEncoder) encodeModule(ctx context.Context, p envoyx.EncodeParams, no
if err != nil {
return
}
auxFields, err := e.encodeModuleFieldsC(ctx, p, tt, node, res, res.Fields)
if err != nil {
return
}
auxNamespaceID, err := e.encodeRef(p, res.NamespaceID, "NamespaceID", node, tt)
if err != nil {
@ -219,12 +226,12 @@ func (e YamlEncoder) encodeModule(ctx context.Context, p envoyx.EncodeParams, no
}
out, err = y7s.AddMap(out,
"config", e.encodeModuleConfig(p, res.Config),
"config", res.Config,
"createdAt", auxCreatedAt,
"deletedAt", auxDeletedAt,
"fields", res.Fields,
"fields", auxFields,
"handle", res.Handle,
"id", res.ID,
"moduleID", res.ID,
"meta", res.Meta,
"name", res.Name,
"namespaceID", auxNamespaceID,
@ -294,6 +301,11 @@ func (e YamlEncoder) encodeModuleField(ctx context.Context, p envoyx.EncodeParam
return
}
auxOptions, err := e.encodeModuleFieldOptionsC(ctx, p, tt, node, res, res.Options)
if err != nil {
return
}
auxUpdatedAt, err := e.encodeTimestampNil(p, res.UpdatedAt)
if err != nil {
return
@ -311,7 +323,7 @@ func (e YamlEncoder) encodeModuleField(ctx context.Context, p envoyx.EncodeParam
"moduleID", auxModuleID,
"multi", res.Multi,
"name", res.Name,
"options", res.Options,
"options", auxOptions,
"place", res.Place,
"required", res.Required,
"updatedAt", auxUpdatedAt,
@ -447,6 +459,10 @@ func (e YamlEncoder) encodePage(ctx context.Context, p envoyx.EncodeParams, node
res := node.Resource.(*types.Page)
// Pre-compute some map values so we can omit error checking when encoding yaml nodes
auxBlocks, err := e.encodePageBlocksC(ctx, p, tt, node, res, res.Blocks)
if err != nil {
return
}
auxCreatedAt, err := e.encodeTimestamp(p, res.CreatedAt)
if err != nil {
@ -476,7 +492,7 @@ func (e YamlEncoder) encodePage(ctx context.Context, p envoyx.EncodeParams, node
}
out, err = y7s.AddMap(out,
"blocks", res.Blocks,
"blocks", auxBlocks,
"children", res.Children,
"config", res.Config,
"createdAt", auxCreatedAt,

View File

@ -1,13 +1,242 @@
package envoy
import (
"context"
"fmt"
"github.com/cortezaproject/corteza/server/compose/types"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/pkg/y7s"
)
func (e YamlEncoder) encodeModuleConfig(p envoyx.EncodeParams, cfg types.ModuleConfig) any {
func (e YamlEncoder) encodeChartConfigC(ctx context.Context, p envoyx.EncodeParams, tt envoyx.Traverser, n *envoyx.Node, chart *types.Chart, cfg types.ChartConfig) (_ any, err error) {
// @todo...
reports, _ := y7s.MakeSeq()
return nil
for i, r := range cfg.Reports {
modRef, ok := n.References[fmt.Sprintf("Config.Reports.%d.ModuleID", i)]
if !ok {
continue
}
mNode := tt.ParentForRef(n, modRef)
if mNode == nil {
err = fmt.Errorf("module for ref not found")
return
}
r, err := y7s.MakeMap(
"filter", r.Filter,
"module", mNode.Identifiers.FriendlyIdentifier(),
"metrics", r.Metrics,
"dimensions", r.Dimensions,
"yAxis", r.YAxis,
)
if err != nil {
return nil, err
}
reports, err = y7s.AddSeq(reports, r)
if err != nil {
return nil, err
}
}
return reports, nil
}
func (e YamlEncoder) encodeModuleFieldsC(ctx context.Context, p envoyx.EncodeParams, tt envoyx.Traverser, n *envoyx.Node, mod *types.Module, fields types.ModuleFieldSet) (_ any, err error) {
fn := tt.ChildrenForResourceType(n, types.ModuleFieldResourceType)
out, err := e.encodeModuleFields(ctx, p, fn, tt)
return out, err
}
func (e YamlEncoder) encodeModuleFieldOptionsC(ctx context.Context, p envoyx.EncodeParams, tt envoyx.Traverser, n *envoyx.Node, f *types.ModuleField, opt types.ModuleFieldOptions) (_ any, err error) {
if opt == nil {
opt = make(types.ModuleFieldOptions)
}
switch f.Kind {
case "Record":
mNode := tt.ParentForRef(n, n.References["Options.ModuleID"])
if mNode == nil {
err = fmt.Errorf("module for ref not found")
return
}
opt["module"] = mNode.Identifiers.FriendlyIdentifier()
delete(opt, "moduleID")
case "User":
aux := make([]string, 0, 2)
for i := range opt.Strings("roles") {
rNode := tt.ParentForRef(n, n.References[fmt.Sprintf("Options.RoleID.%d", i)])
if rNode == nil {
err = fmt.Errorf("role for ref not found")
return
}
aux = append(aux, rNode.Identifiers.FriendlyIdentifier())
}
opt["roles"] = aux
delete(opt, "role")
delete(opt, "roleID")
}
nopt, _ := y7s.MakeMap()
for k, v := range opt {
nopt, err = y7s.AddMap(nopt, k, v)
if err != nil {
return nil, err
}
}
return nopt, nil
}
func (e YamlEncoder) encodePageBlocksC(ctx context.Context, p envoyx.EncodeParams, tt envoyx.Traverser, n *envoyx.Node, pg *types.Page, bb types.PageBlocks) (_ any, err error) {
out, _ := y7s.MakeSeq()
var aux any
for i, b := range pg.Blocks {
aux, err = e.encodePageBlockC(ctx, p, tt, n, pg, i, b)
if err != nil {
return
}
out, err = y7s.AddSeq(out, aux)
if err != nil {
return
}
}
return out, nil
}
func (e YamlEncoder) encodePageBlockC(ctx context.Context, p envoyx.EncodeParams, tt envoyx.Traverser, n *envoyx.Node, pg *types.Page, index int, b types.PageBlock) (_ any, err error) {
switch b.Kind {
case "RecordList":
b = e.cleanupPageblockRecordList(b)
node := tt.ParentForRef(n, n.References[fmt.Sprintf("Blocks.%d.Options.ModuleID", index)])
if node == nil {
err = fmt.Errorf("module for ref not found")
return
}
b.Options["module"] = node.Identifiers.FriendlyIdentifier()
delete(b.Options, "moduleID")
break
case "RecordOrganizer":
node := tt.ParentForRef(n, n.References[fmt.Sprintf("Blocks.%d.Options.ModuleID", index)])
if node == nil {
err = fmt.Errorf("module for ref not found")
return
}
b.Options["module"] = node.Identifiers.FriendlyIdentifier()
delete(b.Options, "moduleID")
break
case "Chart":
node := tt.ParentForRef(n, n.References[fmt.Sprintf("Blocks.%d.Options.ChartID", index)])
if node == nil {
err = fmt.Errorf("chart for ref not found")
return
}
b.Options["chart"] = node.Identifiers.FriendlyIdentifier()
delete(b.Options, "chartID")
break
case "Calendar":
ff, _ := b.Options["feeds"].([]interface{})
for i, f := range ff {
feed, _ := f.(map[string]interface{})
fOpts, _ := (feed["options"]).(map[string]interface{})
node := tt.ParentForRef(n, n.References[fmt.Sprintf("Blocks.%d.Options.feeds.%d.ModuleID", index, i)])
if node == nil {
err = fmt.Errorf("module for ref not found")
return
}
fOpts["module"] = node.Identifiers.FriendlyIdentifier()
delete(fOpts, "moduleID")
}
break
case "Automation":
bb, _ := b.Options["buttons"].([]interface{})
for i, b := range bb {
button, _ := b.(map[string]interface{})
if _, has := button["workflowID"]; !has {
continue
}
node := tt.ParentForRef(n, n.References[fmt.Sprintf("Blocks.%d.Options.buttons.%d.WorkflowID", index, i)])
if node == nil {
err = fmt.Errorf("chart for ref not found")
return
}
button["workflow"] = node.Identifiers.FriendlyIdentifier()
delete(button, "workflowID")
i++
}
break
case "Metric":
mm, _ := b.Options["metrics"].([]interface{})
for i, m := range mm {
node := tt.ParentForRef(n, n.References[fmt.Sprintf("Blocks.%d.Options.metrics.%d.ModuleID", index, i)])
if node == nil {
err = fmt.Errorf("chart for ref not found")
return
}
mops, _ := m.(map[string]interface{})
mops["module"] = node.Identifiers.FriendlyIdentifier()
delete(mops, "moduleID")
}
break
case "Comment":
node := tt.ParentForRef(n, n.References[fmt.Sprintf("Blocks.%d.Options.ModuleID", index)])
if node == nil {
err = fmt.Errorf("module for ref not found")
return
}
b.Options["module"] = node.Identifiers.FriendlyIdentifier()
delete(b.Options, "moduleID")
break
}
return
}
func (e YamlEncoder) cleanupPageblockRecordList(b types.PageBlock) (_ types.PageBlock) {
rawFF, has := b.Options["fields"]
if !has {
return
}
ff, ok := rawFF.([]interface{})
if !ok {
return
}
retFF := make([]interface{}, 0, len(ff))
for _, rawF := range ff {
switch c := rawF.(type) {
case string:
retFF = append(retFF, map[string]interface{}{"name": c})
case map[string]interface{}, map[string]string:
retFF = append(retFF, c)
default:
retFF = append(retFF, rawF)
}
}
b.Options["fields"] = retFF
return b
}

View File

@ -13,7 +13,13 @@ module: {
model: {
ident: "compose_module"
attributes: {
id: schema.IdField
id: schema.IdField & {
envoy: {
yaml: {
identKeyEncode: "moduleID"
}
}
}
namespace_id: {
ident: "namespaceID",
goType: "uint64",
@ -42,17 +48,17 @@ module: {
dal: { type: "JSON", defaultEmptyObject: true }
omitSetter: true
omitGetter: true
envoy: {
yaml: {
customEncoder: true
}
}
}
fields: {
goType: "types.ModuleFieldSet",
store: false
omitSetter: true
omitGetter: true
envoy: {
yaml: {
customEncoder: true
}
}
}
created_at: schema.SortableTimestampNowField
updated_at: schema.SortableTimestampNilField
@ -94,7 +100,8 @@ module: {
extendedResourceDecoders: [{
ident: "source"
expIdent: "Source"
identKeys: ["source", "datasource"]
// @deprecated records is what the old version used
identKeys: ["source", "datasource", "records"]
supportMappedInput: false
}]
}

View File

@ -42,6 +42,7 @@ moduleField: {
envoy: {
yaml: {
customDecoder: true
customEncoder: true
}
}
}
@ -78,12 +79,22 @@ moduleField: {
dal: { type: "JSON", defaultEmptyObject: true }
omitSetter: true
omitGetter: true
envoy: {
yaml: {
customDecoder: true
}
}
}
expressions: {
goType: "types.ModuleFieldExpr"
dal: { type: "JSON", defaultEmptyObject: true }
omitSetter: true
omitGetter: true
envoy: {
yaml: {
customDecoder: true
}
}
}
created_at: schema.SortableTimestampNowField
updated_at: schema.SortableTimestampNilField
@ -127,6 +138,7 @@ moduleField: {
store: {
handleField: ""
customFilterBuilder: true
extendedRefDecoder: true
}
}

View File

@ -28,6 +28,9 @@ page: {
store: {
filterRefField: "ParentID"
}
yaml: {
identKeyAlias: ["parent"]
}
}
}
module_id: {
@ -54,6 +57,12 @@ page: {
dal: { type: "JSON", defaultEmptyObject: true }
omitSetter: true
omitGetter: true
envoy: {
yaml: {
customDecoder: true
customEncoder: true
}
}
}
children: {
goType: "types.PageSet", store: false
@ -67,6 +76,11 @@ page: {
weight: {
goType: "int", sortable: true
dal: { type: "Number", default: 0, meta: { "rdbms:type": "integer" } }
envoy: {
yaml: {
identKeyAlias: ["order"]
}
}
}
description: {
goType: "string"
@ -113,9 +127,18 @@ page: {
supportMappedInput: true
mappedField: "Handle"
identKeyAlias: ["pages", "pg"]
extendedResourceDecoders: [{
ident: "pages"
expIdent: "Pages"
identKeys: ["children", "pages"]
supportMappedInput: true
mappedField: "Handle"
}]
}
store: {
extendedFilterBuilder: true
extendedRefDecoder: true
}
}

View File

@ -10,7 +10,7 @@ type (
}
Datasource interface {
Next(ctx context.Context, out map[string]string) (ident string, more bool, err error)
Next(ctx context.Context, out map[string]string) (ident []string, more bool, err error)
Reset(ctx context.Context) error
SetProvider(Provider) bool
}

View File

@ -3,6 +3,7 @@ package envoyx
import (
"context"
"fmt"
"strings"
)
type (
@ -213,3 +214,16 @@ func (p EncodeParams) validate() (err error) {
return
}
func CastMergeAlg(v string) (mergeAlg mergeAlg) {
switch strings.ToLower(v) {
case "replace", "mergeleft":
mergeAlg = OnConflictReplace
case "skip", "mergeright":
mergeAlg = OnConflictSkip
case "panic", "error":
mergeAlg = OnConflictPanic
}
return
}

View File

@ -19,6 +19,11 @@ type (
// Placeholders are resources which were added to help resolve missing deps
Placeholder bool
Config NodeConfig
}
NodeConfig struct {
MergeAlg mergeAlg
SkipIf string
}
NodeSet []*Node

View File

@ -65,9 +65,21 @@ apigw_route: {
supportMappedInput: true
mappedField: "Endpoint"
identKeyAlias: ["endpoints"]
extendedResourceDecoders: [{
ident: "filters"
expIdent: "Filters"
identKeys: ["filters"]
supportMappedInput: false
}]
extendedResourceEncoders: [{
ident: "apigwFilter"
expIdent: "ApigwFilter"
identKey: "filters"
}]
}
store: {
handleField: "Endpoint"
extendedDecoder: true
}
}

View File

@ -74,7 +74,6 @@ application: {
identKeyAlias: ["apps"]
}
store: {
extendedRefDecoder: true
handleField: "Name"
}
}

View File

@ -49,6 +49,12 @@ auth_client: {
dal: { type: "JSON", defaultEmptyObject: true }
omitSetter: true
omitGetter: true
envoy: {
yaml: {
customDecoder: true
customEncoder: true
}
}
}
owned_by: schema.AttributeUserRef
created_at: schema.SortableTimestampNowField
@ -92,7 +98,9 @@ auth_client: {
mappedField: "Handle"
identKeyAlias: ["authclients"]
}
store: {}
store: {
extendedRefDecoder: true
}
}
rbac: {

View File

@ -63,7 +63,9 @@ dal_connection: {
mappedField: "Handle"
identKeyAlias: ["connection", "connections"]
}
store: {}
store: {
extendedRefDecoder: true
}
}
rbac: {

View File

@ -84,6 +84,12 @@ func (d StoreDecoder) decode(ctx context.Context, s store.Storer, dl dal.FullSer
if err != nil {
return nil, err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return nil, fmt.Errorf("invalid reference %v", ref)
}
@ -223,6 +229,16 @@ func (d StoreDecoder) decode(ctx context.Context, s store.Storer, dl dal.FullSer
}
out = append(out, aux...)
default:
aux, err = d.extendDecoder(ctx, s, dl, wf.rt, refNodes[i], wf.f)
if err != nil {
return
}
for _, a := range aux {
a.Identifiers = a.Identifiers.Merge(wf.f.Identifiers)
a.References = envoyx.MergeRefs(a.References, refRefs[i])
}
out = append(out, aux...)
}
}
@ -255,8 +271,6 @@ func (d StoreDecoder) decodeApplication(ctx context.Context, s store.Storer, dl
},
}
refs = envoyx.MergeRefs(refs, d.decodeApplicationRefs(r))
var scope envoyx.Scope
out = append(out, &envoyx.Node{
@ -349,6 +363,12 @@ func (d StoreDecoder) decodeApigwRoute(ctx context.Context, s store.Storer, dl d
})
}
aux, err := d.extendedApigwRouteDecoder(ctx, s, dl, f, out)
if err != nil {
return
}
out = append(out, aux...)
return
}
@ -494,6 +514,8 @@ func (d StoreDecoder) decodeAuthClient(ctx context.Context, s store.Storer, dl d
},
}
refs = envoyx.MergeRefs(refs, d.decodeAuthClientRefs(r))
var scope envoyx.Scope
scope = envoyx.Scope{
@ -919,6 +941,8 @@ func (d StoreDecoder) decodeDalConnection(ctx context.Context, s store.Storer, d
},
}
refs = envoyx.MergeRefs(refs, d.decodeDalConnectionRefs(r))
var scope envoyx.Scope
out = append(out, &envoyx.Node{

View File

@ -1,12 +1,73 @@
package envoy
import (
"context"
"fmt"
"github.com/cortezaproject/corteza/server/pkg/dal"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/store"
"github.com/cortezaproject/corteza/server/system/types"
)
func (d StoreDecoder) decodeApplicationRefs(c *types.Application) (refs map[string]envoyx.Ref) {
// @todo
func (d StoreDecoder) extendDecoder(ctx context.Context, s store.Storer, dl dal.FullService, rt string, nodes map[string]*envoyx.Node, f envoyx.ResourceFilter) (out envoyx.NodeSet, err error) {
return
}
func (d StoreDecoder) extendedApigwRouteDecoder(ctx context.Context, s store.Storer, dl dal.FullService, f types.ApigwRouteFilter, base envoyx.NodeSet) (out envoyx.NodeSet, err error) {
for _, b := range base {
route := b.Resource.(*types.ApigwRoute)
filters, err := d.decodeApigwFilter(ctx, s, dl, types.ApigwFilterFilter{
RouteID: route.ID,
})
if err != nil {
return nil, err
}
out = append(out, filters...)
}
return
}
func (d StoreDecoder) decodeAuthClientRefs(c *types.AuthClient) (refs map[string]envoyx.Ref) {
refs = make(map[string]envoyx.Ref, 4)
if c.Security.ImpersonateUser > 0 {
refs["Security.ImpersonateUser"] = envoyx.Ref{
ResourceType: types.UserResourceType,
Identifiers: envoyx.MakeIdentifiers(c.Security.ImpersonateUser),
}
}
d.roleSliceToRefs(refs, "Security.PermittedRoles", c.Security.PermittedRoles)
d.roleSliceToRefs(refs, "Security.ProhibitedRoles", c.Security.ProhibitedRoles)
d.roleSliceToRefs(refs, "Security.ForcedRoles", c.Security.ForcedRoles)
return
}
func (d StoreDecoder) decodeDalConnectionRefs(c *types.DalConnection) (refs map[string]envoyx.Ref) {
if c.Config.Privacy.SensitivityLevelID == 0 {
return
}
refs = map[string]envoyx.Ref{
"Config.Privacy.SensitivityLevelID": {
ResourceType: types.DalSensitivityLevelResourceType,
Identifiers: envoyx.MakeIdentifiers(c.Config.Privacy.SensitivityLevelID),
},
}
return
}
func (d StoreDecoder) roleSliceToRefs(refs map[string]envoyx.Ref, k string, rr []string) {
for i, r := range rr {
refs[fmt.Sprintf("%s.%d.RoleID", k, i)] = envoyx.Ref{
ResourceType: types.RoleResourceType,
Identifiers: envoyx.MakeIdentifiers(r),
}
}
}

View File

@ -1,8 +1,23 @@
package envoy
import "github.com/cortezaproject/corteza/server/system/types"
import (
"context"
"time"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/store"
"github.com/cortezaproject/corteza/server/system/types"
)
func (e StoreEncoder) prepare(ctx context.Context, p envoyx.EncodeParams, s store.Storer, rt string, nn envoyx.NodeSet) (err error) {
return
}
func (e StoreEncoder) setApplicationDefaults(res *types.Application) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -11,6 +26,10 @@ func (e StoreEncoder) validateApplication(res *types.Application) (err error) {
}
func (e StoreEncoder) setApigwRouteDefaults(res *types.ApigwRoute) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -19,6 +38,10 @@ func (e StoreEncoder) validateApigwRoute(res *types.ApigwRoute) (err error) {
}
func (e StoreEncoder) setApigwFilterDefaults(res *types.ApigwFilter) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -27,6 +50,10 @@ func (e StoreEncoder) validateApigwFilter(res *types.ApigwFilter) (err error) {
}
func (e StoreEncoder) setAuthClientDefaults(res *types.AuthClient) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -35,6 +62,10 @@ func (e StoreEncoder) validateAuthClient(res *types.AuthClient) (err error) {
}
func (e StoreEncoder) setQueueDefaults(res *types.Queue) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -43,6 +74,10 @@ func (e StoreEncoder) validateQueue(res *types.Queue) (err error) {
}
func (e StoreEncoder) setReportDefaults(res *types.Report) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -51,6 +86,10 @@ func (e StoreEncoder) validateReport(res *types.Report) (err error) {
}
func (e StoreEncoder) setRoleDefaults(res *types.Role) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -59,6 +98,10 @@ func (e StoreEncoder) validateRole(res *types.Role) (err error) {
}
func (e StoreEncoder) setTemplateDefaults(res *types.Template) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -67,6 +110,10 @@ func (e StoreEncoder) validateTemplate(res *types.Template) (err error) {
}
func (e StoreEncoder) setUserDefaults(res *types.User) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -75,6 +122,10 @@ func (e StoreEncoder) validateUser(res *types.User) (err error) {
}
func (e StoreEncoder) setDalConnectionDefaults(res *types.DalConnection) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}
@ -83,6 +134,10 @@ func (e StoreEncoder) validateDalConnection(res *types.DalConnection) (err error
}
func (e StoreEncoder) setDalSensitivityLevelDefaults(res *types.DalSensitivityLevel) (err error) {
if res.CreatedAt.IsZero() {
res.CreatedAt = time.Now()
}
return
}

View File

@ -298,6 +298,7 @@ func (d *auxYamlDoc) unmarshalApplicationNode(dctx documentContext, n *yaml.Node
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -348,6 +349,8 @@ func (d *auxYamlDoc) unmarshalApplicationNode(dctx documentContext, n *yaml.Node
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -422,6 +425,8 @@ func (d *auxYamlDoc) unmarshalApplicationNode(dctx documentContext, n *yaml.Node
ResourceType: types.ApplicationResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -488,6 +493,22 @@ func (d *auxYamlDoc) unmarshalApigwRouteMap(dctx documentContext, n *yaml.Node)
return
}
// unmarshalFiltersExtendedSeq unmarshals Filters when provided as a sequence node
func (d *auxYamlDoc) unmarshalExtendedFiltersSeq(dctx documentContext, n *yaml.Node) (out envoyx.NodeSet, err error) {
var aux envoyx.NodeSet
err = y7s.EachSeq(n, func(n *yaml.Node) error {
aux, err = d.unmarshalFiltersExtendedNode(dctx, n)
if err != nil {
return err
}
out = append(out, aux...)
return nil
})
return
}
// unmarshalApigwRouteNode is a cookie-cutter function to unmarshal
// the yaml node into the corresponding Corteza type & Node
func (d *auxYamlDoc) unmarshalApigwRouteNode(dctx documentContext, n *yaml.Node, meta ...*yaml.Node) (out envoyx.NodeSet, err error) {
@ -519,6 +540,7 @@ func (d *auxYamlDoc) unmarshalApigwRouteNode(dctx documentContext, n *yaml.Node,
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -608,6 +630,8 @@ func (d *auxYamlDoc) unmarshalApigwRouteNode(dctx documentContext, n *yaml.Node,
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -636,6 +660,14 @@ func (d *auxYamlDoc) unmarshalApigwRouteNode(dctx documentContext, n *yaml.Node,
switch strings.ToLower(k.Value) {
case "filters":
if y7s.IsSeq(n) {
nestedNodes, err = d.unmarshalExtendedFiltersSeq(dctx, n)
if err != nil {
return err
}
}
break
}
// Iterate nested nodes and update their reference to the current resource
@ -682,6 +714,8 @@ func (d *auxYamlDoc) unmarshalApigwRouteNode(dctx documentContext, n *yaml.Node,
ResourceType: types.ApigwRouteResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -760,6 +794,7 @@ func (d *auxYamlDoc) unmarshalApigwFilterNode(dctx documentContext, n *yaml.Node
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
)
_ = auxOut
_ = refs
@ -832,6 +867,8 @@ func (d *auxYamlDoc) unmarshalApigwFilterNode(dctx documentContext, n *yaml.Node
break
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -906,6 +943,8 @@ func (d *auxYamlDoc) unmarshalApigwFilterNode(dctx documentContext, n *yaml.Node
ResourceType: types.ApigwFilterResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Put it all together...
@ -985,6 +1024,7 @@ func (d *auxYamlDoc) unmarshalAuthClientNode(dctx documentContext, n *yaml.Node,
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -1055,6 +1095,25 @@ func (d *auxYamlDoc) unmarshalAuthClientNode(dctx documentContext, n *yaml.Node,
break
case "security":
// Handle custom node decoder
//
// The decoder may update the passed resource with arbitrary values
// as well as provide additional references and identifiers for the node.
var (
auxRefs map[string]envoyx.Ref
auxIdents envoyx.Identifiers
)
auxRefs, auxIdents, err = d.unmarshalAuthClientSecurityNode(r, n)
if err != nil {
return err
}
refs = envoyx.MergeRefs(refs, auxRefs)
ii = ii.Merge(auxIdents)
break
case "updatedby":
// Handle references
err = y7s.DecodeScalar(n, "updatedBy", &auxNodeValue)
@ -1084,6 +1143,8 @@ func (d *auxYamlDoc) unmarshalAuthClientNode(dctx documentContext, n *yaml.Node,
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -1169,6 +1230,8 @@ func (d *auxYamlDoc) unmarshalAuthClientNode(dctx documentContext, n *yaml.Node,
References: refs,
Scope: scope,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -1266,6 +1329,7 @@ func (d *auxYamlDoc) unmarshalQueueNode(dctx documentContext, n *yaml.Node, meta
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -1352,6 +1416,8 @@ func (d *auxYamlDoc) unmarshalQueueNode(dctx documentContext, n *yaml.Node, meta
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -1437,6 +1503,8 @@ func (d *auxYamlDoc) unmarshalQueueNode(dctx documentContext, n *yaml.Node, meta
References: refs,
Scope: scope,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -1534,6 +1602,7 @@ func (d *auxYamlDoc) unmarshalReportNode(dctx documentContext, n *yaml.Node, met
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -1633,6 +1702,8 @@ func (d *auxYamlDoc) unmarshalReportNode(dctx documentContext, n *yaml.Node, met
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -1707,6 +1778,8 @@ func (d *auxYamlDoc) unmarshalReportNode(dctx documentContext, n *yaml.Node, met
ResourceType: types.ReportResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -1804,6 +1877,7 @@ func (d *auxYamlDoc) unmarshalRoleNode(dctx documentContext, n *yaml.Node, meta
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -1851,6 +1925,8 @@ func (d *auxYamlDoc) unmarshalRoleNode(dctx documentContext, n *yaml.Node, meta
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -1925,6 +2001,8 @@ func (d *auxYamlDoc) unmarshalRoleNode(dctx documentContext, n *yaml.Node, meta
ResourceType: types.RoleResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -2022,6 +2100,7 @@ func (d *auxYamlDoc) unmarshalTemplateNode(dctx documentContext, n *yaml.Node, m
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -2082,6 +2161,8 @@ func (d *auxYamlDoc) unmarshalTemplateNode(dctx documentContext, n *yaml.Node, m
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -2156,6 +2237,8 @@ func (d *auxYamlDoc) unmarshalTemplateNode(dctx documentContext, n *yaml.Node, m
ResourceType: types.TemplateResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -2253,6 +2336,7 @@ func (d *auxYamlDoc) unmarshalUserNode(dctx documentContext, n *yaml.Node, meta
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -2284,6 +2368,25 @@ func (d *auxYamlDoc) unmarshalUserNode(dctx documentContext, n *yaml.Node, meta
break
case "roles":
// Handle custom node decoder
//
// The decoder may update the passed resource with arbitrary values
// as well as provide additional references and identifiers for the node.
var (
auxRefs map[string]envoyx.Ref
auxIdents envoyx.Identifiers
)
auxRefs, auxIdents, err = d.unmarshalUserRolesNode(r, n)
if err != nil {
return err
}
refs = envoyx.MergeRefs(refs, auxRefs)
ii = ii.Merge(auxIdents)
break
// Handle RBAC rules
case "allow":
auxOut, err = unmarshalAllowNode(n)
@ -2300,6 +2403,8 @@ func (d *auxYamlDoc) unmarshalUserNode(dctx documentContext, n *yaml.Node, meta
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -2374,6 +2479,8 @@ func (d *auxYamlDoc) unmarshalUserNode(dctx documentContext, n *yaml.Node, meta
ResourceType: types.UserResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -2471,6 +2578,7 @@ func (d *auxYamlDoc) unmarshalDalConnectionNode(dctx documentContext, n *yaml.No
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
rbacNodes envoyx.NodeSet
)
_ = auxOut
@ -2557,6 +2665,8 @@ func (d *auxYamlDoc) unmarshalDalConnectionNode(dctx documentContext, n *yaml.No
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -2631,6 +2741,8 @@ func (d *auxYamlDoc) unmarshalDalConnectionNode(dctx documentContext, n *yaml.No
ResourceType: types.DalConnectionResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
@ -2728,6 +2840,7 @@ func (d *auxYamlDoc) unmarshalDalSensitivityLevelNode(dctx documentContext, n *y
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
envoyConfig envoyx.NodeConfig
)
_ = auxOut
_ = refs
@ -2797,6 +2910,8 @@ func (d *auxYamlDoc) unmarshalDalSensitivityLevelNode(dctx documentContext, n *y
break
case "(envoy)":
envoyConfig = d.decodeEnvoyConfig(n)
}
return nil
@ -2871,6 +2986,8 @@ func (d *auxYamlDoc) unmarshalDalSensitivityLevelNode(dctx documentContext, n *y
ResourceType: types.DalSensitivityLevelResourceType,
Identifiers: ii,
References: refs,
Config: envoyConfig,
}
// Put it all together...
@ -2996,6 +3113,25 @@ func unmarshalLocaleNode(n *yaml.Node) (out envoyx.NodeSet, err error) {
})
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Envoy config unmarshal logic
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d *auxYamlDoc) decodeEnvoyConfig(n *yaml.Node) (out envoyx.NodeConfig) {
y7s.EachMap(n, func(k, v *yaml.Node) (err error) {
switch strings.ToLower(k.Value) {
case "skipif", "skip":
return y7s.DecodeScalar(v, "decode skip if", &out.SkipIf)
case "onexisting", "mergealg":
out.MergeAlg = envoyx.CastMergeAlg(v.Value)
}
return nil
})
return
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utilities
// // // // // // // // // // // // // // // // // // // // // // // // //

View File

@ -1,10 +1,84 @@
package envoy
import (
"fmt"
"strings"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/pkg/y7s"
"github.com/cortezaproject/corteza/server/system/types"
"gopkg.in/yaml.v3"
)
func (d *auxYamlDoc) unmarshalYAML(k string, n *yaml.Node) (out envoyx.NodeSet, err error) {
return
}
func (d *auxYamlDoc) unmarshalFiltersExtendedNode(dctx documentContext, n *yaml.Node, meta ...*yaml.Node) (out envoyx.NodeSet, err error) {
return d.unmarshalApigwFilterNode(dctx, n, meta...)
}
func (d *auxYamlDoc) unmarshalUserRolesNode(r *types.User, n *yaml.Node) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
refs = make(map[string]envoyx.Ref, len(n.Content))
i := 0
err = y7s.EachSeq(n, func(n *yaml.Node) error {
refs[fmt.Sprintf("Roles.%d", i)] = envoyx.Ref{
ResourceType: types.RoleResourceType,
Identifiers: envoyx.MakeIdentifiers(n.Value),
}
return nil
})
return
}
func (d *auxYamlDoc) unmarshalAuthClientSecurityNode(r *types.AuthClient, n *yaml.Node) (refs map[string]envoyx.Ref, idents envoyx.Identifiers, err error) {
refs = make(map[string]envoyx.Ref)
err = y7s.EachMap(n, func(k, v *yaml.Node) (err error) {
switch strings.ToLower(k.Value) {
case "impersonateuser":
var av string
err = y7s.DecodeScalar(v, "Impersonate user", &av)
refs["Security.ImpersonateUser"] = envoyx.Ref{
ResourceType: types.UserResourceType,
Identifiers: envoyx.MakeIdentifiers(av),
}
break
case "permittedroles":
refs = envoyx.MergeRefs(refs, roleSliceToRefs("Security.PermittedRoles", v))
break
case "prohibitedroles":
refs = envoyx.MergeRefs(refs, roleSliceToRefs("Security.ProhibitedRoles", v))
break
case "forcedroles":
refs = envoyx.MergeRefs(refs, roleSliceToRefs("Security.ForcedRoles", v))
break
}
return nil
})
return
}
func roleSliceToRefs(k string, n *yaml.Node) (refs map[string]envoyx.Ref) {
i := 0
y7s.EachSeq(n, func(n *yaml.Node) error {
refs[fmt.Sprintf("%s.%d", k, i)] = envoyx.Ref{
ResourceType: types.RoleResourceType,
Identifiers: envoyx.MakeIdentifiers(n.Value),
}
i++
return nil
})
return
}

View File

@ -316,6 +316,17 @@ func (e YamlEncoder) encodeApigwRoute(ctx context.Context, p envoyx.EncodeParams
var aux *yaml.Node
_ = aux
aux, err = e.encodeApigwFilters(ctx, p, tt.ChildrenForResourceType(node, types.ApigwFilterResourceType), tt)
if err != nil {
return
}
out, err = y7s.AddMap(out,
"filters", aux,
)
if err != nil {
return
}
return
}
@ -454,6 +465,11 @@ func (e YamlEncoder) encodeAuthClient(ctx context.Context, p envoyx.EncodeParams
return
}
auxSecurity, err := e.encodeAuthClientSecurityC(ctx, p, tt, node, res, res.Security)
if err != nil {
return
}
auxUpdatedAt, err := e.encodeTimestampNil(p, res.UpdatedAt)
if err != nil {
return
@ -481,7 +497,7 @@ func (e YamlEncoder) encodeAuthClient(ctx context.Context, p envoyx.EncodeParams
"redirectURI", res.RedirectURI,
"scope", res.Scope,
"secret", res.Secret,
"security", res.Security,
"security", auxSecurity,
"trusted", res.Trusted,
"updatedAt", auxUpdatedAt,
"updatedBy", auxUpdatedBy,
@ -853,6 +869,7 @@ func (e YamlEncoder) encodeUser(ctx context.Context, p envoyx.EncodeParams, node
"kind", res.Kind,
"meta", res.Meta,
"name", res.Name,
"roles", res.Roles,
"suspendedAt", auxSuspendedAt,
"updatedAt", auxUpdatedAt,
"username", res.Username,

View File

@ -0,0 +1,64 @@
package envoy
import (
"context"
"fmt"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/pkg/y7s"
"github.com/cortezaproject/corteza/server/system/types"
"gopkg.in/yaml.v3"
)
func (e YamlEncoder) encodeAuthClientSecurityC(ctx context.Context, p envoyx.EncodeParams, tt envoyx.Traverser, n *envoyx.Node, ac *types.AuthClient, sec *types.AuthClientSecurity) (_ any, err error) {
sqPermittedRoles, err := e.encodeRoleSlice(n, tt, "Security.PermittedRoles", sec.PermittedRoles)
if err != nil {
return
}
sqProhibitedRoles, err := e.encodeRoleSlice(n, tt, "Security.ProhibitedRoles", sec.ProhibitedRoles)
if err != nil {
return
}
sqForcedRoles, err := e.encodeRoleSlice(n, tt, "Security.ForcedRoles", sec.ForcedRoles)
if err != nil {
return
}
var impersonateUser string
if _, ok := n.References["Security.ImpersonateUser.UserID"]; ok {
node := tt.ParentForRef(n, n.References["Security.ImpersonateUser.UserID"])
if node == nil {
err = fmt.Errorf("node not found @todo error")
return
}
impersonateUser = n.Identifiers.FriendlyIdentifier()
}
return y7s.MakeMap(
"impersonateUser", impersonateUser,
"permittedRoles", sqPermittedRoles,
"prohibitedRoles", sqProhibitedRoles,
"forcedRoles", sqForcedRoles,
)
}
func (e YamlEncoder) encodeRoleSlice(n *envoyx.Node, tt envoyx.Traverser, k string, rr []string) (out *yaml.Node, err error) {
sq, _ := y7s.MakeSeq()
for i := range rr {
node := tt.ParentForRef(n, n.References[fmt.Sprintf("%s.%d.RoleID", k, i)])
if node == nil {
err = fmt.Errorf("node not found @todo error")
return
}
sq, err = y7s.AddSeq(sq, node.Identifiers.FriendlyIdentifier())
if err != nil {
return
}
}
return sq, nil
}

View File

@ -24,6 +24,17 @@ user: {
ignoreCase: true
dal: {}
}
roles: {
goType: "[]uint64",
store: false
omitSetter: true
omitGetter: true
envoy: {
yaml: {
customDecoder: true
}
}
}
name: {
sortable: true
dal: {}