3
0

Add codegen templates for Envoy (un)marshal

This commit is contained in:
Tomaž Jerman 2023-02-19 11:00:03 +01:00
parent acc256badf
commit 735cb155f5
7 changed files with 1701 additions and 0 deletions

View File

@ -0,0 +1,271 @@
package {{ .package }}
{{ template "gocode/header-gentext.tpl" }}
import (
"context"
"fmt"
"github.com/cortezaproject/corteza/server/pkg/dal"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/store"
{{- range .imports }}
"{{ . }}"
{{- end }}
)
type (
// StoreDecoder is responsible for fetching already stored Corteza resources
// which are then managed by envoy and imported via an encoder.
StoreDecoder struct{}
)
// Decode returns a set of envoy nodes based on the provided params
//
// StoreDecoder expects the DecodeParam of `storer` and `dal` which conform
// to the store.Storer and dal.FullService interfaces.
func (d StoreDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
var (
s store.Storer
dl dal.FullService
)
// @todo we can optionally not require them based on what we're doing
if auxS, ok := p.Params["storer"]; ok {
s = auxS.(store.Storer)
}
if auxDl, ok := p.Params["dal"]; ok {
dl = auxDl.(dal.FullService)
}
return d.decode(ctx, s, dl, p)
}
func (d StoreDecoder) decode(ctx context.Context, s store.Storer, dl dal.FullService, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Transform passed filters into an ordered structure
type (
filterWrap struct {
rt string
f envoyx.ResourceFilter
}
)
wrappedFilters := make([]filterWrap, 0, len(p.Filter))
for rt, f := range p.Filter {
wrappedFilters = append(wrappedFilters, filterWrap{rt: rt, f: f})
}
// Get all requested scopes
scopedNodes := make(envoyx.NodeSet, len(p.Filter))
for i, a := range wrappedFilters {
if a.f.Scope.ResourceType == "" {
continue
}
// For now the scope can only point to namespace so this will do
var nn envoyx.NodeSet
nn, err = d.decodeNamespace(ctx, s, dl, d.identToNamespaceFilter(a.f.Scope.Identifiers))
if err != nil {
return
}
if len(nn) > 1 {
err = fmt.Errorf("ambiguous scope %v", a.f.Scope)
return
}
if len(nn) == 0 {
err = fmt.Errorf("invalid scope: resource not found %v", a.f)
return
}
scopedNodes[i] = nn[0]
}
// Get all requested references
//
// Keep an index for the Node and one for the reference to make our
// lives easier.
refNodes := make([]map[string]*envoyx.Node, len(p.Filter))
refRefs := make([]map[string]envoyx.Ref, len(p.Filter))
for i, a := range wrappedFilters {
if len(a.f.Refs) == 0 {
continue
}
auxr := make(map[string]*envoyx.Node, len(a.f.Refs))
auxa := make(map[string]envoyx.Ref)
for field, ref := range a.f.Refs {
f := ref.ResourceFilter()
aux, err := d.decode(ctx, s, dl, envoyx.DecodeParams{
Type: envoyx.DecodeTypeStore,
Filter: f,
})
if err != nil {
return nil, err
}
if len(aux) == 0 {
return nil, fmt.Errorf("invalid reference %v", ref)
}
if len(aux) > 1 {
return nil, fmt.Errorf("ambiguous reference: too many resources returned %v", a.f)
}
auxr[field] = aux[0]
auxa[field] = aux[0].ToRef()
}
refNodes[i] = auxr
refRefs[i] = auxa
}
var aux envoyx.NodeSet
for i, wf := range wrappedFilters {
switch wf.rt {
{{- range .resources -}}
{{- if or .envoy.omit (not .envoy.use)}}{{continue}}{{ end -}}
case types.{{.expIdent}}ResourceType:
aux, err = d.decode{{.expIdent}}(ctx, s, dl, d.make{{.expIdent}}Filter(scopedNodes[i], refNodes[i], wf.f))
if err != nil {
return
}
for _, a := range aux {
a.Identifiers = a.Identifiers.Merge(wf.f.Identifiers)
a.References = envoyx.MergeRefs(a.References, refRefs[i])
}
out = append(out, aux...)
{{ end -}}
}
}
return
}
{{- range .resources }}
{{- if or .envoy.omit (not .envoy.use)}}
{{continue}}
{{ end -}}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Functions for resource {{.ident}}
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d StoreDecoder) decode{{.expIdent}}(ctx context.Context, s store.Storer, dl dal.FullService, f types.{{.expIdent}}Filter) (out envoyx.NodeSet, err error) {
// @todo this might need to be improved.
// Currently, no resource is vast enough to pose a problem.
rr, _, err := store.Search{{.store.expIdentPlural}}(ctx, s, f)
if err != nil {
return
}
for _, r := range rr {
// Identifiers
ii := envoyx.MakeIdentifiers(
{{- range .model.attributes -}}
{{- if not .envoy.identifier -}}
{{continue}}
{{ end }}
r.{{.expIdent}},
{{- end }}
)
refs := map[string]envoyx.Ref{
{{- range .model.attributes -}}
{{- if eq .dal.type "Ref" }}
// Handle references
"{{ .expIdent }}": envoyx.Ref{
ResourceType: "{{ .dal.refModelResType }}",
Identifiers: envoyx.MakeIdentifiers(r.{{.expIdent}}),
},
{{- end }}
{{- end }}
}
{{ if .envoy.store.extendedRefDecoder }}
refs = envoyx.MergeRefs(refs, d.decode{{.expIdent}}Refs(r))
{{ end }}
var scope envoyx.Scope
{{if and .envoy.scoped .parents}}
scope = envoyx.Scope{
ResourceType: refs["{{(index .parents 0).refField}}"].ResourceType,
Identifiers: refs["{{(index .parents 0).refField}}"].Identifiers,
}
for k, ref := range refs {
ref.Scope = scope
refs[k] = ref
}
{{end}}
{{if and .envoy.scoped (not .parents)}}
scope = envoyx.Scope{
ResourceType: types.{{ .expIdent }}ResourceType,
Identifiers: ii,
}
{{end}}
out = append(out, &envoyx.Node{
Resource: r,
ResourceType: types.{{.expIdent}}ResourceType,
Identifiers: ii,
References: refs,
Scope: scope,
})
}
{{ if .envoy.store.extendedDecoder -}}
aux, err := d.extended{{.expIdent}}Decoder(ctx, s, dl, f, out)
if err != nil {
return
}
out = append(out, aux...)
{{- end }}
return
}
{{ if not .envoy.store.customFilterBuilder }}
func (d StoreDecoder) make{{.expIdent}}Filter(scope *envoyx.Node, refs map[string]*envoyx.Node, auxf envoyx.ResourceFilter) (out types.{{.expIdent}}Filter) {
out.Limit = auxf.Limit
ids, hh := auxf.Identifiers.Idents()
_ = ids
_ = hh
out.{{.expIdent}}ID = ids
{{ if .envoy.store.handleField }}
if len(hh) > 0 {
out.{{ .envoy.store.handleField }} = hh[0]
}
{{ end }}
// Refs
var (
ar *envoyx.Node
ok bool
)
_ = ar
_ = ok
{{ range .model.attributes }}
{{ if eq .dal.type "Ref" }}
ar, ok = refs["{{ .expIdent }}"]
if ok {
{{ if .envoy.store.filterRefField -}}
out.{{ .envoy.store.filterRefField }} = ar.Resource.GetID()
{{- else -}}
out.{{ .expIdent }} = ar.Resource.GetID()
{{- end }}
}
{{ end }}
{{ end }}
{{- if .envoy.store.extendedFilterBuilder }}
out = d.extend{{.expIdent}}Filter(scope, refs, auxf, out)
{{ end -}}
return
}
{{ else }}
// Resource should define a custom filter builder
{{ end }}
{{end}}

View File

@ -0,0 +1,331 @@
package {{ .package }}
{{ template "gocode/header-gentext.tpl" }}
import (
"context"
"fmt"
"strconv"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/pkg/id"
"github.com/cortezaproject/corteza/server/store"
{{- range .imports }}
"{{ . }}"
{{- end }}
)
type (
// StoreEncoder is responsible for encoding Corteza resources into the
// database via the Storer or the DAL interface
//
// @todo consider having a different encoder for the DAL resources
StoreEncoder struct{}
)
{{ $rootRes := .resources }}
// Prepare performs some initial processing on the resource before it can be encoded
//
// Preparation runs validation, default value initialization, matching with
// already existing instances, ...
//
// The prepare function receives a set of nodes grouped by the resource type.
// This enables some batching optimization and simplifications when it comes to
// matching with existing resources.
//
// Prepare does not receive any placeholder nodes which are used solely
// for dependency resolution.
func (e StoreEncoder) Prepare(ctx context.Context, p envoyx.EncodeParams, rt string, nn envoyx.NodeSet) (err error) {
s, err := e.grabStorer(p)
if err != nil {
return
}
switch rt {
{{- range .resources }}
{{- if or .envoy.omit (not .envoy.use)}}
{{continue}}
{{end -}}
case types.{{.expIdent}}ResourceType:
return e.prepare{{.expIdent}}(ctx, p, s, nn)
{{ end -}}
}
return
}
// Encode encodes the given Corteza resources into the primary store
//
// Encoding should not do any additional processing apart from matching with
// dependencies and runtime validation
//
// The Encode function is called for every resource type where the resource
// appears at the root of the dependency tree.
// All of the root-level resources for that resource type are passed into the function.
// The encoding function must traverse the branches to encode all of the dependencies.
//
// This flow is used to simplify the flow of how resources are encoded into YAML
// (and other documents) as well as to simplify batching.
//
// Encode does not receive any placeholder nodes which are used solely
// for dependency resolution.
func (e StoreEncoder) Encode(ctx context.Context, p envoyx.EncodeParams, rt string, nodes envoyx.NodeSet, tree envoyx.Traverser) (err error) {
s, err := e.grabStorer(p)
if err != nil {
return
}
switch rt {
{{- range .resources }}
{{- if or .envoy.omit (not .envoy.use) -}}
{{continue}}
{{end}}
case types.{{.expIdent}}ResourceType:
return e.encode{{.expIdent}}s(ctx, p, s, nodes, tree)
{{ end -}}
}
return
}
{{- range .resources }}
{{- if or .envoy.omit (not .envoy.use)}}
{{continue}}
{{end}}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Functions for resource {{.ident}}
// // // // // // // // // // // // // // // // // // // // // // // // //
// prepare{{.expIdent}} prepares the resources of the given type for encoding
func (e StoreEncoder) prepare{{.expIdent}}(ctx context.Context, p envoyx.EncodeParams, s store.Storer, nn envoyx.NodeSet) (err error) {
// Grab an index of already existing resources of this type
// @note since these resources should be fairly low-volume and existing for
// a short time (and because we batch by resource type); fetching them all
// into memory shouldn't hurt too much.
// @todo do some benchmarks and potentially implement some smarter check such as
// a bloom filter or something similar.
// Initializing the index here (and using a hashmap) so it's not escaped to the heap
existing := make(map[int]types.{{.expIdent}}, len(nn))
err = e.matchup{{.expIdent}}s(ctx, s, existing, nn)
if err != nil {
return
}
for i, n := range nn {
if n.Resource == nil {
panic("unexpected state: cannot call prepare{{.expIdent}} with nodes without a defined Resource")
}
res, ok := n.Resource.(*types.{{.expIdent}})
if !ok {
panic("unexpected resource type: node expecting type of {{.ident}}")
}
existing, hasExisting := existing[i]
if hasExisting {
// On existing, we don't need to re-do identifiers and references; simply
// changing up the internal resource is enough.
//
// In the future, we can pass down the tree and re-do the deps like that
switch p.Config.OnExisting {
case envoyx.OnConflictPanic:
err = fmt.Errorf("resource already exists")
return
case envoyx.OnConflictReplace:
// Replace; simple ID change should do the trick
res.ID = existing.ID
case envoyx.OnConflictSkip:
// Replace the node's resource with the fetched one
res = &existing
// @todo merging
}
} else {
// @todo actually a bottleneck. As per sonyflake docs, it can at most
// generate up to 2**8 (256) IDs per 10ms in a single thread.
// How can we improve this?
res.ID = id.Next()
}
// We can skip validation/defaults when the resource is overwritten by
// the one already stored (the panic one errors out anyway) since it
// should already be ok.
if !hasExisting || p.Config.OnExisting != envoyx.OnConflictSkip {
err = e.set{{.expIdent}}Defaults(res)
if err != nil {
return err
}
err = e.validate{{.expIdent}}(res)
if err != nil {
return err
}
}
n.Resource = res
}
return
}
// encode{{.expIdent}}s encodes a set of resource into the database
func (e StoreEncoder) encode{{.expIdent}}s(ctx context.Context, p envoyx.EncodeParams, s store.Storer, nn envoyx.NodeSet, tree envoyx.Traverser) (err error) {
for _, n := range nn {
err = e.encode{{.expIdent}}(ctx, p, s, n, tree)
if err != nil {
return
}
}
return
}
// encode{{.expIdent}} encodes the resource into the database
func (e StoreEncoder) encode{{.expIdent}}(ctx context.Context, p envoyx.EncodeParams, s store.Storer, n *envoyx.Node, tree envoyx.Traverser) (err error) {
// Grab dependency references
var auxID uint64
for fieldLabel, ref := range n.References {
rn := tree.ParentForRef(n, ref)
if rn == nil {
err = fmt.Errorf("missing node for ref %v", ref)
return
}
auxID = rn.Resource.GetID()
if auxID == 0 {
err = fmt.Errorf("related resource doesn't provide an ID")
return
}
err = n.Resource.SetValue(fieldLabel, 0, auxID)
if err != nil {
return
}
}
// Flush to the DB
err = store.Upsert{{.store.expIdent}}(ctx, s, n.Resource.(*types.{{.expIdent}}))
if err != nil {
return
}
{{ $a := . }}
// Handle resources nested under it
//
// @todo how can we remove the OmitPlaceholderNodes call the same way we did for
// the root function calls?
{{/*
@note this setup will not duplicate encode calls since we only take the
most specific parent resource.
*/}}
for rt, nn := range envoyx.NodesByResourceType(tree.Children(n)...) {
nn = envoyx.OmitPlaceholderNodes(nn...)
switch rt {
{{- range $cmp := $rootRes }}
{{ if or ($cmp.envoy.omit) (not $cmp.envoy.use) }}
{{continue}}
{{ end }}
{{ if not $cmp.parents }}
{{continue}}
{{ end }}
{{ $p := index $cmp.parents (sub (len $cmp.parents) 1)}}
{{ if not (eq $p.handle $a.ident) }}
{{continue}}
{{ end }}
case types.{{$cmp.expIdent}}ResourceType:
err = e.encode{{$cmp.expIdent}}s(ctx, p, s, nn, tree)
if err != nil {
return
}
{{- end }}
}
}
return
}
// matchup{{.expIdent}}s returns an index with indicates what resources already exist
func (e StoreEncoder) matchup{{.expIdent}}s(ctx context.Context, s store.Storer, uu map[int]types.{{.expIdent}}, nn envoyx.NodeSet) (err error) {
// @todo might need to do it smarter then this.
// Most resources won't really be that vast so this should be acceptable for now.
aa, _, err := store.Search{{.store.expIdentPlural}}(ctx, s, types.{{.expIdent}}Filter{})
if err != nil {
return
}
idMap := make(map[uint64]*types.{{.expIdent}}, len(aa))
strMap := make(map[string]*types.{{.expIdent}}, len(aa))
for _, a := range aa {
{{ range .model.attributes }}
{{- if not .envoy.identifier -}}
{{continue}}
{{- end -}}
{{- if eq .goType "uint64" -}}
idMap[a.{{.expIdent}}] = a
{{- else -}}
strMap[a.{{.expIdent}}] = a
{{- end}}
{{ end }}
}
var aux *types.{{.expIdent}}
var ok bool
for i, n := range nn {
for _, idf := range n.Identifiers.Slice {
if id, err := strconv.ParseUint(idf, 10, 64); err == nil {
aux, ok = idMap[id]
if ok {
uu[i] = *aux
// When any identifier matches we can end it
break
}
}
aux, ok = strMap[idf]
if ok {
uu[i] = *aux
// When any identifier matches we can end it
break
}
}
}
return
}
{{end}}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utility functions
// // // // // // // // // // // // // // // // // // // // // // // // //
func (e *StoreEncoder) grabStorer(p envoyx.EncodeParams) (s store.Storer, err error) {
auxs, ok := p.Params["storer"]
if !ok {
err = fmt.Errorf("storer not defined")
return
}
s, ok = auxs.(store.Storer)
if !ok {
err = fmt.Errorf("invalid storer provided")
return
}
return
}

View File

@ -0,0 +1,73 @@
package {{ .package }}
{{ template "gocode/header-gentext.tpl" }}
import (
"strings"
{{- range .imports }}
"{{ . }}"
{{- end }}
)
// SplitResourceIdentifier takes an identifier string and splices it into path
// identifiers as defined by the resource
func SplitResourceIdentifier(ref string) (out map[string]Ref) {
out = make(map[string]Ref, 3)
ref = strings.TrimRight(ref, "/")
pp := strings.Split(ref, "/")
rt := pp[0]
pp = pp[1:]
gRef := func(pp []string, i int) string {
if pp[i] == "*" {
return ""
}
return pp[i]
}
switch rt {
{{ range .components }}
{{ $rootCmp := . }}
{{range .resources}}
{{ $a := . }}
case "corteza::{{$rootCmp.ident}}:{{.ident}}":
{{$res := .}}
{{range $i, $p := .parents}}
if gRef(pp, {{$i}}) == "" {
return
}
{{- range $cmp := $rootCmp.resources }}
{{ if or ($cmp.envoy.omit) (not $cmp.envoy.use) }}
{{continue}}
{{ end }}
{{ if eq $p.handle $cmp.ident }}
out["{{$i}}"] = Ref{
ResourceType: "{{$cmp.fqrt}}",
Identifiers: MakeIdentifiers(gRef(pp, {{ $i }})),
}
{{break}}
{{ end }}
{{- end }}
{{end}}
if gRef(pp, {{len .parents}}) == "" {
return
}
out["{{len .parents}}"] = Ref{
ResourceType: "{{.fqrt}}",
Identifiers: MakeIdentifiers(gRef(pp, {{len .parents}})),
}
{{ end }}
{{ end }}
}
return
}

View File

@ -0,0 +1,32 @@
package {{ .package }}
{{ template "gocode/header-gentext.tpl" }}
import (
{{- range .imports }}
"{{ . }}"
{{- end }}
)
var (
// needyResources is a list of resources that require a parent resource
//
// This list is primarily used when figuring out what nodes the dep. graph
// should return when traversing.
needyResources = map[string]bool{
{{- range .components -}}
{{- range .resources -}}
{{- if not .parents }}
{{continue}}
{{- end }}
"{{ .fqrt }}": true,
{{- end }}
{{- end }}
}
// superNeedyResources is the second level of filtering in case the first
// pass removes everything
superNeedyResources = map[string]bool{
"corteza::compose:module-field": true,
}
)

View File

@ -0,0 +1,654 @@
package {{ .package }}
{{ template "gocode/header-gentext.tpl" }}
import (
"strings"
"context"
"io"
"fmt"
systemTypes "github.com/cortezaproject/corteza/server/system/types"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/pkg/rbac"
"github.com/cortezaproject/corteza/server/pkg/y7s"
"golang.org/x/text/language"
"gopkg.in/yaml.v3"
{{- range .imports }}
"{{ . }}"
{{- end }}
)
{{$cmpIdent := .componentIdent}}
type (
// YamlDecoder is responsible for decoding YAML documents into Corteza resources
// which are then managed by envoy and imported via an encoder.
YamlDecoder struct{}
documentContext struct {
references map[string]string
}
auxYamlDoc struct {
nodes envoyx.NodeSet
}
)
// Decode returns a set of envoy nodes based on the provided params
//
// YamlDecoder expects the DecodeParam of `stream` which conforms
// to the io.Reader interface.
func (d YamlDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Get the reader
r, err := d.getReader(ctx, p)
if err != nil {
return
}
// Offload decoding to the aux document
doc := &auxYamlDoc{}
err = yaml.NewDecoder(r).Decode(doc)
if err != nil {
return
}
return doc.nodes, nil
}
func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
// Get the document context from the root level
dctx, err := d.getDocumentContext(n)
if err != nil {
return
}
var aux envoyx.NodeSet
return y7s.EachMap(n, func(k, v *yaml.Node) error {
kv := strings.ToLower(k.Value)
switch kv {
{{- range .resources -}}
{{- if or .envoy.omit (not .envoy.use) -}}
{{continue}}
{{- end -}}
{{ $identKeys := .envoy.yaml.identKeys }}
case {{ range $i, $l := $identKeys -}}
"{{ $l }}"{{if not (eq $i (sub (len $identKeys) 1))}},{{end}}
{{- end}}:
{{- if .envoy.yaml.supportMappedInput }}
if y7s.IsMapping(v) {
aux, err = d.unmarshal{{.expIdent}}Map(dctx, v)
d.nodes = append(d.nodes, aux...)
return err
}
{{- end }}
if y7s.IsSeq(v) {
aux, err = d.unmarshal{{.expIdent}}Seq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
return err
{{ end }}
// Access control nodes
case "allow":
aux, err = unmarshalAllowNode(v)
d.nodes = append(d.nodes, aux...)
if err != nil {
return err
}
case "deny":
aux, err = unmarshalDenyNode(v)
d.nodes = append(d.nodes, aux...)
if err != nil {
return err
}
// Resource translation nodes
case "locale", "translation", "translations", "i18n":
aux, err = unmarshalLocaleNode(v)
d.nodes = append(d.nodes, aux...)
if err != nil {
return err
}
// Offload to custom handlers
default:
aux, err = d.unmarshalYAML(kv, v)
d.nodes = append(d.nodes, aux...)
if err != nil {
return err
}
}
return nil
})
}
{{ $rootRes := .resources }}
{{- range .resources }}
{{- if or .envoy.omit (not .envoy.use)}}
{{continue}}
{{ end -}}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Functions for resource {{.ident}}
// // // // // // // // // // // // // // // // // // // // // // // // //
// unmarshal{{.expIdent}}Seq unmarshals {{.expIdent}} when provided as a sequence node
func (d *auxYamlDoc) unmarshal{{.expIdent}}Seq(dctx documentContext, n *yaml.Node) (out envoyx.NodeSet, err error) {
var aux envoyx.NodeSet
err = y7s.EachSeq(n, func(n *yaml.Node) error {
aux, err = d.unmarshal{{ .expIdent }}Node(dctx, n)
if err != nil {
return err
}
out = append(out, aux...)
return nil
})
return
}
// unmarshal{{.expIdent}}Map unmarshals {{.expIdent}} when provided as a mapping node
//
// When map encoded, the map key is used as a preset identifier.
// The identifier is passed to the node function as a meta node
{{- if not .envoy.yaml.supportMappedInput }}
// @note this resource does not support map encoding.
// Refer to the corresponding definition files to adjust if needed.
{{ else }}
func (d *auxYamlDoc) unmarshal{{ .expIdent }}Map(dctx documentContext, n *yaml.Node) (out envoyx.NodeSet, err error) {
var aux envoyx.NodeSet
err = y7s.EachMap(n, func(k, n *yaml.Node) error {
aux, err = d.unmarshal{{ .expIdent }}Node(dctx, n, k)
if err != nil {
return err
}
out = append(out, aux...)
return nil
})
return
}
{{ end }}
// unmarshal{{ .expIdent }}Node is a cookie-cutter function to unmarshal
// the yaml node into the corresponding Corteza type & Node
func (d *auxYamlDoc) unmarshal{{ .expIdent }}Node(dctx documentContext, n *yaml.Node, meta ...*yaml.Node) (out envoyx.NodeSet, err error) {
var r *types.{{ .expIdent }}
// @todo we're omitting errors because there will be a bunch due to invalid
// resource field types. This might be a bit unstable as other errors may
// also get ignored.
//
// A potential fix would be to firstly unmarshal into an any, check errors
// and then unmarshal into the resource while omitting errors.
n.Decode(&r)
// Identifiers are determined manually when iterating the yaml node.
// This is to help assure there are no duplicates and everything
// was accounted for especially when working with aliases such as
// user_name instead of userName.
ii := envoyx.Identifiers{}
{{ if .envoy.yaml.supportMappedInput}}
// When a resource supports mapped input, the key is passed as meta which
// needs to be registered as an identifier (since it is)
if len(meta) > 0 {
y7s.DecodeScalar(meta[0], "{{ .envoy.yaml.mappedField }}", &r.{{ .envoy.yaml.mappedField }})
ii = ii.Add(r.{{ .envoy.yaml.mappedField }})
}
{{ end }}
var (
refs = make(map[string]envoyx.Ref)
auxOut envoyx.NodeSet
nestedNodes envoyx.NodeSet
scope envoyx.Scope
{{- if .rbac }}
rbacNodes envoyx.NodeSet
{{- end }}
)
_ = auxOut
_ = refs
err = y7s.EachMap(n, func(k, n *yaml.Node) error {
var auxNodeValue any
_ = auxNodeValue
switch strings.ToLower(k.Value) {
{{ $resource := . }}
{{/*
Iterate over all model attributes and handle
- attribute aliases
- identifiers
- reference
- custom decoding logic
Generic field decoding is already handled at the top with the generic
node.Decode(...) function call.
*/}}
{{- range $attr := .model.attributes }}
{{/*
We can skip the attributes when...
- there aren't any ident key aliases
- not an identifier
- not a reference
- no custom logic
*/}}
{{
if and
(or (not $attr.envoy.yaml.identKeyAlias) (eq (len $attr.envoy.yaml.identKeyAlias) 0))
(not $attr.envoy.identifier)
(not (eq $attr.dal.type "Ref"))
(not $attr.envoy.yaml.customDecoder)
}}
{{continue}}
{{ end }}
{{ $identKeys := .envoy.yaml.identKeys }}
case {{ range $i, $l := $identKeys -}}
"{{ $l }}"{{if not (eq $i (sub (len $identKeys) 1))}},{{end}}
{{- end}}:
{{- if and $attr.envoy.yaml.identKeyAlias (gt (len $attr.envoy.yaml.identKeyAlias) 0) }}
// Handle field alias
//
// @todo consider adding an is empty check before overwriting
err = y7s.DecodeScalar(n, "{{ $attr.ident }}", &r.{{ $attr.expIdent }})
if err != nil {
return err
}
{{- end }}
{{- if $attr.envoy.identifier }}
// Handle identifiers
err = y7s.DecodeScalar(n, "{{ $attr.ident }}", &auxNodeValue)
if err != nil {
return err
}
ii = ii.Add(auxNodeValue)
{{- end }}
{{- if eq $attr.dal.type "Ref" }}
// Handle references
err = y7s.DecodeScalar(n, "{{ $attr.ident }}", &auxNodeValue)
if err != nil {
return err
}
refs["{{ $attr.expIdent }}"] = envoyx.Ref{
ResourceType: "{{ $attr.dal.refModelResType }}",
Identifiers: envoyx.MakeIdentifiers(auxNodeValue),
}
{{- end }}
{{ if $attr.envoy.yaml.customDecoder }}
// Handle custom node decoder
//
// The decoder may update the passed resource with arbitrary values
// as well as provide additional references and identifiers for the node.
var (
auxRefs map[string]envoyx.Ref
auxIdents envoyx.Identifiers
)
auxRefs, auxIdents, err = unmarshal{{ $resource.expIdent }}{{ $attr.expIdent }}Node(r, n)
if err != nil {
return err
}
refs = envoyx.MergeRefs(refs, auxRefs)
ii = ii.Merge(auxIdents)
{{ end }}
break
{{- end }}
{{ if .rbac }}
// Handle RBAC rules
case "allow":
auxOut, err = unmarshalAllowNode(n)
if err != nil {
return err
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
case "deny":
auxOut, err = unmarshalDenyNode(n)
if err != nil {
return err
}
rbacNodes = append(rbacNodes, auxOut...)
auxOut = nil
{{- end }}
}
return nil
})
if err != nil {
return
}
{{ if eq $cmpIdent "compose" }}
// Handle global namespace reference which can be provided as the doc. context
//
// @todo this is a temporary solution and should be extended when the document
// context needs to be extended.
// Limit this only to the compose resource since that is the only scenario
// the previous implementation supports.
if ref, ok := dctx.references["namespace"]; ok {
refs["NamespaceID"] = envoyx.Ref{
ResourceType: types.NamespaceResourceType,
Identifiers: envoyx.MakeIdentifiers(ref),
}
}
{{ end }}
{{- if and .envoy.scoped .parents}}
// Define the scope
//
// This resource is scoped to the first parent (generally the namespace)
// when talking about Compose resources (the only supported scenario at the moment).
scope = envoyx.Scope{
ResourceType: refs["{{(index .parents 0).refField}}"].ResourceType,
Identifiers: refs["{{(index .parents 0).refField}}"].Identifiers,
}
{{end}}
{{if and .envoy.scoped (not .parents)}}
// Define the scope
//
// This resource is scoped with no parent resources so this resource is the
// root itself (generally the namespace -- the only currently supported scenario).
scope = envoyx.Scope{
ResourceType: types.{{ .expIdent }}ResourceType,
Identifiers: ii,
}
{{end}}
// Apply the scope to all of the references of the same type
for k, ref := range refs {
if ref.ResourceType != scope.ResourceType {
continue
}
ref.Scope = scope
refs[k] = ref
}
// Handle any resources that could be inserted under {{.ident}} such as a module inside a namespace
//
// This operation is done in the second pass of the document so we have
// the complete context of the current resource; such as the identifier,
// references, and scope.
err = y7s.EachMap(n, func(k, n *yaml.Node) error {
nestedNodes = nil
switch strings.ToLower(k.Value) {
{{/*
@note each resource can only nest resources from the same component.
Iterate resources of the current component and if this one appears as any
of the parent resources, attempt to unmarshal it.
@todo consider limiting the supported set to only the root parent
(like we do with store encoders) to limit strange cases and reduce
potential problems.
*/}}
{{ $a := . }}
{{- range $cmp := $rootRes }}
{{ if or ($cmp.envoy.omit) (not $cmp.envoy.use) }}
{{continue}}
{{ end }}
{{- range $p := $cmp.parents }}
{{ if not (eq $p.handle $a.ident) }}
{{continue}}
{{ end }}
{{ $identKeys := $cmp.envoy.yaml.identKeys }}
case {{ range $i, $l := $identKeys -}}
"{{ $l }}"{{if not (eq $i (sub (len $identKeys) 1))}},{{end}}
{{- end}}:
if y7s.IsSeq(n) {
nestedNodes, err = d.unmarshal{{$cmp.expIdent}}Seq(dctx, n)
if err != nil {
return err
}
} else {
nestedNodes, err = d.unmarshal{{$cmp.expIdent}}Map(dctx, n)
if err != nil {
return err
}
}
break
{{/* As long as one parent matches we're golden; avoid potential duplicates */}}
{{break}}
{{- end }}
{{- end -}}
}
// Iterate nested nodes and update their reference to the current resource
//
// Any reference to the parent resource from the child resource is overwritten
// to avoid potential user-error edge cases.
for _, a := range nestedNodes {
// @note all nested resources fall under the same component and the same scope.
// Simply assign the same scope to all -- if it shouldn't be scoped
// the parent won't have it (saving CPU ticks :)
a.Scope = scope
if a.References == nil {
a.References = make(map[string]envoyx.Ref)
}
a.References["{{.expIdent}}ID"] = envoyx.Ref{
ResourceType: types.{{ .expIdent }}ResourceType,
Identifiers: ii,
Scope: scope,
}
for f, ref := range refs {
a.References[f] = ref
}
}
auxOut = append(auxOut, nestedNodes...)
return nil
})
if err != nil {
return
}
a := &envoyx.Node{
Resource: r,
ResourceType: types.{{ .expIdent }}ResourceType,
Identifiers: ii,
References: refs,
{{if or .envoy.scoped}}
Scope: scope,
{{end}}
}
{{- if .rbac }}
// Update RBAC resource nodes with references regarding the resource
for _, rn := range rbacNodes {
// Since the rule belongs to the resource, it will have the same
// subset of references as the parent resource.
rn.References = envoyx.MergeRefs(rn.References, a.References)
// The RBAC rule's most specific identifier is the resource itself.
// Using this we can hardcode it to point to the location after the parent resource.
//
// @todo consider using a more descriptive identifier for the position
// such as `index-%d`.
rn.References["{{len .parents}}"] = envoyx.Ref{
ResourceType: a.ResourceType,
Identifiers: a.Identifiers,
Scope: scope,
}
}
{{ end }}
// Put it all together...
out = append(out, a)
out = append(out, auxOut...)
{{- if .rbac }}
out = append(out, rbacNodes...)
{{ end }}
return
}
{{ end }}
// // // // // // // // // // // // // // // // // // // // // // // // //
// RBAC unmarshal logic
// // // // // // // // // // // // // // // // // // // // // // // // //
func unmarshalAllowNode(n *yaml.Node) (out envoyx.NodeSet, err error) {
return unmarshalRBACNode(n, rbac.Allow)
}
func unmarshalDenyNode(n *yaml.Node) (out envoyx.NodeSet, err error) {
return unmarshalRBACNode(n, rbac.Deny)
}
func unmarshalRBACNode(n *yaml.Node, acc rbac.Access) (out envoyx.NodeSet, err error) {
if y7s.IsMapping(n.Content[1]) {
return unmarshalNestedRBACNode(n, acc)
}
return unmarshalFlatRBACNode(n, acc)
}
// unmarshalNestedRBACNode handles RBAC rules when they are nested inside a resource
//
// The edge-case exists since the node doesn't explicitly specify the resource
// it belongs to.
//
// Example:
//
// modules:
// module1:
// name: "module 1"
// fields: ...
// allow:
// role1:
// - read
// - delete
func unmarshalNestedRBACNode(n *yaml.Node, acc rbac.Access) (out envoyx.NodeSet, err error) {
// Handles role
return out, y7s.EachMap(n, func(role, perm *yaml.Node) error {
// Handles operation
return y7s.EachMap(perm, func(res, op *yaml.Node) error {
out = append(out, &envoyx.Node{
Resource: &rbac.Rule{
Resource: res.Value,
Operation: op.Value,
Access: acc,
},
ResourceType: rbac.RuleResourceType,
References: envoyx.MergeRefs(
map[string]envoyx.Ref{"RoleID": {
// Providing resource type as plain text to reduce cross component references
ResourceType: "corteza::system:role",
Identifiers: envoyx.MakeIdentifiers(role.Value),
}},
envoyx.SplitResourceIdentifier(res.Value),
),
})
return nil
})
})
}
// unmarshalFlatRBACNode handles RBAC rules when they are provided on the root level
//
// Example:
//
// allow:
// role1:
// corteza::system/:
// - users.search
// - users.create
func unmarshalFlatRBACNode(n *yaml.Node, acc rbac.Access) (out envoyx.NodeSet, err error) {
return out, y7s.EachMap(n, func(role, op *yaml.Node) error {
out = append(out, &envoyx.Node{
Resource: &rbac.Rule{
Operation: op.Value,
Access: acc,
},
ResourceType: rbac.RuleResourceType,
References: map[string]envoyx.Ref{
"RoleID": {
// Providing resource type as plain text to reduce cross component references
ResourceType: "corteza::system:role",
Identifiers: envoyx.MakeIdentifiers(role.Value),
},
},
})
return nil
})
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// i18n unmarshal logic
// // // // // // // // // // // // // // // // // // // // // // // // //
func unmarshalLocaleNode(n *yaml.Node) (out envoyx.NodeSet, err error) {
return out, y7s.EachMap(n, func(lang, loc *yaml.Node) error {
langTag := systemTypes.Lang{Tag: language.Make(lang.Value)}
return y7s.EachMap(loc, func(res, kv *yaml.Node) error {
return y7s.EachMap(kv, func(k, msg *yaml.Node) error {
out = append(out, &envoyx.Node{
Resource: &systemTypes.ResourceTranslation{
Lang: langTag,
K: k.Value,
Message: msg.Value,
},
// Providing resource type as plain text to reduce cross component references
ResourceType: "corteza::system:resource-translation",
References: envoyx.SplitResourceIdentifier(res.Value),
})
return nil
})
})
})
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utilities
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d YamlDecoder) getReader(ctx context.Context, p envoyx.DecodeParams) (r io.Reader, err error) {
aux, ok := p.Params["stream"]
if ok {
r, ok = aux.(io.Reader)
if ok {
return
}
}
// @todo consider adding support for managing files from a location
err = fmt.Errorf("YAML decoder expects a stream conforming to io.Reader interface")
return
}
func (d *auxYamlDoc) getDocumentContext(n *yaml.Node) (dctx documentContext, err error) {
dctx = documentContext{
references: make(map[string]string),
}
err = y7s.EachMap(n, func(k, v *yaml.Node) error {
// @todo expand when needed. The previous implementation only supported
// namespaces on the root of the document.
if y7s.IsKind(v, yaml.ScalarNode) {
dctx.references[k.Value] = v.Value
}
return nil
})
return
}

View File

@ -0,0 +1,246 @@
package {{ .package }}
{{ template "gocode/header-gentext.tpl" }}
import (
"context"
"fmt"
"io"
"time"
"github.com/cortezaproject/corteza/server/pkg/envoyx"
"github.com/cortezaproject/corteza/server/pkg/y7s"
"gopkg.in/yaml.v3"
{{- range .imports }}
"{{ . }}"
{{- end }}
)
type (
// YamlEncoder is responsible for encoding Corteza resources into
// a YAML supported format
YamlEncoder struct{}
)
// Encode encodes the given Corteza resources into some YAML supported format
//
// Encoding should not do any additional processing apart from matching with
// dependencies and runtime validation
//
// Preparation runs validation, default value initialization, matching with
// already existing instances, ...
//
// The prepare function receives a set of nodes grouped by the resource type.
// This enables some batching optimization and simplifications when it comes to
// matching with existing resources.
//
// Prepare does not receive any placeholder nodes which are used solely
// for dependency resolution.
func (e YamlEncoder) Encode(ctx context.Context, p envoyx.EncodeParams, rt string, nodes envoyx.NodeSet, tt envoyx.Traverser) (err error) {
var (
out *yaml.Node
aux *yaml.Node
)
_ = aux
w, err := e.getWriter(p)
if err != nil {
return
}
switch rt {
{{- range .resources }}
{{- if or .envoy.omit (not .envoy.use)}}
{{continue}}
{{ end -}}
case types.{{.expIdent}}ResourceType:
aux, err = e.encode{{.expIdent}}s(ctx, p, nodes, tt)
if err != nil {
return
}
// Root level resources are always encoded as a map
out, err = y7s.AddMap(out, "{{.ident}}", aux)
if err != nil {
return
}
{{ end -}}
}
return yaml.NewEncoder(w).Encode(out)
}
{{ $rootRes := .resources }}
{{- range .resources }}
{{- if or .envoy.omit (not .envoy.use)}}
{{continue}}
{{ end -}}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Functions for resource {{.ident}}
// // // // // // // // // // // // // // // // // // // // // // // // //
func (e YamlEncoder) encode{{.expIdent}}s(ctx context.Context, p envoyx.EncodeParams, nodes envoyx.NodeSet, tt envoyx.Traverser) (out *yaml.Node, err error) {
var aux *yaml.Node
for _, n := range nodes {
aux, err = e.encode{{.expIdent}}(ctx, p, n, tt)
if err != nil {
return
}
out, err = y7s.AddSeq(out, aux)
if err != nil {
return
}
}
return
}
// encode{{.expIdent}} focuses on the specific resource invoked by the Encode method
func (e YamlEncoder) encode{{.expIdent}}(ctx context.Context, p envoyx.EncodeParams, node *envoyx.Node, tt envoyx.Traverser) (out *yaml.Node, err error) {
res := node.Resource.(*types.{{.expIdent}})
{{ $res := .expIdent }}
// Pre-compute some map values so we can omit error checking when encoding yaml nodes
{{ range .model.attributes -}}
{{- if eq .dal.type "Timestamp" -}}
{{- if .dal.nullable -}}
aux{{.expIdent}}, err := e.encodeTimestampNil(p, res.{{.expIdent}})
if err != nil {
return
}
{{- else -}}
aux{{.expIdent}}, err := e.encodeTimestamp(p, res.{{.expIdent}})
if err != nil {
return
}
{{- end -}}
{{- else if eq .dal.type "Ref" -}}
aux{{.expIdent}}, err := e.encodeRef(p, res.{{.expIdent}}, "{{.expIdent}}", node, tt)
if err != nil {
return
}
{{- end }}
{{end}}
out, err = y7s.AddMap(out,
{{ range .model.attributes -}}
{{- if .envoy.yaml.customEncoder -}}
"{{.ident}}", e.encode{{$res}}{{.expIdent}}(p, res.{{.expIdent}}),
{{- else if eq .dal.type "Timestamp" -}}
{{- if .dal.nullable -}}
"{{.ident}}", aux{{.expIdent}},
{{- else -}}
"{{.ident}}", aux{{.expIdent}},
{{- end -}}
{{- else if eq .dal.type "Ref" -}}
"{{.ident}}", aux{{.expIdent}},
{{- else -}}
"{{.ident}}", res.{{.expIdent}},
{{- end }}
{{end}}
)
if err != nil {
return
}
// Handle nested resources
var aux *yaml.Node
_ = aux
{{ $a := . }}
{{- range $cmp := $rootRes }}
{{ if or ($cmp.envoy.omit) (not $cmp.envoy.use) }}{{continue}}{{ end }}
{{if not $cmp.parents}}{{continue}}{{end}}
{{/*
Only handle resources where the current resource would appear last
in the list of parents.
Since parents are ordered by _importance_ this removes the danger of
multiple parents decoding the same resource.
*/}}
{{ $p := (index $cmp.parents (sub (len $cmp.parents) 1)) }}
{{ if (eq $p.handle $a.ident) }}
aux, err = e.encode{{$cmp.expIdent}}s(ctx, p, tt.ChildrenForResourceType(node, types.{{$cmp.expIdent}}ResourceType), tt)
if err != nil {
return
}
out, err = y7s.AddMap(out,
"{{$cmp.ident}}", aux,
)
if err != nil {
return
}
{{ end }}
{{- end }}
return
}
{{ end -}}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Encoding utils
// // // // // // // // // // // // // // // // // // // // // // // // //
func (e YamlEncoder) encodeTimestamp(p envoyx.EncodeParams, t time.Time) (any, error) {
if t.IsZero() {
return nil, nil
}
tz := p.Config.PreferredTimezone
if tz != "" {
tzL, err := time.LoadLocation(tz)
if err != nil {
return nil, err
}
t = t.In(tzL)
}
ly := p.Config.PreferredTimeLayout
if ly == "" {
ly = time.RFC3339
}
return t.Format(ly), nil
}
func (e YamlEncoder) encodeTimestampNil(p envoyx.EncodeParams, t *time.Time) (any, error) {
if t == nil { return nil, nil }
// @todo timestamp encoding format
return e.encodeTimestamp(p, *t)
}
func (e YamlEncoder) encodeRef(p envoyx.EncodeParams, id uint64, field string, node *envoyx.Node, tt envoyx.Traverser) (any, error) {
parent := tt.ParentForRef(node, node.References[field])
// @todo should we panic instead?
// for now gracefully fallback to the ID
if parent == nil {
return id, nil
}
return node.Identifiers.FriendlyIdentifier(), nil
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utility functions
// // // // // // // // // // // // // // // // // // // // // // // // //
func (e YamlEncoder) getWriter(p envoyx.EncodeParams) (out io.Writer, err error) {
aux, ok := p.Params["writer"]
if ok {
out, ok = aux.(io.Writer)
if ok {
return
}
}
// @todo consider adding support for managing files from a location
err = fmt.Errorf("YAML encoder expects a writer conforming to io.Writer interface")
return
}

View File

@ -0,0 +1,94 @@
package codegen
import (
"github.com/cortezaproject/corteza/server/app"
"github.com/cortezaproject/corteza/server/codegen/schema"
)
[...schema.#codegen] &
[
for cmp in app.corteza.components {
template: "gocode/envoy/yaml_decode.go.tpl"
output: "\(cmp.ident)/envoy/yaml_decode.gen.go"
payload: {
package: "envoy"
imports: [
"github.com/cortezaproject/corteza/server/\(cmp.ident)/types"
]
componentIdent: cmp.ident
resources: [ for res in cmp.resources { res }]
}
},
for cmp in app.corteza.components {
template: "gocode/envoy/store_decode.go.tpl"
output: "\(cmp.ident)/envoy/store_decode.gen.go"
payload: {
package: "envoy"
imports: [
"github.com/cortezaproject/corteza/server/\(cmp.ident)/types"
]
componentIdent: cmp.ident
resources: [ for res in cmp.resources { res }]
}
},
for cmp in app.corteza.components {
template: "gocode/envoy/store_encode.go.tpl"
output: "\(cmp.ident)/envoy/store_encode.gen.go"
payload: {
package: "envoy"
imports: [
"github.com/cortezaproject/corteza/server/\(cmp.ident)/types"
]
componentIdent: cmp.ident
resources: [ for res in cmp.resources { res }]
}
},
for cmp in app.corteza.components {
template: "gocode/envoy/yaml_encode.go.tpl"
output: "\(cmp.ident)/envoy/yaml_encode.gen.go"
payload: {
package: "envoy"
imports: [
"github.com/cortezaproject/corteza/server/\(cmp.ident)/types"
]
componentIdent: cmp.ident
resources: [ for res in cmp.resources { res }]
}
},
{
template: "gocode/envoy/util_rbac.go.tpl"
output: "pkg/envoyx/util_rbac.gen.go"
payload: {
package: "envoyx"
components: [for cmp in app.corteza.components {
ident: cmp.ident,
resources: cmp.resources
}]
}
},
{
template: "gocode/envoy/utils.go.tpl"
output: "pkg/envoyx/utils.gen.go"
payload: {
package: "envoyx"
components: [for cmp in app.corteza.components {
ident: cmp.ident,
resources: cmp.resources
}]
}
},
]