3
0
This commit is contained in:
Tomaž Jerman 2023-03-17 09:20:36 +01:00
parent 2f015a26f6
commit 525df34579
18 changed files with 752 additions and 475 deletions

View File

@ -19,9 +19,19 @@ import (
)
type (
// StoreDecoder is responsible for fetching already stored Corteza resources
// which are then managed by envoy and imported via an encoder.
// StoreDecoder is responsible for generating Envoy nodes from already stored
// resources which can then be managed by Envoy and imported via an encoder.
StoreDecoder struct{}
filterWrap struct {
rt string
f envoyx.ResourceFilter
}
)
const (
paramsKeyStorer = "storer"
paramsKeyDAL = "dal"
)
// Decode returns a set of envoy nodes based on the provided params
@ -29,96 +39,36 @@ type (
// StoreDecoder expects the DecodeParam of `storer` and `dal` which conform
// to the store.Storer and dal.FullService interfaces.
func (d StoreDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
var (
s store.Storer
dl dal.FullService
)
// @todo we can optionally not require them based on what we're doing
if auxS, ok := p.Params["storer"]; ok {
s = auxS.(store.Storer)
s, err := d.getStorer(p)
if err != nil {
return
}
if auxDl, ok := p.Params["dal"]; ok {
dl = auxDl.(dal.FullService)
dl, err := d.getDal(p)
if err != nil {
return
}
return d.decode(ctx, s, dl, p)
}
func (d StoreDecoder) decode(ctx context.Context, s store.Storer, dl dal.FullService, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Transform passed filters into an ordered structure
type (
filterWrap struct {
rt string
f envoyx.ResourceFilter
}
)
wrappedFilters := make([]filterWrap, 0, len(p.Filter))
for rt, f := range p.Filter {
// Handle resources that don't belong to this decoder
if !strings.HasPrefix(rt, "corteza::automation") {
continue
}
// Preprocessing and basic filtering (to omit what this decoder can't handle)
wrappedFilters := d.prepFilters(p.Filter)
wrappedFilters = append(wrappedFilters, filterWrap{rt: rt, f: f})
}
// Get all requested scopes
scopedNodes := make(envoyx.NodeSet, len(p.Filter))
// @note skipping scope logic since it's currently only supported within
// Compose resources.
// Get all requested references
//
// Keep an index for the Node and one for the reference to make our
// lives easier.
refNodes := make([]map[string]*envoyx.Node, len(p.Filter))
refRefs := make([]map[string]envoyx.Ref, len(p.Filter))
err = func() (err error) {
for i, a := range wrappedFilters {
if len(a.f.Refs) == 0 {
continue
}
auxr := make(map[string]*envoyx.Node, len(a.f.Refs))
auxa := make(map[string]envoyx.Ref)
for field, ref := range a.f.Refs {
f := ref.ResourceFilter()
aux, err := d.decode(ctx, s, dl, envoyx.DecodeParams{
Type: envoyx.DecodeTypeStore,
Filter: f,
})
if err != nil {
return err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return fmt.Errorf("invalid reference %v", ref)
}
if len(aux) > 1 {
return fmt.Errorf("ambiguous reference: too many resources returned %v", a.f)
}
auxr[field] = aux[0]
auxa[field] = aux[0].ToRef()
}
refNodes[i] = auxr
refRefs[i] = auxa
}
return
}()
// Get all scoped nodes
scopedNodes, err := d.getScopeNodes(ctx, s, dl, wrappedFilters)
if err != nil {
err = errors.Wrap(err, "failed to decode node references")
return
}
// Get all reference nodes
refNodes, refRefs, err := d.getReferenceNodes(ctx, s, dl, wrappedFilters)
if err != nil {
return
}
// Process filters to get the envoy nodes
err = func() (err error) {
var aux envoyx.NodeSet
for i, wf := range wrappedFilters {
@ -309,3 +259,110 @@ func (d StoreDecoder) decodeTrigger(ctx context.Context, s store.Storer, dl dal.
}
// Resource should define a custom filter builder
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utilities
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d StoreDecoder) getStorer(p envoyx.DecodeParams) (s store.Storer, err error) {
aux, ok := p.Params[paramsKeyStorer]
if ok {
s, ok = aux.(store.Storer)
if ok {
return
}
}
err = errors.Errorf("store decoder expects a storer conforming to store.Storer interface")
return
}
func (d StoreDecoder) getDal(p envoyx.DecodeParams) (dl dal.FullService, err error) {
aux, ok := p.Params[paramsKeyDAL]
if ok {
dl, ok = aux.(dal.FullService)
if ok {
return
}
}
err = errors.Errorf("store decoder expects a DAL conforming to dal.FullService interface")
return
}
func (d StoreDecoder) prepFilters(ff map[string]envoyx.ResourceFilter) (out []filterWrap) {
out = make([]filterWrap, 0, len(ff))
for rt, f := range ff {
// Handle resources that don't belong to this decoder
if !strings.HasPrefix(rt, "corteza::automation") {
continue
}
out = append(out, filterWrap{rt: rt, f: f})
}
return
}
func (d StoreDecoder) getScopeNodes(ctx context.Context, s store.Storer, dl dal.FullService, ff []filterWrap) (scopes envoyx.NodeSet, err error) {
// Get all requested scopes
scopes = make(envoyx.NodeSet, len(ff))
// @note skipping scope logic since it's currently only supported within
// Compose resources.
return
}
// getReferenceNodes returns all of the nodes referenced by the nodes defined by the filters
//
// The nodes are provided as a slice (the same order as the filters) and as a map for easier lookups.
func (d StoreDecoder) getReferenceNodes(ctx context.Context, s store.Storer, dl dal.FullService, ff []filterWrap) (nodes []map[string]*envoyx.Node, refs []map[string]envoyx.Ref, err error) {
nodes = make([]map[string]*envoyx.Node, len(ff))
refs = make([]map[string]envoyx.Ref, len(ff))
err = func() (err error) {
for i, a := range ff {
if len(a.f.Refs) == 0 {
continue
}
auxr := make(map[string]*envoyx.Node, len(a.f.Refs))
auxa := make(map[string]envoyx.Ref)
for field, ref := range a.f.Refs {
f := ref.ResourceFilter()
aux, err := d.decode(ctx, s, dl, envoyx.DecodeParams{
Type: envoyx.DecodeTypeStore,
Filter: f,
})
if err != nil {
return err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return fmt.Errorf("invalid reference %v", ref)
}
if len(aux) > 1 {
return fmt.Errorf("ambiguous reference: too many resources returned %v", a.f)
}
auxr[field] = aux[0]
auxa[field] = aux[0].ToRef()
}
nodes[i] = auxr
refs[i] = auxa
}
return
}()
if err != nil {
err = errors.Wrap(err, "failed to decode node references")
return
}
return
}

View File

@ -27,11 +27,6 @@ type (
StoreEncoder struct{}
)
const (
paramsKeyStorer = "storer"
paramsKeyDAL = "dal"
)
// Prepare performs some initial processing on the resource before it can be encoded
//
// Preparation runs validation, default value initialization, matching with

View File

@ -26,11 +26,28 @@ import (
type (
// YamlDecoder is responsible for decoding YAML documents into Corteza resources
// which are then managed by envoy and imported via an encoder.
YamlDecoder struct{}
YamlDecoder struct{}
// documentContext provides a bit of metadata to the decoder such as
// root-level reference definitions (such as the namespace)
documentContext struct {
references map[string]string
// references holds any references defined on the root of the document
//
// Example of defining a namespace reference:
// namespace: test_import_namespace
// modules:
// - name: Test Import Module
// handle: test_import_module
references map[string]string
// parentIdent holds the identifier of the parent resource (if nested).
// The parent ident can be handy if the resource requires the info before the
// original handling is finished (such as record datasource nodes).
parentIdent envoyx.Identifiers
}
// auxYamlDoc is a helper struct that registers custom YAML decoders
// to assist the package
auxYamlDoc struct {
nodes envoyx.NodeSet
}
@ -40,12 +57,14 @@ const (
paramsKeyStream = "stream"
)
// CanFile returns true if the provided file can be decoded with this decoder
func (d YamlDecoder) CanFile(f *os.File) (ok bool) {
// @todo improve/expand
return d.canExt(f.Name())
// @todo improve this check; for now a simple extension check should do the trick
return d.CanExt(f.Name())
}
func (d YamlDecoder) canExt(name string) (ok bool) {
// CanExt returns true if the provided file extension can be decoded with this decoder
func (d YamlDecoder) CanExt(name string) (ok bool) {
var (
pt = strings.Split(name, ".")
ext = strings.TrimSpace(pt[len(pt)-1])
@ -58,7 +77,6 @@ func (d YamlDecoder) canExt(name string) (ok bool) {
// YamlDecoder expects the DecodeParam of `stream` which conforms
// to the io.Reader interface.
func (d YamlDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Get the reader
r, err := d.getReader(ctx, p)
if err != nil {
return
@ -87,43 +105,39 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
kv := strings.ToLower(k.Value)
switch kv {
// Decode all resources under the automation component
case "workflow", "workflows":
if y7s.IsMapping(v) {
aux, err = d.unmarshalWorkflowMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalWorkflowSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal workflow")
err = errors.Wrap(err, "failed to unmarshal node: workflow")
}
return err
case "trigger":
// @note trigger doesn't support mapped inputs. This can be
// changed in the .cue definition under the
// .envoy.yaml.supportMappedInput field.
if y7s.IsSeq(v) {
aux, err = d.unmarshalTriggerSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal trigger")
err = errors.Wrap(err, "failed to unmarshal node: trigger")
}
return err
// Offload to custom handlers
default:
aux, err = d.unmarshalYAML(kv, v)
d.nodes = append(d.nodes, aux...)
if err != nil {
err = errors.Wrap(err, "failed to unmarshal node")
}
}
return nil
})
}
// // // // // // // // // // // // // // // // // // // // // // // // //
} // // // // // // // // // // // // // // // // // // // // // // // // //
// Functions for resource workflow
// // // // // // // // // // // // // // // // // // // // // // // // //

View File

@ -5,10 +5,6 @@ import (
"gopkg.in/yaml.v3"
)
func (d *auxYamlDoc) unmarshalYAML(k string, n *yaml.Node) (out envoyx.NodeSet, err error) {
return
}
func (d *auxYamlDoc) unmarshalTriggersExtendedNode(dctx documentContext, n *yaml.Node, meta ...*yaml.Node) (out envoyx.NodeSet, err error) {
return d.unmarshalTriggerNode(dctx, n, meta...)
}

View File

@ -89,9 +89,7 @@ func (e YamlEncoder) Encode(ctx context.Context, p envoyx.EncodeParams, rt strin
}
return yaml.NewEncoder(w).Encode(out)
}
// // // // // // // // // // // // // // // // // // // // // // // // //
} // // // // // // // // // // // // // // // // // // // // // // // // //
// Functions for resource workflow
// // // // // // // // // // // // // // // // // // // // // // // // //

View File

@ -18,9 +18,20 @@ import (
)
type (
// StoreDecoder is responsible for fetching already stored Corteza resources
// which are then managed by envoy and imported via an encoder.
// StoreDecoder is responsible for generating Envoy nodes from already stored
// resources which can then be managed by Envoy and imported via an encoder.
StoreDecoder struct{}
filterWrap struct {
rt string
f envoyx.ResourceFilter
}
)
const (
paramsKeyStorer = "storer"
paramsKeyDAL = "dal"
)
// Decode returns a set of envoy nodes based on the provided params
@ -28,127 +39,36 @@ type (
// StoreDecoder expects the DecodeParam of `storer` and `dal` which conform
// to the store.Storer and dal.FullService interfaces.
func (d StoreDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
var (
s store.Storer
dl dal.FullService
)
// @todo we can optionally not require them based on what we're doing
if auxS, ok := p.Params["storer"]; ok {
s = auxS.(store.Storer)
s, err := d.getStorer(p)
if err != nil {
return
}
if auxDl, ok := p.Params["dal"]; ok {
dl = auxDl.(dal.FullService)
dl, err := d.getDal(p)
if err != nil {
return
}
return d.decode(ctx, s, dl, p)
}
func (d StoreDecoder) decode(ctx context.Context, s store.Storer, dl dal.FullService, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Transform passed filters into an ordered structure
type (
filterWrap struct {
rt string
f envoyx.ResourceFilter
}
)
wrappedFilters := make([]filterWrap, 0, len(p.Filter))
for rt, f := range p.Filter {
// Handle resources that don't belong to this decoder
if !strings.HasPrefix(rt, "corteza::{{.componentIdent}}") {
continue
}
wrappedFilters = append(wrappedFilters, filterWrap{rt: rt, f: f})
}
// Get all requested scopes
scopedNodes := make(envoyx.NodeSet, len(p.Filter))
{{ if eq .componentIdent "compose" }}
err = func() (err error) {
for i, a := range wrappedFilters {
if a.f.Scope.ResourceType == "" {
continue
}
// For now the scope can only point to namespace so this will do
var nn envoyx.NodeSet
nn, err = d.decodeNamespace(ctx, s, dl, d.makeNamespaceFilter(nil, nil, envoyx.ResourceFilter{Identifiers: a.f.Scope.Identifiers}))
if err != nil {
return
}
if len(nn) > 1 {
err = fmt.Errorf("ambiguous scope %v: matches multiple resources", a.f.Scope)
return
}
if len(nn) == 0 {
err = fmt.Errorf("invalid scope %v: resource not found", a.f)
return
}
scopedNodes[i] = nn[0]
}
return
}()
// Preprocessing and basic filtering (to omit what this decoder can't handle)
wrappedFilters := d.prepFilters(p.Filter)
// Get all scoped nodes
scopedNodes, err := d.getScopeNodes(ctx, s, dl, wrappedFilters)
if err != nil {
err = errors.Wrap(err, "failed to decode node scopes")
return
}
{{ else }}
// @note skipping scope logic since it's currently only supported within
// Compose resources.
{{ end }}
// Get all requested references
//
// Keep an index for the Node and one for the reference to make our
// lives easier.
refNodes := make([]map[string]*envoyx.Node, len(p.Filter))
refRefs := make([]map[string]envoyx.Ref, len(p.Filter))
err = func() (err error) {
for i, a := range wrappedFilters {
if len(a.f.Refs) == 0 {
continue
}
auxr := make(map[string]*envoyx.Node, len(a.f.Refs))
auxa := make(map[string]envoyx.Ref)
for field, ref := range a.f.Refs {
f := ref.ResourceFilter()
aux, err := d.decode(ctx, s, dl, envoyx.DecodeParams{
Type: envoyx.DecodeTypeStore,
Filter: f,
})
if err != nil {
return err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return fmt.Errorf("invalid reference %v", ref)
}
if len(aux) > 1 {
return fmt.Errorf("ambiguous reference: too many resources returned %v", a.f)
}
auxr[field] = aux[0]
auxa[field] = aux[0].ToRef()
}
refNodes[i] = auxr
refRefs[i] = auxa
}
return
}()
// Get all reference nodes
refNodes, refRefs, err := d.getReferenceNodes(ctx, s, dl, wrappedFilters)
if err != nil {
err = errors.Wrap(err, "failed to decode node references")
return
}
// Process filters to get the envoy nodes
err = func() (err error) {
var aux envoyx.NodeSet
for i, wf := range wrappedFilters {
@ -321,3 +241,140 @@ func (d StoreDecoder) make{{.expIdent}}Filter(scope *envoyx.Node, refs map[strin
// Resource should define a custom filter builder
{{ end }}
{{end}}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utilities
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d StoreDecoder) getStorer(p envoyx.DecodeParams) (s store.Storer, err error) {
aux, ok := p.Params[paramsKeyStorer]
if ok {
s, ok = aux.(store.Storer)
if ok {
return
}
}
err = errors.Errorf("store decoder expects a storer conforming to store.Storer interface")
return
}
func (d StoreDecoder) getDal(p envoyx.DecodeParams) (dl dal.FullService, err error) {
aux, ok := p.Params[paramsKeyDAL]
if ok {
dl, ok = aux.(dal.FullService)
if ok {
return
}
}
err = errors.Errorf("store decoder expects a DAL conforming to dal.FullService interface")
return
}
func (d StoreDecoder) prepFilters(ff map[string]envoyx.ResourceFilter) (out []filterWrap) {
out = make([]filterWrap, 0, len(ff))
for rt, f := range ff {
// Handle resources that don't belong to this decoder
if !strings.HasPrefix(rt, "corteza::{{.componentIdent}}") {
continue
}
out = append(out, filterWrap{rt: rt, f: f})
}
return
}
func (d StoreDecoder) getScopeNodes(ctx context.Context, s store.Storer, dl dal.FullService, ff []filterWrap) (scopes envoyx.NodeSet, err error) {
// Get all requested scopes
scopes = make(envoyx.NodeSet, len(ff))
{{ if eq .componentIdent "compose" }}
err = func() (err error) {
for i, fw := range ff {
if fw.f.Scope.ResourceType == "" {
continue
}
// For now the scope can only point to namespace so this will do
var nn envoyx.NodeSet
nn, err = d.decodeNamespace(ctx, s, dl, d.makeNamespaceFilter(nil, nil, envoyx.ResourceFilter{Identifiers: fw.f.Scope.Identifiers}))
if err != nil {
return
}
if len(nn) > 1 {
err = fmt.Errorf("ambiguous scope %v: matches multiple resources", fw.f.Scope)
return
}
if len(nn) == 0 {
err = fmt.Errorf("invalid scope %v: resource not found", fw.f)
return
}
scopes[i] = nn[0]
}
return
}()
if err != nil {
err = errors.Wrap(err, "failed to decode node scopes")
return
}
{{ else }}
// @note skipping scope logic since it's currently only supported within
// Compose resources.
{{ end }}
return
}
// getReferenceNodes returns all of the nodes referenced by the nodes defined by the filters
//
// The nodes are provided as a slice (the same order as the filters) and as a map for easier lookups.
func (d StoreDecoder) getReferenceNodes(ctx context.Context, s store.Storer, dl dal.FullService, ff []filterWrap) (nodes []map[string]*envoyx.Node, refs []map[string]envoyx.Ref, err error) {
nodes = make([]map[string]*envoyx.Node, len(ff))
refs = make([]map[string]envoyx.Ref, len(ff))
err = func() (err error) {
for i, a := range ff {
if len(a.f.Refs) == 0 {
continue
}
auxr := make(map[string]*envoyx.Node, len(a.f.Refs))
auxa := make(map[string]envoyx.Ref)
for field, ref := range a.f.Refs {
f := ref.ResourceFilter()
aux, err := d.decode(ctx, s, dl, envoyx.DecodeParams{
Type: envoyx.DecodeTypeStore,
Filter: f,
})
if err != nil {
return err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return fmt.Errorf("invalid reference %v", ref)
}
if len(aux) > 1 {
return fmt.Errorf("ambiguous reference: too many resources returned %v", a.f)
}
auxr[field] = aux[0]
auxa[field] = aux[0].ToRef()
}
nodes[i] = auxr
refs[i] = auxa
}
return
}()
if err != nil {
err = errors.Wrap(err, "failed to decode node references")
return
}
return
}

View File

@ -25,14 +25,8 @@ type (
StoreEncoder struct{}
)
const (
paramsKeyStorer = "storer"
paramsKeyDAL = "dal"
)
{{ $rootRes := .resources }}
// Prepare performs some initial processing on the resource before it can be encoded
//
// Preparation runs validation, default value initialization, matching with

View File

@ -23,15 +23,33 @@ import (
)
{{$cmpIdent := .componentIdent}}
{{$rootRes := .resources}}
type (
// YamlDecoder is responsible for decoding YAML documents into Corteza resources
// which are then managed by envoy and imported via an encoder.
YamlDecoder struct{}
// documentContext provides a bit of metadata to the decoder such as
// root-level reference definitions (such as the namespace)
documentContext struct {
// references holds any references defined on the root of the document
//
// Example of defining a namespace reference:
// namespace: test_import_namespace
// modules:
// - name: Test Import Module
// handle: test_import_module
references map[string]string
// parentIdent holds the identifier of the parent resource (if nested).
// The parent ident can be handy if the resource requires the info before the
// original handling is finished (such as record datasource nodes).
parentIdent envoyx.Identifiers
}
// auxYamlDoc is a helper struct that registers custom YAML decoders
// to assist the package
auxYamlDoc struct {
nodes envoyx.NodeSet
}
@ -41,12 +59,14 @@ const (
paramsKeyStream = "stream"
)
// CanFile returns true if the provided file can be decoded with this decoder
func (d YamlDecoder) CanFile(f *os.File) (ok bool) {
// @todo improve/expand
return d.canExt(f.Name())
// @todo improve this check; for now a simple extension check should do the trick
return d.CanExt(f.Name())
}
func (d YamlDecoder) canExt(name string) (ok bool) {
// CanExt returns true if the provided file extension can be decoded with this decoder
func (d YamlDecoder) CanExt(name string) (ok bool) {
var (
pt = strings.Split(name, ".")
ext = strings.TrimSpace(pt[len(pt)-1])
@ -59,7 +79,6 @@ func (d YamlDecoder) canExt(name string) (ok bool) {
// YamlDecoder expects the DecodeParam of `stream` which conforms
// to the io.Reader interface.
func (d YamlDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Get the reader
r, err := d.getReader(ctx, p)
if err != nil {
return
@ -88,6 +107,7 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
kv := strings.ToLower(k.Value)
switch kv {
// Decode all resources under the {{$cmpIdent}} component
{{- range .resources -}}
{{- if .envoy.omit -}}
{{continue}}
@ -102,19 +122,24 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshal{{.expIdent}}Map(dctx, v)
d.nodes = append(d.nodes, aux...)
}
{{ else }}
// @note {{ .ident }} doesn't support mapped inputs. This can be
// changed in the .cue definition under the
// .envoy.yaml.supportMappedInput field.
{{- end }}
if y7s.IsSeq(v) {
aux, err = d.unmarshal{{.expIdent}}Seq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal {{.ident}}")
err = errors.Wrap(err, "failed to unmarshal node: {{.ident}}")
}
return err
{{ end }}
{{ if eq .componentIdent "system" }}
// Access control nodes
{{/* @todo this should be improved and offloaded to the cue instead of template sys */}}
{{- if eq .componentIdent "system" }}
// Decode access control nodes
case "allow":
aux, err = unmarshalAllowNode(v)
d.nodes = append(d.nodes, aux...)
@ -138,6 +163,7 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
}
{{ end }}
{{ if .envoy.yaml.defaultDecoder }}
// Offload to custom handlers
default:
aux, err = d.unmarshalYAML(kv, v)
@ -145,12 +171,12 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
if err != nil {
err = errors.Wrap(err, "failed to unmarshal node")
}
{{ end }}
}
return nil
})
}
{{ $rootRes := .resources }}
{{- range .resources }}
{{- if .envoy.omit}}
{{continue}}

View File

@ -16,6 +16,8 @@ import (
{{- end }}
)
{{ $rootRes := .resources }}
type (
// YamlEncoder is responsible for encoding Corteza resources into
// a YAML supported format
@ -84,8 +86,6 @@ func (e YamlEncoder) Encode(ctx context.Context, p envoyx.EncodeParams, rt strin
return yaml.NewEncoder(w).Encode(out)
}
{{ $rootRes := .resources }}
{{- range .resources }}
{{- if .envoy.omit}}
{{continue}}
@ -293,4 +293,4 @@ func (e YamlEncoder) getWriter(p envoyx.EncodeParams) (out io.Writer, err error)
// @todo consider adding support for managing files from a location
err = errors.Errorf("YAML encoder expects a writer conforming to io.Writer interface")
return
}
}

View File

@ -188,6 +188,8 @@ import (
supportMappedInput: bool | *true
mappedField: string | *""
}] | *[]
// enable or disable offloading unhandled nodes onto a custom default decoder
defaultDecoder: bool | *false
extendedResourceEncoders: [...{
ident: string

View File

@ -19,9 +19,19 @@ import (
)
type (
// StoreDecoder is responsible for fetching already stored Corteza resources
// which are then managed by envoy and imported via an encoder.
// StoreDecoder is responsible for generating Envoy nodes from already stored
// resources which can then be managed by Envoy and imported via an encoder.
StoreDecoder struct{}
filterWrap struct {
rt string
f envoyx.ResourceFilter
}
)
const (
paramsKeyStorer = "storer"
paramsKeyDAL = "dal"
)
// Decode returns a set of envoy nodes based on the provided params
@ -29,123 +39,36 @@ type (
// StoreDecoder expects the DecodeParam of `storer` and `dal` which conform
// to the store.Storer and dal.FullService interfaces.
func (d StoreDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
var (
s store.Storer
dl dal.FullService
)
// @todo we can optionally not require them based on what we're doing
if auxS, ok := p.Params["storer"]; ok {
s = auxS.(store.Storer)
s, err := d.getStorer(p)
if err != nil {
return
}
if auxDl, ok := p.Params["dal"]; ok {
dl = auxDl.(dal.FullService)
dl, err := d.getDal(p)
if err != nil {
return
}
return d.decode(ctx, s, dl, p)
}
func (d StoreDecoder) decode(ctx context.Context, s store.Storer, dl dal.FullService, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Transform passed filters into an ordered structure
type (
filterWrap struct {
rt string
f envoyx.ResourceFilter
}
)
wrappedFilters := make([]filterWrap, 0, len(p.Filter))
for rt, f := range p.Filter {
// Handle resources that don't belong to this decoder
if !strings.HasPrefix(rt, "corteza::compose") {
continue
}
// Preprocessing and basic filtering (to omit what this decoder can't handle)
wrappedFilters := d.prepFilters(p.Filter)
wrappedFilters = append(wrappedFilters, filterWrap{rt: rt, f: f})
}
// Get all requested scopes
scopedNodes := make(envoyx.NodeSet, len(p.Filter))
err = func() (err error) {
for i, a := range wrappedFilters {
if a.f.Scope.ResourceType == "" {
continue
}
// For now the scope can only point to namespace so this will do
var nn envoyx.NodeSet
nn, err = d.decodeNamespace(ctx, s, dl, d.makeNamespaceFilter(nil, nil, envoyx.ResourceFilter{Identifiers: a.f.Scope.Identifiers}))
if err != nil {
return
}
if len(nn) > 1 {
err = fmt.Errorf("ambiguous scope %v: matches multiple resources", a.f.Scope)
return
}
if len(nn) == 0 {
err = fmt.Errorf("invalid scope %v: resource not found", a.f)
return
}
scopedNodes[i] = nn[0]
}
return
}()
// Get all scoped nodes
scopedNodes, err := d.getScopeNodes(ctx, s, dl, wrappedFilters)
if err != nil {
err = errors.Wrap(err, "failed to decode node scopes")
return
}
// Get all requested references
//
// Keep an index for the Node and one for the reference to make our
// lives easier.
refNodes := make([]map[string]*envoyx.Node, len(p.Filter))
refRefs := make([]map[string]envoyx.Ref, len(p.Filter))
err = func() (err error) {
for i, a := range wrappedFilters {
if len(a.f.Refs) == 0 {
continue
}
auxr := make(map[string]*envoyx.Node, len(a.f.Refs))
auxa := make(map[string]envoyx.Ref)
for field, ref := range a.f.Refs {
f := ref.ResourceFilter()
aux, err := d.decode(ctx, s, dl, envoyx.DecodeParams{
Type: envoyx.DecodeTypeStore,
Filter: f,
})
if err != nil {
return err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return fmt.Errorf("invalid reference %v", ref)
}
if len(aux) > 1 {
return fmt.Errorf("ambiguous reference: too many resources returned %v", a.f)
}
auxr[field] = aux[0]
auxa[field] = aux[0].ToRef()
}
refNodes[i] = auxr
refRefs[i] = auxa
}
return
}()
// Get all reference nodes
refNodes, refRefs, err := d.getReferenceNodes(ctx, s, dl, wrappedFilters)
if err != nil {
err = errors.Wrap(err, "failed to decode node references")
return
}
// Process filters to get the envoy nodes
err = func() (err error) {
var aux envoyx.NodeSet
for i, wf := range wrappedFilters {
@ -631,3 +554,137 @@ func (d StoreDecoder) makePageFilter(scope *envoyx.Node, refs map[string]*envoyx
out = d.extendPageFilter(scope, refs, auxf, out)
return
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utilities
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d StoreDecoder) getStorer(p envoyx.DecodeParams) (s store.Storer, err error) {
aux, ok := p.Params[paramsKeyStorer]
if ok {
s, ok = aux.(store.Storer)
if ok {
return
}
}
err = errors.Errorf("store decoder expects a storer conforming to store.Storer interface")
return
}
func (d StoreDecoder) getDal(p envoyx.DecodeParams) (dl dal.FullService, err error) {
aux, ok := p.Params[paramsKeyDAL]
if ok {
dl, ok = aux.(dal.FullService)
if ok {
return
}
}
err = errors.Errorf("store decoder expects a DAL conforming to dal.FullService interface")
return
}
func (d StoreDecoder) prepFilters(ff map[string]envoyx.ResourceFilter) (out []filterWrap) {
out = make([]filterWrap, 0, len(ff))
for rt, f := range ff {
// Handle resources that don't belong to this decoder
if !strings.HasPrefix(rt, "corteza::compose") {
continue
}
out = append(out, filterWrap{rt: rt, f: f})
}
return
}
func (d StoreDecoder) getScopeNodes(ctx context.Context, s store.Storer, dl dal.FullService, ff []filterWrap) (scopes envoyx.NodeSet, err error) {
// Get all requested scopes
scopes = make(envoyx.NodeSet, len(ff))
err = func() (err error) {
for i, fw := range ff {
if fw.f.Scope.ResourceType == "" {
continue
}
// For now the scope can only point to namespace so this will do
var nn envoyx.NodeSet
nn, err = d.decodeNamespace(ctx, s, dl, d.makeNamespaceFilter(nil, nil, envoyx.ResourceFilter{Identifiers: fw.f.Scope.Identifiers}))
if err != nil {
return
}
if len(nn) > 1 {
err = fmt.Errorf("ambiguous scope %v: matches multiple resources", fw.f.Scope)
return
}
if len(nn) == 0 {
err = fmt.Errorf("invalid scope %v: resource not found", fw.f)
return
}
scopes[i] = nn[0]
}
return
}()
if err != nil {
err = errors.Wrap(err, "failed to decode node scopes")
return
}
return
}
// getReferenceNodes returns all of the nodes referenced by the nodes defined by the filters
//
// The nodes are provided as a slice (the same order as the filters) and as a map for easier lookups.
func (d StoreDecoder) getReferenceNodes(ctx context.Context, s store.Storer, dl dal.FullService, ff []filterWrap) (nodes []map[string]*envoyx.Node, refs []map[string]envoyx.Ref, err error) {
nodes = make([]map[string]*envoyx.Node, len(ff))
refs = make([]map[string]envoyx.Ref, len(ff))
err = func() (err error) {
for i, a := range ff {
if len(a.f.Refs) == 0 {
continue
}
auxr := make(map[string]*envoyx.Node, len(a.f.Refs))
auxa := make(map[string]envoyx.Ref)
for field, ref := range a.f.Refs {
f := ref.ResourceFilter()
aux, err := d.decode(ctx, s, dl, envoyx.DecodeParams{
Type: envoyx.DecodeTypeStore,
Filter: f,
})
if err != nil {
return err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return fmt.Errorf("invalid reference %v", ref)
}
if len(aux) > 1 {
return fmt.Errorf("ambiguous reference: too many resources returned %v", a.f)
}
auxr[field] = aux[0]
auxa[field] = aux[0].ToRef()
}
nodes[i] = auxr
refs[i] = auxa
}
return
}()
if err != nil {
err = errors.Wrap(err, "failed to decode node references")
return
}
return
}

View File

@ -27,11 +27,6 @@ type (
StoreEncoder struct{}
)
const (
paramsKeyStorer = "storer"
paramsKeyDAL = "dal"
)
// Prepare performs some initial processing on the resource before it can be encoded
//
// Preparation runs validation, default value initialization, matching with

View File

@ -26,11 +26,28 @@ import (
type (
// YamlDecoder is responsible for decoding YAML documents into Corteza resources
// which are then managed by envoy and imported via an encoder.
YamlDecoder struct{}
YamlDecoder struct{}
// documentContext provides a bit of metadata to the decoder such as
// root-level reference definitions (such as the namespace)
documentContext struct {
references map[string]string
// references holds any references defined on the root of the document
//
// Example of defining a namespace reference:
// namespace: test_import_namespace
// modules:
// - name: Test Import Module
// handle: test_import_module
references map[string]string
// parentIdent holds the identifier of the parent resource (if nested).
// The parent ident can be handy if the resource requires the info before the
// original handling is finished (such as record datasource nodes).
parentIdent envoyx.Identifiers
}
// auxYamlDoc is a helper struct that registers custom YAML decoders
// to assist the package
auxYamlDoc struct {
nodes envoyx.NodeSet
}
@ -40,12 +57,14 @@ const (
paramsKeyStream = "stream"
)
// CanFile returns true if the provided file can be decoded with this decoder
func (d YamlDecoder) CanFile(f *os.File) (ok bool) {
// @todo improve/expand
return d.canExt(f.Name())
// @todo improve this check; for now a simple extension check should do the trick
return d.CanExt(f.Name())
}
func (d YamlDecoder) canExt(name string) (ok bool) {
// CanExt returns true if the provided file extension can be decoded with this decoder
func (d YamlDecoder) CanExt(name string) (ok bool) {
var (
pt = strings.Split(name, ".")
ext = strings.TrimSpace(pt[len(pt)-1])
@ -58,7 +77,6 @@ func (d YamlDecoder) canExt(name string) (ok bool) {
// YamlDecoder expects the DecodeParam of `stream` which conforms
// to the io.Reader interface.
func (d YamlDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Get the reader
r, err := d.getReader(ctx, p)
if err != nil {
return
@ -87,17 +105,19 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
kv := strings.ToLower(k.Value)
switch kv {
// Decode all resources under the compose component
case "chart", "charts", "chrt":
if y7s.IsMapping(v) {
aux, err = d.unmarshalChartMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalChartSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal chart")
err = errors.Wrap(err, "failed to unmarshal node: chart")
}
return err
@ -106,12 +126,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalModuleMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalModuleSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal module")
err = errors.Wrap(err, "failed to unmarshal node: module")
}
return err
@ -120,12 +141,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalModuleFieldMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalModuleFieldSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal moduleField")
err = errors.Wrap(err, "failed to unmarshal node: moduleField")
}
return err
@ -134,12 +156,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalNamespaceMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalNamespaceSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal namespace")
err = errors.Wrap(err, "failed to unmarshal node: namespace")
}
return err
@ -148,22 +171,16 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalPageMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalPageSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal page")
err = errors.Wrap(err, "failed to unmarshal node: page")
}
return err
// Offload to custom handlers
default:
aux, err = d.unmarshalYAML(kv, v)
d.nodes = append(d.nodes, aux...)
if err != nil {
err = errors.Wrap(err, "failed to unmarshal node")
}
}
return nil
})

View File

@ -339,10 +339,6 @@ func (d *auxYamlDoc) unmarshalModuleFieldExpressionsNode(r *types.ModuleField, n
return
}
func (d *auxYamlDoc) unmarshalYAML(k string, n *yaml.Node) (out envoyx.NodeSet, err error) {
return
}
func (d *auxYamlDoc) postProcessNestedModuleNodes(nn envoyx.NodeSet) (out envoyx.NodeSet, err error) {
// Get all references from all module fields
refs := make(map[string]envoyx.Ref)

View File

@ -19,9 +19,19 @@ import (
)
type (
// StoreDecoder is responsible for fetching already stored Corteza resources
// which are then managed by envoy and imported via an encoder.
// StoreDecoder is responsible for generating Envoy nodes from already stored
// resources which can then be managed by Envoy and imported via an encoder.
StoreDecoder struct{}
filterWrap struct {
rt string
f envoyx.ResourceFilter
}
)
const (
paramsKeyStorer = "storer"
paramsKeyDAL = "dal"
)
// Decode returns a set of envoy nodes based on the provided params
@ -29,96 +39,36 @@ type (
// StoreDecoder expects the DecodeParam of `storer` and `dal` which conform
// to the store.Storer and dal.FullService interfaces.
func (d StoreDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
var (
s store.Storer
dl dal.FullService
)
// @todo we can optionally not require them based on what we're doing
if auxS, ok := p.Params["storer"]; ok {
s = auxS.(store.Storer)
s, err := d.getStorer(p)
if err != nil {
return
}
if auxDl, ok := p.Params["dal"]; ok {
dl = auxDl.(dal.FullService)
dl, err := d.getDal(p)
if err != nil {
return
}
return d.decode(ctx, s, dl, p)
}
func (d StoreDecoder) decode(ctx context.Context, s store.Storer, dl dal.FullService, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Transform passed filters into an ordered structure
type (
filterWrap struct {
rt string
f envoyx.ResourceFilter
}
)
wrappedFilters := make([]filterWrap, 0, len(p.Filter))
for rt, f := range p.Filter {
// Handle resources that don't belong to this decoder
if !strings.HasPrefix(rt, "corteza::system") {
continue
}
// Preprocessing and basic filtering (to omit what this decoder can't handle)
wrappedFilters := d.prepFilters(p.Filter)
wrappedFilters = append(wrappedFilters, filterWrap{rt: rt, f: f})
}
// Get all requested scopes
scopedNodes := make(envoyx.NodeSet, len(p.Filter))
// @note skipping scope logic since it's currently only supported within
// Compose resources.
// Get all requested references
//
// Keep an index for the Node and one for the reference to make our
// lives easier.
refNodes := make([]map[string]*envoyx.Node, len(p.Filter))
refRefs := make([]map[string]envoyx.Ref, len(p.Filter))
err = func() (err error) {
for i, a := range wrappedFilters {
if len(a.f.Refs) == 0 {
continue
}
auxr := make(map[string]*envoyx.Node, len(a.f.Refs))
auxa := make(map[string]envoyx.Ref)
for field, ref := range a.f.Refs {
f := ref.ResourceFilter()
aux, err := d.decode(ctx, s, dl, envoyx.DecodeParams{
Type: envoyx.DecodeTypeStore,
Filter: f,
})
if err != nil {
return err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return fmt.Errorf("invalid reference %v", ref)
}
if len(aux) > 1 {
return fmt.Errorf("ambiguous reference: too many resources returned %v", a.f)
}
auxr[field] = aux[0]
auxa[field] = aux[0].ToRef()
}
refNodes[i] = auxr
refRefs[i] = auxa
}
return
}()
// Get all scoped nodes
scopedNodes, err := d.getScopeNodes(ctx, s, dl, wrappedFilters)
if err != nil {
err = errors.Wrap(err, "failed to decode node references")
return
}
// Get all reference nodes
refNodes, refRefs, err := d.getReferenceNodes(ctx, s, dl, wrappedFilters)
if err != nil {
return
}
// Process filters to get the envoy nodes
err = func() (err error) {
var aux envoyx.NodeSet
for i, wf := range wrappedFilters {
@ -1108,3 +1058,110 @@ func (d StoreDecoder) makeDalSensitivityLevelFilter(scope *envoyx.Node, refs map
return
}
// // // // // // // // // // // // // // // // // // // // // // // // //
// Utilities
// // // // // // // // // // // // // // // // // // // // // // // // //
func (d StoreDecoder) getStorer(p envoyx.DecodeParams) (s store.Storer, err error) {
aux, ok := p.Params[paramsKeyStorer]
if ok {
s, ok = aux.(store.Storer)
if ok {
return
}
}
err = errors.Errorf("store decoder expects a storer conforming to store.Storer interface")
return
}
func (d StoreDecoder) getDal(p envoyx.DecodeParams) (dl dal.FullService, err error) {
aux, ok := p.Params[paramsKeyDAL]
if ok {
dl, ok = aux.(dal.FullService)
if ok {
return
}
}
err = errors.Errorf("store decoder expects a DAL conforming to dal.FullService interface")
return
}
func (d StoreDecoder) prepFilters(ff map[string]envoyx.ResourceFilter) (out []filterWrap) {
out = make([]filterWrap, 0, len(ff))
for rt, f := range ff {
// Handle resources that don't belong to this decoder
if !strings.HasPrefix(rt, "corteza::system") {
continue
}
out = append(out, filterWrap{rt: rt, f: f})
}
return
}
func (d StoreDecoder) getScopeNodes(ctx context.Context, s store.Storer, dl dal.FullService, ff []filterWrap) (scopes envoyx.NodeSet, err error) {
// Get all requested scopes
scopes = make(envoyx.NodeSet, len(ff))
// @note skipping scope logic since it's currently only supported within
// Compose resources.
return
}
// getReferenceNodes returns all of the nodes referenced by the nodes defined by the filters
//
// The nodes are provided as a slice (the same order as the filters) and as a map for easier lookups.
func (d StoreDecoder) getReferenceNodes(ctx context.Context, s store.Storer, dl dal.FullService, ff []filterWrap) (nodes []map[string]*envoyx.Node, refs []map[string]envoyx.Ref, err error) {
nodes = make([]map[string]*envoyx.Node, len(ff))
refs = make([]map[string]envoyx.Ref, len(ff))
err = func() (err error) {
for i, a := range ff {
if len(a.f.Refs) == 0 {
continue
}
auxr := make(map[string]*envoyx.Node, len(a.f.Refs))
auxa := make(map[string]envoyx.Ref)
for field, ref := range a.f.Refs {
f := ref.ResourceFilter()
aux, err := d.decode(ctx, s, dl, envoyx.DecodeParams{
Type: envoyx.DecodeTypeStore,
Filter: f,
})
if err != nil {
return err
}
// @todo consider changing this.
// Currently it's required because the .decode may return some
// nested nodes as well.
// Consider a flag or a new function.
aux = envoyx.NodesForResourceType(ref.ResourceType, aux...)
if len(aux) == 0 {
return fmt.Errorf("invalid reference %v", ref)
}
if len(aux) > 1 {
return fmt.Errorf("ambiguous reference: too many resources returned %v", a.f)
}
auxr[field] = aux[0]
auxa[field] = aux[0].ToRef()
}
nodes[i] = auxr
refs[i] = auxa
}
return
}()
if err != nil {
err = errors.Wrap(err, "failed to decode node references")
return
}
return
}

View File

@ -27,11 +27,6 @@ type (
StoreEncoder struct{}
)
const (
paramsKeyStorer = "storer"
paramsKeyDAL = "dal"
)
// Prepare performs some initial processing on the resource before it can be encoded
//
// Preparation runs validation, default value initialization, matching with

View File

@ -26,11 +26,28 @@ import (
type (
// YamlDecoder is responsible for decoding YAML documents into Corteza resources
// which are then managed by envoy and imported via an encoder.
YamlDecoder struct{}
YamlDecoder struct{}
// documentContext provides a bit of metadata to the decoder such as
// root-level reference definitions (such as the namespace)
documentContext struct {
references map[string]string
// references holds any references defined on the root of the document
//
// Example of defining a namespace reference:
// namespace: test_import_namespace
// modules:
// - name: Test Import Module
// handle: test_import_module
references map[string]string
// parentIdent holds the identifier of the parent resource (if nested).
// The parent ident can be handy if the resource requires the info before the
// original handling is finished (such as record datasource nodes).
parentIdent envoyx.Identifiers
}
// auxYamlDoc is a helper struct that registers custom YAML decoders
// to assist the package
auxYamlDoc struct {
nodes envoyx.NodeSet
}
@ -40,12 +57,14 @@ const (
paramsKeyStream = "stream"
)
// CanFile returns true if the provided file can be decoded with this decoder
func (d YamlDecoder) CanFile(f *os.File) (ok bool) {
// @todo improve/expand
return d.canExt(f.Name())
// @todo improve this check; for now a simple extension check should do the trick
return d.CanExt(f.Name())
}
func (d YamlDecoder) canExt(name string) (ok bool) {
// CanExt returns true if the provided file extension can be decoded with this decoder
func (d YamlDecoder) CanExt(name string) (ok bool) {
var (
pt = strings.Split(name, ".")
ext = strings.TrimSpace(pt[len(pt)-1])
@ -58,7 +77,6 @@ func (d YamlDecoder) canExt(name string) (ok bool) {
// YamlDecoder expects the DecodeParam of `stream` which conforms
// to the io.Reader interface.
func (d YamlDecoder) Decode(ctx context.Context, p envoyx.DecodeParams) (out envoyx.NodeSet, err error) {
// Get the reader
r, err := d.getReader(ctx, p)
if err != nil {
return
@ -87,17 +105,19 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
kv := strings.ToLower(k.Value)
switch kv {
// Decode all resources under the system component
case "application", "apps":
if y7s.IsMapping(v) {
aux, err = d.unmarshalApplicationMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalApplicationSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal application")
err = errors.Wrap(err, "failed to unmarshal node: application")
}
return err
@ -106,22 +126,26 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalApigwRouteMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalApigwRouteSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal apigwRoute")
err = errors.Wrap(err, "failed to unmarshal node: apigwRoute")
}
return err
case "apigwfilter":
// @note apigwFilter doesn't support mapped inputs. This can be
// changed in the .cue definition under the
// .envoy.yaml.supportMappedInput field.
if y7s.IsSeq(v) {
aux, err = d.unmarshalApigwFilterSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal apigwFilter")
err = errors.Wrap(err, "failed to unmarshal node: apigwFilter")
}
return err
@ -130,12 +154,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalAuthClientMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalAuthClientSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal authClient")
err = errors.Wrap(err, "failed to unmarshal node: authClient")
}
return err
@ -144,12 +169,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalQueueMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalQueueSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal queue")
err = errors.Wrap(err, "failed to unmarshal node: queue")
}
return err
@ -158,12 +184,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalReportMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalReportSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal report")
err = errors.Wrap(err, "failed to unmarshal node: report")
}
return err
@ -172,12 +199,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalRoleMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalRoleSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal role")
err = errors.Wrap(err, "failed to unmarshal node: role")
}
return err
@ -186,12 +214,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalTemplateMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalTemplateSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal template")
err = errors.Wrap(err, "failed to unmarshal node: template")
}
return err
@ -200,12 +229,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalUserMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalUserSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal user")
err = errors.Wrap(err, "failed to unmarshal node: user")
}
return err
@ -214,12 +244,13 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalDalConnectionMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalDalConnectionSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal dalConnection")
err = errors.Wrap(err, "failed to unmarshal node: dalConnection")
}
return err
@ -228,16 +259,17 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
aux, err = d.unmarshalDalSensitivityLevelMap(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if y7s.IsSeq(v) {
aux, err = d.unmarshalDalSensitivityLevelSeq(dctx, v)
d.nodes = append(d.nodes, aux...)
}
if err != nil {
err = errors.Wrap(err, "failed to unmarshal dalSensitivityLevel")
err = errors.Wrap(err, "failed to unmarshal node: dalSensitivityLevel")
}
return err
// Access control nodes
// Decode access control nodes
case "allow":
aux, err = unmarshalAllowNode(v)
d.nodes = append(d.nodes, aux...)
@ -260,13 +292,6 @@ func (d *auxYamlDoc) UnmarshalYAML(n *yaml.Node) (err error) {
err = errors.Wrap(err, "failed to unmarshal node: locale")
}
// Offload to custom handlers
default:
aux, err = d.unmarshalYAML(kv, v)
d.nodes = append(d.nodes, aux...)
if err != nil {
err = errors.Wrap(err, "failed to unmarshal node")
}
}
return nil
})

View File

@ -10,10 +10,6 @@ import (
"gopkg.in/yaml.v3"
)
func (d *auxYamlDoc) unmarshalYAML(k string, n *yaml.Node) (out envoyx.NodeSet, err error) {
return
}
func (d *auxYamlDoc) unmarshalFiltersExtendedNode(dctx documentContext, n *yaml.Node, meta ...*yaml.Node) (out envoyx.NodeSet, err error) {
return d.unmarshalApigwFilterNode(dctx, n, meta...)
}