diff --git a/compose/types/module.go b/compose/types/module.go index 5fbce15ec..8b66a8d03 100644 --- a/compose/types/module.go +++ b/compose/types/module.go @@ -1,27 +1,28 @@ package types import ( + "time" + "github.com/cortezaproject/corteza-server/pkg/filter" "github.com/cortezaproject/corteza-server/pkg/rbac" "github.com/jmoiron/sqlx/types" - "time" ) type ( Module struct { - ID uint64 `json:"moduleID,string"` + ID uint64 `json:"moduleID,string" yaml:"-"` Handle string `json:"handle"` Name string `json:"name"` - Meta types.JSONText `json:"meta"` - Fields ModuleFieldSet `json:"fields"` + Meta types.JSONText `json:"meta" yaml:",omitempty"` + Fields ModuleFieldSet `json:"fields" yaml:"-"` Labels map[string]string `json:"labels,omitempty"` NamespaceID uint64 `json:"namespaceID,string"` - CreatedAt time.Time `json:"createdAt,omitempty"` - UpdatedAt *time.Time `json:"updatedAt,omitempty"` - DeletedAt *time.Time `json:"deletedAt,omitempty"` + CreatedAt time.Time `json:"createdAt,omitempty" yaml:",omitempty"` + UpdatedAt *time.Time `json:"updatedAt,omitempty" yaml:",omitempty"` + DeletedAt *time.Time `json:"deletedAt,omitempty" yaml:",omitempty"` } ModuleFilter struct { diff --git a/compose/types/record.go b/compose/types/record.go index a4a28b376..b50d1b04d 100644 --- a/compose/types/record.go +++ b/compose/types/record.go @@ -3,10 +3,11 @@ package types import ( "encoding/json" "fmt" - "github.com/cortezaproject/corteza-server/pkg/filter" "strconv" "time" + "github.com/cortezaproject/corteza-server/pkg/filter" + "github.com/cortezaproject/corteza-server/pkg/rbac" ) @@ -35,7 +36,7 @@ type ( ID uint64 `json:"recordID,string"` ModuleID uint64 `json:"moduleID,string"` - Values RecordValueSet `json:"values,omitempty"` + Values RecordValueSet `json:"values,omitempty" yaml:"-"` Labels map[string]string `json:"labels,omitempty"` diff --git a/pkg/envoy/decoder/csv.go b/pkg/envoy/decoder/csv.go new file mode 100644 index 000000000..2ae561451 --- /dev/null +++ b/pkg/envoy/decoder/csv.go @@ -0,0 +1,112 @@ +package decoder + +import ( + "context" + "encoding/csv" + "errors" + "io" + "regexp" + + "github.com/cortezaproject/corteza-server/pkg/envoy/types" +) + +type ( + CsvDecoder struct{} +) + +var ( + ErrorNoCsvHeader = errors.New("csv decoder: no header") + ErrorCsvHeaderMalformed = errors.New("csv decoder: header malformed") + + // This strict regexp for field names will do for now. + // Later we can add support for matching over field labels as well. + headerRegexp, _ = regexp.Compile("^[A-Za-z][0-9A-Za-z_]*[A-Za-z0-9]$") +) + +func NewCsvDecoder() *CsvDecoder { + return &CsvDecoder{} +} + +// A quick header field validator +// +// @note should we complicate it any further? +func (c *CsvDecoder) validateHeader(header []string) error { + for _, h := range header { + if !headerRegexp.MatchString(h) { + return ErrorCsvHeaderMalformed + } + } + + return nil +} + +func (c *CsvDecoder) Decode(ctx context.Context, r io.Reader, filename string) ([]types.Node, error) { + n := &types.ComposeRecordNode{} + + // Determine base module for dependency resolution + // -4 is to remove .csv ext + // + // @todo tweak this a bit + modRes := filename[0 : len(filename)-4] + mod := &types.ComposeModule{} + mod.Handle = modRes + mod.Name = modRes + n.Mod = mod + + // Prepare reader + // + // For optimization we reuse allocated memory; keep this in mind! + cr := csv.NewReader(r) + cr.ReuseRecord = true + + // Get header + hh, err := cr.Read() + if err == io.EOF { + return nil, ErrorNoCsvHeader + } else if err != nil { + return nil, err + } + + header := make([]string, 0, len(hh)) + for _, h := range hh { + header = append(header, h) + } + + err = c.validateHeader(header) + if err != nil { + return nil, err + } + + // Iterator function for providing records to be imported. + // This doesn't do any validation; that should be handled by other layers. + n.Walk = func(f func(*types.ComposeRecord) error) error { + for { + record, err := cr.Read() + if err == io.EOF { + return nil + } + if err != nil { + return err + } + + rvs := make(types.ComposeRecordValueSet, 0) + for i, h := range header { + v := &types.ComposeRecordValue{} + v.Name = h + v.Value = record[i] + + rvs = append(rvs, v) + } + + rec := &types.ComposeRecord{} + rec.Values = rvs + + err = f(rec) + if err != nil { + return err + } + } + } + + return []types.Node{n}, nil +} diff --git a/pkg/envoy/decoder/yaml.go b/pkg/envoy/decoder/yaml.go new file mode 100644 index 000000000..e9a5d7f02 --- /dev/null +++ b/pkg/envoy/decoder/yaml.go @@ -0,0 +1,167 @@ +package decoder + +import ( + "context" + "errors" + "io" + "strings" + + "github.com/cortezaproject/corteza-server/pkg/envoy/types" + "gopkg.in/yaml.v3" +) + +type ( + // YamlDecoder is a wrapper struct for yaml related methods + YamlDecoder struct{} + + // Document defines the supported yaml structure + Document struct { + Namespace string + Namespaces types.ComposeNamespaceSet + Modules types.ComposeModuleSet + Records map[string]types.ComposeRecordSet + } +) + +var ( + ErrorCannotResolveNamespace = errors.New("yaml decoder: cannot resolve namespace") +) + +func NewYamlDecoder() *YamlDecoder { + return &YamlDecoder{} +} + +func (y *YamlDecoder) unmarshalDocument(r io.Reader) (*Document, error) { + var c *Document + + buf := new(strings.Builder) + _, err := io.Copy(buf, r) + if err != nil { + return nil, err + } + + err = yaml.Unmarshal([]byte(buf.String()), &c) + if err != nil { + return nil, err + } + + return c, nil +} + +// convert converts the decoded document into a set of envoy nodes +func (y *YamlDecoder) convert(c *Document) ([]types.Node, error) { + nn := make([]types.Node, 0, 100) + + // In case of namespaces... + if c.Namespaces != nil { + nodes, err := y.convertNamespaces(c.Namespaces) + if err != nil { + return nil, err + } + nn = append(nn, nodes...) + } + + ns := &types.ComposeNamespace{} + if c.Namespace != "" { + // In case of a namespace to provide dependencies + ns.Slug = c.Namespace + ns.Name = c.Namespace + } else if len(nn) > 0 { + // Try to fall back to a namespace node + ns = ((nn[0]).(*types.ComposeNamespaceNode)).Ns + } else { + // No good; we can't link with a namespace. + // @note This should be checked when converting Compose resources only. + // Some resources don't belong to a namespace. + return nil, ErrorCannotResolveNamespace + } + + // In case of modules... + if c.Modules != nil { + nodes, err := y.convertModules(c.Modules, ns) + if err != nil { + return nil, err + } + nn = append(nn, nodes...) + } + + if c.Records != nil { + for modRef, rr := range c.Records { + // We can define a basic module representation as it will be updated later + // during validation/runtime + mod := &types.ComposeModule{} + mod.Handle = modRef + mod.Name = modRef + + nodes, err := y.convertRecords(rr, mod) + if err != nil { + return nil, err + } + nn = append(nn, nodes...) + } + } + + return nn, nil +} + +func (y *YamlDecoder) convertNamespaces(nss types.ComposeNamespaceSet) ([]types.Node, error) { + nn := make([]types.Node, 0, 2) + + for _, ns := range nss { + nn = append(nn, &types.ComposeNamespaceNode{Ns: ns}) + + // Nested modules + if ns.Modules != nil { + mm, err := y.convertModules(ns.Modules, ns) + if err != nil { + return nil, err + } + nn = append(nn, mm...) + } + + // @todo nested RBAC + } + + return nn, nil +} + +func (y *YamlDecoder) convertModules(mm types.ComposeModuleSet, ns *types.ComposeNamespace) ([]types.Node, error) { + nn := make([]types.Node, 0) + + for _, m := range mm { + nn = append(nn, &types.ComposeModuleNode{ + Mod: m, + Ns: ns, + }) + + // @todo nested resources; should there be any? + } + + return nn, nil +} + +func (y *YamlDecoder) convertRecords(rr types.ComposeRecordSet, m *types.ComposeModule) ([]types.Node, error) { + // Iterator function for providing records to be imported. + // This doesn't do any validation; that should be handled by other layers. + f := func(f func(*types.ComposeRecord) error) error { + for _, r := range rr { + err := f(r) + if err != nil { + return err + } + } + + return nil + } + + return []types.Node{&types.ComposeRecordNode{Mod: m, Walk: f}}, nil +} + +func (y *YamlDecoder) Decode(ctx context.Context, r io.Reader) ([]types.Node, error) { + d, err := y.unmarshalDocument(r) + if err != nil { + return nil, err + } + + return y.convert(d) +} diff --git a/pkg/envoy/util/yaml.go b/pkg/envoy/util/yaml.go new file mode 100644 index 000000000..96371a2c0 --- /dev/null +++ b/pkg/envoy/util/yaml.go @@ -0,0 +1,39 @@ +package util + +import ( + "fmt" + + "gopkg.in/yaml.v3" +) + +func yamlNodeErr(n *yaml.Node, format string, aa ...interface{}) error { + format += " (%d:%d)" + aa = append(aa, n.Line, n.Column) + return fmt.Errorf(format, aa...) +} + +// YamlIterator helps iterate over mapping and sequence nodes fairly trivially +func YamlIterator(n *yaml.Node, fn func(*yaml.Node, *yaml.Node) error) error { + if n.Kind == yaml.MappingNode { + for i := 0; i < len(n.Content); i += 2 { + if err := fn(n.Content[i], n.Content[i+1]); err != nil { + return err + } + } + + return nil + } + + if n.Kind == yaml.SequenceNode { + var placeholder *yaml.Node + for i := 0; i < len(n.Content); i++ { + if err := fn(placeholder, n.Content[i]); err != nil { + return err + } + } + + return nil + } + + return yamlNodeErr(n, "expecting mapping or sequence node") +}