diff --git a/cue/cuex/compiler.go b/cue/cuex/compiler.go index 6b5e4b0..27e83b0 100644 --- a/cue/cuex/compiler.go +++ b/cue/cuex/compiler.go @@ -32,6 +32,7 @@ import ( "k8s.io/klog/v2" "github.com/kubevela/pkg/cue/cuex/providers/base64" + cueext "github.com/kubevela/pkg/cue/cuex/providers/cue" "github.com/kubevela/pkg/cue/cuex/providers/http" "github.com/kubevela/pkg/cue/cuex/providers/kube" cuexruntime "github.com/kubevela/pkg/cue/cuex/runtime" @@ -217,6 +218,7 @@ func NewCompilerWithDefaultInternalPackages() *Compiler { base64.Package, http.Package, kube.Package, + cueext.Package, ) } diff --git a/cue/cuex/default_complier_test.go b/cue/cuex/default_complier_test.go new file mode 100644 index 0000000..c47e099 --- /dev/null +++ b/cue/cuex/default_complier_test.go @@ -0,0 +1,58 @@ +package cuex_test + +import ( + "context" + + "cuelang.org/go/cue" + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + + "github.com/kubevela/pkg/cue/cuex" + "github.com/kubevela/pkg/cue/cuex/model/sets" + "github.com/kubevela/pkg/util/stringtools" +) + +var _ = Describe("Test Default Compiler", func() { + ctx := context.Background() + compiler := cuex.NewCompilerWithDefaultInternalPackages() + + It("test vela/cue internal packages", func() { + val, err := compiler.CompileString(ctx, ` + import ( + "vela/cue" + ) + secret: { + apiVersion: "v1" + kind: "Secret" + metadata: { + name: "ip" + namespace: "default" + } + } + patch: cue.#StrategyUnify & { + $params: { + value: secret + patch: { + stringData: ip: "127.0.0.1" + } + } + } +`) + Expect(err).Should(BeNil()) + ret := val.LookupPath(cue.ParsePath("patch.$returns")) + retStr, err := sets.ToString(ret) + Expect(err).Should(BeNil()) + + Expect(stringtools.TrimLeadingIndent(retStr)).Should(BeEquivalentTo(stringtools.TrimLeadingIndent(` + apiVersion: "v1" + kind: "Secret" + stringData: { + ip: "127.0.0.1" + } + metadata: { + name: "ip" + namespace: "default" + } +`))) + }) +}) diff --git a/cue/cuex/model/sets/operation.go b/cue/cuex/model/sets/operation.go new file mode 100644 index 0000000..73ad057 --- /dev/null +++ b/cue/cuex/model/sets/operation.go @@ -0,0 +1,429 @@ +/* +Copyright 2022 The KubeVela Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package sets + +import ( + "fmt" + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/cuecontext" + "cuelang.org/go/cue/parser" + jsonpatch "github.com/evanphx/json-patch" + "github.com/pkg/errors" +) + +const ( + // TagPatchKey specify the primary key of the list items + TagPatchKey = "patchKey" + // TagPatchStrategy specify a strategy of the strategic merge patch + TagPatchStrategy = "patchStrategy" + + // StrategyRetainKeys notes on the strategic merge patch using the retainKeys strategy + StrategyRetainKeys = "retainKeys" + // StrategyReplace notes on the strategic merge patch will allow replacing list + StrategyReplace = "replace" + // StrategyJSONPatch notes on the strategic merge patch will follow the RFC 6902 to run JsonPatch + StrategyJSONPatch = "jsonPatch" + // StrategyJSONMergePatch notes on the strategic merge patch will follow the RFC 7396 to run JsonMergePatch + StrategyJSONMergePatch = "jsonMergePatch" +) + +var ( + notFoundErr = errors.Errorf("not found") +) + +// UnifyParams params for unify +type UnifyParams struct { + PatchStrategy string +} + +// UnifyOption defines the option for unify +type UnifyOption interface { + ApplyToOption(params *UnifyParams) +} + +// UnifyByJSONPatch unify by json patch following RFC 6902 +type UnifyByJSONPatch struct{} + +// ApplyToOption apply to option +func (op UnifyByJSONPatch) ApplyToOption(params *UnifyParams) { + params.PatchStrategy = StrategyJSONPatch +} + +// UnifyByJSONMergePatch unify by json patch following RFC 7396 +type UnifyByJSONMergePatch struct{} + +// ApplyToOption apply to option +func (op UnifyByJSONMergePatch) ApplyToOption(params *UnifyParams) { + params.PatchStrategy = StrategyJSONMergePatch +} + +func newUnifyParams(options ...UnifyOption) *UnifyParams { + params := &UnifyParams{} + for _, op := range options { + op.ApplyToOption(params) + } + return params +} + +// CreateUnifyOptionsForPatcher create unify options for patcher +func CreateUnifyOptionsForPatcher(patcher cue.Value) (options []UnifyOption) { + if IsJSONPatch(patcher) { + options = append(options, UnifyByJSONPatch{}) + } else if IsJSONMergePatch(patcher) { + options = append(options, UnifyByJSONMergePatch{}) + } + return +} + +type interceptor func(baseNode ast.Node, patchNode ast.Node) error + +func listMergeProcess(field *ast.Field, key string, baseList, patchList *ast.ListLit) { + kmaps := map[string]ast.Expr{} + nElts := []ast.Expr{} + keys := strings.Split(key, ",") + for _, key := range keys { + foundPatch := false + for i, elt := range patchList.Elts { + if _, ok := elt.(*ast.Ellipsis); ok { + continue + } + nodev, err := lookUp(elt, strings.Split(key, ".")...) + if err != nil { + continue + } + foundPatch = true + blit, ok := nodev.(*ast.BasicLit) + if !ok { + return + } + kmaps[fmt.Sprintf(key, blit.Value)] = patchList.Elts[i] + } + if !foundPatch { + if len(patchList.Elts) == 0 { + continue + } + return + } + + hasStrategyRetainKeys := isStrategyRetainKeys(field) + + for i, elt := range baseList.Elts { + if _, ok := elt.(*ast.Ellipsis); ok { + continue + } + + nodev, err := lookUp(elt, strings.Split(key, ".")...) + if err != nil { + continue + } + blit, ok := nodev.(*ast.BasicLit) + if !ok { + return + } + + k := fmt.Sprintf(key, blit.Value) + if v, ok := kmaps[k]; ok { + if hasStrategyRetainKeys { + baseList.Elts[i] = ast.NewStruct() + } + nElts = append(nElts, v) + delete(kmaps, k) + } else { + nElts = append(nElts, ast.NewStruct()) + } + + } + } + for _, elt := range patchList.Elts { + for _, v := range kmaps { + if elt == v { + nElts = append(nElts, v) + break + } + } + } + + nElts = append(nElts, &ast.Ellipsis{}) + patchList.Elts = nElts +} + +func strategyPatchHandle() interceptor { + return func(baseNode ast.Node, patchNode ast.Node) error { + walker := newWalker(func(node ast.Node, ctx walkCtx) { + field, ok := node.(*ast.Field) + if !ok { + return + } + + value := peelCloseExpr(field.Value) + + switch val := value.(type) { + case *ast.ListLit: + key := ctx.Tags()[TagPatchKey] + patchStrategy := "" + tags := findCommentTag(field.Comments()) + for tk, tv := range tags { + if tk == TagPatchKey { + key = tv + } + if tk == TagPatchStrategy { + patchStrategy = tv + } + } + + paths := append(ctx.Pos(), LabelStr(field.Label)) + baseSubNode, err := lookUp(baseNode, paths...) + if err != nil { + if errors.Is(err, notFoundErr) { + return + } + baseSubNode = ast.NewList() + } + baselist, ok := baseSubNode.(*ast.ListLit) + if !ok { + return + } + if patchStrategy == StrategyReplace { + baselist.Elts = val.Elts + } else if key != "" { + listMergeProcess(field, key, baselist, val) + } + + default: + if !isStrategyRetainKeys(field) { + return + } + + srcNode, _ := lookUp(baseNode, ctx.Pos()...) + if srcNode != nil { + switch v := srcNode.(type) { + case *ast.StructLit: + for _, elt := range v.Elts { + if fe, ok := elt.(*ast.Field); ok && + LabelStr(fe.Label) == LabelStr(field.Label) { + fe.Value = field.Value + } + } + case *ast.File: // For the top level element + for _, decl := range v.Decls { + if fe, ok := decl.(*ast.Field); ok && + LabelStr(fe.Label) == LabelStr(field.Label) { + fe.Value = field.Value + } + } + } + } + } + }) + walker.walk(patchNode) + return nil + } +} + +func isStrategyRetainKeys(node *ast.Field) bool { + tags := findCommentTag(node.Comments()) + for tk, tv := range tags { + if tk == TagPatchStrategy && tv == StrategyRetainKeys { + return true + } + } + return false +} + +// IsJSONMergePatch check if patcher is json merge patch +func IsJSONMergePatch(patcher cue.Value) bool { + tags := findCommentTag(patcher.Doc()) + return tags[TagPatchStrategy] == StrategyJSONMergePatch +} + +// IsJSONPatch check if patcher is json patch +func IsJSONPatch(patcher cue.Value) bool { + tags := findCommentTag(patcher.Doc()) + return tags[TagPatchStrategy] == StrategyJSONPatch +} + +// StrategyUnify unify the objects by the strategy +func StrategyUnify(base, patch cue.Value, options ...UnifyOption) (ret cue.Value, err error) { + params := newUnifyParams(options...) + var patchOpts []interceptor + if params.PatchStrategy == StrategyJSONMergePatch || params.PatchStrategy == StrategyJSONPatch { + _, err := OpenBaiscLit(base) + if err != nil { + return base, err + } + } else { + patchOpts = []interceptor{strategyPatchHandle()} + } + return strategyUnify(base, patch, params, patchOpts...) +} + +// nolint:staticcheck +func strategyUnify(base cue.Value, patch cue.Value, params *UnifyParams, patchOpts ...interceptor) (val cue.Value, err error) { + if params.PatchStrategy == StrategyJSONMergePatch { + return jsonMergePatch(base, patch) + } else if params.PatchStrategy == StrategyJSONPatch { + return jsonPatch(base, patch.LookupPath(cue.ParsePath("operations"))) + } + openBase, err := OpenListLit(base) + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to open list it for merge") + } + patchFile, err := ToFile(patch.Syntax(cue.Docs(true), cue.ResolveReferences(true))) + if err != nil { + return cue.Value{}, err + } + for _, option := range patchOpts { + if err := option(openBase, patchFile); err != nil { + return cue.Value{}, errors.WithMessage(err, "process patchOption") + } + } + + baseCtx := base.Context() + baseInst := baseCtx.BuildFile(openBase) + patchInst := baseCtx.BuildFile(patchFile) + + ret := baseInst.Unify(patchInst) + + _, err = toString(ret, removeTmpVar) + if err != nil { + return ret, errors.WithMessage(err, " format result toString") + } + + if err := ret.Err(); err != nil { + return ret, errors.WithMessage(err, "result check err") + } + + if err := ret.Validate(cue.All()); err != nil { + return ret, errors.WithMessage(err, "result validate") + } + + return ret, nil +} + +func findCommentTag(commentGroup []*ast.CommentGroup) map[string]string { + marker := "+" + kval := map[string]string{} + for _, group := range commentGroup { + for _, lineT := range group.List { + line := lineT.Text + line = strings.TrimPrefix(line, "//") + line = strings.TrimSpace(line) + if len(line) == 0 { + continue + } + if !strings.HasPrefix(line, marker) { + continue + } + kv := strings.SplitN(line[len(marker):], "=", 2) + if len(kv) == 2 { + val := strings.TrimSpace(kv[1]) + if len(strings.Fields(val)) > 1 { + continue + } + kval[strings.TrimSpace(kv[0])] = val + } + } + } + return kval +} + +func jsonMergePatch(base cue.Value, patch cue.Value) (cue.Value, error) { + ctx := cuecontext.New() + baseJSON, err := base.MarshalJSON() + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to marshal base value") + } + patchJSON, err := patch.MarshalJSON() + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to marshal patch value") + } + merged, err := jsonpatch.MergePatch(baseJSON, patchJSON) + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to merge base value and patch value by JsonMergePatch") + } + output, err := openJSON(string(merged)) + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to parse open basic lit for merged result") + } + return ctx.BuildFile(output), nil +} + +func jsonPatch(base cue.Value, patch cue.Value) (cue.Value, error) { + ctx := cuecontext.New() + baseJSON, err := base.MarshalJSON() + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to marshal base value") + } + patchJSON, err := patch.MarshalJSON() + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to marshal patch value") + } + decodedPatch, err := jsonpatch.DecodePatch(patchJSON) + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to decode patch") + } + + merged, err := decodedPatch.Apply(baseJSON) + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to apply json patch") + } + output, err := openJSON(string(merged)) + if err != nil { + return cue.Value{}, errors.Wrapf(err, "failed to parse open basic lit for merged result") + } + return ctx.BuildFile(output), nil +} + +func isEllipsis(elt ast.Node) bool { + _, ok := elt.(*ast.Ellipsis) + return ok +} + +func openJSON(data string) (*ast.File, error) { + f, err := parser.ParseFile("-", data, parser.ParseComments) + if err != nil { + return nil, err + } + ast.Walk(f, func(node ast.Node) bool { + field, ok := node.(*ast.Field) + if ok { + v := field.Value + switch lit := v.(type) { + case *ast.StructLit: + if len(lit.Elts) == 0 || !isEllipsis(lit.Elts[len(lit.Elts)-1]) { + lit.Elts = append(lit.Elts, &ast.Ellipsis{}) + } + case *ast.ListLit: + if len(lit.Elts) == 0 || !isEllipsis(lit.Elts[len(lit.Elts)-1]) { + lit.Elts = append(lit.Elts, &ast.Ellipsis{}) + } + } + } + return true + }, nil) + if len(f.Decls) > 0 { + if emb, ok := f.Decls[0].(*ast.EmbedDecl); ok { + if s, _ok := emb.Expr.(*ast.StructLit); _ok { + f.Decls = s.Elts + } + } + } + return f, nil +} diff --git a/cue/cuex/model/sets/operation_test.go b/cue/cuex/model/sets/operation_test.go new file mode 100644 index 0000000..8ff7b29 --- /dev/null +++ b/cue/cuex/model/sets/operation_test.go @@ -0,0 +1,758 @@ +/* +Copyright 2022 The KubeVela Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package sets + +import ( + "fmt" + "testing" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/cuecontext" + "cuelang.org/go/cue/parser" + "github.com/stretchr/testify/require" +) + +func TestPatch(t *testing.T) { + + testCase := []struct { + base string + patch string + result string + expectedErr string + }{ + { + base: `containers: [{name: "x1"},{name: "x2"},...]`, + patch: `containers: [{name: "x1"},{name: "x2",image: "pause:0.1"}]`, + result: `containers: [{ + name: "x1" +}, { + name: "x2" + image: "pause:0.1" +}] +`, + }, + + { + base: `containers: [{name: "x1"},{name: "x2"},...]`, + patch: `containers: [{name: "x2"},{name: "x1"}]`, + result: `containers: [{ + name: _|_ // containers.0.name: conflicting values "x2" and "x1" +}, { + name: _|_ // containers.1.name: conflicting values "x1" and "x2" +}] +`, + expectedErr: `conflicting values "x2" and "x1"`, + }, + + { + base: `containers: [{name: _|_},{name: "x2"},...]`, + patch: `containers: [{name: _|_},{name: "x2"}]`, + result: `containers: [{ + name: _|_ // explicit error (_|_ literal) in source (and 1 more errors) +}, { + name: "x2" +}] +`, + expectedErr: "explicit error (_|_ literal) in source", + }, + + { + base: `containers: [{name: "x1"},{name: "x2"},...]`, + patch: ` +// +patchKey=name +containers: [{name: "x2"},{name: "x1"}]`, + result: `// +patchKey=name +containers: [{ + name: "x1" +}, { + name: "x2" +}, ...] +`, + }, + + { + // lose close here + base: `containers: [close({namex: "x1"}),...]`, + patch: ` +// +patchKey=name +containers: [{name: "x2"},{name: "x1"}]`, + result: `// +patchKey=name +containers: [{ + namex: "x1" + name: "x2" +}, { + name: "x1" +}, ...] +`, + }, + + { + base: `containers: [{name: "x1"},{name: "x2"},...]`, + patch: ` +// +patchKey=name +containers: [{name: "x4"},{name: "x3"},{name: "x1"}]`, + result: `// +patchKey=name +containers: [{ + name: "x1" +}, { + name: "x2" +}, { + name: "x4" +}, { + name: "x3" +}, ...] +`, + }, + + { + base: `containers: [{name: "x1"},{name: "x2"},...]`, + patch: ` +// +patchKey=name +containers: [{noname: "x3"},...]`, + result: `// +patchKey=name +containers: [{ + name: "x1" + noname: "x3" +}, { + name: "x2" +}, ...] +`, + }, + { + base: `containers: [{name: "x1"},{name: "x2"},...]`, + patch: `// +patchKey=name +containers: [{noname: "x3"},{name: "x1"}]`, + result: `// +patchKey=name +containers: [{ + name: "x1" +}, { + name: "x2" +}, ...] +`, + }, + { + base: `containers: [{name: "x1"},{name: "x2", envs:[ {name: "OPS",value: string},...]},...]`, + patch: ` +// +patchKey=name +containers: [{name: "x2", envs: [{name: "OPS", value: "OAM"}]}]`, + result: `// +patchKey=name +containers: [{ + name: "x1" +}, { + name: "x2" + envs: [{ + name: "OPS" + value: "OAM" + }, ...] +}, ...] +`, + }, + { + base: `containers: [close({name: "x1"}),close({name: "x2", envs:[{name: "OPS",value: string},...]}),...]`, + patch: ` +// +patchKey=name +containers: [{name: "x2", envs: [close({name: "OPS", value: "OAM"})]}]`, + // TODO: fix losing close struct in cue + result: `// +patchKey=name +containers: [{ + name: "x1" +}, { + name: "x2" + envs: [{ + name: "OPS" + value: "OAM" + }, ...] +}, ...] +`, + }, + + { + base: `containers: [{name: "x1"},{name: "x2", envs:[ {name: "OPS",value: string},...]},...]`, + patch: ` +// +patchKey=name +containers: [{name: "x2", envs: [{name: "USER", value: "DEV"},{name: "OPS", value: "OAM"}]}]`, + result: `// +patchKey=name +containers: [{ + name: "x1" +}, { + name: "x2" + envs: [{ + name: "OPS" + value: "OAM" + }, { + name: "USER" + value: "DEV" + }, ...] +}, ...] +`, + }, + + { + base: `containers: [{name: "x1"},{name: "x2", envs:[ {key: "OPS",value: string},...]},...]`, + patch: ` +// +patchKey=name +containers: [{name: "x2", +// +patchKey=key +envs: [{key: "USER", value: "DEV"},{key: "OPS", value: "OAM"}]}]`, + result: `// +patchKey=name +containers: [{ + name: "x1" +}, { + name: "x2" + // +patchKey=key + envs: [{ + key: "OPS" + value: "OAM" + }, { + key: "USER" + value: "DEV" + }, ...] +}, ...] +`, + }, + { + base: `envFrom: [{ + secretRef: { + name: "nginx-rds" + }},...]`, + patch: ` +// +patchKey=secretRef.name +envFrom: [{ + secretRef: { + name: "nginx-redis" + }},...] +`, + result: `// +patchKey=secretRef.name +envFrom: [{ + secretRef: { + name: "nginx-rds" + } +}, { + secretRef: { + name: "nginx-redis" + } +}, ...] +`}, + { + base: ` + containers: [{ + name: "c1" + },{ + name: "c2" + envFrom: [{ + secretRef: { + name: "nginx-rds" + }},...] + },...]`, + patch: ` + // +patchKey=name + containers: [{ + name: "c2" + // +patchKey=secretRef.name + envFrom: [{ + secretRef: { + name: "nginx-redis" + }},...] + }]`, + result: `// +patchKey=name +containers: [{ + name: "c1" +}, { + name: "c2" + // +patchKey=secretRef.name + envFrom: [{ + secretRef: { + name: "nginx-rds" + } + }, { + secretRef: { + name: "nginx-redis" + } + }, ...] +}, ...] +`}, + + { + base: ` + containers: [{ + volumeMounts: [{name: "k1", path: "p1"},{name: "k1", path: "p2"},...] + },...] + volumes: [{name: "x1",value: "v1"},{name: "x2",value: "v2"},...] +`, + + patch: ` + // +patchKey=name + volumes: [{name: "x1",value: "v1"},{name: "x3",value: "x2"}] + + containers: [{ + volumeMounts: [{name: "k1", path: "p1"},{name: "k1", path: "p2"},{ name:"k2", path: "p3"}] + },...]`, + result: `containers: [{ + volumeMounts: [{ + name: "k1" + path: "p1" + }, { + name: "k1" + path: "p2" + }, { + name: "k2" + path: "p3" + }] +}, ...] + +// +patchKey=name +volumes: [{ + name: "x1" + value: "v1" +}, { + name: "x2" + value: "v2" +}, { + name: "x3" + value: "x2" +}, ...] +`}, + + { + base: ` +containers: [{ + name: "c1" +},{ + name: "c2" + envFrom: [{ + secretRef: { + name: "nginx-rds" + }, + }, { + configMapRef: { + name: "nginx-rds" + }, + },...] +},...]`, + patch: ` +// +patchKey=name +containers: [{ + name: "c2" + // +patchKey=secretRef.name,configMapRef.name + envFrom: [{ + secretRef: { + name: "nginx-redis" + }, + }, { + configMapRef: { + name: "nginx-redis" + }, + },...] +}]`, + result: `// +patchKey=name +containers: [{ + name: "c1" +}, { + name: "c2" + // +patchKey=secretRef.name,configMapRef.name + envFrom: [{ + secretRef: { + name: "nginx-rds" + } + }, { + configMapRef: { + name: "nginx-rds" + } + }, { + secretRef: { + name: "nginx-redis" + } + }, { + configMapRef: { + name: "nginx-redis" + } + }, ...] +}, ...] +`}, + { + base: `containers: [{name: "x1"}]`, + patch: ` +containers: [{ + // +patchKey=name + env: [{ + name: "k" + value: "v" + }] +}, ...]`, + result: `containers: [{ + name: "x1" + // +patchKey=name + env: [{ + name: "k" + value: "v" + }] +}, ...] +`, + }, + } + + for i, tcase := range testCase { + t.Run(fmt.Sprintf("case-%d", i), func(t *testing.T) { + r := require.New(t) + ctx := cuecontext.New() + base := ctx.CompileString(tcase.base) + patch := ctx.CompileString(tcase.patch) + v, err := StrategyUnify(base, patch) + if tcase.expectedErr != "" { + r.Error(err) + r.Contains(err.Error(), tcase.expectedErr) + return + } + r.NoError(err) + s, err := toString(v) + r.NoError(err) + r.Equal(s, tcase.result, fmt.Sprintf("testPatch for case(no:%d) %s", i, v)) + }) + } +} + +func TestStrategyPatch(t *testing.T) { + testCase := []struct { + base string + patch string + options []UnifyOption + result string + }{ + { + base: ` +spec: { + strategy: { + type: "rollingUpdate" + rollingUpdate: maxSurge: "30%" + } +} +`, + patch: ` +spec: { + // +patchStrategy=retainKeys + strategy: type: "recreate" +} +`, + result: `spec: { + strategy: { + // +patchStrategy=retainKeys + type: "recreate" + rollingUpdate: { + maxSurge: "30%" + } + } +} +`}, + + { + base: ` +spec: { + strategy: close({ + type: "rollingUpdate" + rollingUpdate: maxSurge: "30%" + }) +} +`, + patch: ` +spec: { + // +patchStrategy=retainKeys + strategy: type: "recreate" +} +`, + result: `spec: { + strategy: { + // +patchStrategy=retainKeys + type: "recreate" + rollingUpdate: { + maxSurge: "30%" + } + } +} +`}, + + { + base: ` +volumes: [{ + name: "test-volume" + cinder: { + volumeID: "" + fsType: "ext4" + } +}] +`, + patch: ` +// +patchStrategy=retainKeys +// +patchKey=name +volumes: [ +{ + name: "test-volume" + configMap: name: "conf-name" +}] +`, + result: `// +patchStrategy=retainKeys +// +patchKey=name +volumes: [{ + name: "test-volume" + configMap: { + name: "conf-name" + } +}, ...] +`}, + + { + base: ` +volumes: [{ + name: "empty-volume" + emptyDir: {} +}, +{ + name: "test-volume" + cinder: { + volumeID: "" + fsType: "ext4" + } +}] +`, + patch: ` +// +patchStrategy=retainKeys +// +patchKey=name +volumes: [ +{ + name: "test-volume" + configMap: name: "conf-name" +}] +`, + result: `// +patchStrategy=retainKeys +// +patchKey=name +volumes: [{ + name: "empty-volume" + emptyDir: {} +}, { + name: "test-volume" + configMap: { + name: "conf-name" + } +}, ...] +`}, + + { + base: ` +containers: [{ + name: "c1" + image: "image1" +}, +{ + name: "c2" + envs:[{name: "e1",value: "v1"}] +}] +`, + patch: ` +// +patchKey=name +containers: [{ + name: "c2" + // +patchStrategy=retainKeys + envs:[{name: "e1",value: "v2"},...] +}] +`, + result: `// +patchKey=name +containers: [{ + name: "c1" + image: "image1" +}, { + name: "c2" + // +patchStrategy=retainKeys + envs: [{ + name: "e1" + value: "v2" + }, ...] +}, ...] +`}, + + { + base: ` +spec: containers: [{ + name: "c1" + image: "image1" +}, +{ + name: "c2" + envs:[{name: "e1",value: "v1"}] +}] +`, + patch: ` +// +patchKey=name +// +patchStrategy=retainKeys +spec: { + containers: [{ + name: "c2" + envs:[{name: "e1",value: "v2"}] +}]} +`, + result: `spec: { + // +patchKey=name + // +patchStrategy=retainKeys + containers: [{ + name: "c2" + envs: [{ + name: "e1" + value: "v2" + }, ...] + }, ...] +} +`}, { + base: ` +kind: "Old" +metadata: { + name: "Old" + labels: keep: "true" +} +`, + patch: `// +patchStrategy=retainKeys +kind: "New" +metadata: { + // +patchStrategy=retainKeys + name: "New" +} +`, + result: ` // +patchStrategy=retainKeys +kind: "New" +metadata: { + // +patchStrategy=retainKeys + name: "New" + labels: { + keep: "true" + } +} +`}, { + base: ` +spec: containers: [{ + name: "c1" + image: "image1" +}, +{ + name: "c2" + envs:[{name: "e1",value: "v1"}] +}] +`, + patch: ` +spec: containers: [{ + name: "c3" + image: "image3" +}] +`, + result: `spec: { + containers: [{ + image: "image3" + name: "c3" + }, ...] +} +`, + options: []UnifyOption{UnifyByJSONMergePatch{}}, + }, + { + base: ` +spec: containers: [{ + name: "c1" + image: "image1" +}] +`, + patch: ` +operations: [{ + {op: "add", path: "/spec/containers/0", value: {name: "c4", image: "image4"}} +}] +`, + result: `spec: { + containers: [{ + name: "c4" + image: "image4" + }, { + name: "c1" + image: "image1" + }, ...] +} +`, + options: []UnifyOption{UnifyByJSONPatch{}}, + }, + { + base: ` +spec: containers: [{ + name: "c1" + envs:[{name: "e1",value: "v1"}] +}] +`, + patch: ` +// +patchKey=name +spec: { + containers: [{ + name: "c1" + // +patchStrategy=replace + envs:[{name: "e1",value: "v2"}] +}]} +`, + result: `spec: { + // +patchKey=name + containers: [{ + name: "c1" + // +patchStrategy=replace + envs: [{ + name: "e1" + value: "v2" + }] + }, ...] +} +`}, + } + + for i, tcase := range testCase { + r := require.New(t) + ctx := cuecontext.New() + base := ctx.CompileString(tcase.base) + patch := ctx.CompileString(tcase.patch) + v, err := StrategyUnify(base, patch, tcase.options...) + r.NoError(err) + s, err := toString(v) + r.NoError(err) + r.Equal(s, tcase.result, fmt.Sprintf("testPatch for case(no:%d) %s", i, s)) + } +} + +func TestParseCommentTags(t *testing.T) { + temp := ` +// +patchKey=name +// +testKey1=testValue1 + // +testKey2=testValue2 +// +testKey3 =testValue3 +// +testKey4 = testValue4 +// invalid=x +// +invalid=x y +// +invalid +x: null +` + + r := require.New(t) + file, err := parser.ParseFile("-", temp, parser.ParseComments) + r.NoError(err) + v := cuecontext.New().BuildFile(file) + ms := findCommentTag(v.LookupPath(cue.ParsePath("x")).Doc()) + r.Equal(ms, map[string]string{ + "patchKey": "name", + "testKey1": "testValue1", + "testKey2": "testValue2", + "testKey3": "testValue3", + "testKey4": "testValue4", + }) +} diff --git a/cue/cuex/model/sets/utils.go b/cue/cuex/model/sets/utils.go new file mode 100644 index 0000000..9fc7560 --- /dev/null +++ b/cue/cuex/model/sets/utils.go @@ -0,0 +1,430 @@ +/* +Copyright 2022 The KubeVela Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package sets + +import ( + "bytes" + "fmt" + "path/filepath" + "strconv" + "strings" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/format" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/token" + "github.com/pkg/errors" +) + +func lookUp(node ast.Node, paths ...string) (ast.Node, error) { + if len(paths) == 0 { + return peelCloseExpr(node), nil + } + key := paths[0] + switch x := node.(type) { + case *ast.File: + for _, decl := range x.Decls { + nnode := lookField(decl, key) + if nnode != nil { + return lookUp(nnode, paths[1:]...) + } + } + case *ast.ListLit: + for index, elt := range x.Elts { + if strconv.Itoa(index) == key { + return lookUp(elt, paths[1:]...) + } + } + case *ast.StructLit: + for _, elt := range x.Elts { + nnode := lookField(elt, key) + if nnode != nil { + return lookUp(nnode, paths[1:]...) + } + } + case *ast.CallExpr: + if it, ok := x.Fun.(*ast.Ident); ok && it.Name == "close" && len(x.Args) == 1 { + return lookUp(x.Args[0], paths...) + } + for index, arg := range x.Args { + if strconv.Itoa(index) == key { + return lookUp(arg, paths[1:]...) + } + } + } + return nil, notFoundErr +} + +// LookUpAll look up all the nodes by paths +func LookUpAll(node ast.Node, paths ...string) []ast.Node { + if len(paths) == 0 { + return []ast.Node{node} + } + key := paths[0] + var nodes []ast.Node + switch x := node.(type) { + case *ast.File: + for _, decl := range x.Decls { + nnode := lookField(decl, key) + if nnode != nil { + nodes = append(nodes, LookUpAll(nnode, paths[1:]...)...) + } + } + + case *ast.StructLit: + for _, elt := range x.Elts { + nnode := lookField(elt, key) + if nnode != nil { + nodes = append(nodes, LookUpAll(nnode, paths[1:]...)...) + } + } + case *ast.ListLit: + for index, elt := range x.Elts { + if strconv.Itoa(index) == key { + return LookUpAll(elt, paths[1:]...) + } + } + } + return nodes +} + +// PreprocessBuiltinFunc preprocess builtin function in cue file. +func PreprocessBuiltinFunc(root ast.Node, name string, process func(values []ast.Node) (ast.Expr, error)) error { + var gerr error + ast.Walk(root, func(node ast.Node) bool { + switch v := node.(type) { + case *ast.EmbedDecl: + if fname, args := extractFuncName(v.Expr); fname == name && len(args) > 0 { + expr, err := doBuiltinFunc(root, args[0], process) + if err != nil { + gerr = err + return false + } + v.Expr = expr + } + case *ast.Field: + if fname, args := extractFuncName(v.Value); fname == name && len(args) > 0 { + expr, err := doBuiltinFunc(root, args[0], process) + if err != nil { + gerr = err + return false + } + v.Value = expr + } + } + return true + }, nil) + return gerr +} + +func doBuiltinFunc(root ast.Node, pathSel ast.Expr, do func(values []ast.Node) (ast.Expr, error)) (ast.Expr, error) { + paths := getPaths(pathSel) + if len(paths) == 0 { + return nil, errors.New("path resolve error") + } + values := LookUpAll(root, paths...) + return do(values) +} + +func extractFuncName(expr ast.Expr) (string, []ast.Expr) { + if call, ok := expr.(*ast.CallExpr); ok && len(call.Args) > 0 { + if ident, ok := call.Fun.(*ast.Ident); ok { + return ident.Name, call.Args + } + } + return "", nil +} + +func getPaths(node ast.Expr) []string { + switch v := node.(type) { + case *ast.SelectorExpr: + var sel string + if l, ok := v.Sel.(*ast.Ident); ok { + sel = l.Name + } else { + sel = fmt.Sprint(v.Sel) + } + return append(getPaths(v.X), sel) + case *ast.Ident: + return []string{v.Name} + case *ast.BasicLit: + s, err := literal.Unquote(v.Value) + if err != nil { + return nil + } + return []string{s} + case *ast.IndexExpr: + return append(getPaths(v.X), getPaths(v.Index)...) + } + return nil +} + +func peelCloseExpr(node ast.Node) ast.Node { + x, ok := node.(*ast.CallExpr) + if !ok { + return node + } + if it, ok := x.Fun.(*ast.Ident); ok && it.Name == "close" && len(x.Args) == 1 { + return x.Args[0] + } + return node +} + +func lookField(node ast.Node, key string) ast.Node { + if field, ok := node.(*ast.Field); ok { + // Note: the trim here has side effect: "\(v)" will be trimmed to \(v), only used for comparing fields + if strings.Trim(LabelStr(field.Label), `"`) == strings.Trim(key, `"`) { + return field.Value + } + } + return nil +} + +// LabelStr get the string label +func LabelStr(label ast.Label) string { + switch v := label.(type) { + case *ast.Ident: + return v.Name + case *ast.BasicLit: + return v.Value + } + return "" +} + +// nolint:staticcheck +func toString(v cue.Value, opts ...func(node ast.Node) ast.Node) (string, error) { + syopts := []cue.Option{cue.All(), cue.ResolveReferences(true), cue.DisallowCycles(true), cue.Docs(true), cue.Attributes(true)} + + var w bytes.Buffer + useSep := false + format := func(name string, n ast.Node) error { + if name != "" { + fmt.Fprintf(&w, "// %s\n", filepath.Base(name)) + } else if useSep { + fmt.Fprintf(&w, "// ---") + } + useSep = true + + f, err := toFile(n) + if err != nil { + return err + } + var node ast.Node = f + for _, opt := range opts { + node = opt(node) + } + b, err := format.Node(node) + if err != nil { + return err + } + _, err = w.Write(b) + return err + } + + if err := format("", v.Syntax(syopts...)); err != nil { + return "", err + } + instStr := w.String() + return instStr, nil +} + +// ToString convert cue.Value to string +func ToString(v cue.Value, opts ...func(node ast.Node) ast.Node) (string, error) { + return toString(v, opts...) +} + +// ToFile convert ast.Node to ast.File +func ToFile(n ast.Node) (*ast.File, error) { + return toFile(n) +} + +func toFile(n ast.Node) (*ast.File, error) { + switch x := n.(type) { + case nil: + return nil, nil + case *ast.StructLit: + decls := []ast.Decl{} + for _, elt := range x.Elts { + if _, ok := elt.(*ast.Ellipsis); ok { + continue + } + decls = append(decls, elt) + } + return &ast.File{Decls: decls}, nil + case ast.Expr: + ast.SetRelPos(x, token.NoSpace) + return &ast.File{Decls: []ast.Decl{&ast.EmbedDecl{Expr: x}}}, nil + case *ast.File: + return x, nil + default: + return nil, errors.Errorf("Unsupported node type %T", x) + } +} + +// OptBytesToString convert cue bytes to string. +func OptBytesToString(node ast.Node) ast.Node { + ast.Walk(node, nil, func(node ast.Node) { + basic, ok := node.(*ast.BasicLit) + if ok { + if basic.Kind == token.STRING { + s := basic.Value + if strings.HasPrefix(s, "'") { + info, nStart, _, err := literal.ParseQuotes(s, s) + if err != nil { + return + } + if !info.IsDouble() { + s = s[nStart:] + s, err := info.Unquote(s) + if err == nil { + basic.Value = fmt.Sprintf(`"%s"`, s) + } + } + } + } + } + }) + return node +} + +// OpenBaiscLit make that the basicLit can be modified. +// nolint:staticcheck +func OpenBaiscLit(val cue.Value) (*ast.File, error) { + f, err := ToFile(val.Syntax(cue.Docs(true), cue.ResolveReferences(true))) + if err != nil { + return nil, err + } + openBaiscLit(f) + return f, err +} + +// OpenListLit make that the listLit can be modified. +// nolint:staticcheck +func OpenListLit(val cue.Value) (*ast.File, error) { + f, err := ToFile(val.Syntax(cue.Docs(true), cue.ResolveReferences(true))) + if err != nil { + return nil, err + } + ast.Walk(f, func(node ast.Node) bool { + field, ok := node.(*ast.Field) + if ok { + v := field.Value + switch lit := v.(type) { + case *ast.ListLit: + if len(lit.Elts) > 0 { + if _, ok := lit.Elts[len(lit.Elts)-1].(*ast.Ellipsis); ok { + break + } + } + newList := lit.Elts + newList = append(newList, &ast.Ellipsis{}) + field.Value = ast.NewList(newList...) + } + } + return true + }, nil) + return f, nil +} + +func openBaiscLit(root ast.Node) { + ast.Walk(root, func(node ast.Node) bool { + field, ok := node.(*ast.Field) + if ok { + v := field.Value + switch lit := v.(type) { + case *ast.BasicLit: + field.Value = ast.NewBinExpr(token.OR, &ast.UnaryExpr{X: lit, Op: token.MUL}, ast.NewIdent("_")) + case *ast.ListLit: + field.Value = ast.NewBinExpr(token.OR, &ast.UnaryExpr{X: lit, Op: token.MUL}, ast.NewList(&ast.Ellipsis{})) + } + } + return true + }, nil) +} + +// ListOpen enable the cue list can add elements. +func ListOpen(expr ast.Node) ast.Node { + listOpen(expr) + return expr +} + +func listOpen(expr ast.Node) { + switch v := expr.(type) { + case *ast.File: + for _, decl := range v.Decls { + listOpen(decl) + } + case *ast.Field: + listOpen(v.Value) + case *ast.StructLit: + for _, elt := range v.Elts { + listOpen(elt) + } + case *ast.BinaryExpr: + listOpen(v.X) + listOpen(v.Y) + case *ast.EmbedDecl: + listOpen(v.Expr) + case *ast.Comprehension: + listOpen(v.Value) + case *ast.ListLit: + for _, elt := range v.Elts { + listOpen(elt) + } + if len(v.Elts) > 0 { + if _, ok := v.Elts[len(v.Elts)-1].(*ast.Ellipsis); !ok { + v.Elts = append(v.Elts, &ast.Ellipsis{}) + } + } + } +} + +func removeTmpVar(expr ast.Node) ast.Node { + switch v := expr.(type) { + case *ast.File: + for _, decl := range v.Decls { + removeTmpVar(decl) + } + case *ast.Field: + removeTmpVar(v.Value) + case *ast.StructLit: + var elts []ast.Decl + for _, elt := range v.Elts { + if field, isField := elt.(*ast.Field); isField { + if ident, isIdent := field.Label.(*ast.Ident); isIdent && strings.HasPrefix(ident.Name, "_") { + continue + } + } + removeTmpVar(elt) + elts = append(elts, elt) + } + v.Elts = elts + case *ast.BinaryExpr: + removeTmpVar(v.X) + removeTmpVar(v.Y) + case *ast.EmbedDecl: + removeTmpVar(v.Expr) + case *ast.Comprehension: + removeTmpVar(v.Value) + case *ast.ListLit: + for _, elt := range v.Elts { + removeTmpVar(elt) + } + } + return expr +} diff --git a/cue/cuex/model/sets/utils_test.go b/cue/cuex/model/sets/utils_test.go new file mode 100644 index 0000000..7c6ccd5 --- /dev/null +++ b/cue/cuex/model/sets/utils_test.go @@ -0,0 +1,290 @@ +/* +Copyright 2022 The KubeVela Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package sets + +import ( + "testing" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/cuecontext" + "cuelang.org/go/cue/format" + "cuelang.org/go/cue/literal" + "cuelang.org/go/cue/parser" + "github.com/pkg/errors" + "github.com/stretchr/testify/require" +) + +func TestToString(t *testing.T) { + testCases := []struct { + s string + expected string + }{ + { + s: ` +foo: int +lacy: string +`, + expected: `foo: int +lacy: string +`}, + { + s: ` import "strconv" +foo: strconv.Atoi("100") +lacy: string +`, + expected: `foo: 100 +lacy: string +`}, + { + s: ` +if true { + foo: int +} +lacy: string +`, + expected: `lacy: string +foo: int +`}, + { + s: ` +foo: int +if foo>5{ +lacy: "=5" +} +`, + expected: `foo: int +if foo > 5 { + lacy: "=5" +} +`}, + } + for _, tcase := range testCases { + r := require.New(t) + inst := cuecontext.New().CompileString(tcase.s) + str, err := ToString(inst) + r.NoError(err) + r.Equal(str, tcase.expected) + } +} + +func TestOptBytesToString(t *testing.T) { + testCases := []struct { + s string + expected string + }{ + { + s: ` +import "encoding/base64" +foo: int +lacy: base64.Decode(null,base64.Encode(null,"abc")) +`, + expected: `foo: int +lacy: "abc" +`}, + { + s: ` +foo: int +lacy: 'xxx==vv-' +`, + expected: `foo: int +lacy: "xxx==vv-" +`}, + { + s: ` +foo: int +lacy: "123456" +`, + expected: `foo: int +lacy: "123456" +`}, + { + s: ` +foo: int +lacy: #""" +abc +123 +"""# +`, + expected: `foo: int +lacy: """ + abc + 123 + """ +`}, + } + + ctx := cuecontext.New() + for _, tcase := range testCases { + r := require.New(t) + file, err := parser.ParseFile("-", tcase.s) + r.NoError(err) + inst := ctx.BuildFile(file) + str, err := ToString(inst.Value(), OptBytesToString) + r.NoError(err) + r.Equal(str, tcase.expected) + } +} + +func TestPreprocessBuiltinFunc(t *testing.T) { + + doScript := func(values []ast.Node) (ast.Expr, error) { + for _, v := range values { + lit, ok := v.(*ast.BasicLit) + if ok { + src, _ := literal.Unquote(lit.Value) + expr, err := parser.ParseExpr("-", src) + if err != nil { + return nil, errors.Errorf("script value(%s) format err", src) + } + return expr, nil + } + } + return nil, errors.New("script parameter") + } + + testCases := []struct { + src string + expectJson string + }{ + { + src: ` +a: "a" +b: "b" +c: script(a) +`, + expectJson: `{"a":"a","b":"b","c":"a"}`, + }, + { + src: ` +parameter: { + continue: "true" +} + +wait: { + continue: script(parameter.continue) +} + +`, + expectJson: `{"parameter":{"continue":"true"},"wait":{"continue":true}}`, + }, + { + src: ` +parameter: { + continue: "_status" +} + +wait: { + _status: true + continue: script(parameter.continue) +} + +`, + expectJson: `{"parameter":{"continue":"_status"},"wait":{"continue":true}}`, + }, + { + src: ` +parameter: { + continue: "_status" +} + +wait: { + _status: true + if parameter.continue!=_|_{ + continue: script(parameter["continue"]) + } +} + +`, + expectJson: `{"parameter":{"continue":"_status"},"wait":{"continue":true}}`, + }, + { + src: ` +parameter: { + continue: "_status" +} + +wait: { + _status: { + x: "abc" + } + script(parameter["continue"]) +} +`, + expectJson: `{"parameter":{"continue":"_status"},"wait":{"x":"abc"}}`, + }, + } + + ctx := cuecontext.New() + for _, tCase := range testCases { + r := require.New(t) + f, err := parser.ParseFile("-", tCase.src) + r.NoError(err) + err = PreprocessBuiltinFunc(f, "script", doScript) + r.NoError(err) + inst := ctx.BuildFile(f) + bt, _ := inst.Value().MarshalJSON() + r.Equal(string(bt), tCase.expectJson) + } +} + +func TestOpenBasicLit(t *testing.T) { + r := require.New(t) + f, err := OpenBaiscLit(cuecontext.New().CompileString(` +a: 10 +a1: int +b: "foo" +b1: string +c: true +c1: bool +arr: [1,2] +top: _ +bottom: _|_ +`)) + r.NoError(err) + val := cuecontext.New().BuildFile(f) + s, err := toString(val) + r.NoError(err) + r.Equal(s, `a: *10 | _ +a1: int +b: *"foo" | _ +b1: string +c: *true | _ +c1: bool +arr: *[1, 2] | [...] +top: _ +bottom: _|_ // explicit error (_|_ literal) in source +`) +} + +func TestListOpen(t *testing.T) { + r := require.New(t) + f, err := parser.ParseFile("-", ` +x: ["a","b"] +y: [...string] +z: [] +`) + r.NoError(err) + ListOpen(f) + + bt, err := format.Node(f) + r.NoError(err) + s := string(bt) + r.Equal(s, `x: ["a", "b", ...] +y: [...string] +z: [] +`) + +} diff --git a/cue/cuex/model/sets/walk.go b/cue/cuex/model/sets/walk.go new file mode 100644 index 0000000..be21439 --- /dev/null +++ b/cue/cuex/model/sets/walk.go @@ -0,0 +1,162 @@ +/* +Copyright 2022 The KubeVela Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package sets + +import ( + "strconv" + "strings" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/token" +) + +type nodewalker struct { + pos []string + tags map[string]string + process walkProcess +} + +type walkCtx interface { + Pos() []string + Tags() map[string]string +} + +type walkProcess func(node ast.Node, ctx walkCtx) + +func newWalker(process walkProcess) *nodewalker { + return &nodewalker{ + pos: []string{}, + process: process, + tags: map[string]string{}, + } +} + +func (nwk *nodewalker) walk(node ast.Node) { + if nwk.process != nil { + nwk.process(node, nwk) + } + switch n := node.(type) { + + case *ast.Field: + label := LabelStr(n.Label) + if label == "" || strings.HasPrefix(label, "#") { + return + } + if n.Value != nil { + origin := nwk.pos + oriTags := nwk.tags + nwk.tags = map[string]string{} + for k, v := range oriTags { + nwk.tags[k] = v + } + nwk.pos = append(nwk.pos, LabelStr(n.Label)) + tags := findCommentTag(n.Comments()) + for tk, tv := range tags { + nwk.tags[tk] = tv + } + + nwk.walk(n.Value) + nwk.tags = oriTags + nwk.pos = origin + } + + case *ast.StructLit: + nwk.walkDeclList(n.Elts) + + case *ast.ListLit: + nwk.walkExprList(n.Elts) + + case *ast.BinaryExpr: + nwk.walk(n.X) + nwk.walk(n.Y) + + case *ast.UnaryExpr: + nwk.walk(n.X) + + case *ast.EmbedDecl: + nwk.walk(n.Expr) + + case *ast.Comprehension: + nwk.walk(n.Value) + + // Files + case *ast.File: + nwk.walkDeclList(n.Decls) + + case *ast.SliceExpr: + if list, ok := n.X.(*ast.ListLit); ok { + nwk.walkExprSlice(list.Elts, n.Low, n.High) + } + + case *ast.CallExpr: + // close func need to be ignored + if it, ok := n.Fun.(*ast.Ident); ok && it.Name == "close" && len(n.Args) == 1 { + nwk.walk(n.Args[0]) + } else { + nwk.walkExprList(n.Args) + } + + default: + + } + +} + +func (nwk *nodewalker) walkExprList(list []ast.Expr) { + for i, x := range list { + origin := nwk.pos + nwk.pos = append(nwk.pos, strconv.Itoa(i)) + nwk.walk(x) + nwk.pos = origin + } +} + +func (nwk *nodewalker) walkExprSlice(list []ast.Expr, low ast.Expr, high ast.Expr) { + var ( + lowIndex = 0 + highIndex = len(list) + ) + if v, ok := low.(*ast.BasicLit); ok && v.Kind == token.INT { + lowIndex, _ = strconv.Atoi(v.Value) + } + if v, ok := high.(*ast.BasicLit); ok && v.Kind == token.INT { + highIndex, _ = strconv.Atoi(v.Value) + } + for i, x := range list { + if i < lowIndex || i >= highIndex { + continue + } + origin := nwk.pos + nwk.pos = append(nwk.pos, strconv.Itoa(i-lowIndex)) + nwk.walk(x) + nwk.pos = origin + } +} + +func (nwk *nodewalker) walkDeclList(list []ast.Decl) { + for _, x := range list { + nwk.walk(x) + } +} + +func (nwk *nodewalker) Pos() []string { + return nwk.pos +} + +func (nwk *nodewalker) Tags() map[string]string { + return nwk.tags +} diff --git a/cue/cuex/model/sets/walk_test.go b/cue/cuex/model/sets/walk_test.go new file mode 100644 index 0000000..32b2d2b --- /dev/null +++ b/cue/cuex/model/sets/walk_test.go @@ -0,0 +1,147 @@ +/* +Copyright 2022 The KubeVela Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package sets + +import ( + "testing" + + "cuelang.org/go/cue/ast" + "cuelang.org/go/cue/cuecontext" + "cuelang.org/go/cue/parser" + "github.com/stretchr/testify/require" +) + +func TestWalk(t *testing.T) { + + testCases := []string{ + `x: "124"`, + + `{ x: y: string }`, + + `x: {y: 124}`, + + `kind: "Deployment" + metadata: name: "test" + spec: replicas: 12`, + + `sidecar: { + name: "agent" + image: "test.com/agent:0.1" + } + containers: [{ + name: "main" + image: "webserver:0.2" + },sidecar] + `, + + ` x: 12 + if x==12 { + y: "test string" + } + `, + + ` item1: { + x: 12 + if x==12 { + y: "test string" + } + } + output: [item1] + `, + `import "strings" + + #User: { + tags_str: string + tags_map: { + for k, v in strings.Split(tags_str, " ") { + "\(v)": string + } + "{a}": string + } + } + + user: { + #User + tags_str: "b {c}" + } + `, + `import "strings" + + b: string + user: { + tags_str: strings.Compare(b,"c") + } + `, + `a: [1, 2, 3]`, + } + + for _, src := range testCases { + r := require.New(t) + inst := cuecontext.New().CompileString(src) + nsrc, err := toString(inst.Value()) + r.NoError(err) + f, err := parser.ParseFile("-", nsrc) + r.NoError(err) + + newWalker(func(node ast.Node, ctx walkCtx) { + if len(ctx.Pos()) == 0 { + return + } + + if _, ok := node.(ast.Expr); !ok { + return + } + if _, ok := node.(*ast.CallExpr); ok { + return + } + + n, err := lookUp(f, ctx.Pos()...) + r.NoError(err) + + r.Equal(n, node, nsrc) + }).walk(f) + } + +} + +func TestRemoveTmpVar(t *testing.T) { + src := `spec: { + _tmp: "x" + list: [{ + _tmp: "x" + retain: "y" + }, { + _tmp: "x" + retain: "z" + }] + retain: "y" +} +` + r := require.New(t) + v := cuecontext.New().CompileString(src) + s, err := toString(v, removeTmpVar) + r.NoError(err) + r.Equal(`spec: { + list: [{ + retain: "y" + }, { + retain: "z" + }] + retain: "y" +} +`, s) +} diff --git a/cue/cuex/providers/cue/cue.cue b/cue/cuex/providers/cue/cue.cue new file mode 100644 index 0000000..490a657 --- /dev/null +++ b/cue/cuex/providers/cue/cue.cue @@ -0,0 +1,15 @@ +package cue + +#StrategyUnify: { + #do: "strategyUnify" + #provider: "cue" + + // +usage=The params of this action + $params: { + value: {...} + patch: {...} + } + + // +usage=The result of this action + $returns: {...} +} diff --git a/cue/cuex/providers/cue/cue.go b/cue/cuex/providers/cue/cue.go new file mode 100644 index 0000000..6fcd31f --- /dev/null +++ b/cue/cuex/providers/cue/cue.go @@ -0,0 +1,49 @@ +/* +Copyright 2023 The KubeVela Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package cue + +import ( + "context" + _ "embed" + + "cuelang.org/go/cue" + + "github.com/kubevela/pkg/cue/cuex/model/sets" + "github.com/kubevela/pkg/cue/cuex/providers" + cuexruntime "github.com/kubevela/pkg/cue/cuex/runtime" + "github.com/kubevela/pkg/util/runtime" +) + +// ProviderName . +const ProviderName = "cue" + +//go:embed cue.cue +var template string + +// StrategyUnify unifies values by using a strategic patching approach. +func StrategyUnify(_ context.Context, in cue.Value) (cue.Value, error) { + params := in.LookupPath(cue.ParsePath(providers.ParamsKey)) + base := params.LookupPath(cue.ParsePath("value")) + patcher := params.LookupPath(cue.ParsePath("patch")) + res, err := sets.StrategyUnify(base, patcher) + return in.FillPath(cue.ParsePath(providers.ReturnsKey), res), err +} + +// Package . +var Package = runtime.Must(cuexruntime.NewInternalPackage(ProviderName, template, map[string]cuexruntime.ProviderFn{ + "strategyUnify": cuexruntime.NativeProviderFn(StrategyUnify), +})) diff --git a/cue/cuex/providers/cue/cue_test.go b/cue/cuex/providers/cue/cue_test.go new file mode 100644 index 0000000..bf7c39e --- /dev/null +++ b/cue/cuex/providers/cue/cue_test.go @@ -0,0 +1,124 @@ +/* +Copyright 2023 The KubeVela Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +package cue_test + +import ( + "context" + "fmt" + "testing" + + "cuelang.org/go/cue" + "cuelang.org/go/cue/cuecontext" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/kubevela/pkg/cue/cuex/model/sets" + "github.com/kubevela/pkg/cue/cuex/providers" + cueprovider "github.com/kubevela/pkg/cue/cuex/providers/cue" + "github.com/kubevela/pkg/util/stringtools" +) + +func TestStrategyUnify(t *testing.T) { + paramsTemplate := "{$params: {value: {%s}, patch: {%s}}}" + testcases := map[string]struct { + value string + patch string + expect string + hasErr bool + }{ + "test unify with normal patch": { + value: `containers: [{name: "x1"},{name: "x2"},...]`, + patch: `containers: [{name: "x1"},{name: "x2",image: "pause:0.1"}]`, + expect: ` + containers: [{ + name: "x1" + }, { + name: "x2" + image: "pause:0.1" + }] +`, + hasErr: false, + }, + "test unify with +patchKey tag": { + value: `containers: [{name: "x1"},{name: "x2"},...]`, + patch: ` + // +patchKey=name + containers: [{name: "x2", image: "nginx:latest"}] +`, + expect: ` + // +patchKey=name + containers: [{ + name: "x1" + }, { + name: "x2" + image: "nginx:latest" + }, ...] +`, + hasErr: false, + }, + "test unify with +patchStrategy=retainKeys tag": { + value: `containers: [{name: "x1"},{name: "x2", image: "redis:latest"}]`, + patch: ` + // +patchKey=name + containers: [{ + name: "x2" + // +patchStrategy=retainKeys + image: "nginx:latest" + }] +`, + expect: ` + // +patchKey=name + containers: [{ + name: "x1" + }, { + name: "x2" + // +patchStrategy=retainKeys + image: "nginx:latest" + }, ...] +`, + hasErr: false, + }, + "test unify with conflicting error": { + value: `containers: [{name: "x1"},{name: "x2"},...]`, + patch: `containers: [{name: "x2"},{name: "x1"}]`, + expect: ` + containers: [{ + name: _|_ // $returns.containers.0.name: conflicting values "x2" and "x1" + }, { + name: _|_ // $returns.containers.1.name: conflicting values "x1" and "x2" + }] +`, + hasErr: true, + }, + } + + ctx := context.Background() + cueCtx := cuecontext.New() + for name, testcase := range testcases { + t.Run(name, func(t *testing.T) { + value := cueCtx.CompileString(fmt.Sprintf(paramsTemplate, testcase.value, testcase.patch)) + val, err := cueprovider.StrategyUnify(ctx, value) + if testcase.hasErr { + require.Error(t, err) + } + ret := val.LookupPath(cue.ParsePath(providers.ReturnsKey)) + retStr, err := sets.ToString(ret) + require.NoError(t, err) + assert.Equal(t, stringtools.TrimLeadingIndent(testcase.expect), stringtools.TrimLeadingIndent(retStr)) + }) + } +}