diff --git a/.github/workflows/pull-request.yaml b/.github/workflows/pull-request.yaml index ae6752d99d2..c9fff69e1ed 100644 --- a/.github/workflows/pull-request.yaml +++ b/.github/workflows/pull-request.yaml @@ -311,11 +311,12 @@ jobs: - name: Ensure proper formatting run: opa fmt --list --fail build/policy - - name: Run policy checks on changed files + - name: Run file policy checks on changed files run: | curl --silent --fail --header 'Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}' -o files.json \ https://api.github.com/repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files - opa eval --bundle build/policy/ --format values --input files.json \ + + opa eval -d build/policy/files.rego -d build/policy/helpers.rego --format values --input files.json \ --fail-defined 'data.files.deny[message]' env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -323,3 +324,17 @@ jobs: - name: Show input on failure run: opa eval --input files.json --format pretty input if: ${{ failure() }} + + - name: Setup Hugo + uses: peaceiris/actions-hugo@v2 + with: + # keep this version in sync with the version in netlify.toml + hugo-version: '0.113.0' + extended: true + + - name: Build docs site and test integrations data + run: | + cd docs + make dev-generate hugo-production-build + cd - + opa eval 'data.integrations.deny[message]' -i docs/website/public/index.json -d build/policy/integrations.rego --format=values --fail-defined diff --git a/.go-version b/.go-version index 8909929f6e7..d2ab029d32c 100644 --- a/.go-version +++ b/.go-version @@ -1 +1 @@ -1.20.7 +1.21 diff --git a/ast/check.go b/ast/check.go index 388ff67aa62..b4075c8b782 100644 --- a/ast/check.go +++ b/ast/check.go @@ -268,7 +268,7 @@ func (tc *typeChecker) checkRule(env *TypeEnv, as *AnnotationSet, rule *Rule) { } if tpe != nil { - env.tree.Insert(path, tpe) + env.tree.Insert(path, tpe, env) } } diff --git a/ast/check_test.go b/ast/check_test.go index ef214d93920..ae8e27fcbd2 100644 --- a/ast/check_test.go +++ b/ast/check_test.go @@ -355,8 +355,8 @@ func TestCheckInferenceRules(t *testing.T) { {`overlap`, `p.q.r = false { true }`}, {`overlap`, `p.q.r = "false" { true }`}, {`overlap`, `p.q[42] = 1337 { true }`}, - {`overlap`, `p.q.a = input.a { true }`}, - {`overlap`, `p.q[56] = input.a { true }`}, + {`overlap`, `p.q2.a = input.a { true }`}, + {`overlap`, `p.q2[56] = input.a { true }`}, } tests := []struct { @@ -525,33 +525,28 @@ func TestCheckInferenceRules(t *testing.T) { { note: "ref-rules single value, full ref to known leaf (any type)", rules: ruleset2, - ref: "data.overlap.p.q.a", + ref: "data.overlap.p.q2.a", expected: types.A, }, { note: "ref-rules single value, full ref to known leaf (same key type as dynamic, any type)", rules: ruleset2, - ref: "data.overlap.p.q[56]", + ref: "data.overlap.p.q2[56]", expected: types.A, }, { note: "ref-rules single value, full ref to dynamic leaf", rules: ruleset2, ref: "data.overlap.p.q[1]", - expected: types.S, + expected: types.Any{types.B, types.N, types.S}, // key type cannot be tied to specific dynamic value type, so we get all of them }, { note: "ref-rules single value, prefix ref to partial object root", rules: ruleset2, ref: "data.overlap.p.q", expected: types.NewObject( - []*types.StaticProperty{ - types.NewStaticProperty(json.Number("42"), types.N), - types.NewStaticProperty(json.Number("56"), types.A), - types.NewStaticProperty("a", types.A), - types.NewStaticProperty("r", types.Or(types.B, types.S)), - }, - types.NewDynamicProperty(types.N, types.S), + nil, + types.NewDynamicProperty(types.Any{types.N, types.S}, types.Any{types.B, types.N, types.S}), ), }, } diff --git a/ast/compile.go b/ast/compile.go index 27c07ea15f7..967f4ea8c2c 100644 --- a/ast/compile.go +++ b/ast/compile.go @@ -698,15 +698,15 @@ func (c *Compiler) GetRulesDynamicWithOpts(ref Ref, opts RulesOptions) []*Rule { // The head of the ref is always grounded. In case another part of the // ref is also grounded, we can lookup the exact child. If it's not found // we can immediately return... - if child := node.Child(ref[i].Value); child == nil { - return - } else if len(child.Values) > 0 { - // If there are any rules at this position, it's what the ref would - // refer to. We can just append those and stop here. - insertRules(set, child.Values) - } else { - // Otherwise, we continue using the child node. + if child := node.Child(ref[i].Value); child != nil { + if len(child.Values) > 0 { + // Add any rules at this position + insertRules(set, child.Values) + } + // There might still be "sub-rules" contributing key-value "overrides" for e.g. partial object rules, continue walking walk(child, i+1) + } else { + return } default: diff --git a/ast/compile_test.go b/ast/compile_test.go index 88844cf2cce..ca7752e5107 100644 --- a/ast/compile_test.go +++ b/ast/compile_test.go @@ -7771,6 +7771,11 @@ r2 = 2`, r3 = 3`, "hidden": `package system.hidden r4 = 4`, + "mod4": `package b.c +r5[x] = 5 { x := "foo" } +r5.bar = 6 { input.x } +r5.baz = 7 { input.y } +`, }) compileStages(compiler, nil) @@ -7780,6 +7785,9 @@ r4 = 4`, rule2 := compiler.Modules["mod2"].Rules[1] rule3 := compiler.Modules["mod3"].Rules[0] rule4 := compiler.Modules["hidden"].Rules[0] + rule5 := compiler.Modules["mod4"].Rules[0] + rule5b := compiler.Modules["mod4"].Rules[1] + rule5c := compiler.Modules["mod4"].Rules[2] tests := []struct { input string @@ -7791,12 +7799,16 @@ r4 = 4`, {input: "data.a.b[x].d", expected: []*Rule{rule1, rule3}}, {input: "data.a.b.c", expected: []*Rule{rule1, rule2d, rule2}}, {input: "data.a.b.d"}, - {input: "data", expected: []*Rule{rule1, rule2d, rule2, rule3, rule4}}, - {input: "data[x]", expected: []*Rule{rule1, rule2d, rule2, rule3, rule4}}, + {input: "data", expected: []*Rule{rule1, rule2d, rule2, rule3, rule4, rule5, rule5b, rule5c}}, + {input: "data[x]", expected: []*Rule{rule1, rule2d, rule2, rule3, rule4, rule5, rule5b, rule5c}}, {input: "data[data.complex_computation].b[y]", expected: []*Rule{rule1, rule2d, rule2, rule3}}, {input: "data[x][y].c.e", expected: []*Rule{rule2d, rule2}}, {input: "data[x][y].r3", expected: []*Rule{rule3}}, - {input: "data[x][y]", expected: []*Rule{rule1, rule2d, rule2, rule3}, excludeHidden: true}, // old behaviour of GetRulesDynamic + {input: "data[x][y]", expected: []*Rule{rule1, rule2d, rule2, rule3, rule5, rule5b, rule5c}, excludeHidden: true}, // old behaviour of GetRulesDynamic + {input: "data.b.c", expected: []*Rule{rule5, rule5b, rule5c}}, + {input: "data.b.c.r5", expected: []*Rule{rule5, rule5b, rule5c}}, + {input: "data.b.c.r5.bar", expected: []*Rule{rule5, rule5b}}, // rule5 might still define a value for the "bar" key + {input: "data.b.c.r5.baz", expected: []*Rule{rule5, rule5c}}, } for _, tc := range tests { diff --git a/ast/env.go b/ast/env.go index d5a0706614d..21c56392b1f 100644 --- a/ast/env.go +++ b/ast/env.go @@ -6,6 +6,7 @@ package ast import ( "fmt" + "strings" "github.com/open-policy-agent/opa/types" "github.com/open-policy-agent/opa/util" @@ -171,6 +172,11 @@ func (env *TypeEnv) getRefRec(node *typeTreeNode, ref, tail Ref) types.Type { } if node.Leaf() { + if node.children.Len() > 0 { + if child := node.Child(tail[0].Value); child != nil { + return env.getRefRec(child, ref, tail[1:]) + } + } return selectRef(node.Value(), tail) } @@ -305,9 +311,9 @@ func (n *typeTreeNode) Put(path Ref, tpe types.Type) { } // Insert inserts tpe at path in the tree, but also merges the value into any types.Object present along that path. -// If an types.Object is inserted, any leafs already present further down the tree are merged into the inserted object. +// If a types.Object is inserted, any leafs already present further down the tree are merged into the inserted object. // path must be ground. -func (n *typeTreeNode) Insert(path Ref, tpe types.Type) { +func (n *typeTreeNode) Insert(path Ref, tpe types.Type, env *TypeEnv) { curr := n for i, term := range path { c, ok := curr.children.Get(term.Value) @@ -324,7 +330,7 @@ func (n *typeTreeNode) Insert(path Ref, tpe types.Type) { // If child has an object value, merge the new value into it. if o, ok := child.value.(*types.Object); ok { var err error - child.value, err = insertIntoObject(o, path[i+1:], tpe) + child.value, err = insertIntoObject(o, path[i+1:], tpe, env) if err != nil { panic(fmt.Errorf("unreachable, insertIntoObject: %w", err)) } @@ -335,14 +341,14 @@ func (n *typeTreeNode) Insert(path Ref, tpe types.Type) { curr = child } - curr.value = tpe + curr.value = mergeTypes(curr.value, tpe) if _, ok := tpe.(*types.Object); ok && curr.children.Len() > 0 { // merge all leafs into the inserted object leafs := curr.Leafs() for p, t := range leafs { var err error - curr.value, err = insertIntoObject(curr.value.(*types.Object), *p, t) + curr.value, err = insertIntoObject(curr.value.(*types.Object), *p, t, env) if err != nil { panic(fmt.Errorf("unreachable, insertIntoObject: %w", err)) } @@ -350,47 +356,116 @@ func (n *typeTreeNode) Insert(path Ref, tpe types.Type) { } } -func insertIntoObject(o *types.Object, path Ref, tpe types.Type) (*types.Object, error) { - if len(path) == 0 { - return o, nil +// mergeTypes merges the types of 'a' and 'b'. If both are sets, their 'of' types are joined with an types.Or. +// If both are objects, the key and value types of their dynamic properties are joined with types.Or:s. +// If 'a' and 'b' are both objects, and at least one of them have static properties, they are joined +// with an types.Or, instead of being merged. +// If 'a' is an Any containing an Object, and 'b' is an Object (or vice versa); AND both objects have no +// static properties, they are merged. +// If 'a' and 'b' are different types, they are joined with an types.Or. +func mergeTypes(a, b types.Type) types.Type { + if a == nil { + return b } - key, err := JSON(path[0].Value) - if err != nil { - return nil, fmt.Errorf("invalid path term %v: %w", path[0], err) + if b == nil { + return a } - if len(path) == 1 { - for _, prop := range o.StaticProperties() { - if util.Compare(prop.Key, key) == 0 { - prop.Value = types.Or(prop.Value, tpe) - return o, nil + switch a := a.(type) { + case *types.Object: + if bObj, ok := b.(*types.Object); ok && len(a.StaticProperties()) == 0 && len(bObj.StaticProperties()) == 0 { + if len(a.StaticProperties()) > 0 || len(bObj.StaticProperties()) > 0 { + return types.Or(a, bObj) } - } - staticProps := append(o.StaticProperties(), types.NewStaticProperty(key, tpe)) - return types.NewObject(staticProps, o.DynamicProperties()), nil - } - for _, prop := range o.StaticProperties() { - if util.Compare(prop.Key, key) == 0 { - if propO := prop.Value.(*types.Object); propO != nil { - prop.Value, err = insertIntoObject(propO, path[1:], tpe) - if err != nil { - return nil, err + aDynProps := a.DynamicProperties() + bDynProps := bObj.DynamicProperties() + return types.NewObject(nil, types.NewDynamicProperty( + types.Or(aDynProps.Key, bDynProps.Key), + types.Or(aDynProps.Value, bDynProps.Value))) + } else if bAny, ok := b.(types.Any); ok && len(a.StaticProperties()) == 0 { + // If a is an object type with no static components ... + for _, t := range bAny { + if tObj, ok := t.(*types.Object); ok && len(tObj.StaticProperties()) == 0 { + // ... and b is a types.Any containing an object with no static components, we merge them. + aDynProps := a.DynamicProperties() + tDynProps := tObj.DynamicProperties() + tDynProps.Key = types.Or(tDynProps.Key, aDynProps.Key) + tDynProps.Value = types.Or(tDynProps.Value, aDynProps.Value) + return bAny } - } else { - return nil, fmt.Errorf("cannot insert into non-object type %v", prop.Value) } - return o, nil } + case *types.Set: + if bSet, ok := b.(*types.Set); ok { + return types.NewSet(types.Or(a.Of(), bSet.Of())) + } + case types.Any: + if _, ok := b.(types.Any); !ok { + return mergeTypes(b, a) + } + } + + return types.Or(a, b) +} + +func (n *typeTreeNode) String() string { + b := strings.Builder{} + + if k := n.key; k != nil { + b.WriteString(k.String()) + } else { + b.WriteString("-") + } + + if v := n.value; v != nil { + b.WriteString(": ") + b.WriteString(v.String()) + } + + n.children.Iter(func(_, v util.T) bool { + if child, ok := v.(*typeTreeNode); ok { + b.WriteString("\n\t+ ") + s := child.String() + s = strings.ReplaceAll(s, "\n", "\n\t") + b.WriteString(s) + } + return false + }) + + return b.String() +} + +func insertIntoObject(o *types.Object, path Ref, tpe types.Type, env *TypeEnv) (*types.Object, error) { + if len(path) == 0 { + return o, nil } - child, err := insertIntoObject(types.NewObject(nil, nil), path[1:], tpe) + key := env.Get(path[0].Value) + + if len(path) == 1 { + var dynamicProps *types.DynamicProperty + if dp := o.DynamicProperties(); dp != nil { + dynamicProps = types.NewDynamicProperty(types.Or(o.DynamicProperties().Key, key), types.Or(o.DynamicProperties().Value, tpe)) + } else { + dynamicProps = types.NewDynamicProperty(key, tpe) + } + return types.NewObject(o.StaticProperties(), dynamicProps), nil + } + + child, err := insertIntoObject(types.NewObject(nil, nil), path[1:], tpe, env) if err != nil { return nil, err } - staticProps := append(o.StaticProperties(), types.NewStaticProperty(key, child)) - return types.NewObject(staticProps, o.DynamicProperties()), nil + + var dynamicProps *types.DynamicProperty + if dp := o.DynamicProperties(); dp != nil { + dynamicProps = types.NewDynamicProperty(types.Or(o.DynamicProperties().Key, key), types.Or(o.DynamicProperties().Value, child)) + } else { + dynamicProps = types.NewDynamicProperty(key, child) + } + return types.NewObject(o.StaticProperties(), dynamicProps), nil } func (n *typeTreeNode) Leafs() map[*Ref]types.Type { diff --git a/ast/env_test.go b/ast/env_test.go index 1328b93de7a..0e3ac1de0fd 100644 --- a/ast/env_test.go +++ b/ast/env_test.go @@ -5,7 +5,6 @@ package ast import ( - "encoding/json" "testing" "github.com/open-policy-agent/opa/types" @@ -25,8 +24,8 @@ func TestInsertIntoObject(t *testing.T) { path: Ref{NewTerm(String("a"))}, tpe: types.S, expected: types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("a", types.S)}, - nil), + nil, + types.NewDynamicProperty(types.S, types.S)), }, { note: "empty path", @@ -49,9 +48,8 @@ func TestInsertIntoObject(t *testing.T) { expected: types.NewObject( []*types.StaticProperty{ types.NewStaticProperty("a", types.S), - types.NewStaticProperty("b", types.S), }, - nil), + types.NewDynamicProperty(types.S, types.S)), }, { note: "number key", @@ -63,53 +61,63 @@ func TestInsertIntoObject(t *testing.T) { expected: types.NewObject( []*types.StaticProperty{ types.NewStaticProperty("a", types.S), - types.NewStaticProperty(json.Number("2"), types.S), }, - nil), + types.NewDynamicProperty(types.N, types.S)), }, { - note: "same path, same type", + note: "other type value inserted", obj: types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("a", types.S)}, - nil), + nil, + types.NewDynamicProperty(types.S, types.S)), path: Ref{NewTerm(String("a"))}, - tpe: types.S, + tpe: types.B, expected: types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("a", types.S)}, - nil), + nil, + types.NewDynamicProperty(types.S, types.Any{types.B, types.S})), }, { - note: "same path, different type", + note: "any type value inserted", obj: types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("a", types.S)}, - nil), + nil, + types.NewDynamicProperty(types.S, types.S)), path: Ref{NewTerm(String("a"))}, - tpe: types.B, + tpe: types.A, expected: types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("a", types.Or(types.S, types.B))}, - nil), + nil, + types.NewDynamicProperty(types.S, types.A)), }, { - note: "same path, any type inserted", + note: "other type key inserted", obj: types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("a", types.S)}, - nil), - path: Ref{NewTerm(String("a"))}, - tpe: types.A, + nil, + types.NewDynamicProperty(types.S, types.S)), + path: Ref{NewTerm(Number("42"))}, + tpe: types.S, expected: types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("a", types.A)}, - nil), + nil, + types.NewDynamicProperty(types.Any{types.N, types.S}, types.S)), }, { - note: "same path, any type present", + note: "other type key and value inserted", obj: types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("a", types.A)}, - nil), + nil, + types.NewDynamicProperty(types.S, types.S)), + path: Ref{NewTerm(Number("42"))}, + tpe: types.B, + expected: types.NewObject( + nil, + types.NewDynamicProperty(types.Any{types.N, types.S}, types.Any{types.B, types.S})), + }, + { + note: "any type value present, string inserted", + obj: types.NewObject( + nil, + types.NewDynamicProperty(types.S, types.A)), path: Ref{NewTerm(String("a"))}, tpe: types.S, expected: types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("a", types.A)}, - nil), + nil, + types.NewDynamicProperty(types.S, types.A)), }, { note: "long path", @@ -121,97 +129,82 @@ func TestInsertIntoObject(t *testing.T) { expected: types.NewObject( []*types.StaticProperty{ types.NewStaticProperty("a", types.S), - types.NewStaticProperty("b", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("c", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("d", types.S), - }, nil)), - }, nil)), }, - nil), + types.NewDynamicProperty(types.S, // b + types.NewObject(nil, types.NewDynamicProperty(types.S, // c + types.NewObject(nil, types.NewDynamicProperty(types.S, types.S)))))), // d }, { - note: "long path, full match", + note: "long path, dynamic overlap with different key type", obj: types.NewObject( - []*types.StaticProperty{ - types.NewStaticProperty("a", types.S), - types.NewStaticProperty("b", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("c", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("d", types.S), - }, nil)), - }, nil)), - }, - nil), + nil, + types.NewDynamicProperty(types.N, types.S)), path: Ref{NewTerm(String("b")), NewTerm(String("c")), NewTerm(String("d"))}, tpe: types.S, expected: types.NewObject( - []*types.StaticProperty{ - types.NewStaticProperty("a", types.S), - types.NewStaticProperty("b", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("c", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("d", types.S), - }, nil)), - }, nil)), - }, - nil), + nil, + types.NewDynamicProperty(types.Any{types.N, types.S}, // b + types.Any{types.S, + types.NewObject(nil, types.NewDynamicProperty(types.S, // c + types.NewObject(nil, types.NewDynamicProperty(types.S, types.S))))})), // d }, { - note: "long path, full match, different type", + note: "long path, dynamic overlap with object", obj: types.NewObject( - []*types.StaticProperty{ - types.NewStaticProperty("a", types.S), - types.NewStaticProperty("b", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("c", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("d", types.S), - }, nil)), - }, nil)), - }, - nil), + nil, + types.NewDynamicProperty(types.S, + types.NewObject(nil, types.NewDynamicProperty(types.S, types.N)))), path: Ref{NewTerm(String("b")), NewTerm(String("c")), NewTerm(String("d"))}, - tpe: types.B, + tpe: types.S, expected: types.NewObject( - []*types.StaticProperty{ - types.NewStaticProperty("a", types.S), - types.NewStaticProperty("b", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("c", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("d", types.Or(types.S, types.B)), - }, nil)), - }, nil)), - }, - nil), + nil, + types.NewDynamicProperty(types.S, // b + types.Any{ + types.NewObject(nil, types.NewDynamicProperty(types.S, types.N)), + types.NewObject(nil, types.NewDynamicProperty(types.S, // c + types.NewObject(nil, types.NewDynamicProperty(types.S, types.S)))), // d + })), }, { - note: "long path, partial match", + note: "long path, dynamic overlap with object (2)", obj: types.NewObject( - []*types.StaticProperty{ - types.NewStaticProperty("a", types.S), - types.NewStaticProperty("b", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("c", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("d", types.S), - }, nil)), - }, nil)), - }, - nil), - path: Ref{NewTerm(String("b")), NewTerm(String("x")), NewTerm(String("d"))}, + nil, + types.NewDynamicProperty(types.S, + types.NewObject(nil, types.NewDynamicProperty(types.S, + types.NewObject(nil, types.NewDynamicProperty(types.S, types.N)))))), + path: Ref{NewTerm(String("b")), NewTerm(String("c")), NewTerm(String("d"))}, tpe: types.S, expected: types.NewObject( - []*types.StaticProperty{ - types.NewStaticProperty("a", types.S), - types.NewStaticProperty("b", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("c", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("d", types.S), - }, nil)), - types.NewStaticProperty("x", types.NewObject([]*types.StaticProperty{ - types.NewStaticProperty("d", types.S), - }, nil)), - }, nil)), - }, - nil), + nil, + types.NewDynamicProperty(types.S, + types.Any{ // Objects aren't merged, as that would become very complicated if they contain static components + types.NewObject(nil, types.NewDynamicProperty(types.S, + types.NewObject(nil, types.NewDynamicProperty(types.S, types.N)))), + types.NewObject(nil, types.NewDynamicProperty(types.S, + types.NewObject(nil, types.NewDynamicProperty(types.S, types.S)))), + })), + }, + { + note: "long path, dynamic overlap with different value type", + obj: types.NewObject( + nil, + types.NewDynamicProperty(types.S, types.S)), + path: Ref{NewTerm(String("b")), NewTerm(String("c")), NewTerm(String("d"))}, + tpe: types.S, + expected: types.NewObject( + nil, + types.NewDynamicProperty(types.S, // b + types.Any{types.S, + types.NewObject(nil, types.NewDynamicProperty(types.S, // c + types.NewObject(nil, types.NewDynamicProperty(types.S, types.S))))})), // d }, } + env := TypeEnv{} + for _, tc := range tests { t.Run(tc.note, func(t *testing.T) { - result, err := insertIntoObject(tc.obj, tc.path, tc.tpe) + result, err := insertIntoObject(tc.obj, tc.path, tc.tpe, &env) if err != nil { t.Fatalf("Unexpected error: %v", err) } @@ -222,7 +215,295 @@ func TestInsertIntoObject(t *testing.T) { } } +type pathAndType struct { + path Ref + tpe types.Type +} + +func TestTypeTreeNode_Insert(t *testing.T) { + cases := []struct { + note string + insertions []pathAndType + expected []pathAndType + }{ + { + note: "only primitives", + insertions: []pathAndType{ + { + path: MustParseRef("data.a.b.c"), + tpe: types.N, + }, + { + path: MustParseRef("data.a.b.c2"), + tpe: types.S, + }, + { + path: MustParseRef("data.a.b[42]"), + tpe: types.B, + }, + }, + expected: []pathAndType{ + { + path: MustParseRef("data.a.b.c"), + tpe: types.N, + }, + { + path: MustParseRef("data.a.b.c2"), + tpe: types.S, + }, + { + path: MustParseRef("data.a.b[42]"), + tpe: types.B, + }, + { + path: MustParseRef("data.a.b"), + tpe: nil, + }, + }, + }, + { + note: "primitive leafs inserted into object", + insertions: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject(nil, types.NewDynamicProperty(types.N, types.B)), + }, + { + path: MustParseRef("data.a.b.c"), + tpe: types.S, + }, + { + path: MustParseRef("data.a.b[true]"), + tpe: types.S, + }, + }, + expected: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject(nil, types.NewDynamicProperty( + types.Any{types.B, types.N, types.S}, types.Any{types.B, types.S})), + }, + }, + }, + { + note: "primitive leafs first, then object", + insertions: []pathAndType{ + { + path: MustParseRef("data.a.b.c"), + tpe: types.S, + }, + { + path: MustParseRef("data.a.b[true]"), + tpe: types.S, + }, + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject(nil, types.NewDynamicProperty(types.N, types.B)), + }, + }, + expected: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject(nil, types.NewDynamicProperty( + types.Any{types.B, types.N, types.S}, + types.Any{types.B, types.S}, + )), + }, + }, + }, + { + note: "object beside object", + insertions: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject(nil, types.NewDynamicProperty(types.N, types.B)), + }, + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject(nil, types.NewDynamicProperty(types.S, types.S)), + }, + }, + expected: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject(nil, types.NewDynamicProperty( + types.Any{types.N, types.S}, + types.Any{types.B, types.S}, + )), + }, + }, + }, + { + note: "object beside object with static types", + insertions: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("foo", types.N)}, + types.NewDynamicProperty(types.N, types.B)), + }, + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("bar", types.S)}, + types.NewDynamicProperty(types.S, types.S)), + }, + }, + expected: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.Any{ + types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("foo", types.N)}, + types.NewDynamicProperty(types.N, types.B)), + types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("bar", types.S)}, + types.NewDynamicProperty(types.S, types.S)), + }, + }, + }, + }, + { + note: "object beside object with static types (2)", + insertions: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject( + nil, + types.NewDynamicProperty(types.N, types.B)), + }, + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("bar", types.S)}, + types.NewDynamicProperty(types.S, types.S)), + }, + }, + expected: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.Any{ + types.NewObject( + nil, + types.NewDynamicProperty(types.N, types.B)), + types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("bar", types.S)}, + types.NewDynamicProperty(types.S, types.S)), + }, + }, + }, + }, + { + note: "object beside object with static types (3)", + insertions: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("foo", types.N)}, + types.NewDynamicProperty(types.N, types.B)), + }, + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject( + nil, + types.NewDynamicProperty(types.S, types.S)), + }, + }, + expected: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.Any{ + types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("foo", types.N)}, + types.NewDynamicProperty(types.N, types.B)), + types.NewObject( + nil, + types.NewDynamicProperty(types.S, types.S)), + }, + }, + }, + }, + { + note: "object beside object with static types (4)", + insertions: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("foo", types.N)}, + types.NewDynamicProperty(types.N, types.B)), + }, + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject( + nil, + types.NewDynamicProperty(types.S, types.S)), + }, + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject( + nil, + types.NewDynamicProperty(types.N, types.B)), + }, + }, + expected: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.Any{ + types.NewObject( + []*types.StaticProperty{types.NewStaticProperty("foo", types.N)}, + types.NewDynamicProperty(types.N, types.B)), + types.NewObject( + nil, + types.NewDynamicProperty(types.Any{types.N, types.S}, types.Any{types.B, types.S})), + }, + }, + }, + }, + { + note: "object into object", + insertions: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject(nil, types.NewDynamicProperty(types.N, types.B)), + }, + { + path: MustParseRef("data.a.b.c"), + tpe: types.NewObject(nil, types.NewDynamicProperty(types.B, types.N)), + }, + }, + expected: []pathAndType{ + { + path: MustParseRef("data.a.b"), + tpe: types.NewObject(nil, types.NewDynamicProperty( + types.Any{types.N, types.S}, + types.Any{types.B, types.NewObject(nil, types.NewDynamicProperty(types.B, types.N))}, + )), + }, + }, + }, + } + + for _, tc := range cases { + t.Run(tc.note, func(t *testing.T) { + root := newTypeTree() + env := TypeEnv{tree: root} + + for _, insertion := range tc.insertions { + root.Insert(insertion.path, insertion.tpe, &env) + } + + for _, expected := range tc.expected { + actual := root.Get(expected.path) + if types.Compare(actual, expected.tpe) != 0 { + t.Fatalf("Expected %v but got %v", expected.tpe, actual) + } + } + }) + } +} + func TestTypeTreeInsert(t *testing.T) { + env := TypeEnv{} n := newTypeTree() abcRef := Ref{NewTerm(String("a")), NewTerm(String("b")), NewTerm(String("c"))} @@ -242,17 +523,14 @@ func TestTypeTreeInsert(t *testing.T) { // existing "child" leafs should be added to new intermediate object leaf abRef := Ref{NewTerm(String("a")), NewTerm(String("b"))} - n.Insert(abRef, types.NewObject(nil, &types.DynamicProperty{Key: types.N, Value: types.S})) + n.Insert(abRef, types.NewObject(nil, &types.DynamicProperty{Key: types.N, Value: types.S}), &env) actual = n.Get(abRef) expected := types.NewObject( - []*types.StaticProperty{ - types.NewStaticProperty("c", types.B), - types.NewStaticProperty("d", types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("e", types.N)}, - nil)), - }, - &types.DynamicProperty{Key: types.N, Value: types.S}, + nil, + types.NewDynamicProperty( + types.Any{types.N, types.S}, + types.Any{types.B, types.S, types.NewObject(nil, types.NewDynamicProperty(types.S, types.N))}), ) if types.Compare(actual, expected) != 0 { t.Fatalf("Expected %v but got %v", expected, actual) @@ -260,8 +538,8 @@ func TestTypeTreeInsert(t *testing.T) { // new "child" leafs should be added to new intermediate object leaf - abfRef := Ref{NewTerm(String("a")), NewTerm(String("b")), NewTerm(String("f"))} - n.Insert(abfRef, types.S) + abfRef := Ref{NewTerm(String("a")), NewTerm(String("b")), NewTerm(Boolean(true))} + n.Insert(abfRef, types.S, &env) actual = n.Get(abfRef) if types.Compare(actual, types.S) != 0 { @@ -270,14 +548,10 @@ func TestTypeTreeInsert(t *testing.T) { actual = n.Get(abRef) expected = types.NewObject( - []*types.StaticProperty{ - types.NewStaticProperty("c", types.B), - types.NewStaticProperty("f", types.S), - types.NewStaticProperty("d", types.NewObject( - []*types.StaticProperty{types.NewStaticProperty("e", types.N)}, - nil)), - }, - &types.DynamicProperty{Key: types.N, Value: types.S}, + nil, + types.NewDynamicProperty( + types.Any{types.B, types.N, types.S}, + types.Any{types.B, types.S, types.NewObject(nil, types.NewDynamicProperty(types.S, types.N))}), ) if types.Compare(actual, expected) != 0 { t.Fatalf("Expected %v but got %v", expected, actual) diff --git a/build/policy/files.rego b/build/policy/files.rego index ed14cf70086..5a4bdb6126c 100644 --- a/build/policy/files.rego +++ b/build/policy/files.rego @@ -2,7 +2,7 @@ # https://api.github.com/repos/open-policy-agent/opa/pulls/${PR_ID}/files # # Note that the "filename" here refers to the full path of the file, like -# docs/website/data/integrations.yaml - since that's how it's named in the +# docs/foo/bar.yaml - since that's how it's named in the # input we'll use the same convention here. package files @@ -17,8 +17,6 @@ import data.helpers.extension filenames := {f.filename | some f in input} -logo_exts := {"png", "svg"} - changes[filename] := attributes if { some change in input filename := change.filename @@ -45,69 +43,6 @@ get_file_in_pr(filename) := dump_response_on_error(http.send({ "raise_error": false, })).raw_body -deny contains "Logo must be placed in docs/website/static/img/logos/integrations" if { - "docs/website/data/integrations.yaml" in filenames - - some filename in filenames - extension(filename) in logo_exts - changes[filename].status == "added" - directory(filename) != "docs/website/static/img/logos/integrations" -} - -deny contains "Logo must be a .png or .svg file" if { - "docs/website/data/integrations.yaml" in filenames - - some filename in filenames - changes[filename].status == "added" - directory(filename) == "docs/website/static/img/logos/integrations" - not extension(filename) in logo_exts -} - -deny contains "Logo name must match integration" if { - "docs/website/data/integrations.yaml" in filenames - - some filename in filenames - ext := extension(filename) - ext in logo_exts - changes[filename].status == "added" - logo_name := trim_suffix(basename(filename), concat("", [".", ext])) - - integrations := {integration | some integration, _ in yaml.unmarshal(integrations_file).integrations} - not logo_name in integrations -} - -deny contains sprintf("Integration '%v' missing required attribute '%v'", [name, attr]) if { - "docs/website/data/integrations.yaml" in filenames - - file := yaml.unmarshal(integrations_file) - required := {"title", "description"} - - some name, item in file.integrations - some attr in (required - {key | some key, _ in item}) -} - -deny contains sprintf("Integration '%v' references unknown software '%v' (i.e. not in 'software' object)", [name, software]) if { - "docs/website/data/integrations.yaml" in filenames - - file := yaml.unmarshal(integrations_file) - software_list := object.keys(file.software) - - some name, item in file.integrations - some software in item.software - not software in software_list -} - -deny contains sprintf("Integration '%v' references unknown organization '%v' (i.e. not in 'organizations' object)", [name, organization]) if { - "docs/website/data/integrations.yaml" in filenames - - file := yaml.unmarshal(integrations_file) - organizations_list := object.keys(file.organizations) - - some name, item in file.integrations - some organization in item.inventors - not organization in organizations_list -} - deny contains sprintf("%s is an invalid YAML file: %s", [filename, content]) if { some filename, content in yaml_file_contents changes[filename].status in {"added", "modified"} @@ -120,8 +55,6 @@ deny contains sprintf("%s is an invalid JSON file: %s", [filename, content]) if not json.is_valid(content) } -integrations_file := get_file_in_pr("docs/website/data/integrations.yaml") - yaml_file_contents[filename] := get_file_in_pr(filename) if { some filename in filenames extension(filename) in {"yml", "yaml"} diff --git a/build/policy/files_test.rego b/build/policy/files_test.rego index b6e432f7f5b..03e9f1e2d37 100644 --- a/build/policy/files_test.rego +++ b/build/policy/files_test.rego @@ -4,183 +4,6 @@ import future.keywords.in import data.files.deny -test_deny_logo_if_added_in_wrong_directory { - expected := "Logo must be placed in docs/website/static/img/logos/integrations" - expected in deny with input as [ - { - "filename": "docs/website/data/integrations.yaml", - "status": "modified", - }, - { - "filename": "docs/website/static/img/logos/example.png", - "status": "added", - }, - ] -} - -test_allow_logo_if_added_in_correct_directory { - integrations := yaml.marshal({"integrations": {"example": { - "title": "My test integration", - "description": "Testing", - }}}) - - count(deny) == 0 with data.files.integrations_file as integrations with input as [ - { - "filename": "docs/website/data/integrations.yaml", - "status": "modified", - }, - { - "filename": "docs/website/static/img/logos/integrations/example.png", - "status": "added", - }, - ] -} - -test_deny_logo_if_not_png_file { - expected := "Logo must be a .png or .svg file" - expected in deny with input as [ - { - "filename": "docs/website/data/integrations.yaml", - "status": "modified", - }, - { - "filename": "docs/website/static/img/logos/integrations/example.jpg", - "status": "added", - }, - ] -} - -test_deny_logo_if_no_matching_integration { - integrations := yaml.marshal({"integrations": {"my-integration": { - "title": "My test integration", - "description": "Testing", - }}}) - - files := [ - { - "filename": "docs/website/data/integrations.yaml", - "status": "modified", - }, - { - "filename": "docs/website/static/img/logos/integrations/example.png", - "status": "added", - }, - ] - - expected := "Logo name must match integration" - - expected in deny with data.files.integrations_file as integrations with input as files -} - -test_allow_logo_if_no_matching_integration { - integrations := yaml.marshal({"integrations": {"my-integration": { - "title": "My test integration", - "description": "Testing", - }}}) - - files := [ - { - "filename": "docs/website/data/integrations.yaml", - "status": "modified", - }, - { - "filename": "docs/website/static/img/logos/integrations/my-integration.png", - "status": "added", - }, - ] - - count(deny) == 0 with data.files.integrations_file as integrations with input as files -} - -test_deny_integration_if_missing_required_attribute { - expected := "Integration 'my-integration' missing required attribute 'description'" - files := [{"filename": "docs/website/data/integrations.yaml"}] - integrations := yaml.marshal({"integrations": {"my-integration": { - "title": "My test integration", - "inventors": ["acmecorp"], - }}}) - - expected in deny with data.files.integrations_file as integrations with input as files -} - -test_deny_integration_allowed_with_required_attributes { - files := [{"filename": "docs/website/data/integrations.yaml"}] - integrations := yaml.marshal({ - "integrations": {"my-integration": { - "title": "My test integration", - "description": "This is a test integration", - "inventors": ["acmecorp"], - }}, - "organizations": {"acmecorp": {"name": "AcmeCorp", "link": "https://acmecorp.example.org"}}, - }) - - count(deny) == 0 with data.files.integrations_file as integrations with input as files -} - -test_deny_unlisted_software { - files := [{"filename": "docs/website/data/integrations.yaml"}] - integrations := yaml.marshal({ - "integrations": {"my-integration": { - "title": "My test integration", - "description": "This is a test integration", - "software": ["bitcoin-miner"], - }}, - "software": {"kubernetes": {"name": "Kubernetes"}}, - }) - - expected := "Integration 'my-integration' references unknown software 'bitcoin-miner' (i.e. not in 'software' object)" - - expected in deny with data.files.integrations_file as integrations with input as files -} - -test_allow_listed_software { - files := [{"filename": "docs/website/data/integrations.yaml"}] - integrations := yaml.marshal({ - "integrations": {"my-integration": { - "title": "My test integration", - "description": "This is a test integration", - "software": ["kubernetes"], - }}, - "software": {"kubernetes": {"name": "Kubernetes"}}, - }) - - count(deny) == 0 with data.files.integrations_file as integrations with input as files -} - -test_deny_unlisted_organization { - files := [{"filename": "docs/website/data/integrations.yaml"}] - integrations := yaml.marshal({ - "integrations": {"my-integration": { - "title": "My test integration", - "description": "This is a test integration", - "software": ["kubernetes"], - "inventors": ["acmecorp"], - }}, - "software": {"kubernetes": {"name": "Kubernetes"}}, - "organizations": {"foobar": {"name": "FooBar", "link": "https://foobar.example.org"}}, - }) - - expected := "Integration 'my-integration' references unknown organization 'acmecorp' (i.e. not in 'organizations' object)" - - expected in deny with data.files.integrations_file as integrations with input as files -} - -test_allow_listed_organization { - files := [{"filename": "docs/website/data/integrations.yaml"}] - integrations := yaml.marshal({ - "integrations": {"my-integration": { - "title": "My test integration", - "description": "This is a test integration", - "software": ["kubernetes"], - "inventors": ["acmecorp"], - }}, - "software": {"kubernetes": {"name": "Kubernetes"}}, - "organizations": {"acmecorp": {"name": "AcmeCorp", "link": "https://acmecorp.example.org"}}, - }) - - count(deny) == 0 with data.files.integrations_file as integrations with input as files -} - test_deny_invalid_yaml_file { expected := "invalid.yaml is an invalid YAML file: {null{}}" expected in deny with data.files.yaml_file_contents as {"invalid.yaml": "{null{}}"} diff --git a/build/policy/integrations.rego b/build/policy/integrations.rego new file mode 100644 index 00000000000..3adc20fec94 --- /dev/null +++ b/build/policy/integrations.rego @@ -0,0 +1,189 @@ +package integrations + +import future.keywords.contains +import future.keywords.if +import future.keywords.in + +allowed_image_extensions := ["png", "svg"] + +# check that all integrations have an image +deny contains result if { + some id, integration in input.integrations + + # some integrations are allowed to have a missing image as no suitable image is available + not integration.allow_missing_image == true + + some _, ext in allowed_image_extensions + + possible_filenames := {e | + some i + ext := allowed_image_extensions[i] + + e := sprintf("%s.%s", [id, ext]) + } + + possible_filenames - {i | i := input.images[_]} == possible_filenames + + result := { + "key": "integration_image", + "message": sprintf("integration %s missing image in 'static/img/logos/integrations' with extension of: %v", [id, concat(",", allowed_image_extensions)]), + } +} + +# check that all images have an integration +deny contains result if { + some _, image in input.images + + id := split(image, ".")[0] + + not id in object.keys(input.integrations) + + result := { + "key": "image_integration", + "message": sprintf("image %s is not used by any integration page", [image]), + } +} + +# check that all integrations have the required fields +deny contains result if { + some id, integration in input.integrations + + missing_fields := {"title", "layout"} - object.keys(integration) + + count(missing_fields) > 0 + + result := { + "key": "fields", + "message": sprintf("integration %s missing required fields: %v", [id, concat(", ", sort(missing_fields))]), + } +} + +# check that all integrations have content +deny contains result if { + some id, integration in input.integrations + + content := trim_space(object.get(integration, "content", "")) + + content == "" + + result := { + "key": "content", + "message": sprintf("integration %s has no content", [id]), + } +} + +# check that all integrations have a layout set to integration-single +deny contains result if { + some id, integration in input.integrations + + layout := object.get(integration, "layout", "") + + layout != "integration-single" + + result := { + "key": "layout", + "message": sprintf("integration %s does not have layout set to: integration-single", [id]), + } +} + +# check that all integrations reference an existing organization +deny contains result if { + some id, integration in input.integrations + + inventors := object.get(integration, "inventors", []) + + some _, inventor in inventors + + not inventor in object.keys(input.organizations) + + result := { + "key": "inventors", + "message": sprintf("integration %s references organization %s which does not exist", [id, inventor]), + } +} + +# check that all integrations reference existing software +deny contains result if { + some id, integration in input.integrations + + softwares := object.get(integration, "software", []) + + some _, software in softwares + + not software in object.keys(input.softwares) + + result := { + "key": "software", + "message": sprintf("integration %s references software %s which does not exist", [id, software]), + } +} + +# check that softwares have required fields +deny contains result if { + some id, software in input.softwares + + missing_fields := {"title", "layout", "link"} - object.keys(software) + + count(missing_fields) > 0 + + result := { + "key": "fields", + "message": sprintf("software %s missing required fields: %v", [id, concat(", ", sort(missing_fields))]), + } +} + +# check that organizations have required fields +deny contains result if { + some id, organization in input.organizations + + missing_fields := {"title", "layout", "link"} - object.keys(organization) + + count(missing_fields) > 0 + + result := { + "key": "fields", + "message": sprintf("organization %s missing required fields: %v", [id, concat(", ", sort(missing_fields))]), + } +} + +# check that each organization has at least one integration +deny contains result if { + some id, organization in input.organizations + + inventor_integrations := {i | + some i, integration in input.integrations + id in integration.inventors + } + speaker_integrations := {i | + some i, integration in input.integrations + some _, video in integration.videos + + some _, speaker in video.speakers + + speaker.organization == id + } + + count(inventor_integrations) + count(speaker_integrations) == 0 + + result := { + "key": "orphaned_org", + "message": sprintf("organization %s has no integrations", [id]), + } +} + +# check that each software has at least one integration +deny contains result if { + some id, software in input.softwares + + integrations := {i | + some i, integration in input.integrations + id in integration.software + } + + count(integrations) == 0 + + result := { + "key": "orphaned_software", + "message": sprintf("software %s has no integrations", [id]), + } +} diff --git a/build/policy/integrations_test.rego b/build/policy/integrations_test.rego new file mode 100644 index 00000000000..dc97bf5e33a --- /dev/null +++ b/build/policy/integrations_test.rego @@ -0,0 +1,372 @@ +package integrations_test + +import future.keywords.in + +messages_for_key(key, output) = messages { + messages := {m | + some e + output[e] + + key in e + + m := e.message + } +} + +print_if(true, _, _, _) = true + +print_if(false, key, false, output) := false { + print("Exp:", {}) + print("Got: ", messages_for_key(key, output)) +} + +print_if(false, key, expected, output) := false { + is_string(expected) + print("Exp:", expected) + print("Got:", messages_for_key(key, output)) +} + +test_integration_has_required_fields_missing { + output := data.integrations.deny with input as {"integrations": {"regal": {}}} + + key := "fields" + message := "integration regal missing required fields: layout, title" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_integration_has_required_fields_present { + output := data.integrations.deny with input as {"integrations": {"regal": {"title": "Regal", "layout": "integration"}}} + + key := "fields" + message := "integration regal missing required fields: layout, title" + + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_integration_has_layout_missing { + output := data.integrations.deny with input as {"integrations": {"regal": {}}} + + key := "layout" + message := "integration regal does not have layout set to: integration-single" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_integration_has_layout_missing { + output := data.integrations.deny with input as {"integrations": {"regal": {"layout": "wrong"}}} + + key := "layout" + message := "integration regal does not have layout set to: integration-single" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_integration_has_layout_present { + output := data.integrations.deny with input as {"integrations": {"regal": {"layout": "integration-single"}}} + + key := "layout" + + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_integration_has_content_missing { + output := data.integrations.deny with input as {"integrations": {"regal": {}}} + + key := "content" + message := "integration regal has no content" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_integration_has_content_blank { + output := data.integrations.deny with input as {"integrations": {"regal": {"content": "\t\t\n "}}} + + key := "content" + message := "integration regal has no content" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_integration_has_content_present { + output := data.integrations.deny with input as {"integrations": {"regal": {"content": "foobar"}}} + + key := "content" + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_every_integration_has_image_missing { + output := data.integrations.deny with input as { + "images": ["reegal.png"], + "integrations": {"regal": {}}, + } + + key := "integration_image" + message := "integration regal missing image in 'static/img/logos/integrations' with extension of: png,svg" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_every_integration_has_image_present { + output := data.integrations.deny with input as { + "images": ["regal.png"], + "integrations": {"regal": {}}, + } + + key := "integration_image" + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_every_integration_has_image_missing_but_permitted { + output := data.integrations.deny with input as { + "images": ["reegal.png"], + "integrations": {"regal": {"allow_missing_image": true}}, + } + + key := "integration_image" + + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_every_image_has_integration_missing { + output := data.integrations.deny with input as { + "images": ["regal.png"], + "integrations": {"foobar": {}}, + } + + key := "image_integration" + message := "image regal.png is not used by any integration page" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_every_image_has_integration_present { + output := data.integrations.deny with input as { + "images": ["regal.png"], + "integrations": {"regal": {}}, + } + + key := "image_integration" + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_integration_organizations_missing { + output := data.integrations.deny with input as { + "organizations": {"stira": {}}, + "integrations": {"regal": {"inventors": ["styra"]}}, + } + + key := "inventors" + message := "integration regal references organization styra which does not exist" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_integration_organizations_present { + output := data.integrations.deny with input as { + "organizations": {"styra": {}}, + "integrations": {"regal": {"inventors": ["styra"]}}, + } + + key := "inventors" + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_integration_softwares_missing { + output := data.integrations.deny with input as { + "softwares": {"mars": {}}, + "integrations": {"regal": {"software": ["terraform"]}}, + } + + key := "software" + message := "integration regal references software terraform which does not exist" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_integration_softwares_present { + output := data.integrations.deny with input as { + "softwares": {"terraform": {}}, + "integrations": {"regal": {"software": ["terraform"]}}, + } + + key := "software" + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_software_has_required_fields_missing { + output := data.integrations.deny with input as {"softwares": {"terraform": {}}} + + key := "fields" + message := "software terraform missing required fields: layout, link, title" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_software_has_required_fields_present { + output := data.integrations.deny with input as {"softwares": {"terraform": {"layout": "software-single", "link": "https://www.terraform.io/", "title": "Terraform"}}} + + key := "fields" + + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_organization_has_required_labels { + output := data.integrations.deny with input as {"organizations": {"styra": {}}} + + key := "fields" + message := "organization styra missing required fields: layout, link, title" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_organization_has_required_fields_present { + output := data.integrations.deny with input as {"organizations": {"styra": {"layout": "organization-single", "link": "https://styra.com/", "title": "Styra"}}} + + key := "fields" + + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_organization_has_one_or_more_integrations_none { + output := data.integrations.deny with input as {"organizations": {"foobar": {}}, "integrations": {}} + + key := "orphaned_org" + message := "organization foobar has no integrations" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_organization_has_one_or_more_integrations_one { + output := data.integrations.deny with input as {"organizations": {"foobaz": {}}, "integrations": {"foobar": {"inventors": ["foobaz"]}}} + + key := "orphaned_org" + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_organization_has_one_or_more_integrations_speaker { + output := data.integrations.deny with input as {"organizations": {"foobaz": {}}, "integrations": {"foobar": {"videos": [{"speakers": [{"organization": "foobaz"}]}]}}} + + key := "orphaned_org" + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} + +test_software_has_one_or_more_integrations_none { + output := data.integrations.deny with input as {"softwares": {"foobar": {}}, "integrations": {}} + + key := "orphaned_software" + message := "software foobar has no integrations" + + got := messages_for_key(key, output) + + result := message in got + + print_if(result, key, message, output) +} + +test_software_has_one_or_more_integrations_one { + output := data.integrations.deny with input as {"softwares": {"foobaz": {}}, "integrations": {"foobar": {"software": ["foobaz"]}}} + + key := "orphaned_software" + got := messages_for_key(key, output) + + result := got == set() + + print_if(result, key, false, output) +} diff --git a/cmd/exec.go b/cmd/exec.go index 8cb43204ea6..2197af50c03 100644 --- a/cmd/exec.go +++ b/cmd/exec.go @@ -68,8 +68,9 @@ e.g., opa exec --decision /foo/bar/baz ...`, addConfigOverrides(cmd.Flags(), ¶ms.ConfigOverrides) addConfigOverrideFiles(cmd.Flags(), ¶ms.ConfigOverrideFiles) cmd.Flags().StringVarP(¶ms.Decision, "decision", "", "", "set decision to evaluate") - cmd.Flags().BoolVarP(¶ms.FailDefined, "fail-defined", "", false, "exits with non-zero exit code on defined/non-empty result and errors") - cmd.Flags().BoolVarP(¶ms.Fail, "fail", "", false, "exits with non-zero exit code on undefined/empty result and errors") + cmd.Flags().BoolVarP(¶ms.FailDefined, "fail-defined", "", false, "exits with non-zero exit code on defined result and errors") + cmd.Flags().BoolVarP(¶ms.Fail, "fail", "", false, "exits with non-zero exit code on undefined result and errors") + cmd.Flags().BoolVarP(¶ms.FailNonEmpty, "fail-non-empty", "", false, "exits with non-zero exit code on non-empty result and errors") cmd.Flags().VarP(params.LogLevel, "log-level", "l", "set log level") cmd.Flags().Var(params.LogFormat, "log-format", "set log format") cmd.Flags().StringVar(¶ms.LogTimestampFormat, "log-timestamp-format", "", "set log timestamp format (OPA_LOG_TIMESTAMP_FORMAT environment variable)") diff --git a/cmd/exec_test.go b/cmd/exec_test.go index dc158d225ab..e6858bde61f 100644 --- a/cmd/exec_test.go +++ b/cmd/exec_test.go @@ -161,16 +161,54 @@ func TestInvalidConfig(t *testing.T) { } } +func TestInvalidConfigAllThree(t *testing.T) { + var buf bytes.Buffer + params := exec.NewParams(&buf) + params.Fail = true + params.FailDefined = true + params.FailNonEmpty = true + + err := exec.Exec(context.TODO(), nil, params) + if err == nil || err.Error() != "specify --fail or --fail-defined but not both" { + t.Fatalf("Expected error '%s' but got '%s'", "specify --fail or --fail-defined but not both", err.Error()) + } +} + +func TestInvalidConfigNonEmptyAndFail(t *testing.T) { + var buf bytes.Buffer + params := exec.NewParams(&buf) + params.FailNonEmpty = true + params.Fail = true + + err := exec.Exec(context.TODO(), nil, params) + if err == nil || err.Error() != "specify --fail-non-empty or --fail but not both" { + t.Fatalf("Expected error '%s' but got '%s'", "specify --fail-non-empty or --fail but not both", err.Error()) + } +} + +func TestInvalidConfigNonEmptyAndFailDefined(t *testing.T) { + var buf bytes.Buffer + params := exec.NewParams(&buf) + params.FailNonEmpty = true + params.FailDefined = true + + err := exec.Exec(context.TODO(), nil, params) + if err == nil || err.Error() != "specify --fail-non-empty or --fail-defined but not both" { + t.Fatalf("Expected error '%s' but got '%s'", "specify --fail-non-empty or --fail-defined but not both", err.Error()) + } +} + func TestFailFlagCases(t *testing.T) { var tests = []struct { - description string - files map[string]string - decision string - expectError bool - expected interface{} - fail bool - failDefined bool + description string + files map[string]string + decision string + expectError bool + expected interface{} + fail bool + failDefined bool + failNonEmpty bool }{ { description: "--fail-defined with undefined result", @@ -187,6 +225,7 @@ func TestFailFlagCases(t *testing.T) { test_fun }`, }, + expectError: false, expected: util.MustUnmarshalJSON([]byte(`{"result": [{ "path": "/files/test.json", "error": { @@ -287,6 +326,7 @@ func TestFailFlagCases(t *testing.T) { main["hello"]`, }, + expectError: false, expected: util.MustUnmarshalJSON([]byte(`{"result": [{ "path": "/files/test.json", "result": ["hello"] @@ -294,7 +334,7 @@ func TestFailFlagCases(t *testing.T) { fail: true, }, { - description: "--fail-defined with true boolean result", + description: "--fail with true boolean result", files: map[string]string{ "files/test.json": `{"foo": 7}`, "bundle/x.rego": `package fail.defined.flag @@ -308,7 +348,8 @@ func TestFailFlagCases(t *testing.T) { some_function }`, }, - decision: "fail/defined/flag/fail_test", + decision: "fail/defined/flag/fail_test", + expectError: false, expected: util.MustUnmarshalJSON([]byte(`{"result": [{ "path": "/files/test.json", "result": true @@ -316,7 +357,7 @@ func TestFailFlagCases(t *testing.T) { fail: true, }, { - description: "--fail-defined with false boolean result", + description: "--fail with false boolean result", files: map[string]string{ "files/test.json": `{"foo": 7}`, "bundle/x.rego": `package fail.defined.flag @@ -326,13 +367,135 @@ func TestFailFlagCases(t *testing.T) { false }`, }, - decision: "fail/defined/flag/fail_test", + decision: "fail/defined/flag/fail_test", + expectError: false, expected: util.MustUnmarshalJSON([]byte(`{"result": [{ "path": "/files/test.json", "result": false }]}`)), fail: true, }, + { + description: "--fail-non-empty with undefined result", + files: map[string]string{ + "files/test.json": `{"foo": 7}`, + "bundle/x.rego": `package system + + test_fun := x { + x = false + x + } + + undefined_test { + test_fun + }`, + }, + expectError: false, + expected: util.MustUnmarshalJSON([]byte(`{"result": [{ + "path": "/files/test.json", + "error": { + "code": "opa_undefined_error", + "message": "/system/main decision was undefined" + } + }]}`)), + failNonEmpty: true, + }, + { + description: "--fail-non-empty with populated result", + files: map[string]string{ + "files/test.json": `{"foo": 7}`, + "bundle/x.rego": `package system + + main["hello"]`, + }, + decision: "", + expectError: true, + expected: util.MustUnmarshalJSON([]byte(`{"result": [{ + "path": "/files/test.json", + "result": ["hello"] + }]}`)), + failNonEmpty: true, + }, + { + description: "--fail-non-empty with true boolean result", + files: map[string]string{ + "files/test.json": `{"foo": 7}`, + "bundle/x.rego": `package fail.non.empty.flag + + some_function { + input.foo == 7 + } + + default fail_test := false + fail_test { + some_function + }`, + }, + decision: "fail/non/empty/flag/fail_test", + expectError: true, + expected: util.MustUnmarshalJSON([]byte(`{"result": [{ + "path": "/files/test.json", + "result": true + }]}`)), + failNonEmpty: true, + }, + { + description: "--fail-non-empty with false boolean result", + files: map[string]string{ + "files/test.json": `{"foo": 7}`, + "bundle/x.rego": `package fail.non.empty.flag + + default fail_test := false + fail_test { + false + }`, + }, + decision: "fail/non/empty/flag/fail_test", + expectError: true, + expected: util.MustUnmarshalJSON([]byte(`{"result": [{ + "path": "/files/test.json", + "result": false + }]}`)), + failNonEmpty: true, + }, + { + description: "--fail-non-empty with an empty array", + files: map[string]string{ + "files/test.json": `{"foo": 7}`, + "bundle/x.rego": `package fail.non.empty.flag + + default fail_test := ["something", "hello"] + fail_test := [] if { + input.foo == 7 + }`, + }, + decision: "fail/non/empty/flag/fail_test", + expectError: false, + expected: util.MustUnmarshalJSON([]byte(`{"result": [{ + "path": "/files/test.json", + "result": [] + }]}`)), + failNonEmpty: true, + }, + { + description: "--fail-non-empty for an empty set coming from a partial rule", + files: map[string]string{ + "files/test.json": `{"foo": 7}`, + "bundle/x.rego": `package fail.non.empty.flag + + fail_test[message] { + false + message := "not gonna happen" + }`, + }, + decision: "fail/non/empty/flag/fail_test", + expectError: false, + expected: util.MustUnmarshalJSON([]byte(`{"result": [{ + "path": "/files/test.json", + "result": [] + }]}`)), + failNonEmpty: true, + }, } for _, tt := range tests { @@ -348,6 +511,7 @@ func TestFailFlagCases(t *testing.T) { } params.FailDefined = tt.failDefined params.Fail = tt.fail + params.FailNonEmpty = tt.failNonEmpty err := runExec(params) if err != nil && !tt.expectError { diff --git a/cmd/internal/exec/exec.go b/cmd/internal/exec/exec.go index e7f9f10c93f..a8490fdc1e3 100644 --- a/cmd/internal/exec/exec.go +++ b/cmd/internal/exec/exec.go @@ -27,8 +27,9 @@ type Params struct { LogTimestampFormat string // log timestamp format for plugins BundlePaths []string // explicit paths of bundles to inject into the configuration Decision string // decision to evaluate (overrides default decision set by configuration) - Fail bool // exits with non-zero exit code on undefined/empty result and errors - FailDefined bool // exits with non-zero exit code on defined/non-empty result and errors + Fail bool // exits with non-zero exit code on undefined policy decision or empty policy decision result or other errors + FailDefined bool // exits with non-zero exit code on 'not undefined policy decisiondefined' or 'not empty policy decision result' or other errors + FailNonEmpty bool // exits with non-zero exit code on non-empty set (array) results } func NewParams(w io.Writer) *Params { @@ -44,6 +45,12 @@ func (p *Params) validateParams() error { if p.Fail && p.FailDefined { return errors.New("specify --fail or --fail-defined but not both") } + if p.FailNonEmpty && p.Fail { + return errors.New("specify --fail-non-empty or --fail but not both") + } + if p.FailNonEmpty && p.FailDefined { + return errors.New("specify --fail-non-empty or --fail-defined but not both") + } return nil } @@ -79,7 +86,7 @@ func Exec(ctx context.Context, opa *sdk.OPA, params *Params) error { if err2 := r.Report(result{Path: item.Path, Error: err}); err2 != nil { return err2 } - if params.FailDefined || params.Fail { + if params.FailDefined || params.Fail || params.FailNonEmpty { errorCount++ } continue @@ -96,7 +103,7 @@ func Exec(ctx context.Context, opa *sdk.OPA, params *Params) error { if err2 := r.Report(result{Path: item.Path, Error: err}); err2 != nil { return err2 } - if (params.FailDefined && !sdk.IsUndefinedErr(err)) || (params.Fail && sdk.IsUndefinedErr(err)) { + if (params.FailDefined && !sdk.IsUndefinedErr(err)) || (params.Fail && sdk.IsUndefinedErr(err)) || (params.FailNonEmpty && !sdk.IsUndefinedErr(err)) { errorCount++ } continue @@ -109,17 +116,27 @@ func Exec(ctx context.Context, opa *sdk.OPA, params *Params) error { if (params.FailDefined && rs.Result != nil) || (params.Fail && rs.Result == nil) { failCount++ } - } + if params.FailNonEmpty && rs.Result != nil { + // Check if rs.Result is an array and has one or more members + resultArray, isArray := rs.Result.([]interface{}) + if (!isArray) || (isArray && (len(resultArray) > 0)) { + failCount++ + } + } + } if err := r.Close(); err != nil { return err } - if (params.Fail || params.FailDefined) && (failCount > 0 || errorCount > 0) { + if (params.Fail || params.FailDefined || params.FailNonEmpty) && (failCount > 0 || errorCount > 0) { if params.Fail { return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail is set", failCount, errorCount) } - return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail-defined is set", failCount, errorCount) + if params.FailDefined { + return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail-defined is set", failCount, errorCount) + } + return fmt.Errorf("there were %d failures and %d errors counted in the results list, and --fail-non-empty is set", failCount, errorCount) } return nil diff --git a/cmd/run_test.go b/cmd/run_test.go index a0ad554e969..bc83a9862e5 100644 --- a/cmd/run_test.go +++ b/cmd/run_test.go @@ -50,7 +50,7 @@ func TestRunServerBase(t *testing.T) { func TestRunServerWithDiagnosticAddr(t *testing.T) { params := newTestRunParams() - params.rt.DiagnosticAddrs = &[]string{":0"} + params.rt.DiagnosticAddrs = &[]string{"localhost:0"} ctx, cancel := context.WithCancel(context.Background()) rt, err := initRuntime(ctx, params, nil, false) @@ -194,7 +194,7 @@ func TestInitRuntimeAddrSetByUser(t *testing.T) { } params := newTestRunParams() - params.rt.Addrs = &[]string{":0"} + params.rt.Addrs = &[]string{"localhost:0"} ctx, cancel := context.WithCancel(context.Background()) rt, err := initRuntime(ctx, params, []string{}, cmd.Flags().Changed("addr")) @@ -214,7 +214,7 @@ func TestInitRuntimeAddrSetByUser(t *testing.T) { func newTestRunParams() runCmdParams { params := newRunParams() params.rt.GracefulShutdownPeriod = 1 - params.rt.Addrs = &[]string{":0"} + params.rt.Addrs = &[]string{"localhost:0"} params.rt.DiagnosticAddrs = &[]string{} params.serverMode = true return params diff --git a/cmd/test_test.go b/cmd/test_test.go index f1dfe31e395..fa90e271bfd 100644 --- a/cmd/test_test.go +++ b/cmd/test_test.go @@ -219,6 +219,51 @@ func failTrace(t *testing.T) []*topdown.Event { return *tracer } +// Assert that ignore flag is correctly used when the bundle flag is activated +func TestIgnoreFlag(t *testing.T) { + files := map[string]string{ + "/test.rego": "package test\n p := input.foo == 42\ntest_p {\n p with input.foo as 42\n}", + "/broken.rego": "package foo\n bar {", + } + + var exitCode int + test.WithTempFS(files, func(root string) { + testParams := newTestCommandParams() + testParams.count = 1 + testParams.errOutput = io.Discard + testParams.bundleMode = false + testParams.ignore = []string{"broken.rego"} + + exitCode, _ = opaTest([]string{root}, testParams) + }) + + if exitCode > 0 { + t.Fatalf("unexpected exit code: %d", exitCode) + } +} + +// Assert that ignore flag is correctly used when the bundle flag is activated +func TestIgnoreFlagWithBundleFlag(t *testing.T) { + files := map[string]string{ + "/test.rego": "package test\n p := input.foo == 42\ntest_p {\n p with input.foo as 42\n}", + "/broken.rego": "package foo\n bar {", + } + + var exitCode int + test.WithTempFS(files, func(root string) { + testParams := newTestCommandParams() + testParams.count = 1 + testParams.errOutput = io.Discard + testParams.bundleMode = true + testParams.ignore = []string{"broken.rego"} + exitCode, _ = opaTest([]string{root}, testParams) + }) + + if exitCode > 0 { + t.Fatalf("unexpected exit code: %d", exitCode) + } +} + func testSchemasAnnotation(rego string) (int, error) { files := map[string]string{ diff --git a/docs/README.md b/docs/README.md index a192915ce97..762758daa43 100644 --- a/docs/README.md +++ b/docs/README.md @@ -14,8 +14,8 @@ the website. directory. This content is versioned for each release and should have all images and code snippets alongside the markdown content files. -[website/data/integrations.yaml](./website/data/integrations.yaml) - Source for the -integrations index. See [Integration Index](#integration-index) below for more details. +[content/integrations/](./content/integrations) - Source for the +OPA Ecosystem page. See [OPA Ecosystem](#opa-ecosystem) below for more details. ## Website Components @@ -40,9 +40,19 @@ and are build into a `_redirects` file when the Hugo build happens via ## Site updates The OPA site is automatically published using [Netlify](https://netlify.com). Whenever -changes in this directory are pushed to `master`, the site will be re-built and +changes in this directory are pushed to `main`, the site will be re-built and re-deployed. +**Note:** The site is built for many versions of the docs, this introduces some +complexities to be aware of when making changes to the site's layout: + +* Updates to the [site's templates or styles/](./website/) are applied to all versions + (edge, latest and all versions) when merged to `main`. +* Site [data](./website/data) treated in the same way, so updates to data files also + apply to all versions as soon as they are merged. +* Site [content/](./content/), when merged to `main`, is only shown on `edge` until the + next release. + ## How to Edit and Test ### Preview Markdown `content` (*.md) @@ -287,73 +297,86 @@ another group's module when evaluating (e.g. so that they can be imported). > If a query isn't specified for the output's group, when other modules are included the default becomes `data` instead of `data.`. -# Integration Index +# OPA Ecosystem -The integration index makes it easy to find either a specific integration with OPA -or to browse the integrations with OPA within a particular category. And it pulls -information about that integration (e.g. blogs, videos, tutorials, code) into a -single place while allowing integration authors to maintain the code wherever they like. +The [OPA Ecosystem](https://www.openpolicyagent.org/docs/latest/ecosystem/) +makes it easy to find either a specific integration with OPA +or to browse the integrations with OPA within a particular category. It pulls +information about different integrations (e.g. blogs, videos, tutorials, code) into a +single place while allowing integration authors to update the docs content as needed. ## Schema -The schema of integrations.yaml has the following highlevel entries, each of which is self-explanatory. -- integrations -- organizations -- software - -Each entry is an object where keys are unique identifiers for each subentry. -Organizations and Software are self-explanatory by inspection. The schema for integrations is as follows. - -- title: string -- description: string -- software: array of strings -- labels: collection of key/value pairs. -- tutorials: array of links -- code: array of links -- inventors: array of either - - string (organization name) - - object with fields - - name: string - - organization: string -- videos: array of either - - link - - object with fields - - title: string - - speakers: array of name/organization objects - - venue: string - - link: string -- blogs: array of links - -The UI for this is currently hosted at [https://openpolicyagent.org/docs/latest/ecosystem/](https://openpolicyagent.org/docs/latest/ecosystem/) - -The future plan is to use the following labels to generate categories of integrations. - -- layer: which layer of the stack does this belong to -- category: which kind of component within that layer is this -- type: what kind of integration this is. Either `enforcement` or `poweredbyopa`. `enforcement` is the default - if `type` is missing. `poweredbyopa` is intended to be integrations built using OPA that are not tied to a - particular layer of the stack. This distinction is the most ambiguous and may change. - -As of now the labels are only displayed for each entry. +Source information for the OPA Ecosystem is stored in the following places: + +- [content/integrations/](./content/integrations) - each file creates a page in the OPA Ecosystem for a particular integration. +- [content/organizations/](./content/organizations) - each file is a page for organizations and companies associated with integrations. +- [content/softwares/](./content/softwares) - each file is for software categories related to integrations. + +Integrations should have a file in `content/integrations/` with the following schema: + +```md +--- +layout: integration-single # required to be set and to this value +title: +software: +- +- +inventors: +- +- +tutorials: # optional, links to tutorials for the integration +- https://example.com/tutorial +code: # optional, links to code for the integration +- https://github.com/... +blogs: # optional, links to blog posts for the integration +- https://example.com/blog/1 +videos: # optional, links to videos for the integration +- title: