Skip to content

Commit

Permalink
Merge branch 'main' into 861-finding-reason
Browse files Browse the repository at this point in the history
  • Loading branch information
brandtkeller authored Dec 20, 2024
2 parents d23ea8b + be2a063 commit b24fc58
Show file tree
Hide file tree
Showing 18 changed files with 483 additions and 33 deletions.
3 changes: 3 additions & 0 deletions docs/cli-commands/lula_validate.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ To run validations and automatically confirm execution
lula dev validate -f ./oscal-component.yaml --confirm-execution
To run validations non-interactively (no execution)
lula dev validate -f ./oscal-component.yaml --non-interactive
To run validations and their tests, generating a test-results file
lula dev validate -f ./oscal-component.yaml --run-tests
```

Expand All @@ -40,6 +42,7 @@ To run validations non-interactively (no execution)
-f, --input-file string the path to the target OSCAL component definition
--non-interactive run the command non-interactively
-o, --output-file string the path to write assessment results. Creates a new file or appends to existing files
--run-tests run tests specified in the validation, writes to test-results-<timestamp>.yaml in output directory
--save-resources saves the resources to 'resources' directory at assessment-results level
-s, --set strings set a value in the template data
-t, --target string the specific control implementations or framework to validate against
Expand Down
45 changes: 42 additions & 3 deletions docs/reference/testing.md
Original file line number Diff line number Diff line change
Expand Up @@ -128,17 +128,56 @@ Which will delete the existing labels map and then add an empty map, such that t

## Executing Tests

Tests can be executed by specifying the `--run-tests` flag when running `lula dev validate`. E.g.,
Tests can be executed by specifying the `--run-tests` flag when running both `lula validate` and `lula dev validate`, however the output of either will be slightly different.

### lula validate
When running `lula validate ... --run-tests`, a test results summary will be printed to the console, while the test results yaml file will be written to the same directory as the output data (either default to directory of the `component-definition` source file or to the directory specified by the `--output-file` flag).

E.g., Running validate on a component-definition with two validations, one with tests and one without:
```sh
lula validate -f ./component.yaml --run-tests
```

Will print the test results summary to the console as:
```sh
Test Results: 1 passing, 0 failing, 1 missing
```

And will print a test results yaml file to the same directory as the output data:
```yaml
61ec8808-f0f4-4b35-9a5b-4d7516053534:
name: test-validation
test-results: []
82099492-0601-4287-a2d1-cc94c49dca9b:
name: test-validation-with-tests
test-results:
- test-name: change-image-name
pass: true
result: not-satisfied
- test-name: no-containers
pass: true
result: not-satisfied
```
> Note that `61ec8808-f0f4-4b35-9a5b-4d7516053534` is the UUID of the validation without tests, and `82099492-0601-4287-a2d1-cc94c49dca9b` is the UUID of the validation with tests.

### lula dev validate
When executing `lula dev validate ... --run-tests`, the test results data will be written directly to console.

E.g., Running dev validate on a Lula validation with two tests:
```sh
lula dev validate -f ./validation.yaml --run-tests
```

This will execute the tests and print the test results to the console.
Will print the test results to the console as:
```sh
✔ Pass: change-image-name
• Result: not-satisfied
✔ Pass: no-containers
• Result: not-satisfied
```

To aid in debugging, the `--print-test-resources` flag can be used to print the resources used for each test to the validation directory, the filenames will be `<test-name>.json`.. E.g.,

```sh
lula dev validate -f ./validation.yaml --run-tests --print-test-resources
```

4 changes: 4 additions & 0 deletions src/cmd/dev/validate.go
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,10 @@ func DevValidateCommand() *cobra.Command {
if err != nil {
return fmt.Errorf("error running tests")
}
if testReport == nil {
message.Debug("No tests defined for validation")
return nil
}
// Print the test report using messages
testReport.PrintReport()

Expand Down
11 changes: 9 additions & 2 deletions src/cmd/validate/validate.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,14 @@ import (
"path/filepath"

oscalTypes "github.com/defenseunicorns/go-oscal/src/types/oscal-1-1-3"
"github.com/spf13/cobra"

"github.com/defenseunicorns/lula/src/cmd/common"
"github.com/defenseunicorns/lula/src/pkg/common/composition"
"github.com/defenseunicorns/lula/src/pkg/common/oscal"
"github.com/defenseunicorns/lula/src/pkg/common/validation"
"github.com/defenseunicorns/lula/src/pkg/message"
"github.com/defenseunicorns/lula/src/types"
"github.com/spf13/cobra"
)

var validateHelp = `
Expand All @@ -27,6 +28,8 @@ To run validations and automatically confirm execution
lula dev validate -f ./oscal-component.yaml --confirm-execution
To run validations non-interactively (no execution)
lula dev validate -f ./oscal-component.yaml --non-interactive
To run validations and their tests, generating a test-results file
lula dev validate -f ./oscal-component.yaml --run-tests
`

var (
Expand All @@ -48,6 +51,7 @@ func ValidateCommand() *cobra.Command {
confirmExecution bool
runNonInteractively bool
saveResources bool
runTests bool
)

cmd := &cobra.Command{
Expand Down Expand Up @@ -81,8 +85,10 @@ func ValidateCommand() *cobra.Command {
// Set up the validator
validator, err := validation.New(
validation.WithComposition(composer, inputFile),
validation.WithResourcesDir(saveResources, filepath.Dir(outputFile)),
validation.WithOutputDir(filepath.Dir(outputFile)),
validation.WithSaveResources(saveResources),
validation.WithAllowExecution(confirmExecution, runNonInteractively),
validation.WithTests(runTests),
)
if err != nil {
return fmt.Errorf("error creating new validator: %v", err)
Expand Down Expand Up @@ -122,6 +128,7 @@ func ValidateCommand() *cobra.Command {
cmd.Flags().BoolVar(&confirmExecution, "confirm-execution", false, "confirm execution scripts run as part of the validation")
cmd.Flags().BoolVar(&runNonInteractively, "non-interactive", false, "run the command non-interactively")
cmd.Flags().BoolVar(&saveResources, "save-resources", false, "saves the resources to 'resources' directory at assessment-results level")
cmd.Flags().BoolVar(&runTests, "run-tests", false, "run tests specified in the validation, writes to test-results-<timestamp>.yaml in output directory")
cmd.Flags().StringSliceVarP(&setOpts, "set", "s", []string{}, "set a value in the template data")

return cmd
Expand Down
12 changes: 12 additions & 0 deletions src/pkg/common/validation-store/testdata/data.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"containers": [
{
"name": "test-container1",
"image": "nginx"
},
{
"name": "test-container2",
"image": "nginx"
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
metadata:
name: test-validation-with-tests
uuid: 82099492-0601-4287-a2d1-cc94c49dca9b
domain:
type: file
file-spec:
filepaths:
- name: 'data'
path: 'data.json'
provider:
type: opa
opa-spec:
rego: |
package validate
import rego.v1
default validate = false
validate if {
every container in input.data.containers {
container.image == "nginx"
}
}
tests:
- name: change-image-name
expected-result: not-satisfied
changes:
- path: data.containers.[name=test-container1].image
type: update
value: other
- name: no-containers
expected-result: not-satisfied
changes:
- path: data.containers
type: delete
23 changes: 23 additions & 0 deletions src/pkg/common/validation-store/testdata/validation.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
metadata:
name: test-validation
uuid: 61ec8808-f0f4-4b35-9a5b-4d7516053534
domain:
type: file
file-spec:
filepaths:
- name: 'data'
path: 'data.json'
provider:
type: opa
opa-spec:
rego: |
package validate
import rego.v1
default validate = false
validate if {
every container in input.data.containers {
container.image == "nginx"
}
}
39 changes: 37 additions & 2 deletions src/pkg/common/validation-store/validation-store.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"github.com/defenseunicorns/go-oscal/src/pkg/files"
"github.com/defenseunicorns/go-oscal/src/pkg/uuid"
oscalTypes "github.com/defenseunicorns/go-oscal/src/types/oscal-1-1-3"

"github.com/defenseunicorns/lula/src/pkg/common"
"github.com/defenseunicorns/lula/src/pkg/common/oscal"
"github.com/defenseunicorns/lula/src/pkg/message"
Expand Down Expand Up @@ -107,7 +108,7 @@ func (v *ValidationStore) DryRun() (executable bool, msg string) {
}

// RunValidations runs the validations in the store
func (v *ValidationStore) RunValidations(ctx context.Context, confirmExecution, saveResources bool, resourcesDir string) []oscalTypes.Observation {
func (v *ValidationStore) RunValidations(ctx context.Context, confirmExecution, saveResources bool, outputsDir string) []oscalTypes.Observation {
observations := make([]oscalTypes.Observation, 0, len(v.validationMap))

for k, val := range v.validationMap {
Expand Down Expand Up @@ -149,7 +150,7 @@ func (v *ValidationStore) RunValidations(ctx context.Context, confirmExecution,
resourceUuid := uuid.NewUUID()
// Create a remote resource file -> create directory 'resources' in the assessment-results directory -> create file with UUID as name
filename := fmt.Sprintf("%s.json", resourceUuid)
resourceFile := filepath.Join(resourcesDir, "resources", filename)
resourceFile := filepath.Join(outputsDir, "resources", filename)
err := os.MkdirAll(filepath.Dir(resourceFile), os.ModePerm) // #nosec G301
if err != nil {
message.Debugf("Error creating directory for remote resource: %v", err)
Expand Down Expand Up @@ -204,3 +205,37 @@ func (v *ValidationStore) GetRelatedObservation(id string) (oscalTypes.RelatedOb
ObservationUuid: observation.UUID,
}, pass
}

// RunTests executes any tests defined on the validations in the validation store
func (v *ValidationStore) RunTests(ctx context.Context) map[string]types.LulaValidationTestReport {
testReportMap := make(map[string]types.LulaValidationTestReport)

for uuid, validation := range v.validationMap {
// TODO: should test results be saved, e.g., if printResources is true?
testReport, err := validation.RunTests(ctx, false)
if err != nil {
testReportMap[uuid] = types.LulaValidationTestReport{
Name: validation.Name,
TestResults: []*types.LulaValidationTestResult{
{
TestName: "Error running validation",
Pass: false,
Result: err.Error(),
},
},
}
continue
}

// If no tests are defined, return an empty report for the validation
if testReport == nil {
testReportMap[uuid] = types.LulaValidationTestReport{
Name: validation.Name,
}
continue
}

testReportMap[uuid] = *testReport
}
return testReportMap
}
36 changes: 36 additions & 0 deletions src/pkg/common/validation-store/validation-store_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import (

"github.com/defenseunicorns/go-oscal/src/pkg/uuid"
oscalTypes "github.com/defenseunicorns/go-oscal/src/types/oscal-1-1-3"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"

"github.com/defenseunicorns/lula/src/pkg/common"
Expand Down Expand Up @@ -242,3 +243,38 @@ func TestGetRelatedObservation(t *testing.T) {
})
}
}

func TestRunTests(t *testing.T) {
message.NoProgress = true
ctx := context.Background()
v := validationstore.NewValidationStore()

validation := generateValidation(t, "./testdata/validation.yaml")
validationWithTests := generateValidation(t, "./testdata/validation-with-tests.yaml")

idValidation, err := v.AddValidation(&validation)
require.NoError(t, err)
idValidationWithTests, err := v.AddValidation(&validationWithTests)
require.NoError(t, err)

// Run validations to populate domain resources for tests
v.RunValidations(ctx, true, false, "")

// Run tests
testReport := v.RunTests(ctx)

// Validate the content of the test report
reportValidation, ok := testReport[idValidation]
require.True(t, ok)

// no tests defined, should be empty
assert.Equal(t, 0, len(reportValidation.TestResults))

reportValidationWithTests, ok := testReport[idValidationWithTests]
require.True(t, ok)

// 2 tests defined, should have 2 results, both passing
assert.Equal(t, 2, len(reportValidationWithTests.TestResults))
assert.True(t, reportValidationWithTests.TestResults[0].Pass)
assert.True(t, reportValidationWithTests.TestResults[1].Pass)
}
20 changes: 16 additions & 4 deletions src/pkg/common/validation/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,23 @@ func WithAllowExecution(confirmExecution, runNonInteractively bool) Option {
}
}

func WithResourcesDir(saveResources bool, rootDir string) Option {
func WithOutputDir(rootDir string) Option {
return func(v *Validator) error {
if saveResources {
v.resourcesDir = rootDir
}
v.outputsDir = rootDir
return nil
}
}

func WithSaveResources(saveResources bool) Option {
return func(v *Validator) error {
v.saveResources = saveResources
return nil
}
}

func WithTests(runTests bool) Option {
return func(v *Validator) error {
v.runTests = runTests
return nil
}
}
Loading

0 comments on commit b24fc58

Please sign in to comment.