perf(misconf): replace with post-analyzers (#4090)

Signed-off-by: Simar <simar@linux.com>
Co-authored-by: Simar <simar@linux.com>
This commit is contained in:
Teppei Fukuda
2023-04-23 19:22:46 +03:00
committed by GitHub
parent 76662d5dd7
commit bd0c60364a
87 changed files with 1168 additions and 2170 deletions

1
go.mod
View File

@@ -21,7 +21,6 @@ require (
github.com/aquasecurity/go-pep440-version v0.0.0-20210121094942-22b2f8951d46
github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492
github.com/aquasecurity/loading v0.0.5
github.com/aquasecurity/memoryfs v1.4.4
github.com/aquasecurity/table v1.8.0
github.com/aquasecurity/testdocker v0.0.0-20230111101738-e741bda259da
github.com/aquasecurity/tml v0.6.1

2
go.sum
View File

@@ -333,8 +333,6 @@ github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492 h1:rcEG5HI
github.com/aquasecurity/go-version v0.0.0-20210121072130-637058cfe492/go.mod h1:9Beu8XsUNNfzml7WBf3QmyPToP1wm1Gj/Vc5UJKqTzU=
github.com/aquasecurity/loading v0.0.5 h1:2iq02sPSSMU+ULFPmk0v0lXnK/eZ2e0dRAj/Dl5TvuM=
github.com/aquasecurity/loading v0.0.5/go.mod h1:NSHeeq1JTDTFuXAe87q4yQ2DX57pXiaQMqq8Zm9HCJA=
github.com/aquasecurity/memoryfs v1.4.4 h1:HdkShi6jjKZLAgQ+6/CXXDB/zwH2hAMp2oklo9w5t7A=
github.com/aquasecurity/memoryfs v1.4.4/go.mod h1:kLxvGxhdyG0zmlFUJB6VAkLn4WRPOycLW/UYO6dspao=
github.com/aquasecurity/table v1.8.0 h1:9ntpSwrUfjrM6/YviArlx/ZBGd6ix8W+MtojQcM7tv0=
github.com/aquasecurity/table v1.8.0/go.mod h1:eqOmvjjB7AhXFgFqpJUEE/ietg7RrMSJZXyTN8E/wZw=
github.com/aquasecurity/testdocker v0.0.0-20230111101738-e741bda259da h1:pj/adfN0Wbzc0H8YkI1nX5K92wOU5/1/1TRuuc0y5Nw=

View File

@@ -0,0 +1,11 @@
package testutil
import (
"runtime"
"github.com/samber/lo"
)
var ErrNotExist string = lo.Ternary(runtime.GOOS == "windows",
"The system cannot find the file specified.",
"no such file or directory")

View File

@@ -16,13 +16,13 @@ import (
tcache "github.com/aquasecurity/trivy/pkg/cache"
"github.com/aquasecurity/trivy/pkg/commands/operation"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
"github.com/aquasecurity/trivy/pkg/fanal/cache"
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/flag"
"github.com/aquasecurity/trivy/pkg/javadb"
"github.com/aquasecurity/trivy/pkg/log"
"github.com/aquasecurity/trivy/pkg/misconf"
"github.com/aquasecurity/trivy/pkg/module"
"github.com/aquasecurity/trivy/pkg/report"
pkgReport "github.com/aquasecurity/trivy/pkg/report"
@@ -562,7 +562,7 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi
}
// ScannerOption is filled only when config scanning is enabled.
var configScannerOptions config.ScannerOption
var configScannerOptions misconf.ScannerOption
if opts.Scanners.Enabled(types.MisconfigScanner) || opts.ImageConfigScanners.Enabled(types.MisconfigScanner) {
log.Logger.Info("Misconfiguration scanning is enabled")
@@ -577,7 +577,7 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi
log.Logger.Debug("Policies successfully loaded from disk")
disableEmbedded = true
}
configScannerOptions = config.ScannerOption{
configScannerOptions = misconf.ScannerOption{
Trace: opts.Trace,
Namespaces: append(opts.PolicyNamespaces, defaultPolicyNamespaces...),
PolicyPaths: append(opts.PolicyPaths, downloadedPolicyPaths...),

View File

@@ -20,6 +20,7 @@ import (
"github.com/aquasecurity/trivy/pkg/fanal/log"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/mapfs"
"github.com/aquasecurity/trivy/pkg/misconf"
"github.com/aquasecurity/trivy/pkg/syncx"
)
@@ -45,6 +46,7 @@ type AnalyzerOptions struct {
Slow bool
FilePatterns []string
DisabledAnalyzers []Type
MisconfScannerOption misconf.ScannerOption
SecretScannerOption SecretScannerOption
LicenseScannerOption LicenseScannerOption
}
@@ -153,13 +155,11 @@ type AnalysisResult struct {
Repository *types.Repository
PackageInfos []types.PackageInfo
Applications []types.Application
Misconfigurations []types.Misconfiguration
Secrets []types.Secret
Licenses []types.LicenseFile
SystemInstalledFiles []string // A list of files installed by OS package manager
// Files holds necessary file contents for the respective post-handler
Files map[types.HandlerType][]types.File
// Digests contains SHA-256 digests of unpackaged files
// used to search for SBOM attestation.
Digests map[string]string
@@ -174,14 +174,13 @@ type AnalysisResult struct {
func NewAnalysisResult() *AnalysisResult {
result := new(AnalysisResult)
result.Files = map[types.HandlerType][]types.File{}
return result
}
func (r *AnalysisResult) isEmpty() bool {
return lo.IsEmpty(r.OS) && r.Repository == nil && len(r.PackageInfos) == 0 && len(r.Applications) == 0 &&
len(r.Secrets) == 0 && len(r.Licenses) == 0 && len(r.SystemInstalledFiles) == 0 &&
r.BuildInfo == nil && len(r.Files) == 0 && len(r.Digests) == 0 && len(r.CustomResources) == 0
len(r.Misconfigurations) == 0 && len(r.Secrets) == 0 && len(r.Licenses) == 0 && len(r.SystemInstalledFiles) == 0 &&
r.BuildInfo == nil && len(r.Digests) == 0 && len(r.CustomResources) == 0
}
func (r *AnalysisResult) Sort() {
@@ -213,11 +212,10 @@ func (r *AnalysisResult) Sort() {
return r.CustomResources[i].FilePath < r.CustomResources[j].FilePath
})
for _, files := range r.Files {
sort.Slice(files, func(i, j int) bool {
return files[i].Path < files[j].Path
})
}
// Misconfigurations
sort.Slice(r.Misconfigurations, func(i, j int) bool {
return r.Misconfigurations[i].FilePath < r.Misconfigurations[j].FilePath
})
// Secrets
sort.Slice(r.Secrets, func(i, j int) bool {
@@ -274,14 +272,7 @@ func (r *AnalysisResult) Merge(new *AnalysisResult) {
r.Digests = lo.Assign(r.Digests, new.Digests)
}
for t, files := range new.Files {
if v, ok := r.Files[t]; ok {
r.Files[t] = append(v, files...)
} else {
r.Files[t] = files
}
}
r.Misconfigurations = append(r.Misconfigurations, new.Misconfigurations...)
r.Secrets = append(r.Secrets, new.Secrets...)
r.Licenses = append(r.Licenses, new.Licenses...)
r.SystemInstalledFiles = append(r.SystemInstalledFiles, new.SystemInstalledFiles...)
@@ -400,6 +391,9 @@ func (ag AnalyzerGroup) AnalyzerVersions() Versions {
}
}
// AnalyzeFile determines which files are required by the analyzers based on the file name and attributes,
// and passes only those files to the analyzer for analysis.
// This function may be called concurrently and must be thread-safe.
func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, limit *semaphore.Weighted, result *AnalysisResult,
dir, filePath string, info os.FileInfo, opener Opener, disabled []Type, opts AnalysisOptions) error {
if info.IsDir() {
@@ -454,19 +448,24 @@ func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, lim
return nil
}
// RequiredPostAnalyzers returns a list of analyzer types that require the given file.
func (ag AnalyzerGroup) RequiredPostAnalyzers(filePath string, info os.FileInfo) []Type {
if info.IsDir() {
return nil
}
var postAnalyzerTypes []Type
for _, a := range ag.postAnalyzers {
if a.Required(filePath, info) {
if ag.filePatternMatch(a.Type(), filePath) || a.Required(filePath, info) {
postAnalyzerTypes = append(postAnalyzerTypes, a.Type())
}
}
return postAnalyzerTypes
}
// PostAnalyze passes a virtual filesystem containing only required files
// and passes it to the respective post-analyzer.
// The obtained results are merged into the "result".
// This function may be called concurrently and must be thread-safe.
func (ag AnalyzerGroup) PostAnalyze(ctx context.Context, files *syncx.Map[Type, *mapfs.FS], result *AnalysisResult, opts AnalysisOptions) error {
for _, a := range ag.postAnalyzers {
fsys, ok := files.Load(a.Type())

View File

@@ -1,9 +1,10 @@
package all
import (
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/azurearm"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/cloudformation"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/dockerfile"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/helm"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/json"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/k8s"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/terraform"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/yaml"
)

View File

@@ -0,0 +1,38 @@
package azurearm
import (
"os"
"path/filepath"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/misconf"
)
const (
version = 1
analyzerType = analyzer.TypeAzureARM
)
func init() {
analyzer.RegisterPostAnalyzer(analyzerType, newAzureARMConfigAnalyzer)
}
// azureARMConfigAnalyzer is an analyzer for detecting misconfigurations in Azure ARM templates.
// It embeds config.Analyzer so it can implement analyzer.PostAnalyzer.
type azureARMConfigAnalyzer struct {
*config.Analyzer
}
func newAzureARMConfigAnalyzer(opts analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) {
a, err := config.NewAnalyzer(analyzerType, version, misconf.NewAzureARMScanner, opts)
if err != nil {
return nil, err
}
return &azureARMConfigAnalyzer{Analyzer: a}, nil
}
// Required overrides config.Analyzer.Required() and check if the given file is JSON.
func (a *azureARMConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
return filepath.Ext(filePath) == ".json"
}

View File

@@ -0,0 +1,35 @@
package azurearm
import (
"testing"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func Test_azureARMConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "json",
filePath: "test.json",
want: true,
},
{
name: "yaml",
filePath: "test.yaml",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a, err := newAzureARMConfigAnalyzer(analyzer.AnalyzerOptions{})
require.NoError(t, err)
assert.Equal(t, tt.want, a.Required(tt.filePath, nil))
})
}
}

View File

@@ -0,0 +1,30 @@
package cloudformation
import (
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/misconf"
)
const (
analyzerType = analyzer.TypeCloudFormation
version = 1
)
func init() {
analyzer.RegisterPostAnalyzer(analyzerType, newCloudFormationConfigAnalyzer)
}
// cloudFormationConfigAnalyzer is an analyzer for detecting misconfigurations in CloudFormation files.
// It embeds config.Analyzer so it can implement analyzer.PostAnalyzer.
type cloudFormationConfigAnalyzer struct {
*config.Analyzer
}
func newCloudFormationConfigAnalyzer(opts analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) {
a, err := config.NewAnalyzer(analyzerType, version, misconf.NewCloudFormationScanner, opts)
if err != nil {
return nil, err
}
return &cloudFormationConfigAnalyzer{Analyzer: a}, nil
}

View File

@@ -1,27 +1,65 @@
package config
import (
"sort"
"context"
"os"
"path/filepath"
"golang.org/x/xerrors"
"k8s.io/utils/strings/slices"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/misconf"
)
type ScannerOption struct {
Trace bool
RegoOnly bool
Namespaces []string
PolicyPaths []string
DataPaths []string
DisableEmbeddedPolicies bool
var (
_ analyzer.PostAnalyzer = (*Analyzer)(nil)
HelmValues []string
HelmValueFiles []string
HelmFileValues []string
HelmStringValues []string
TerraformTFVars []string
K8sVersion string
requiredExts = []string{".json", ".yaml", ".yml"}
)
// Analyzer represents an analyzer for config files,
// which is embedded into each config analyzer such as Kubernetes.
type Analyzer struct {
typ analyzer.Type
version int
scanner *misconf.Scanner
}
func (o *ScannerOption) Sort() {
sort.Strings(o.Namespaces)
sort.Strings(o.PolicyPaths)
sort.Strings(o.DataPaths)
type NewScanner func([]string, misconf.ScannerOption) (*misconf.Scanner, error)
func NewAnalyzer(t analyzer.Type, version int, newScanner NewScanner, opts analyzer.AnalyzerOptions) (*Analyzer, error) {
s, err := newScanner(opts.FilePatterns, opts.MisconfScannerOption)
if err != nil {
return nil, xerrors.Errorf("%s scanner init error: %w", t, err)
}
return &Analyzer{
typ: t,
version: version,
scanner: s,
}, nil
}
// PostAnalyze performs configuration analysis on the input filesystem and detect misconfigurations.
func (a *Analyzer) PostAnalyze(ctx context.Context, input analyzer.PostAnalysisInput) (*analyzer.AnalysisResult, error) {
misconfs, err := a.scanner.Scan(ctx, input.FS)
if err != nil {
return nil, xerrors.Errorf("%s scan error: %w", a.typ, err)
}
return &analyzer.AnalysisResult{Misconfigurations: misconfs}, nil
}
// Required checks if the given file path has one of the required file extensions.
func (a *Analyzer) Required(filePath string, _ os.FileInfo) bool {
return slices.Contains(requiredExts, filepath.Ext(filePath))
}
// Type returns the analyzer type of the current Analyzer instance.
func (a *Analyzer) Type() analyzer.Type {
return a.typ
}
// Version returns the version of the current Analyzer instance.
func (a *Analyzer) Version() int {
return a.version
}

View File

@@ -1,59 +1,107 @@
package config_test
import (
"context"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/defsec/pkg/detection"
"github.com/aquasecurity/trivy/internal/testutil"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/misconf"
)
func TestScannerOption_Sort(t *testing.T) {
func TestAnalyzer_PostAnalyze(t *testing.T) {
type fields struct {
Namespaces []string
PolicyPaths []string
DataPaths []string
typ analyzer.Type
newScanner config.NewScanner
opts analyzer.AnalyzerOptions
}
tests := []struct {
name string
fields fields
want config.ScannerOption
name string
fields fields
dir string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path",
name: "dockerfile",
fields: fields{
Namespaces: []string{"main", "custom", "default"},
PolicyPaths: []string{"policy"},
DataPaths: []string{"data/b", "data/c", "data/a"},
typ: analyzer.TypeDockerfile,
newScanner: misconf.NewDockerfileScanner,
opts: analyzer.AnalyzerOptions{
MisconfScannerOption: misconf.ScannerOption{
Namespaces: []string{"user"},
PolicyPaths: []string{"testdata/rego"},
DisableEmbeddedPolicies: true,
},
},
},
want: config.ScannerOption{
Namespaces: []string{"custom", "default", "main"},
PolicyPaths: []string{"policy"},
DataPaths: []string{"data/a", "data/b", "data/c"},
dir: "testdata/src",
want: &analyzer.AnalysisResult{
Misconfigurations: []types.Misconfiguration{
{
FileType: string(detection.FileTypeDockerfile),
FilePath: "Dockerfile",
Successes: types.MisconfResults{
types.MisconfResult{
Namespace: "user.something",
Query: "data.user.something.deny",
PolicyMetadata: types.PolicyMetadata{
ID: "TEST001",
AVDID: "AVD-TEST-0001",
Type: "Dockerfile Security Check",
Title: "Test policy",
Description: "This is a test policy.",
Severity: "LOW",
RecommendedActions: "Have a cup of tea.",
References: []string{"https://trivy.dev/"},
},
CauseMetadata: types.CauseMetadata{
Provider: "Generic",
Service: "general",
},
},
},
},
},
},
},
{
name: "missing some fields",
name: "non-existent dir",
fields: fields{
Namespaces: []string{"main"},
PolicyPaths: nil,
DataPaths: nil,
},
want: config.ScannerOption{
Namespaces: []string{"main"},
typ: analyzer.TypeDockerfile,
newScanner: misconf.NewDockerfileScanner,
opts: analyzer.AnalyzerOptions{
MisconfScannerOption: misconf.ScannerOption{
Namespaces: []string{"user"},
PolicyPaths: []string{"testdata/rego"},
DisableEmbeddedPolicies: true,
},
},
},
dir: "testdata/non-existent",
wantErr: testutil.ErrNotExist,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
o := config.ScannerOption{
Namespaces: tt.fields.Namespaces,
PolicyPaths: tt.fields.PolicyPaths,
DataPaths: tt.fields.DataPaths,
}
o.Sort()
a, err := config.NewAnalyzer(tt.fields.typ, 0, tt.fields.newScanner, tt.fields.opts)
require.NoError(t, err)
assert.Equal(t, tt.want, o)
got, err := a.PostAnalyze(context.Background(), analyzer.PostAnalysisInput{
FS: os.DirFS(tt.dir),
})
if tt.wantErr != "" {
assert.ErrorContains(t, err, tt.wantErr)
return
}
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -1,51 +1,44 @@
package dockerfile
import (
"context"
"io"
"os"
"path/filepath"
"strings"
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/misconf"
)
func init() {
analyzer.RegisterAnalyzer(&dockerConfigAnalyzer{})
}
const version = 1
const (
version = 1
analyzerType = analyzer.TypeDockerfile
)
var requiredFiles = []string{"Dockerfile", "Containerfile"}
type dockerConfigAnalyzer struct{}
func init() {
analyzer.RegisterPostAnalyzer(analyzerType, newDockerfileConfigAnalyzer)
}
func (s dockerConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
b, err := io.ReadAll(input.Content)
// dockerConfigAnalyzer is an analyzer for detecting misconfigurations in Dockerfiles.
// It embeds config.Analyzer so it can implement analyzer.PostAnalyzer.
type dockerConfigAnalyzer struct {
*config.Analyzer
}
func newDockerfileConfigAnalyzer(opts analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) {
a, err := config.NewAnalyzer(analyzerType, version, misconf.NewDockerfileScanner, opts)
if err != nil {
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
return nil, err
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// It will be passed to misconfig post handler
types.MisconfPostHandler: {
{
Type: types.Dockerfile,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
return &dockerConfigAnalyzer{Analyzer: a}, nil
}
// Required does a case-insensitive check for filePath and returns true if
// filePath equals/startsWith/hasExtension requiredFiles
func (s dockerConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
// It overrides config.Analyzer.Required().
func (a *dockerConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
base := filepath.Base(filePath)
ext := filepath.Ext(base)
for _, file := range requiredFiles {
@@ -59,11 +52,3 @@ func (s dockerConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
return false
}
func (s dockerConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeDockerfile
}
func (s dockerConfigAnalyzer) Version() int {
return version
}

View File

@@ -1,62 +1,11 @@
package dockerfile
import (
"context"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func Test_dockerConfigAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path",
inputFile: "testdata/Dockerfile.deployment",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: types.Dockerfile,
Path: "testdata/Dockerfile.deployment",
Content: []byte(`FROM scratch`),
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := strings.NewReader("FROM scratch")
a := dockerConfigAnalyzer{}
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Content: r,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_dockerConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
@@ -122,10 +71,3 @@ func Test_dockerConfigAnalyzer_Required(t *testing.T) {
})
}
}
func Test_dockerConfigAnalyzer_Type(t *testing.T) {
s := dockerConfigAnalyzer{}
want := analyzer.TypeDockerfile
got := s.Type()
assert.Equal(t, want, got)
}

View File

@@ -1,3 +0,0 @@
FROM foo
COPY . /
RUN echo hello

View File

@@ -1,6 +0,0 @@
FROM foo AS build
COPY . /
RUN echo hello
FROM scratch
COPY --from=build /bar /bar

View File

@@ -1,76 +1,49 @@
package helm
import (
"archive/tar"
"bytes"
"compress/gzip"
"context"
"errors"
"io"
"os"
"path/filepath"
"strings"
"golang.org/x/xerrors"
dio "github.com/aquasecurity/go-dep-parser/pkg/io"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/misconf"
)
const (
analyzerType = analyzer.TypeHelm
version = 1
maxTarSize = 209_715_200 // 200MB
)
var acceptedExts = []string{".tpl", ".json", ".yml", ".yaml", ".tar", ".tgz", ".tar.gz"}
func init() {
analyzer.RegisterAnalyzer(&helmConfigAnalyzer{})
analyzer.RegisterPostAnalyzer(analyzerType, newHelmConfigAnalyzer)
}
const version = 1
// helmConfigAnalyzer is an analyzer for detecting misconfigurations in Helm charts.
// It embeds config.Analyzer so it can implement analyzer.PostAnalyzer.
type helmConfigAnalyzer struct {
*config.Analyzer
}
const maxTarSize = 209_715_200 // 200MB
type helmConfigAnalyzer struct{}
func (a helmConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
isAnArchive := false
if isArchive(input.FilePath) {
isAnArchive = true
if !isHelmChart(input.FilePath, input.Content) {
return nil, nil
}
// reset the content
_, err := input.Content.Seek(0, 0)
if err != nil {
return nil, err
}
}
b, err := io.ReadAll(input.Content)
func newHelmConfigAnalyzer(opts analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) {
a, err := config.NewAnalyzer(analyzerType, version, misconf.NewHelmScanner, opts)
if err != nil {
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
return nil, err
}
if !isAnArchive {
// if it's not an archive we need to remove the carriage returns
b = bytes.ReplaceAll(b, []byte("\r"), []byte(""))
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// it will be passed to misconfig post handler
types.MisconfPostHandler: {
{
Type: types.Helm,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
return &helmConfigAnalyzer{Analyzer: a}, nil
}
func (a helmConfigAnalyzer) Required(filePath string, info os.FileInfo) bool {
// Required overrides config.Analyzer.Required() and checks if the given file is a Helm chart.
func (*helmConfigAnalyzer) Required(filePath string, info os.FileInfo) bool {
if info.Size() > maxTarSize {
// tarball is too big to be Helm chart - move on
return false
}
for _, acceptable := range []string{".tpl", ".json", ".yml", ".yaml", ".tar", ".tgz", ".tar.gz"} {
for _, acceptable := range acceptedExts {
if strings.HasSuffix(strings.ToLower(filePath), acceptable) {
return true
}
@@ -85,54 +58,3 @@ func (a helmConfigAnalyzer) Required(filePath string, info os.FileInfo) bool {
return false
}
func (helmConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeHelm
}
func (helmConfigAnalyzer) Version() int {
return version
}
func isHelmChart(path string, file dio.ReadSeekerAt) bool {
var err error
var fr io.Reader = file
if isGzip(path) {
if fr, err = gzip.NewReader(file); err != nil {
return false
}
}
tr := tar.NewReader(fr)
for {
header, err := tr.Next()
if err != nil {
if errors.Is(err, io.EOF) {
break
}
return false
}
if header.Typeflag == tar.TypeReg && strings.HasSuffix(header.Name, "Chart.yaml") {
return true
}
}
return false
}
func isArchive(path string) bool {
if strings.HasSuffix(path, ".tar") || isGzip(path) {
return true
}
return false
}
func isGzip(path string) bool {
if strings.HasSuffix(path, ".tgz") ||
strings.HasSuffix(path, ".tar.gz") {
return true
}
return false
}

View File

@@ -1,361 +1,13 @@
package helm
import (
"context"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func Test_helmConfigAnalyzer_Analyze(t *testing.T) {
type args struct {
namespaces []string
policyPaths []string
}
tests := []struct {
name string
args args
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "Chart.yaml",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: filepath.Join("testdata", "Chart.yaml"),
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "helm",
Path: filepath.Join("testdata", "Chart.yaml"),
Content: []byte(`apiVersion: v2
name: testchart
description: A Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"
`),
},
},
},
},
},
{
name: "values.yaml",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: filepath.Join("testdata", "values.yaml"),
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "helm",
Path: filepath.Join("testdata", "values.yaml"),
Content: []byte(`# Default values for testchart.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
replicaCount: 1
image:
repository: nginx
pullPolicy: IfNotPresent
# Overrides the image tag whose default is the chart appVersion.
tag: ""
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
# Specifies whether a service account should be created
create: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: ""
podAnnotations: {}
podSecurityContext:
{}
# fsGroup: 2000
securityContext:
{}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
service:
type: ClusterIP
port: 80
ingress:
enabled: false
className: ""
annotations:
{}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: chart-example.local
paths:
- path: /
pathType: ImplementationSpecific
tls: []
# - secretName: chart-example-tls
# hosts:
# - chart-example.local
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
nodeSelector: {}
tolerations: []
affinity: {}
`),
},
},
},
},
},
{
name: "testchart.tgz",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: filepath.Join("testdata", "testchart.tgz"),
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "helm",
Path: filepath.Join("testdata", "testchart.tgz"),
Content: []uint8{
0x1f, 0x8b, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0xed, 0x58, 0x5b, 0x6f, 0xdb, 0x36,
0x14, 0xce, 0xb3, 0x7e, 0x5, 0xd7, 0x3c, 0xa4, 0x2d, 0x1a, 0xd9, 0x4a, 0xec, 0xa4, 0xf0,
0x5b, 0x90, 0xec, 0x12, 0x2c, 0x69, 0x83, 0xa6, 0xed, 0x30, 0xc, 0xc3, 0x40, 0x4b, 0xb4,
0xcd, 0x85, 0x22, 0x55, 0x92, 0x72, 0xe2, 0xe, 0xdb, 0x6f, 0xdf, 0x77, 0x48, 0xc9, 0x76,
0x9c, 0xa6, 0xed, 0x43, 0x9b, 0x62, 0x98, 0xce, 0x83, 0x6d, 0x51, 0x87, 0xe7, 0x7e,
0xb5, 0x17, 0xce, 0xe7, 0x33, 0x6e, 0x7d, 0x6f, 0xeb, 0xab, 0x41, 0x1f, 0x70, 0x38,
0x1c, 0xd2, 0x77, 0x76, 0x38, 0xec, 0xc7, 0xef, 0xfd, 0xf0, 0xdd, 0xc2, 0x56, 0x36,
0xd8, 0x1b, 0x64, 0x7b, 0xd9, 0x60, 0x98, 0x1, 0x2f, 0xcb, 0xe, 0x7, 0x87, 0x5b, 0x6c,
0xf8, 0xf5, 0x44, 0x5a, 0x41, 0xed, 0x3c, 0xb7, 0x8c, 0x6d, 0x99, 0x6b, 0xa1, 0x3f,
0x8e, 0x27, 0xac, 0x7b, 0x8, 0x81, 0x1e, 0x16, 0xfc, 0xd2, 0xff, 0xc7, 0xf4, 0x99, 0x2e,
0x78, 0xa9, 0xbe, 0x34, 0xf, 0x72, 0xf0, 0xc1, 0x60, 0x70, 0x9f, 0xff, 0xf7, 0xb2, 0xc3,
0x2c, 0xf8, 0xbf, 0x3f, 0x18, 0xec, 0xed, 0xf, 0xf6, 0xe0, 0xff, 0xfd, 0x83, 0xc3, 0xe1,
0x16, 0xeb, 0x7f, 0x69, 0x41, 0x3e, 0x4, 0xff, 0x73, 0xff, 0xf3, 0x4a, 0xbe, 0x85, 0x62,
0xd2, 0xe8, 0x11, 0x9b, 0xef, 0x25, 0x9a, 0x97, 0x62, 0xc4, 0x96, 0x41, 0x91, 0x14,
0xc2, 0xe5, 0x56, 0x56, 0x3e, 0xbc, 0x3f, 0x62, 0x3f, 0x9, 0x55, 0xb2, 0xf0, 0x86, 0x4d,
0x8c, 0x65, 0x3f, 0xd7, 0x63, 0x61, 0xb5, 0x0, 0x7a, 0x92, 0x6c, 0xe3, 0x75, 0x7c, 0x93,
0x73, 0xcd, 0xc6, 0x82, 0x9, 0xe9, 0x67, 0xc2, 0x32, 0x3c, 0xec, 0xf0, 0xaa, 0x52, 0x32,
0xe7, 0x44, 0x65, 0x87, 0xe1, 0x1e, 0x67, 0x3b, 0x4a, 0x8e, 0x2d, 0xb7, 0x8b, 0x9d,
0x78, 0x27, 0x4d, 0xb6, 0x89, 0xc0, 0xa, 0x2d, 0x1e, 0x3b, 0xc6, 0xad, 0x0, 0x76, 0x6e,
0x94, 0x12, 0x79, 0x38, 0x37, 0x13, 0x48, 0x57, 0x56, 0x8a, 0x83, 0x29, 0xf3, 0x33,
0xbe, 0x64, 0x57, 0xf1, 0xfc, 0x8a, 0x4f, 0x45, 0xc1, 0xa4, 0xf6, 0x86, 0xcd, 0xa3,
0x4e, 0x78, 0xe4, 0x36, 0x9f, 0xc9, 0x39, 0x24, 0xdc, 0x66, 0x38, 0x7, 0x62, 0x21, 0x2a,
0x65, 0x16, 0xa2, 0x88, 0x3c, 0xcf, 0xa2, 0x1c, 0x2d, 0xbf, 0xca, 0x9a, 0xb9, 0x2c, 0x4,
0x83, 0xb7, 0x27, 0xb5, 0x62, 0xb5, 0x97, 0x4a, 0x7a, 0x9, 0x56, 0x90, 0x7a, 0x52, 0xeb,
0x20, 0x83, 0xb, 0xba, 0x43, 0xb9, 0x46, 0xdf, 0x42, 0xcc, 0x85, 0x32, 0x95, 0xb0, 0x29,
0x7b, 0x3d, 0x13, 0x8b, 0x1d, 0x88, 0x2c, 0x75, 0xae, 0xea, 0x82, 0xb8, 0x13, 0x5f,
0x4e, 0x3c, 0x85, 0x2e, 0x84, 0xce, 0x17, 0xa4, 0x0, 0xbf, 0xab, 0x27, 0x44, 0x93, 0xfa,
0x4f, 0xe8, 0x8, 0xba, 0xc6, 0x89, 0x35, 0xc6, 0x5c, 0x17, 0x6b, 0x9c, 0x83, 0x72, 0xc4,
0xda, 0x12, 0x3d, 0x2b, 0xf5, 0x14, 0xf4, 0x2b, 0x59, 0x9, 0x25, 0xb5, 0x48, 0x37, 0xb5,
0x29, 0xc, 0xd3, 0x86, 0x4, 0x9c, 0xe0, 0x2d, 0x28, 0x2d, 0xd6, 0x6c, 0x47, 0x74, 0xc9,
0x41, 0x2, 0xca, 0x8, 0x32, 0x22, 0x61, 0xde, 0x32, 0x8f, 0x5f, 0x54, 0x88, 0x85, 0x35,
0x61, 0xc9, 0xc9, 0xaf, 0x67, 0x12, 0x52, 0xb8, 0x35, 0xf5, 0x1b, 0x53, 0xa7, 0xf1,
0x55, 0xf3, 0xc4, 0x74, 0x5d, 0x22, 0x36, 0x98, 0x9b, 0x99, 0x5a, 0x15, 0x44, 0x17,
0x26, 0xb1, 0xa2, 0x14, 0xda, 0xc3, 0x2a, 0x82, 0xe7, 0x33, 0xe6, 0x65, 0x29, 0xd8,
0xc2, 0xd4, 0xac, 0xe4, 0x57, 0x81, 0x96, 0x9e, 0xb6, 0x5e, 0x5a, 0x11, 0x27, 0x29,
0x25, 0xd9, 0xa7, 0x95, 0xfb, 0x59, 0x63, 0x5b, 0x68, 0x1e, 0xd0, 0x20, 0xde, 0x52, 0x2,
0x5c, 0x6e, 0x62, 0x39, 0x6, 0x8e, 0xb8, 0xa9, 0x60, 0x51, 0xf0, 0x3, 0xc9, 0x9, 0x42,
0xc8, 0x5c, 0xb3, 0x4b, 0x51, 0x72, 0xed, 0x65, 0xde, 0x22, 0x12, 0x99, 0xc7, 0x33,
0xef, 0x2b, 0x37, 0xea, 0xf5, 0x9c, 0x28, 0x41, 0x2a, 0x35, 0x76, 0xda, 0x7b, 0x92,
0xcc, 0xdb, 0xac, 0xe8, 0xa7, 0x59, 0xda, 0xdf, 0x54, 0x7d, 0x43, 0x4d, 0x8a, 0xca,
0x28, 0xcc, 0xd2, 0xb1, 0x63, 0x41, 0xb4, 0x97, 0xd6, 0xfc, 0xb8, 0x75, 0x40, 0xfd,
0xf3, 0xec, 0xd3, 0x5a, 0x67, 0x8d, 0x53, 0x7a, 0x5b, 0x69, 0x72, 0xe3, 0x9a, 0xe2,
0xa0, 0x7c, 0xbf, 0xea, 0x31, 0x60, 0x5b, 0x31, 0x10, 0xa, 0x2a, 0x46, 0xe0, 0x4a, 0xbf,
0x4d, 0xad, 0xa0, 0x43, 0xed, 0xe8, 0x26, 0xe8, 0x9e, 0x7a, 0x7a, 0xb4, 0x22, 0x37,
0x65, 0x49, 0xd1, 0x18, 0xec, 0x8c, 0xc4, 0x81, 0xc3, 0xd8, 0x35, 0x92, 0x9f, 0xbd,
0xab, 0xd, 0x3c, 0x96, 0x26, 0x20, 0xb0, 0xac, 0x31, 0x8f, 0xb2, 0x34, 0x3b, 0x48, 0xfb,
0x8f, 0x92, 0x6f, 0x5d, 0xf8, 0x3a, 0x8, 0xb0, 0xea, 0xff, 0xe9, 0xc, 0xc5, 0x5d, 0x4e,
0x35, 0xca, 0xc1, 0x17, 0xe6, 0xf1, 0x89, 0xfe, 0xdf, 0x1f, 0xee, 0xf, 0x37, 0xfa, 0xff,
0x20, 0xeb, 0xf, 0xba, 0xfe, 0xff, 0x10, 0xb0, 0xcd, 0x2e, 0xb8, 0xf7, 0xe8, 0xe2, 0xb1,
0x7, 0x5, 0xf7, 0xb3, 0xeb, 0x99, 0x40, 0x1, 0xab, 0xa5, 0xa, 0x65, 0xb6, 0xe9, 0xac,
0x2e, 0x6d, 0x6b, 0xa0, 0xab, 0xab, 0xca, 0x50, 0x7f, 0x71, 0x8, 0x19, 0xc5, 0xa6, 0xca,
0x8c, 0x51, 0xa4, 0x10, 0x46, 0xc0, 0x7e, 0x86, 0x82, 0x80, 0x42, 0x8d, 0x8e, 0x8b,
0x7b, 0xa8, 0x1, 0xab, 0x73, 0x94, 0x72, 0x10, 0xd0, 0x62, 0x1a, 0x2b, 0xc9, 0xe3, 0xa,
0xf5, 0x46, 0xde, 0xa0, 0x6a, 0x84, 0x5a, 0xf1, 0xdd, 0x93, 0x94, 0xbd, 0xd4, 0xa, 0xfd,
0x51, 0x87, 0x9b, 0x24, 0x12, 0x43, 0x4f, 0x65, 0xa1, 0xb1, 0x25, 0xe9, 0xc9, 0xe5,
0x1f, 0x97, 0x1e, 0xb2, 0x81, 0xc4, 0x31, 0xea, 0xd, 0x8, 0xbc, 0x3d, 0xbe, 0x64, 0x85,
0xb4, 0x2e, 0x49, 0xa7, 0xd2, 0xf7, 0xc2, 0x67, 0x14, 0x3f, 0x49, 0xc7, 0xef, 0x6d,
0x2f, 0x7c, 0xb6, 0x7, 0xb3, 0x69, 0x8f, 0x3e, 0xda, 0x47, 0x37, 0xd7, 0xbd, 0x15, 0xa1,
0x31, 0xf4, 0xab, 0x2b, 0x36, 0x91, 0xa, 0xfd, 0xe7, 0x69, 0xea, 0xae, 0x2b, 0x7c, 0x8e,
0xf9, 0x15, 0x3e, 0x7d, 0x49, 0xbf, 0xd, 0xe8, 0x24, 0x4f, 0xff, 0xa1, 0xf6, 0xc2, 0xad,
0x34, 0xb5, 0x63, 0xa7, 0x27, 0xdf, 0x83, 0x2f, 0x86, 0x5, 0x6a, 0xd9, 0x49, 0x8a, 0x89,
0x81, 0xf7, 0x22, 0x3a, 0x8e, 0x92, 0x74, 0xee, 0x72, 0x53, 0x88, 0xde, 0x7f, 0xa1,
0xc6, 0xad, 0xf2, 0x7f, 0xce, 0x55, 0xd, 0x27, 0x7f, 0x85, 0x5, 0xe0, 0x13, 0xf9, 0xbf,
0x3f, 0xdc, 0x3f, 0xb8, 0x93, 0xff, 0x83, 0xac, 0xcb, 0xff, 0x87, 0x80, 0x6d, 0x76,
0x22, 0x26, 0xbc, 0x56, 0x98, 0xe3, 0x82, 0xff, 0xe3, 0x6c, 0xdb, 0x6, 0x45, 0xba, 0x36,
0xf6, 0x70, 0xf6, 0xeb, 0xd1, 0xf9, 0xd9, 0x2e, 0xde, 0x97, 0x94, 0x9e, 0x45, 0x48,
0x18, 0x42, 0x38, 0x11, 0xb9, 0xa2, 0xc9, 0x63, 0x8e, 0xe4, 0xe0, 0x63, 0x15, 0x87,
0x94, 0x30, 0x91, 0x3b, 0xd7, 0xce, 0xe3, 0x98, 0x63, 0xec, 0x6a, 0x88, 0x4b, 0x93,
0xc4, 0x8a, 0x30, 0x54, 0x1c, 0x9b, 0x5a, 0xfb, 0x11, 0xcb, 0x92, 0x44, 0x96, 0xa8,
0x31, 0xa3, 0x84, 0xa1, 0x7e, 0x54, 0xc6, 0x49, 0xe4, 0xfa, 0x62, 0xc4, 0xf4, 0x54,
0xea, 0x1b, 0x9c, 0x55, 0xb5, 0x52, 0x17, 0x6, 0x17, 0x70, 0x76, 0x3a, 0x79, 0x61, 0xfc,
0x85, 0x15, 0xe, 0xb3, 0x12, 0x5e, 0x6d, 0xb3, 0x97, 0x18, 0x56, 0x2c, 0x52, 0x30, 0x4e,
0x66, 0x81, 0xe, 0xf3, 0x7c, 0x8a, 0x2a, 0x46, 0x93, 0x74, 0xd1, 0xa8, 0x77, 0x6b, 0x66,
0x5d, 0x4d, 0x24, 0x29, 0x48, 0x0, 0x19, 0x83, 0xc9, 0xa3, 0x46, 0x86, 0xb, 0xf0, 0xba,
0x14, 0x18, 0xc6, 0xbc, 0x1b, 0xb1, 0xdf, 0x7e, 0xf, 0x2b, 0x51, 0xcb, 0x22, 0xa0, 0x61,
0x31, 0x50, 0x77, 0xe, 0x13, 0x84, 0xc7, 0x5c, 0xe6, 0xe2, 0x28, 0xcf, 0x83, 0x4a, 0x41,
0xb2, 0x4b, 0x8c, 0x61, 0x72, 0x42, 0x53, 0x3c, 0x2a, 0x6a, 0xdc, 0x86, 0x58, 0x83,
0xc7, 0x78, 0x44, 0x5c, 0x9b, 0x90, 0xc1, 0x12, 0xc6, 0x29, 0x70, 0x33, 0xfe, 0xc2,
0x1e, 0x66, 0x6b, 0x11, 0x8, 0x1d, 0xd1, 0x74, 0xce, 0xe3, 0xa, 0x0, 0x6b, 0xf2, 0xa2,
0x68, 0x7, 0xc1, 0xd, 0x72, 0xc0, 0xe6, 0x2b, 0xdc, 0x11, 0xfb, 0xeb, 0xef, 0x70, 0x1f,
0x63, 0x1e, 0x23, 0x91, 0xdb, 0x41, 0x75, 0x53, 0x88, 0x38, 0xb8, 0xa5, 0x1, 0xf7, 0x74,
0x12, 0x66, 0x48, 0x27, 0xe2, 0xf4, 0x1d, 0x65, 0x9, 0xe6, 0x83, 0x34, 0x28, 0xe3, 0x91,
0x10, 0x9e, 0xa7, 0x42, 0xb, 0x4b, 0x12, 0xc7, 0x91, 0x30, 0x10, 0x6e, 0x6d, 0xb3, 0xf4,
0x35, 0x48, 0xc6, 0x9d, 0x92, 0x6c, 0x54, 0x99, 0xe2, 0x68, 0x43, 0x3a, 0x3a, 0x83,
0xb5, 0x6b, 0x2b, 0xfd, 0xe2, 0xd8, 0x60, 0xfa, 0xbd, 0x9, 0xb6, 0x6b, 0xe4, 0x9e, 0xb8,
0x1f, 0xad, 0xa9, 0xab, 0x11, 0xdb, 0x43, 0x9d, 0x20, 0x1b, 0xdf, 0x87, 0x98, 0xf3,
0x8a, 0x8f, 0x9b, 0x95, 0x29, 0xda, 0x9e, 0xb1, 0xc2, 0x9a, 0xaa, 0xfd, 0xbd, 0xcb,
0x8e, 0xce, 0xce, 0xc2, 0x6f, 0xa8, 0x53, 0x50, 0xa3, 0x79, 0x65, 0x8c, 0xff, 0x81,
0x4a, 0xfe, 0xc2, 0x41, 0xd6, 0x35, 0x5b, 0xdb, 0x5a, 0x1f, 0xb9, 0x17, 0x46, 0x13,
0xc2, 0xe6, 0xf1, 0x1b, 0x18, 0xe, 0xd1, 0xda, 0x8, 0x13, 0x6c, 0x48, 0x1c, 0xe2, 0xa6,
0x74, 0xac, 0x50, 0x4b, 0x84, 0x3d, 0xbd, 0xa0, 0x80, 0x45, 0x7f, 0x1c, 0xb1, 0xe7,
0x40, 0x83, 0x65, 0x10, 0xab, 0x41, 0x28, 0xa1, 0x29, 0x3d, 0x8a, 0x11, 0x9b, 0x70,
0xe5, 0x88, 0x2a, 0xf2, 0xc6, 0xb9, 0x17, 0xad, 0x79, 0x6e, 0xbb, 0xe, 0x8f, 0x8d, 0x76,
0xc4, 0xfd, 0x6a, 0xb9, 0x69, 0xa7, 0xd2, 0xf4, 0x1a, 0x9a, 0x69, 0xb8, 0xbf, 0xca,
0x92, 0xbb, 0x88, 0x5e, 0xb9, 0x5d, 0x9e, 0x7, 0xfa, 0xa4, 0x8, 0xf1, 0x40, 0x52, 0xf8,
0x86, 0xfa, 0x6e, 0x78, 0x18, 0xc5, 0x9c, 0xd8, 0x15, 0x37, 0x1c, 0x2e, 0x13, 0xa9,
0x32, 0x39, 0x57, 0xe1, 0x3d, 0xb, 0x2d, 0xbc, 0x41, 0x8e, 0x17, 0xe8, 0x60, 0xc4, 0x7a,
0xcb, 0x93, 0x88, 0xf2, 0x3a, 0xe8, 0x7f, 0x4a, 0xd7, 0x69, 0x83, 0x9, 0x1a, 0x34, 0xb1,
0x9f, 0x93, 0x79, 0x54, 0xcc, 0xa4, 0xe0, 0x8a, 0x5d, 0x44, 0x16, 0x25, 0x57, 0x54,
0xfb, 0x16, 0xeb, 0x5d, 0x20, 0x36, 0xfe, 0x5a, 0x89, 0xb9, 0xdd, 0xb2, 0xfe, 0x90,
0x94, 0x28, 0x23, 0xe, 0x95, 0x25, 0x8f, 0x4e, 0x6f, 0x62, 0xe1, 0x17, 0x5a, 0xdf, 0x6b,
0xae, 0x30, 0x4c, 0x2c, 0x97, 0x93, 0x10, 0xd2, 0x88, 0x72, 0x17, 0xa4, 0x5a, 0x2c,
0xab, 0xc2, 0xf2, 0x7e, 0xdc, 0x87, 0xd, 0x53, 0x82, 0x63, 0x74, 0xf1, 0x54, 0xf3, 0xb8,
0xb, 0x7f, 0x3d, 0x68, 0x97, 0x53, 0xcf, 0x8f, 0x61, 0x36, 0x33, 0x94, 0x36, 0xed, 0xfe,
0x4f, 0x3d, 0xa1, 0xd9, 0xec, 0xe0, 0x51, 0x13, 0x77, 0x38, 0xee, 0x40, 0x8d, 0xd6,
0xb5, 0x3c, 0x7e, 0xd3, 0xa0, 0x84, 0xe0, 0xc1, 0x60, 0x83, 0x0, 0x98, 0x4b, 0x6b, 0x34,
0xd9, 0xc8, 0xc5, 0xb1, 0x7, 0x11, 0xeb, 0x55, 0x13, 0x60, 0xad, 0x28, 0xcf, 0x30, 0x60,
0x61, 0xff, 0x3, 0xfb, 0x73, 0xa9, 0x25, 0x39, 0x34, 0xa5, 0xa4, 0xa4, 0x55, 0x10, 0xb,
0xfd, 0x35, 0xd7, 0xb7, 0x34, 0x59, 0xbb, 0x56, 0xeb, 0xa8, 0x6d, 0x5c, 0xe1, 0xe2,
0xda, 0x47, 0xff, 0x10, 0x10, 0x75, 0x1a, 0xa3, 0x80, 0xc2, 0x8b, 0x3f, 0x11, 0xa4,
0xf4, 0xbe, 0x24, 0x6, 0x5a, 0xe0, 0xa2, 0xe3, 0x76, 0x11, 0xc6, 0x33, 0xd0, 0x2a, 0x4d,
0xd0, 0x1e, 0xc5, 0xa8, 0xb6, 0xb0, 0xdf, 0xd8, 0xf2, 0x60, 0x9b, 0x9, 0xe2, 0x9a, 0xed,
0xac, 0x8c, 0xbd, 0x93, 0x36, 0x44, 0x4b, 0xb9, 0xf2, 0x52, 0x5e, 0xd5, 0x21, 0x37,
0xca, 0xe6, 0xb9, 0x4, 0x35, 0x2a, 0xe2, 0xd9, 0xde, 0xf3, 0x73, 0xd9, 0xa8, 0xf8, 0xe,
0x2d, 0xe6, 0x73, 0x6f, 0x24, 0xbc, 0xf6, 0xc6, 0xc1, 0xcd, 0xd0, 0xe1, 0x83, 0xe9,
0x53, 0x4a, 0xfd, 0x2a, 0xb6, 0x11, 0x47, 0x2d, 0x4, 0x7, 0xfc, 0x66, 0xed, 0x0, 0x49,
0x4a, 0xb5, 0xdd, 0x4e, 0x85, 0x3f, 0xbe, 0x78, 0xf3, 0x86, 0xfe, 0x51, 0x79, 0x1f,
0x42, 0xf3, 0x42, 0x40, 0x9, 0x84, 0x29, 0xfa, 0xd, 0xa5, 0x28, 0xf1, 0x8e, 0x68, 0xe7,
0x81, 0xff, 0xfd, 0x98, 0x89, 0xc6, 0x58, 0x77, 0x29, 0x68, 0x4b, 0x36, 0x36, 0x96,
0x2f, 0x6f, 0x14, 0x15, 0xc1, 0x58, 0xcf, 0x10, 0xe3, 0x9, 0x9f, 0x4c, 0xe0, 0x35, 0xbf,
0x8, 0xaf, 0xbf, 0x75, 0x77, 0xef, 0xa0, 0x83, 0xe, 0x3a, 0xe8, 0xa0, 0x83, 0xe, 0x3a,
0xe8, 0xa0, 0x83, 0xe, 0x3a, 0xe8, 0xa0, 0x83, 0xe, 0x3a, 0xd8, 0xda, 0xfa, 0x17, 0xe2,
0x8a, 0xf9, 0x39, 0x0, 0x28, 0x0, 0x0,
},
},
},
},
},
},
{
name: "nope.tgz",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: filepath.Join("testdata", "nope.tgz"),
want: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer func() {
_ = f.Close()
}()
info, err := os.Stat(tt.inputFile)
require.NoError(t, err)
a := helmConfigAnalyzer{}
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Info: info,
Content: f,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_helmConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
@@ -364,66 +16,60 @@ func Test_helmConfigAnalyzer_Required(t *testing.T) {
}{
{
name: "yaml",
filePath: "testdata/testchart/Chart.yaml",
filePath: "Chart.yaml",
want: true,
},
{
name: "yaml - shorthand",
filePath: "testdata/testchart/templates/deployment.yml",
filePath: "templates/deployment.yml",
want: true,
},
{
name: "tpl",
filePath: "testdata/testchart/templates/_helpers.tpl",
filePath: "templates/_helpers.tpl",
want: true,
},
{
name: "json",
filePath: "testdata/testchart/values.yaml",
filePath: "values.json",
want: true,
},
{
name: "NOTES.txt",
filePath: "testdata/testchart/templates/NOTES.txt",
filePath: "templates/NOTES.txt",
want: false,
},
{
name: ".helmignore",
filePath: "testdata/testchart/.helmignore",
filePath: ".helmignore",
want: true,
},
{
name: "testchart.tgz",
filePath: filepath.Join("testdata", "testchart.tgz"),
filePath: "testchart.tgz",
want: true,
},
{
name: "testchart.tar.gz",
filePath: filepath.Join("testdata", "testchart.tar.gz"),
filePath: "testchart.tar.gz",
want: true,
},
{
name: "nope.tgz",
filePath: filepath.Join("testdata", "nope.tgz"),
want: true, // its a tarball after all
filePath: "nope.tgz",
want: true, // it's a tarball after all
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := helmConfigAnalyzer{}
info, _ := os.Stat(tt.filePath)
// Create a dummy file info
info, err := os.Stat("./helm_test.go")
require.NoError(t, err)
got := s.Required(tt.filePath, info)
assert.Equal(t, tt.want, got)
})
}
}
func Test_helmConfigAnalyzer_Type(t *testing.T) {
s := helmConfigAnalyzer{}
want := analyzer.TypeHelm
got := s.Type()
assert.Equal(t, want, got)
}

View File

@@ -1,24 +0,0 @@
apiVersion: v2
name: testchart
description: A Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"

Binary file not shown.

View File

@@ -1,23 +0,0 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -1,24 +0,0 @@
apiVersion: v2
name: testchart
description: A Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"

View File

@@ -1,22 +0,0 @@
1. Get the application URL by running these commands:
{{- if .Values.ingress.enabled }}
{{- range $host := .Values.ingress.hosts }}
{{- range .paths }}
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
{{- end }}
{{- end }}
{{- else if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "testchart.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "testchart.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "testchart.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "testchart.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}

View File

@@ -1,62 +0,0 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "testchart.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "testchart.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "testchart.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "testchart.labels" -}}
helm.sh/chart: {{ include "testchart.chart" . }}
{{ include "testchart.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "testchart.selectorLabels" -}}
app.kubernetes.io/name: {{ include "testchart.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "testchart.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "testchart.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -1,61 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "testchart.fullname" . }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
spec:
{{- if not .Values.autoscaling.enabled }}
replicas: {{ .Values.replicaCount }}
{{- end }}
selector:
matchLabels:
{{- include "testchart.selectorLabels" . | nindent 6 }}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "testchart.selectorLabels" . | nindent 8 }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "testchart.serviceAccountName" . }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
containers:
- name: {{ .Chart.Name }}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
ports:
- name: http
containerPort: 80
protocol: TCP
livenessProbe:
httpGet:
path: /
port: http
readinessProbe:
httpGet:
path: /
port: http
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}

View File

@@ -1,28 +0,0 @@
{{- if .Values.autoscaling.enabled }}
apiVersion: autoscaling/v2beta1
kind: HorizontalPodAutoscaler
metadata:
name: {{ include "testchart.fullname" . }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
spec:
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: {{ include "testchart.fullname" . }}
minReplicas: {{ .Values.autoscaling.minReplicas }}
maxReplicas: {{ .Values.autoscaling.maxReplicas }}
metrics:
{{- if .Values.autoscaling.targetCPUUtilizationPercentage }}
- type: Resource
resource:
name: cpu
targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }}
{{- end }}
{{- if .Values.autoscaling.targetMemoryUtilizationPercentage }}
- type: Resource
resource:
name: memory
targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }}
{{- end }}
{{- end }}

View File

@@ -1,61 +0,0 @@
{{- if .Values.ingress.enabled -}}
{{- $fullName := include "testchart.fullname" . -}}
{{- $svcPort := .Values.service.port -}}
{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
{{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }}
{{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}}
{{- end }}
{{- end }}
{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}}
apiVersion: networking.k8s.io/v1
{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}}
apiVersion: networking.k8s.io/v1beta1
{{- else -}}
apiVersion: extensions/v1beta1
{{- end }}
kind: Ingress
metadata:
name: {{ $fullName }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
{{- with .Values.ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
ingressClassName: {{ .Values.ingress.className }}
{{- end }}
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
{{- range .Values.ingress.hosts }}
- host: {{ .host | quote }}
http:
paths:
{{- range .paths }}
- path: {{ .path }}
{{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }}
pathType: {{ .pathType }}
{{- end }}
backend:
{{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }}
service:
name: {{ $fullName }}
port:
number: {{ $svcPort }}
{{- else }}
serviceName: {{ $fullName }}
servicePort: {{ $svcPort }}
{{- end }}
{{- end }}
{{- end }}
{{- end }}

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "testchart.fullname" . }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
{{- include "testchart.selectorLabels" . | nindent 4 }}

View File

@@ -1,12 +0,0 @@
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "testchart.serviceAccountName" . }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
{{- end }}

View File

@@ -1,15 +0,0 @@
apiVersion: v1
kind: Pod
metadata:
name: "{{ include "testchart.fullname" . }}-test-connection"
labels:
{{- include "testchart.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: ['{{ include "testchart.fullname" . }}:{{ .Values.service.port }}']
restartPolicy: Never

View File

@@ -1,86 +0,0 @@
# Default values for testchart.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
replicaCount: 1
image:
repository: nginx
pullPolicy: IfNotPresent
# Overrides the image tag whose default is the chart appVersion.
tag: ""
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
# Specifies whether a service account should be created
create: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: ""
podAnnotations: {}
podSecurityContext:
{}
# fsGroup: 2000
securityContext:
{}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
service:
type: ClusterIP
port: 80
ingress:
enabled: false
className: ""
annotations:
{}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: chart-example.local
paths:
- path: /
pathType: ImplementationSpecific
tls: []
# - secretName: chart-example-tls
# hosts:
# - chart-example.local
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -1,86 +0,0 @@
# Default values for testchart.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
replicaCount: 1
image:
repository: nginx
pullPolicy: IfNotPresent
# Overrides the image tag whose default is the chart appVersion.
tag: ""
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
# Specifies whether a service account should be created
create: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: ""
podAnnotations: {}
podSecurityContext:
{}
# fsGroup: 2000
securityContext:
{}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
service:
type: ClusterIP
port: 80
ingress:
enabled: false
className: ""
annotations:
{}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: chart-example.local
paths:
- path: /
pathType: ImplementationSpecific
tls: []
# - secretName: chart-example-tls
# hosts:
# - chart-example.local
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -1,65 +0,0 @@
package json
import (
"context"
"io"
"os"
"path/filepath"
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func init() {
analyzer.RegisterAnalyzer(&jsonConfigAnalyzer{})
}
const version = 1
var (
requiredExt = ".json"
excludedFiles = []string{types.NpmPkgLock, types.NuGetPkgsLock, types.NuGetPkgsConfig}
)
type jsonConfigAnalyzer struct{}
func (a jsonConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
b, err := io.ReadAll(input.Content)
if err != nil {
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// It will be passed to misconfig post handler
types.MisconfPostHandler: {
{
Type: types.JSON,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
}
func (a jsonConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
filename := filepath.Base(filePath)
for _, excludedFile := range excludedFiles {
if filename == excludedFile {
return false
}
}
return filepath.Ext(filePath) == requiredExt
}
func (jsonConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeJSON
}
func (jsonConfigAnalyzer) Version() int {
return version
}

View File

@@ -1,98 +0,0 @@
package json
import (
"context"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path",
inputFile: "test.json",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "json",
Path: "test.json",
Content: []byte(`{}`),
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := strings.NewReader("{}")
s := jsonConfigAnalyzer{}
got, err := s.Analyze(context.Background(), analyzer.AnalysisInput{
FilePath: tt.inputFile,
Content: r,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_jsonConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "json",
filePath: "deployment.json",
want: true,
},
{
name: "yaml",
filePath: "deployment.yaml",
want: false,
},
{
name: "npm json",
filePath: "package-lock.json",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := jsonConfigAnalyzer{}
got := s.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}
func Test_jsonConfigAnalyzer_Type(t *testing.T) {
s := jsonConfigAnalyzer{}
want := analyzer.TypeJSON
got := s.Type()
assert.Equal(t, want, got)
}

View File

@@ -1,22 +0,0 @@
[
{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 4
}
},
{
"apiVersion": "apps/v2",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 5
}
}
]

View File

@@ -1,10 +0,0 @@
{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 3
}
}

View File

@@ -1,10 +0,0 @@
{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 4
}
}

View File

@@ -0,0 +1,30 @@
package k8s
import (
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/misconf"
)
const (
analyzerType = analyzer.TypeKubernetes
version = 1
)
func init() {
analyzer.RegisterPostAnalyzer(analyzerType, newKubernetesConfigAnalyzer)
}
// kubernetesConfigAnalyzer is an analyzer for detecting misconfigurations in Kubernetes config files.
// It embeds config.Analyzer so it can implement analyzer.PostAnalyzer.
type kubernetesConfigAnalyzer struct {
*config.Analyzer
}
func newKubernetesConfigAnalyzer(opts analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) {
a, err := config.NewAnalyzer(analyzerType, version, misconf.NewKubernetesScanner, opts)
if err != nil {
return nil, err
}
return &kubernetesConfigAnalyzer{Analyzer: a}, nil
}

View File

@@ -1,56 +1,45 @@
package terraform
import (
"context"
"io"
"os"
"path/filepath"
"golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/misconf"
)
const (
analyzerType = analyzer.TypeTerraform
version = 1
)
var requiredExts = []string{
".tf",
".tf.json",
}
func init() {
analyzer.RegisterAnalyzer(&terraformConfigAnalyzer{})
analyzer.RegisterPostAnalyzer(analyzerType, newTerraformConfigAnalyzer)
}
const version = 1
// terraformConfigAnalyzer is an analyzer for detecting misconfigurations in Terraform files.
// It embeds config.Analyzer so it can implement analyzer.PostAnalyzer.
type terraformConfigAnalyzer struct {
*config.Analyzer
}
var requiredExts = []string{".tf", ".tf.json"}
type terraformConfigAnalyzer struct{}
// Analyze returns a name of Terraform file
func (a terraformConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
b, err := io.ReadAll(input.Content)
func newTerraformConfigAnalyzer(opts analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) {
a, err := config.NewAnalyzer(analyzerType, version, misconf.NewTerraformScanner, opts)
if err != nil {
return nil, xerrors.Errorf("read error (%s): %w", input.FilePath, err)
return nil, err
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// It will be passed to misconf post handler
types.MisconfPostHandler: {
{
Type: types.Terraform,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
return &terraformConfigAnalyzer{Analyzer: a}, nil
}
func (a terraformConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
// Required overrides config.Analyzer.Required() and checks if the given file is a Terraform file.
func (*terraformConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
return slices.Contains(requiredExts, filepath.Ext(filePath))
}
func (terraformConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeTerraform
}
func (terraformConfigAnalyzer) Version() int {
return version
}

View File

@@ -1,55 +1,11 @@
package terraform
import (
"bytes"
"context"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func TestConfigAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
input analyzer.AnalysisInput
want *analyzer.AnalysisResult
}{
{
name: "happy path",
input: analyzer.AnalysisInput{
Dir: "path/to/",
FilePath: "main.tf",
Content: bytes.NewReader(nil),
},
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: types.Terraform,
Path: "main.tf",
Content: []byte{},
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := terraformConfigAnalyzer{}
ctx := context.Background()
got, err := a.Analyze(ctx, tt.input)
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func TestConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string

View File

@@ -1,21 +0,0 @@
package users.dockerfile.xyz_100
__rego_metadata__ := {
"id": "XYZ-100",
"title": "Bad Dockerfile",
"version": "v1.0.0",
"severity": "HIGH",
"type": "Docker Security Check",
}
denylist = [
"foo"
]
deny[res] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], denylist[_])
res = {"type": "Docker Security Check", "msg": sprintf("deny: image found %s", [val]), "severity": "HIGH", "id": "RULE-100"}
}

View File

@@ -1,35 +0,0 @@
package main.dockerfile
denylist = [
"foo"
]
deny[res] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], denylist[_])
res = {
"type": "Docker Security Check",
"msg": sprintf("deny: image found %s", [val]),
"severity": "HIGH",
"id": "RULE-100"
}
}
warnlist = [
"echo"
]
warn[res] {
input[i].Cmd == "run"
val := input[i].Value
contains(val[_], warnlist[_])
res = {
"type": "Docker Security Check",
"msg": sprintf("warn: command %s contains banned: %s", [val, warnlist]),
"severity": "LOW",
"id": "RULE-10"
}
}

View File

@@ -1,20 +0,0 @@
package main.dockerfile
__rego_metadata__ := {
"id": "XYZ-100",
"title": "Bad Dockerfile",
"version": "v1.0.0",
"severity": "HIGH",
"type": "Docker Security Check",
}
denylist = [
]
deny[msg] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], denylist[_])
msg = sprintf("deny: image found %s", [val])
}

View File

@@ -1,13 +0,0 @@
package main.dockerfile.id_100
violationlist = [
"foo"
]
violation[{"msg": msg, "details": {}}] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], violationlist[_])
msg = sprintf("violation: image found %s", [val])
}

View File

@@ -1,19 +0,0 @@
package main.dockerfile.xyz_100
__rego_metadata__ := {
"id": "XYZ-100",
"title": "Bad Dockerfile",
"version": "v1.0.0",
}
warnlist = [
"foo"
]
warn[msg] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], warnlist[_])
msg = sprintf("warn: image found %s", [val])
}

View File

@@ -1,15 +0,0 @@
package main.kubernetes.xyz_100
__rego_metadata__ := {
"id": "XYZ-100",
"title": "Bad Kubernetes Replicas",
"version": "v1.0.0",
"severity": "HIGH",
"type": "Kubernetes Security Check",
}
deny[msg] {
rpl = input.spec.replicas
rpl > 3
msg = sprintf("too many replicas: %d", [rpl])
}

View File

@@ -0,0 +1,32 @@
package user.something
__rego_metadata__ := {
"id": "TEST001",
"avd_id": "AVD-TEST-0001",
"title": "Test policy",
"short_code": "no-buckets",
"severity": "LOW",
"description": "This is a test policy.",
"recommended_actions": "Have a cup of tea.",
"url": "https://trivy.dev/",
}
# taken from defsec rego lib to mimic behaviour
result(msg, cause) = result {
metadata := object.get(cause, "__defsec_metadata", cause)
result := {
"msg": msg,
"startline": object.get(metadata, "startline", object.get(metadata, "StartLine", 0)),
"endline": object.get(metadata, "endline", object.get(metadata, "EndLine", 0)),
"filepath": object.get(metadata, "filepath", object.get(metadata, "Path", "")),
"explicit": object.get(metadata, "explicit", false),
"managed": object.get(metadata, "managed", true),
"fskey": object.get(metadata, "fskey", ""),
"resource": object.get(metadata, "resource", ""),
}
}
deny[res] {
cmd := input.stages[_][_]
res := result("No commands allowed!", cmd)
}

View File

@@ -0,0 +1 @@
FROM ubuntu

View File

@@ -1,15 +0,0 @@
default: &default
line: single line
john: &J
john_name: john
fred: &F
fred_name: fred
main:
<<: *default
name:
<<: [*J, *F]
comment: |
multi
line

View File

@@ -1 +0,0 @@
apiVersion": foo: bar

View File

@@ -1,3 +0,0 @@
circular: &circular
name:
<<: *circular

View File

@@ -1,13 +0,0 @@
package main.yaml.xyz_123
__rego_metadata__ := {
"id": "XYZ-123",
"title": "Bad YAML",
"version": "v1.0.0",
"severity": "CRITICAL",
"type": "YAML Security Check",
}
deny[msg]{
msg := "bad"
}

View File

@@ -1,6 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-kubernetes
spec:
replicas: 3

View File

@@ -1,6 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-kubernetes
spec:
replicas: 4

View File

@@ -1,4 +0,0 @@
replacements:
amd64: 64bit
386: 32bit
arm: ARM

View File

@@ -1,18 +0,0 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-kubernetes
spec:
replicas: 4
---
apiVersion: v1
kind: Service
metadata:
name: hello-kubernetes
spec:
ports:
- protocol: TCP
port: 80
targetPort: 8080

View File

@@ -1,61 +0,0 @@
package yaml
import (
"context"
"io"
"os"
"path/filepath"
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func init() {
analyzer.RegisterAnalyzer(&yamlConfigAnalyzer{})
}
const version = 1
var requiredExts = []string{".yaml", ".yml"}
type yamlConfigAnalyzer struct{}
func (a yamlConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
b, err := io.ReadAll(input.Content)
if err != nil {
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// it will be passed to misconfig post handler
types.MisconfPostHandler: {
{
Type: types.YAML,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
}
func (a yamlConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
ext := filepath.Ext(filePath)
for _, required := range requiredExts {
if ext == required {
return true
}
}
return false
}
func (yamlConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeYaml
}
func (yamlConfigAnalyzer) Version() int {
return version
}

View File

@@ -1,97 +0,0 @@
package yaml
import (
"context"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path",
inputFile: "test.yaml",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "yaml",
Path: "test.yaml",
Content: []byte(`- abc`),
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := strings.NewReader("- abc")
a := yamlConfigAnalyzer{}
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Content: r,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_yamlConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "yaml",
filePath: "deployment.yaml",
want: true,
},
{
name: "yml",
filePath: "deployment.yml",
want: true,
},
{
name: "json",
filePath: "deployment.json",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := yamlConfigAnalyzer{}
got := s.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}
func Test_yamlConfigAnalyzer_Type(t *testing.T) {
s := yamlConfigAnalyzer{}
want := analyzer.TypeYaml
got := s.Type()
assert.Equal(t, want, got)
}

View File

@@ -7,9 +7,9 @@ import (
"golang.org/x/exp/slices"
"golang.org/x/xerrors"
misconf "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/log"
"github.com/aquasecurity/trivy/pkg/misconf"
)
var configAnalyzerConstructors = map[Type]configAnalyzerConstructor{}

View File

@@ -1,5 +1,7 @@
package analyzer
import "github.com/aquasecurity/defsec/pkg/detection"
type Type string
const (
@@ -102,12 +104,12 @@ const (
// =================
// Structured Config
// =================
TypeYaml Type = "yaml"
TypeJSON Type = "json"
TypeDockerfile Type = "dockerfile"
TypeTerraform Type = "terraform"
TypeCloudFormation Type = "cloudFormation"
TypeHelm Type = "helm"
TypeAzureARM Type = Type(detection.FileTypeAzureARM)
TypeCloudFormation Type = Type(detection.FileTypeCloudFormation)
TypeDockerfile Type = Type(detection.FileTypeDockerfile)
TypeHelm Type = Type(detection.FileTypeHelm)
TypeKubernetes Type = Type(detection.FileTypeKubernetes)
TypeTerraform Type = Type(detection.FileTypeTerraform)
// ========
// License
@@ -211,11 +213,11 @@ var (
// TypeConfigFiles has all config file analyzers
TypeConfigFiles = []Type{
TypeYaml,
TypeJSON,
TypeDockerfile,
TypeTerraform,
TypeAzureARM,
TypeCloudFormation,
TypeDockerfile,
TypeHelm,
TypeKubernetes,
TypeTerraform,
}
)

View File

@@ -10,6 +10,7 @@ import (
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/mapfs"
"github.com/aquasecurity/trivy/pkg/misconf"
)
@@ -20,11 +21,11 @@ func init() {
}
type historyAnalyzer struct {
scanner misconf.Scanner
scanner *misconf.Scanner
}
func newHistoryAnalyzer(opts analyzer.ConfigAnalyzerOptions) (analyzer.ConfigAnalyzer, error) {
s, err := misconf.NewScanner(opts.FilePatterns, opts.MisconfScannerOption)
s, err := misconf.NewDockerfileScanner(opts.FilePatterns, opts.MisconfScannerOption)
if err != nil {
return nil, xerrors.Errorf("misconfiguration scanner error: %w", err)
}
@@ -70,15 +71,12 @@ func (a *historyAnalyzer) Analyze(ctx context.Context, input analyzer.ConfigAnal
dockerfile.WriteString(strings.TrimSpace(createdBy) + "\n")
}
files := []types.File{
{
Type: types.Dockerfile,
Path: "Dockerfile",
Content: dockerfile.Bytes(),
},
fsys := mapfs.New()
if err := fsys.WriteVirtualFile("Dockerfile", dockerfile.Bytes(), 0600); err != nil {
return nil, xerrors.Errorf("mapfs write error: %w", err)
}
misconfs, err := a.scanner.Scan(ctx, files)
misconfs, err := a.scanner.Scan(ctx, fsys)
if err != nil {
return nil, xerrors.Errorf("history scan error: %w", err)
}

View File

@@ -5,9 +5,9 @@ import (
"sort"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
misconf "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/fanal/walker"
"github.com/aquasecurity/trivy/pkg/misconf"
)
type Option struct {

View File

@@ -56,6 +56,7 @@ func NewArtifact(img types.Image, c cache.ArtifactCache, opt artifact.Option) (a
Slow: opt.Slow,
FilePatterns: opt.FilePatterns,
DisabledAnalyzers: opt.DisabledAnalyzers,
MisconfScannerOption: opt.MisconfScannerOption,
SecretScannerOption: opt.SecretScannerOption,
LicenseScannerOption: opt.LicenseScannerOption,
})
@@ -327,19 +328,20 @@ func (a Artifact) inspectLayer(ctx context.Context, layerInfo LayerInfo, disable
result.Sort()
blobInfo := types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: layerDigest,
DiffID: layerInfo.DiffID,
CreatedBy: layerInfo.CreatedBy,
OpaqueDirs: opqDirs,
WhiteoutFiles: whFiles,
OS: result.OS,
Repository: result.Repository,
PackageInfos: result.PackageInfos,
Applications: result.Applications,
Secrets: result.Secrets,
Licenses: result.Licenses,
CustomResources: result.CustomResources,
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: layerDigest,
DiffID: layerInfo.DiffID,
CreatedBy: layerInfo.CreatedBy,
OpaqueDirs: opqDirs,
WhiteoutFiles: whFiles,
OS: result.OS,
Repository: result.Repository,
PackageInfos: result.PackageInfos,
Applications: result.Applications,
Misconfigurations: result.Misconfigurations,
Secrets: result.Secrets,
Licenses: result.Licenses,
CustomResources: result.CustomResources,
// For Red Hat
BuildInfo: result.BuildInfo,

View File

@@ -12,6 +12,12 @@ import (
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
image2 "github.com/aquasecurity/trivy/pkg/fanal/artifact/image"
"github.com/aquasecurity/trivy/pkg/fanal/cache"
"github.com/aquasecurity/trivy/pkg/fanal/image"
"github.com/aquasecurity/trivy/pkg/fanal/types"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/all"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/imgconf/apk"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/php/composer"
@@ -23,13 +29,7 @@ import (
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/pkg/dpkg"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/repo/apk"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/secret"
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
image2 "github.com/aquasecurity/trivy/pkg/fanal/artifact/image"
"github.com/aquasecurity/trivy/pkg/fanal/cache"
_ "github.com/aquasecurity/trivy/pkg/fanal/handler/misconf"
_ "github.com/aquasecurity/trivy/pkg/fanal/handler/sysfile"
"github.com/aquasecurity/trivy/pkg/fanal/image"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func TestArtifact_Inspect(t *testing.T) {
@@ -217,17 +217,17 @@ func TestArtifact_Inspect(t *testing.T) {
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313",
BlobIDs: []string{"sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3"},
BlobIDs: []string{"sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46"},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingArtifact: true,
MissingBlobIDs: []string{"sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3"},
MissingBlobIDs: []string{"sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46"},
},
},
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3",
BlobID: "sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -294,7 +294,7 @@ func TestArtifact_Inspect(t *testing.T) {
Name: "../../test/testdata/alpine-311.tar.gz",
Type: types.ArtifactContainerImage,
ID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313",
BlobIDs: []string{"sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3"},
BlobIDs: []string{"sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46"},
ImageMetadata: types.ImageMetadata{
ID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
DiffIDs: []string{
@@ -353,25 +353,25 @@ func TestArtifact_Inspect(t *testing.T) {
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:33f9415ed2cd5a9cef5d5144333619745b9ec0f851f0684dd45fa79c6b26a650",
BlobIDs: []string{
"sha256:673f305ef9cede893bc9a1851da8152b1f7597321e06f551a1d875f20f947f5b",
"sha256:2886467019d514a49e74ce4507da571023c97798e3f0f3805e9c9826b5b993ef",
"sha256:f77cea0f8767d9520ea9001de1f1102e0e5e85ccf726c91271e3d63e963ab4d4",
"sha256:c5233a461c9ead1191adfa7a34d9cd66e6b319460939bbf0f085a3fa0faae635",
"sha256:1d02588865377e478a263c4ef2b020d8bf8d9919fdbd14243283b35249b91d4a",
"sha256:7b2d1df7e78b9e5c851676d9cc04bad8d7e86deb2661f0e15ff3d7f37bf53d53",
"sha256:57508fe06ce45edcad30f95a9da631edf746914b0ffa32fa13b83a133529828e",
"sha256:f8d6b5b326b6bad89cf20b94e1c98380187e536ec34795d18c00907f9a35aeb5",
},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingBlobIDs: []string{
"sha256:673f305ef9cede893bc9a1851da8152b1f7597321e06f551a1d875f20f947f5b",
"sha256:2886467019d514a49e74ce4507da571023c97798e3f0f3805e9c9826b5b993ef",
"sha256:f77cea0f8767d9520ea9001de1f1102e0e5e85ccf726c91271e3d63e963ab4d4",
"sha256:c5233a461c9ead1191adfa7a34d9cd66e6b319460939bbf0f085a3fa0faae635",
"sha256:1d02588865377e478a263c4ef2b020d8bf8d9919fdbd14243283b35249b91d4a",
"sha256:7b2d1df7e78b9e5c851676d9cc04bad8d7e86deb2661f0e15ff3d7f37bf53d53",
"sha256:57508fe06ce45edcad30f95a9da631edf746914b0ffa32fa13b83a133529828e",
"sha256:f8d6b5b326b6bad89cf20b94e1c98380187e536ec34795d18c00907f9a35aeb5",
},
},
},
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:673f305ef9cede893bc9a1851da8152b1f7597321e06f551a1d875f20f947f5b",
BlobID: "sha256:1d02588865377e478a263c4ef2b020d8bf8d9919fdbd14243283b35249b91d4a",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -459,7 +459,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:2886467019d514a49e74ce4507da571023c97798e3f0f3805e9c9826b5b993ef",
BlobID: "sha256:7b2d1df7e78b9e5c851676d9cc04bad8d7e86deb2661f0e15ff3d7f37bf53d53",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -555,7 +555,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:f77cea0f8767d9520ea9001de1f1102e0e5e85ccf726c91271e3d63e963ab4d4",
BlobID: "sha256:57508fe06ce45edcad30f95a9da631edf746914b0ffa32fa13b83a133529828e",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -679,7 +679,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:c5233a461c9ead1191adfa7a34d9cd66e6b319460939bbf0f085a3fa0faae635",
BlobID: "sha256:f8d6b5b326b6bad89cf20b94e1c98380187e536ec34795d18c00907f9a35aeb5",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -1488,10 +1488,10 @@ func TestArtifact_Inspect(t *testing.T) {
Type: types.ArtifactContainerImage,
ID: "sha256:33f9415ed2cd5a9cef5d5144333619745b9ec0f851f0684dd45fa79c6b26a650",
BlobIDs: []string{
"sha256:673f305ef9cede893bc9a1851da8152b1f7597321e06f551a1d875f20f947f5b",
"sha256:2886467019d514a49e74ce4507da571023c97798e3f0f3805e9c9826b5b993ef",
"sha256:f77cea0f8767d9520ea9001de1f1102e0e5e85ccf726c91271e3d63e963ab4d4",
"sha256:c5233a461c9ead1191adfa7a34d9cd66e6b319460939bbf0f085a3fa0faae635",
"sha256:1d02588865377e478a263c4ef2b020d8bf8d9919fdbd14243283b35249b91d4a",
"sha256:7b2d1df7e78b9e5c851676d9cc04bad8d7e86deb2661f0e15ff3d7f37bf53d53",
"sha256:57508fe06ce45edcad30f95a9da631edf746914b0ffa32fa13b83a133529828e",
"sha256:f8d6b5b326b6bad89cf20b94e1c98380187e536ec34795d18c00907f9a35aeb5",
},
ImageMetadata: types.ImageMetadata{
ID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4",
@@ -1585,25 +1585,25 @@ func TestArtifact_Inspect(t *testing.T) {
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:33f9415ed2cd5a9cef5d5144333619745b9ec0f851f0684dd45fa79c6b26a650",
BlobIDs: []string{
"sha256:ce763fafc4c45bc6311188adfcd8b932fa42553f3324bb9ec8649e5f7c3f9f14",
"sha256:b3765fc11963a0c92cc8c8ef0c8a3c54c9a3111100ae69384049b2d7b15419ae",
"sha256:1bd6f23a3c252702080dd0e524f9ef13d8ff918e15b322fd8b5c2ceb9f5b8b4f",
"sha256:9589cedce50fd3d37c19f22a5653dece7a092edff293a598d15125eb2a4d8849",
"sha256:9a7c29b10391bcedce533e9609c58ec0e7b0132692fd287bd40592816d1bfbef",
"sha256:e15c92866a85305a909ae200974937d6febcd7a504aeb32ad0a01371c245c25e",
"sha256:6cfccd64a1b1ead1b517bad7dfda8aa0616f63a2d93e71921ff51cb70f447567",
"sha256:032128f06ff805d1ec38f171ea6ae60639175eb70bc80e2b3abc91f6fbfa343d",
},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingBlobIDs: []string{
"sha256:ce763fafc4c45bc6311188adfcd8b932fa42553f3324bb9ec8649e5f7c3f9f14",
"sha256:b3765fc11963a0c92cc8c8ef0c8a3c54c9a3111100ae69384049b2d7b15419ae",
"sha256:1bd6f23a3c252702080dd0e524f9ef13d8ff918e15b322fd8b5c2ceb9f5b8b4f",
"sha256:9589cedce50fd3d37c19f22a5653dece7a092edff293a598d15125eb2a4d8849",
"sha256:9a7c29b10391bcedce533e9609c58ec0e7b0132692fd287bd40592816d1bfbef",
"sha256:e15c92866a85305a909ae200974937d6febcd7a504aeb32ad0a01371c245c25e",
"sha256:6cfccd64a1b1ead1b517bad7dfda8aa0616f63a2d93e71921ff51cb70f447567",
"sha256:032128f06ff805d1ec38f171ea6ae60639175eb70bc80e2b3abc91f6fbfa343d",
},
},
},
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:ce763fafc4c45bc6311188adfcd8b932fa42553f3324bb9ec8649e5f7c3f9f14",
BlobID: "sha256:9a7c29b10391bcedce533e9609c58ec0e7b0132692fd287bd40592816d1bfbef",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -1614,7 +1614,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:b3765fc11963a0c92cc8c8ef0c8a3c54c9a3111100ae69384049b2d7b15419ae",
BlobID: "sha256:e15c92866a85305a909ae200974937d6febcd7a504aeb32ad0a01371c245c25e",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -1625,7 +1625,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:1bd6f23a3c252702080dd0e524f9ef13d8ff918e15b322fd8b5c2ceb9f5b8b4f",
BlobID: "sha256:6cfccd64a1b1ead1b517bad7dfda8aa0616f63a2d93e71921ff51cb70f447567",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -1637,7 +1637,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:9589cedce50fd3d37c19f22a5653dece7a092edff293a598d15125eb2a4d8849",
BlobID: "sha256:032128f06ff805d1ec38f171ea6ae60639175eb70bc80e2b3abc91f6fbfa343d",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -1653,10 +1653,10 @@ func TestArtifact_Inspect(t *testing.T) {
Type: types.ArtifactContainerImage,
ID: "sha256:33f9415ed2cd5a9cef5d5144333619745b9ec0f851f0684dd45fa79c6b26a650",
BlobIDs: []string{
"sha256:ce763fafc4c45bc6311188adfcd8b932fa42553f3324bb9ec8649e5f7c3f9f14",
"sha256:b3765fc11963a0c92cc8c8ef0c8a3c54c9a3111100ae69384049b2d7b15419ae",
"sha256:1bd6f23a3c252702080dd0e524f9ef13d8ff918e15b322fd8b5c2ceb9f5b8b4f",
"sha256:9589cedce50fd3d37c19f22a5653dece7a092edff293a598d15125eb2a4d8849",
"sha256:9a7c29b10391bcedce533e9609c58ec0e7b0132692fd287bd40592816d1bfbef",
"sha256:e15c92866a85305a909ae200974937d6febcd7a504aeb32ad0a01371c245c25e",
"sha256:6cfccd64a1b1ead1b517bad7dfda8aa0616f63a2d93e71921ff51cb70f447567",
"sha256:032128f06ff805d1ec38f171ea6ae60639175eb70bc80e2b3abc91f6fbfa343d",
},
ImageMetadata: types.ImageMetadata{
ID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4",
@@ -1739,7 +1739,7 @@ func TestArtifact_Inspect(t *testing.T) {
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313",
BlobIDs: []string{"sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3"},
BlobIDs: []string{"sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46"},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
Err: xerrors.New("MissingBlobs failed"),
@@ -1753,16 +1753,16 @@ func TestArtifact_Inspect(t *testing.T) {
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313",
BlobIDs: []string{"sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3"},
BlobIDs: []string{"sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46"},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingBlobIDs: []string{"sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3"},
MissingBlobIDs: []string{"sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46"},
},
},
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3",
BlobID: "sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",
@@ -1821,17 +1821,17 @@ func TestArtifact_Inspect(t *testing.T) {
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
Args: cache.ArtifactCacheMissingBlobsArgs{
ArtifactID: "sha256:c232b7d8ac8aa08aa767313d0b53084c4380d1c01a213a5971bdb039e6538313",
BlobIDs: []string{"sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3"},
BlobIDs: []string{"sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46"},
},
Returns: cache.ArtifactCacheMissingBlobsReturns{
MissingArtifact: true,
MissingBlobIDs: []string{"sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3"},
MissingBlobIDs: []string{"sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46"},
},
},
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:1ee72875fbb6def206801205982d81b4c2be24974906823266224527badad8e3",
BlobID: "sha256:61da8ea7801a711b5fdd7e11c47471bb98bc0537fb50bef3f46e7b67e2d90f46",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Digest: "",

View File

@@ -47,6 +47,7 @@ func NewArtifact(rootPath string, c cache.ArtifactCache, opt artifact.Option) (a
Slow: opt.Slow,
FilePatterns: opt.FilePatterns,
DisabledAnalyzers: opt.DisabledAnalyzers,
MisconfScannerOption: opt.MisconfScannerOption,
SecretScannerOption: opt.SecretScannerOption,
LicenseScannerOption: opt.LicenseScannerOption,
})
@@ -169,14 +170,15 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error)
result.Sort()
blobInfo := types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
OS: result.OS,
Repository: result.Repository,
PackageInfos: result.PackageInfos,
Applications: result.Applications,
Secrets: result.Secrets,
Licenses: result.Licenses,
CustomResources: result.CustomResources,
SchemaVersion: types.BlobJSONSchemaVersion,
OS: result.OS,
Repository: result.Repository,
PackageInfos: result.PackageInfos,
Applications: result.Applications,
Misconfigurations: result.Misconfigurations,
Secrets: result.Secrets,
Licenses: result.Licenses,
CustomResources: result.CustomResources,
}
if err = a.handlerManager.PostHandle(ctx, result, &blobInfo); err != nil {

View File

@@ -12,17 +12,16 @@ import (
"golang.org/x/exp/slices"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
"github.com/aquasecurity/trivy/pkg/fanal/cache"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/misconf"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/all"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/python/pip"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os/alpine"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/pkg/apk"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/secret"
_ "github.com/aquasecurity/trivy/pkg/fanal/handler/misconf"
_ "github.com/aquasecurity/trivy/pkg/fanal/handler/sysfile"
)
@@ -34,7 +33,7 @@ func TestArtifact_Inspect(t *testing.T) {
name string
fields fields
artifactOpt artifact.Option
scannerOpt config.ScannerOption
scannerOpt misconf.ScannerOption
disabledAnalyzers []analyzer.Type
disabledHandlers []types.HandlerType
putBlobExpectation cache.ArtifactCachePutBlobExpectation
@@ -48,7 +47,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:40ca14c99b2b22a5f78c1d1a2cbfeeaa3243e3fe1cf150839209ca3b5a897e62",
BlobID: "sha256:fc0c7d225197e1c103784139def1e34b642e8183cf54519cac79dd0cfdd19aba",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
OS: types.OS{
@@ -77,9 +76,9 @@ func TestArtifact_Inspect(t *testing.T) {
want: types.ArtifactReference{
Name: "host",
Type: types.ArtifactFilesystem,
ID: "sha256:40ca14c99b2b22a5f78c1d1a2cbfeeaa3243e3fe1cf150839209ca3b5a897e62",
ID: "sha256:fc0c7d225197e1c103784139def1e34b642e8183cf54519cac79dd0cfdd19aba",
BlobIDs: []string{
"sha256:40ca14c99b2b22a5f78c1d1a2cbfeeaa3243e3fe1cf150839209ca3b5a897e62",
"sha256:fc0c7d225197e1c103784139def1e34b642e8183cf54519cac79dd0cfdd19aba",
},
},
},
@@ -97,7 +96,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:8a4332f0b77c97330369206f2e1d144bfa4cd58ccba42a61d3618da8267435c8",
BlobID: "sha256:0fbf0f996ea580c0a7408a34290f2f061e6577995cd63c475ecf8b262a7622d1",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
},
@@ -107,9 +106,9 @@ func TestArtifact_Inspect(t *testing.T) {
want: types.ArtifactReference{
Name: "host",
Type: types.ArtifactFilesystem,
ID: "sha256:8a4332f0b77c97330369206f2e1d144bfa4cd58ccba42a61d3618da8267435c8",
ID: "sha256:0fbf0f996ea580c0a7408a34290f2f061e6577995cd63c475ecf8b262a7622d1",
BlobIDs: []string{
"sha256:8a4332f0b77c97330369206f2e1d144bfa4cd58ccba42a61d3618da8267435c8",
"sha256:0fbf0f996ea580c0a7408a34290f2f061e6577995cd63c475ecf8b262a7622d1",
},
},
},
@@ -120,7 +119,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:40ca14c99b2b22a5f78c1d1a2cbfeeaa3243e3fe1cf150839209ca3b5a897e62",
BlobID: "sha256:fc0c7d225197e1c103784139def1e34b642e8183cf54519cac79dd0cfdd19aba",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
OS: types.OS{
@@ -164,7 +163,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:45358d29778e36270f6fafd84e45e175e7aae7c0101b72eef99cee6dc598f5d4",
BlobID: "sha256:874588e7714441c06344a11526d73fe4d8c386d85e6d5498eab3cde13cae05ac",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Applications: []types.Application{
@@ -186,9 +185,9 @@ func TestArtifact_Inspect(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/requirements.txt",
Type: types.ArtifactFilesystem,
ID: "sha256:45358d29778e36270f6fafd84e45e175e7aae7c0101b72eef99cee6dc598f5d4",
ID: "sha256:874588e7714441c06344a11526d73fe4d8c386d85e6d5498eab3cde13cae05ac",
BlobIDs: []string{
"sha256:45358d29778e36270f6fafd84e45e175e7aae7c0101b72eef99cee6dc598f5d4",
"sha256:874588e7714441c06344a11526d73fe4d8c386d85e6d5498eab3cde13cae05ac",
},
},
},
@@ -199,7 +198,7 @@ func TestArtifact_Inspect(t *testing.T) {
},
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
Args: cache.ArtifactCachePutBlobArgs{
BlobID: "sha256:45358d29778e36270f6fafd84e45e175e7aae7c0101b72eef99cee6dc598f5d4",
BlobID: "sha256:874588e7714441c06344a11526d73fe4d8c386d85e6d5498eab3cde13cae05ac",
BlobInfo: types.BlobInfo{
SchemaVersion: types.BlobJSONSchemaVersion,
Applications: []types.Application{
@@ -221,9 +220,9 @@ func TestArtifact_Inspect(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/requirements.txt",
Type: types.ArtifactFilesystem,
ID: "sha256:45358d29778e36270f6fafd84e45e175e7aae7c0101b72eef99cee6dc598f5d4",
ID: "sha256:874588e7714441c06344a11526d73fe4d8c386d85e6d5498eab3cde13cae05ac",
BlobIDs: []string{
"sha256:45358d29778e36270f6fafd84e45e175e7aae7c0101b72eef99cee6dc598f5d4",
"sha256:874588e7714441c06344a11526d73fe4d8c386d85e6d5498eab3cde13cae05ac",
},
},
},
@@ -367,7 +366,7 @@ func TestTerraformMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/terraform/single-failure/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/terraform/single-failure/rego"},
@@ -415,9 +414,9 @@ func TestTerraformMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/terraform/single-failure/src",
Type: types.ArtifactFilesystem,
ID: "sha256:b3ae72efb468a0e17551fa4067c1f9d9dff9a1e520b9f8191f48829ab6e8356d",
ID: "sha256:b17b243265d2c555c049753f42c76d3dc478d55851cc9461cbd23e618cb8f0eb",
BlobIDs: []string{
"sha256:b3ae72efb468a0e17551fa4067c1f9d9dff9a1e520b9f8191f48829ab6e8356d",
"sha256:b17b243265d2c555c049753f42c76d3dc478d55851cc9461cbd23e618cb8f0eb",
},
},
},
@@ -427,7 +426,7 @@ func TestTerraformMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/terraform/multiple-failures/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/terraform/multiple-failures/rego"},
@@ -525,9 +524,9 @@ func TestTerraformMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/terraform/multiple-failures/src",
Type: types.ArtifactFilesystem,
ID: "sha256:340197e6c02b644e4d1310647e5b2503c224caeaeb5be01187467b71827614ce",
ID: "sha256:4f9b3fe0f3d7b75fa7120740fe2f179eb5c250646d30186f47bc5eb148a77229",
BlobIDs: []string{
"sha256:340197e6c02b644e4d1310647e5b2503c224caeaeb5be01187467b71827614ce",
"sha256:4f9b3fe0f3d7b75fa7120740fe2f179eb5c250646d30186f47bc5eb148a77229",
},
},
},
@@ -537,7 +536,7 @@ func TestTerraformMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/terraform/no-results/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/terraform/no-results/rego"},
@@ -555,9 +554,9 @@ func TestTerraformMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/terraform/no-results/src",
Type: types.ArtifactFilesystem,
ID: "sha256:1694d46ecb8151fde496faca988441a78c4fe40ddb3049f4f59467282ab9853e",
ID: "sha256:cf90e43f7fb29358faf6f486db722ee739122347ec94839c7f3861489f242213",
BlobIDs: []string{
"sha256:1694d46ecb8151fde496faca988441a78c4fe40ddb3049f4f59467282ab9853e",
"sha256:cf90e43f7fb29358faf6f486db722ee739122347ec94839c7f3861489f242213",
},
},
},
@@ -567,7 +566,7 @@ func TestTerraformMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/terraform/passed/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/terraform/passed/rego"},
@@ -611,9 +610,91 @@ func TestTerraformMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/terraform/passed/src",
Type: types.ArtifactFilesystem,
ID: "sha256:ee80e0b4d07abe98c88119a1dc3d5cfd3b5f3cfda3b52cf3e566d7abc15f072b",
ID: "sha256:4a7baddfc7b3e06e3f246c0680d879aecf539a07a08b48ae5710e997ec486d75",
BlobIDs: []string{
"sha256:ee80e0b4d07abe98c88119a1dc3d5cfd3b5f3cfda3b52cf3e566d7abc15f072b",
"sha256:4a7baddfc7b3e06e3f246c0680d879aecf539a07a08b48ae5710e997ec486d75",
},
},
},
{
name: "multiple failures busted relative paths",
fields: fields{
dir: "./testdata/misconfig/terraform/busted-relative-paths/src/child/main.tf",
},
artifactOpt: artifact.Option{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/terraform/busted-relative-paths/rego"},
},
},
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
Args: cache.ArtifactCachePutBlobArgs{
BlobIDAnything: true,
BlobInfo: types.BlobInfo{
SchemaVersion: 2,
Misconfigurations: []types.Misconfiguration{
{
FileType: "terraform",
FilePath: "main.tf",
Failures: types.MisconfResults{
{
Namespace: "user.something",
Query: "data.user.something.deny",
Message: "No buckets allowed!",
PolicyMetadata: types.PolicyMetadata{
ID: "TEST001",
AVDID: "AVD-TEST-0001",
Type: "Terraform Security Check",
Title: "Test policy",
Description: "This is a test policy.",
Severity: "LOW",
RecommendedActions: "Have a cup of tea.",
References: []string{"https://trivy.dev/"},
},
CauseMetadata: types.CauseMetadata{
Resource: "aws_s3_bucket.one",
Provider: "Generic",
Service: "general",
StartLine: 1,
EndLine: 3,
},
},
{
Namespace: "user.something",
Query: "data.user.something.deny",
Message: "No buckets allowed!",
PolicyMetadata: types.PolicyMetadata{
ID: "TEST001",
AVDID: "AVD-TEST-0001",
Type: "Terraform Security Check",
Title: "Test policy",
Description: "This is a test policy.",
Severity: "LOW",
RecommendedActions: "Have a cup of tea.",
References: []string{"https://trivy.dev/"},
},
CauseMetadata: types.CauseMetadata{
Resource: "aws_s3_bucket.two",
Provider: "Generic",
Service: "general",
StartLine: 5,
EndLine: 7,
},
},
},
},
},
},
},
Returns: cache.ArtifactCachePutBlobReturns{},
},
want: types.ArtifactReference{
Name: "testdata/misconfig/terraform/busted-relative-paths/src/child/main.tf",
Type: types.ArtifactFilesystem,
ID: "sha256:3f85f73698c7f29b181030749808d634575547aecab68d17c114fefaaa67f990",
BlobIDs: []string{
"sha256:3f85f73698c7f29b181030749808d634575547aecab68d17c114fefaaa67f990",
},
},
},
@@ -652,7 +733,7 @@ func TestCloudFormationMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/cloudformation/single-failure/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/cloudformation/single-failure/rego"},
@@ -700,9 +781,9 @@ func TestCloudFormationMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/cloudformation/single-failure/src",
Type: types.ArtifactFilesystem,
ID: "sha256:f276c65a15cc7e54ec100f4abb9446b44da8cc39ff02596109faa44baf3e88b5",
ID: "sha256:3b083e0be1a8bfd270abc53a573d5491c3a39c41b88f4e978b0c48e79754e12a",
BlobIDs: []string{
"sha256:f276c65a15cc7e54ec100f4abb9446b44da8cc39ff02596109faa44baf3e88b5",
"sha256:3b083e0be1a8bfd270abc53a573d5491c3a39c41b88f4e978b0c48e79754e12a",
},
},
},
@@ -712,7 +793,7 @@ func TestCloudFormationMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/cloudformation/multiple-failures/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/cloudformation/multiple-failures/rego"},
@@ -782,9 +863,9 @@ func TestCloudFormationMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/cloudformation/multiple-failures/src",
Type: types.ArtifactFilesystem,
ID: "sha256:e4676a9d8ff2a2da3c63f61e34759cc53f504588c72625f241f6f0ee43db8ef9",
ID: "sha256:e167a0b5edc1a723a6f6e37adfd72ebf5cd05a578d69a564cba2a2954f47ea5e",
BlobIDs: []string{
"sha256:e4676a9d8ff2a2da3c63f61e34759cc53f504588c72625f241f6f0ee43db8ef9",
"sha256:e167a0b5edc1a723a6f6e37adfd72ebf5cd05a578d69a564cba2a2954f47ea5e",
},
},
},
@@ -794,7 +875,7 @@ func TestCloudFormationMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/cloudformation/no-results/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/cloudformation/no-results/rego"},
@@ -812,9 +893,9 @@ func TestCloudFormationMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/cloudformation/no-results/src",
Type: types.ArtifactFilesystem,
ID: "sha256:1694d46ecb8151fde496faca988441a78c4fe40ddb3049f4f59467282ab9853e",
ID: "sha256:cf90e43f7fb29358faf6f486db722ee739122347ec94839c7f3861489f242213",
BlobIDs: []string{
"sha256:1694d46ecb8151fde496faca988441a78c4fe40ddb3049f4f59467282ab9853e",
"sha256:cf90e43f7fb29358faf6f486db722ee739122347ec94839c7f3861489f242213",
},
},
},
@@ -824,7 +905,7 @@ func TestCloudFormationMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/cloudformation/passed/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/cloudformation/passed/rego"},
@@ -868,9 +949,9 @@ func TestCloudFormationMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/cloudformation/passed/src",
Type: types.ArtifactFilesystem,
ID: "sha256:dda92729eda80706a78c3bf2f118948c175a818305c7bd9f71336b9b795d2776",
ID: "sha256:68de62641f1c26e9973cc699aa7f84f3cb02a305d73238eba6cace5d749e4549",
BlobIDs: []string{
"sha256:dda92729eda80706a78c3bf2f118948c175a818305c7bd9f71336b9b795d2776",
"sha256:68de62641f1c26e9973cc699aa7f84f3cb02a305d73238eba6cace5d749e4549",
},
},
},
@@ -909,7 +990,7 @@ func TestDockerfileMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/dockerfile/single-failure/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/dockerfile/single-failure/rego"},
@@ -954,9 +1035,9 @@ func TestDockerfileMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/dockerfile/single-failure/src",
Type: types.ArtifactFilesystem,
ID: "sha256:80337e1de2fb019bd8e43c88cb532f4715cf58384063ef7c63ef5f55e7eb4a5c",
ID: "sha256:998908fee16ac8aa658138e5bda73f5ffba4f1d194e9d3b3e274a8082b4af580",
BlobIDs: []string{
"sha256:80337e1de2fb019bd8e43c88cb532f4715cf58384063ef7c63ef5f55e7eb4a5c",
"sha256:998908fee16ac8aa658138e5bda73f5ffba4f1d194e9d3b3e274a8082b4af580",
},
},
},
@@ -966,7 +1047,7 @@ func TestDockerfileMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/dockerfile/multiple-failures/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/dockerfile/multiple-failures/rego"},
@@ -1011,9 +1092,9 @@ func TestDockerfileMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/dockerfile/multiple-failures/src",
Type: types.ArtifactFilesystem,
ID: "sha256:80337e1de2fb019bd8e43c88cb532f4715cf58384063ef7c63ef5f55e7eb4a5c",
ID: "sha256:998908fee16ac8aa658138e5bda73f5ffba4f1d194e9d3b3e274a8082b4af580",
BlobIDs: []string{
"sha256:80337e1de2fb019bd8e43c88cb532f4715cf58384063ef7c63ef5f55e7eb4a5c",
"sha256:998908fee16ac8aa658138e5bda73f5ffba4f1d194e9d3b3e274a8082b4af580",
},
},
},
@@ -1023,7 +1104,7 @@ func TestDockerfileMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/dockerfile/no-results/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/dockerfile/no-results/rego"},
@@ -1041,9 +1122,9 @@ func TestDockerfileMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/dockerfile/no-results/src",
Type: types.ArtifactFilesystem,
ID: "sha256:1694d46ecb8151fde496faca988441a78c4fe40ddb3049f4f59467282ab9853e",
ID: "sha256:cf90e43f7fb29358faf6f486db722ee739122347ec94839c7f3861489f242213",
BlobIDs: []string{
"sha256:1694d46ecb8151fde496faca988441a78c4fe40ddb3049f4f59467282ab9853e",
"sha256:cf90e43f7fb29358faf6f486db722ee739122347ec94839c7f3861489f242213",
},
},
},
@@ -1053,7 +1134,7 @@ func TestDockerfileMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/dockerfile/passed/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/dockerfile/passed/rego"},
@@ -1100,9 +1181,9 @@ func TestDockerfileMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/dockerfile/passed/src",
Type: types.ArtifactFilesystem,
ID: "sha256:165d6b849191f10ab1e2834cea9da9decbd6bf005efdb2e4afcef6df0ec53955",
ID: "sha256:836ab9fec50d3ff799f01dee1db9d5340294fa0348011370d55848be04696f6b",
BlobIDs: []string{
"sha256:165d6b849191f10ab1e2834cea9da9decbd6bf005efdb2e4afcef6df0ec53955",
"sha256:836ab9fec50d3ff799f01dee1db9d5340294fa0348011370d55848be04696f6b",
},
},
},
@@ -1141,7 +1222,7 @@ func TestKubernetesMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/kubernetes/single-failure/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/kubernetes/single-failure/rego"},
@@ -1191,9 +1272,9 @@ func TestKubernetesMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/kubernetes/single-failure/src",
Type: types.ArtifactFilesystem,
ID: "sha256:6502a485fddeaac944a70b7e25dec5a779ae7dc10a64dbb8acfc08bec5a207a0",
ID: "sha256:c2ff22ba22599a7b5423c1b275b013aab56cc22eb732624db4b1bfbdf6b62743",
BlobIDs: []string{
"sha256:6502a485fddeaac944a70b7e25dec5a779ae7dc10a64dbb8acfc08bec5a207a0",
"sha256:c2ff22ba22599a7b5423c1b275b013aab56cc22eb732624db4b1bfbdf6b62743",
},
},
},
@@ -1203,7 +1284,7 @@ func TestKubernetesMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/kubernetes/multiple-failures/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/kubernetes/multiple-failures/rego"},
@@ -1276,9 +1357,9 @@ func TestKubernetesMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/kubernetes/multiple-failures/src",
Type: types.ArtifactFilesystem,
ID: "sha256:12db0860b146463e15a2e5143742c7268e1de1d3f3655f669891d7f532934734",
ID: "sha256:485e85ab412143ca5ab48f09c2a3dcacf8283c28c4f451a4b63d377ba2a21c15",
BlobIDs: []string{
"sha256:12db0860b146463e15a2e5143742c7268e1de1d3f3655f669891d7f532934734",
"sha256:485e85ab412143ca5ab48f09c2a3dcacf8283c28c4f451a4b63d377ba2a21c15",
},
},
},
@@ -1288,7 +1369,7 @@ func TestKubernetesMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/kubernetes/no-results/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/kubernetes/no-results/rego"},
@@ -1306,9 +1387,9 @@ func TestKubernetesMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/kubernetes/no-results/src",
Type: types.ArtifactFilesystem,
ID: "sha256:20043d42935fe45a25fd24949d6efad9d7fd52674bad6b8d29a4af97ed485e7a",
ID: "sha256:e68b0e0ac19f7ef311025a3dd587cab0512e51cd19f80e3a8f7dde342979933a",
BlobIDs: []string{
"sha256:20043d42935fe45a25fd24949d6efad9d7fd52674bad6b8d29a4af97ed485e7a",
"sha256:e68b0e0ac19f7ef311025a3dd587cab0512e51cd19f80e3a8f7dde342979933a",
},
},
},
@@ -1318,7 +1399,7 @@ func TestKubernetesMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/kubernetes/passed/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/kubernetes/passed/rego"},
@@ -1365,9 +1446,9 @@ func TestKubernetesMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/kubernetes/passed/src",
Type: types.ArtifactFilesystem,
ID: "sha256:7f3de08246eabb3277e4ec95e65d8e15b6fe4b50eb0414fd043690b94c08cbb3",
ID: "sha256:f6d3e8b62915ad822e643fabf146214e8e3a8349ea2ba509366748e858a42159",
BlobIDs: []string{
"sha256:7f3de08246eabb3277e4ec95e65d8e15b6fe4b50eb0414fd043690b94c08cbb3",
"sha256:f6d3e8b62915ad822e643fabf146214e8e3a8349ea2ba509366748e858a42159",
},
},
},
@@ -1406,7 +1487,7 @@ func TestAzureARMMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/azurearm/single-failure/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/azurearm/single-failure/rego"},
@@ -1454,9 +1535,9 @@ func TestAzureARMMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/azurearm/single-failure/src",
Type: types.ArtifactFilesystem,
ID: "sha256:8d6ebb66f1cb65e92f90114b5b4555d53f8c8051a8a27b5a717644668d76e3a3",
ID: "sha256:96697231b9abb6529c3fab2df31316730edd53ec2e8fbb5f7dbd2179e1c8bf3b",
BlobIDs: []string{
"sha256:8d6ebb66f1cb65e92f90114b5b4555d53f8c8051a8a27b5a717644668d76e3a3",
"sha256:96697231b9abb6529c3fab2df31316730edd53ec2e8fbb5f7dbd2179e1c8bf3b",
},
},
},
@@ -1466,7 +1547,7 @@ func TestAzureARMMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/azurearm/multiple-failures/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/azurearm/multiple-failures/rego"},
@@ -1536,9 +1617,9 @@ func TestAzureARMMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/azurearm/multiple-failures/src",
Type: types.ArtifactFilesystem,
ID: "sha256:640e36de479b97e639dc361863540467ad80caf6b0024555f64171e31a055cc5",
ID: "sha256:682771ea4115a19d2835e83c0b5b49caf9a5f97664c69c2f9f5c18eae34cac88",
BlobIDs: []string{
"sha256:640e36de479b97e639dc361863540467ad80caf6b0024555f64171e31a055cc5",
"sha256:682771ea4115a19d2835e83c0b5b49caf9a5f97664c69c2f9f5c18eae34cac88",
},
},
},
@@ -1548,7 +1629,7 @@ func TestAzureARMMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/azurearm/no-results/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/azurearm/no-results/rego"},
@@ -1566,9 +1647,9 @@ func TestAzureARMMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/azurearm/no-results/src",
Type: types.ArtifactFilesystem,
ID: "sha256:1694d46ecb8151fde496faca988441a78c4fe40ddb3049f4f59467282ab9853e",
ID: "sha256:cf90e43f7fb29358faf6f486db722ee739122347ec94839c7f3861489f242213",
BlobIDs: []string{
"sha256:1694d46ecb8151fde496faca988441a78c4fe40ddb3049f4f59467282ab9853e",
"sha256:cf90e43f7fb29358faf6f486db722ee739122347ec94839c7f3861489f242213",
},
},
},
@@ -1578,7 +1659,7 @@ func TestAzureARMMisconfigurationScan(t *testing.T) {
dir: "./testdata/misconfig/azurearm/passed/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/azurearm/passed/rego"},
@@ -1622,9 +1703,9 @@ func TestAzureARMMisconfigurationScan(t *testing.T) {
want: types.ArtifactReference{
Name: "testdata/misconfig/azurearm/passed/src",
Type: types.ArtifactFilesystem,
ID: "sha256:f90eabc657cedc83e3ebfd3c8f8840d4fea55a73f813e0a904ac4b0ae76f39ce",
ID: "sha256:e266ca6dc704e6d71a55370d65bd72c5a6bcbb38eb2cff19db827863c4af68f3",
BlobIDs: []string{
"sha256:f90eabc657cedc83e3ebfd3c8f8840d4fea55a73f813e0a904ac4b0ae76f39ce",
"sha256:e266ca6dc704e6d71a55370d65bd72c5a6bcbb38eb2cff19db827863c4af68f3",
},
},
},
@@ -1645,3 +1726,121 @@ func TestAzureARMMisconfigurationScan(t *testing.T) {
})
}
}
func TestMixedConfigurationScan(t *testing.T) {
type fields struct {
dir string
}
tests := []struct {
name string
fields fields
putBlobExpectation cache.ArtifactCachePutBlobExpectation
artifactOpt artifact.Option
want types.ArtifactReference
}{
{
name: "single failure each within terraform and cloudformation",
fields: fields{
dir: "./testdata/misconfig/mixed/src",
},
artifactOpt: artifact.Option{
MisconfScannerOption: misconf.ScannerOption{
RegoOnly: true,
Namespaces: []string{"user"},
PolicyPaths: []string{"./testdata/misconfig/mixed/rego"},
},
},
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
Args: cache.ArtifactCachePutBlobArgs{
BlobIDAnything: true,
BlobInfo: types.BlobInfo{
SchemaVersion: 2,
Misconfigurations: []types.Misconfiguration{
{
FileType: "terraform",
FilePath: "main.tf",
Failures: types.MisconfResults{
{
Namespace: "user.something",
Query: "data.user.something.deny",
Message: "No buckets allowed!",
PolicyMetadata: types.PolicyMetadata{
ID: "TEST001",
AVDID: "AVD-TEST-0001",
Type: "Terraform Security Check",
Title: "Test policy",
Description: "This is a test policy.",
Severity: "LOW",
RecommendedActions: "Have a cup of tea.",
References: []string{"https://trivy.dev/"},
},
CauseMetadata: types.CauseMetadata{
Resource: "aws_s3_bucket.asd",
Provider: "Generic",
Service: "general",
StartLine: 1,
EndLine: 3,
},
},
},
},
{
FileType: "cloudformation",
FilePath: "main.yaml",
Failures: types.MisconfResults{
{
Namespace: "user.something",
Query: "data.user.something.deny",
Message: "No buckets allowed!",
PolicyMetadata: types.PolicyMetadata{
ID: "TEST001",
AVDID: "AVD-TEST-0001",
Type: "CloudFormation Security Check",
Title: "Test policy",
Description: "This is a test policy.",
Severity: "LOW",
RecommendedActions: "Have a cup of tea.",
References: []string{"https://trivy.dev/"},
},
CauseMetadata: types.CauseMetadata{
Resource: "main.yaml:3-6",
Provider: "Generic",
Service: "general",
StartLine: 3,
EndLine: 6,
},
},
},
},
},
},
},
Returns: cache.ArtifactCachePutBlobReturns{},
},
want: types.ArtifactReference{
Name: "testdata/misconfig/mixed/src",
Type: types.ArtifactFilesystem,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
c := new(cache.MockArtifactCache)
c.ApplyPutBlobExpectation(tt.putBlobExpectation)
tt.artifactOpt.DisabledHandlers = []types.HandlerType{
types.SystemFileFilteringPostHandler,
}
a, err := NewArtifact(tt.fields.dir, c, tt.artifactOpt)
require.NoError(t, err)
got, err := a.Inspect(context.Background())
require.NoError(t, err)
require.NotNil(t, got)
assert.Equal(t, tt.want.Name, got.Name)
assert.Equal(t, tt.want.Type, got.Type)
})
}
}

View File

@@ -0,0 +1,32 @@
package user.something
__rego_metadata__ := {
"id": "TEST001",
"avd_id": "AVD-TEST-0001",
"title": "Test policy",
"short_code": "no-buckets",
"severity": "LOW",
"description": "This is a test policy.",
"recommended_actions": "Have a cup of tea.",
"url": "https://trivy.dev/",
}
# taken from defsec rego lib to mimic behaviour
result(msg, cause) = result {
metadata := object.get(cause, "__defsec_metadata", cause)
result := {
"msg": msg,
"startline": object.get(metadata, "startline", 0),
"endline": object.get(metadata, "endline", 0),
"filepath": object.get(metadata, "filepath", ""),
"explicit": object.get(metadata, "explicit", false),
"managed": object.get(metadata, "managed", true),
"fskey": object.get(metadata, "fskey", ""),
"resource": object.get(metadata, "resource", ""),
}
}
deny[res] {
bucket := input.aws.s3.buckets[_]
res := result("No buckets allowed!", bucket)
}

View File

@@ -0,0 +1,3 @@
resource "aws_s3_bucket" "asd" {
}

View File

@@ -0,0 +1,6 @@
---
Resources:
S3Bucket:
Type: 'AWS::S3::Bucket'
Properties:
BucketName: public-bucket

View File

@@ -0,0 +1,32 @@
package user.something
__rego_metadata__ := {
"id": "TEST001",
"avd_id": "AVD-TEST-0001",
"title": "Test policy",
"short_code": "no-buckets",
"severity": "LOW",
"description": "This is a test policy.",
"recommended_actions": "Have a cup of tea.",
"url": "https://trivy.dev/",
}
# taken from defsec rego lib to mimic behaviour
result(msg, cause) = result {
metadata := object.get(cause, "__defsec_metadata", cause)
result := {
"msg": msg,
"startline": object.get(metadata, "startline", 0),
"endline": object.get(metadata, "endline", 0),
"filepath": object.get(metadata, "filepath", ""),
"explicit": object.get(metadata, "explicit", false),
"managed": object.get(metadata, "managed", true),
"fskey": object.get(metadata, "fskey", ""),
"resource": object.get(metadata, "resource", ""),
}
}
deny[res] {
bucket := input.aws.s3.buckets[_]
res := result("No buckets allowed!", bucket)
}

View File

@@ -0,0 +1,11 @@
resource "aws_s3_bucket" "one" {
}
resource "aws_s3_bucket" "two" {
}
module "module_in_parent_dir" {
source = "../does not exist anywhere/"
}

View File

@@ -0,0 +1,3 @@
resource "aws_s3_bucket" "three" {
}

View File

@@ -184,9 +184,9 @@ func TestArtifact_Inspect(t *testing.T) {
want: types.ArtifactReference{
Name: ts.URL + "/test.git",
Type: types.ArtifactRemoteRepository,
ID: "sha256:43256f1a50997b78fd91690ac248cde42d56ca996201a596282e9d84e1dccaeb",
ID: "sha256:37247f99bb62bd4b866758a2aff29374eba956dc82a73430efbf405f5a2fd60b",
BlobIDs: []string{
"sha256:43256f1a50997b78fd91690ac248cde42d56ca996201a596282e9d84e1dccaeb",
"sha256:37247f99bb62bd4b866758a2aff29374eba956dc82a73430efbf405f5a2fd60b",
},
},
},

View File

@@ -90,6 +90,7 @@ func NewArtifact(target string, c cache.ArtifactCache, opt artifact.Option) (art
Group: opt.AnalyzerGroup,
FilePatterns: opt.FilePatterns,
DisabledAnalyzers: opt.DisabledAnalyzers,
MisconfScannerOption: opt.MisconfScannerOption,
SecretScannerOption: opt.SecretScannerOption,
LicenseScannerOption: opt.LicenseScannerOption,
})

View File

@@ -12,11 +12,11 @@ import (
"github.com/aquasecurity/trivy/internal/testutil"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
"github.com/aquasecurity/trivy/pkg/fanal/artifact/vm"
"github.com/aquasecurity/trivy/pkg/fanal/cache"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/misconf"
ebsfile "github.com/masahiro331/go-ebs-file"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os/amazonlinux"
@@ -72,7 +72,7 @@ func TestArtifact_Inspect(t *testing.T) {
name string
filePath string
artifactOpt artifact.Option
scannerOpt config.ScannerOption
scannerOpt misconf.ScannerOption
disabledAnalyzers []analyzer.Type
disabledHandlers []types.HandlerType
missingBlobsExpectation cache.ArtifactCacheMissingBlobsExpectation

View File

@@ -1,14 +1,14 @@
package cache
import (
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
"github.com/aquasecurity/trivy/pkg/misconf"
)
func TestCalcKey(t *testing.T) {
@@ -183,7 +183,7 @@ func TestCalcKey(t *testing.T) {
SkipDirs: tt.args.skipDirs,
FilePatterns: tt.args.patterns,
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
PolicyPaths: tt.args.policy,
DataPaths: tt.args.data,
},

View File

@@ -5,12 +5,12 @@ import (
"errors"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/applier"
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
"github.com/aquasecurity/trivy/pkg/fanal/artifact/local"
"github.com/aquasecurity/trivy/pkg/fanal/cache"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/misconf"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/all"
)
@@ -41,7 +41,7 @@ func NewConfigScanner(cacheDir string, policyPaths, dataPaths, namespaces []stri
func (s ConfigScanner) Scan(dir string) ([]types.Misconfiguration, error) {
art, err := local.NewArtifact(dir, s.cache, artifact.Option{
MisconfScannerOption: config.ScannerOption{
MisconfScannerOption: misconf.ScannerOption{
PolicyPaths: s.policyPaths,
DataPaths: s.dataPaths,
Namespaces: s.namespaces,

View File

@@ -8,8 +8,9 @@ import (
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/external"
_ "github.com/aquasecurity/trivy/pkg/fanal/handler/misconf"
"github.com/aquasecurity/trivy/pkg/fanal/types"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/all"
)
func TestConfigScanner_Scan(t *testing.T) {

View File

@@ -1,7 +1,6 @@
package all
import (
_ "github.com/aquasecurity/trivy/pkg/fanal/handler/misconf"
_ "github.com/aquasecurity/trivy/pkg/fanal/handler/sysfile"
_ "github.com/aquasecurity/trivy/pkg/fanal/handler/unpackaged"
)

View File

@@ -1,63 +0,0 @@
package misconf
import (
"context"
_ "embed"
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
"github.com/aquasecurity/trivy/pkg/fanal/handler"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/misconf"
)
func init() {
handler.RegisterPostHandlerInit(types.MisconfPostHandler, newMisconfPostHandler)
}
const version = 1
type misconfPostHandler struct {
scanner misconf.Scanner
}
func newMisconfPostHandler(artifactOpt artifact.Option) (handler.PostHandler, error) {
s, err := misconf.NewScanner(artifactOpt.FilePatterns, artifactOpt.MisconfScannerOption)
if err != nil {
return nil, xerrors.Errorf("scanner init error: %w", err)
}
return misconfPostHandler{
scanner: s,
}, nil
}
// Handle detects misconfigurations.
func (h misconfPostHandler) Handle(ctx context.Context, result *analyzer.AnalysisResult, blob *types.BlobInfo) error {
files, ok := result.Files[h.Type()]
if !ok {
return nil
}
misconfs, err := h.scanner.Scan(ctx, files)
if err != nil {
return xerrors.Errorf("misconfiguration scan error: %w", err)
}
blob.Misconfigurations = misconfs
return nil
}
func (h misconfPostHandler) Version() int {
return version
}
func (h misconfPostHandler) Type() types.HandlerType {
return types.MisconfPostHandler
}
func (h misconfPostHandler) Priority() int {
return types.MisconfPostHandlerPriority
}

View File

@@ -4,13 +4,11 @@ type HandlerType string
const (
SystemFileFilteringPostHandler HandlerType = "system-file-filter"
MisconfPostHandler HandlerType = "misconf"
UnpackagedPostHandler HandlerType = "unpackaged"
// SystemFileFilteringPostHandlerPriority should be higher than other handlers.
// Otherwise, other handlers need to process unnecessary files.
SystemFileFilteringPostHandlerPriority = 100
MisconfPostHandlerPriority = 50
UnpackagedPostHandlerPriority = 50
)

View File

@@ -84,10 +84,14 @@ func (r MisconfResults) Less(i, j int) bool {
switch {
case r[i].Type != r[j].Type:
return r[i].Type < r[j].Type
case r[i].AVDID != r[j].AVDID:
return r[i].AVDID < r[j].AVDID
case r[i].ID != r[j].ID:
return r[i].ID < r[j].ID
case r[i].Severity != r[j].Severity:
return r[i].Severity < r[j].Severity
case r[i].Resource != r[j].Resource:
return r[i].Resource < r[j].Resource
}
return r[i].Message < r[j].Message
}

View File

@@ -21,10 +21,10 @@ var separator = "/"
// - a virtual file
// - a virtual dir
type file struct {
path string // underlying file path
data []byte // virtual file, only either of 'path' or 'data' has a value.
stat fileStat
files syncx.Map[string, *file]
underlyingPath string // underlying file path
data []byte // virtual file, only either of 'path' or 'data' has a value.
stat fileStat
files syncx.Map[string, *file]
}
func (f *file) isVirtual() bool {
@@ -55,7 +55,7 @@ func (f *file) open() (fs.File, error) {
return nil, xerrors.Errorf("read dir error: %w", err)
}
return &mapDir{
path: f.path,
path: f.underlyingPath,
fileStat: f.stat,
entry: entries,
}, nil
@@ -66,7 +66,7 @@ func (f *file) open() (fs.File, error) {
offset: 0,
}, nil
default: // Real file
return os.Open(f.path)
return os.Open(f.underlyingPath)
}
}
@@ -140,7 +140,7 @@ func (f *file) ReadDir(name string) ([]fs.DirEntry, error) {
entries = append(entries, &value.stat)
} else {
var fi os.FileInfo
fi, err = os.Stat(value.path)
fi, err = os.Stat(value.underlyingPath)
if err != nil {
return false
}
@@ -208,7 +208,7 @@ func (f *file) WriteFile(path, underlyingPath string) error {
if len(parts) == 1 {
f.files.Store(parts[0], &file{
path: underlyingPath,
underlyingPath: underlyingPath,
})
return nil
}
@@ -345,7 +345,7 @@ func (f *openMapFile) ReadAt(b []byte, offset int64) (int, error) {
return n, nil
}
// A mapDir is a directory fs.File (so also an fs.ReadDirFile) open for reading.
// A mapDir is a directory fs.File (so also fs.ReadDirFile) open for reading.
type mapDir struct {
path string
fileStat

View File

@@ -50,6 +50,13 @@ func (m *FS) Filter(skippedFiles []string) (*FS, error) {
if len(skippedFiles) == 0 {
return m, nil
}
filter := func(path string, _ fs.DirEntry) (bool, error) {
return slices.Contains(skippedFiles, path), nil
}
return m.FilterFunc(filter)
}
func (m *FS) FilterFunc(fn func(path string, d fs.DirEntry) (bool, error)) (*FS, error) {
newFS := New()
err := fs.WalkDir(m, ".", func(path string, d fs.DirEntry, err error) error {
if err != nil {
@@ -60,7 +67,9 @@ func (m *FS) Filter(skippedFiles []string) (*FS, error) {
return newFS.MkdirAll(path, d.Type().Perm())
}
if slices.Contains(skippedFiles, path) {
if filtered, err := fn(path, d); err != nil {
return err
} else if filtered {
return nil
}
@@ -68,7 +77,11 @@ func (m *FS) Filter(skippedFiles []string) (*FS, error) {
if err != nil {
return xerrors.Errorf("unable to get %s: %w", path, err)
}
return newFS.WriteFile(path, f.path)
// Virtual file
if f.underlyingPath == "" {
return newFS.WriteVirtualFile(path, f.data, f.stat.mode)
}
return newFS.WriteFile(path, f.underlyingPath)
})
if err != nil {
return nil, xerrors.Errorf("walk error", err)
@@ -102,7 +115,7 @@ func (m *FS) Stat(name string) (fs.FileInfo, error) {
if f.isVirtual() {
return &f.stat, nil
}
return os.Stat(f.path)
return os.Stat(f.underlyingPath)
}
// ReadDir reads the named directory

View File

@@ -1,12 +1,11 @@
package misconf
import (
"bytes"
"context"
_ "embed"
"fmt"
"io"
"io/fs"
"os"
"path/filepath"
"sort"
"strings"
@@ -25,8 +24,6 @@ import (
k8sscanner "github.com/aquasecurity/defsec/pkg/scanners/kubernetes"
"github.com/aquasecurity/defsec/pkg/scanners/options"
tfscanner "github.com/aquasecurity/defsec/pkg/scanners/terraform"
"github.com/aquasecurity/memoryfs"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/log"
"github.com/aquasecurity/trivy/pkg/mapfs"
@@ -41,12 +38,155 @@ var enabledDefsecTypes = map[detection.FileType]string{
detection.FileTypeHelm: types.Helm,
}
type Scanner struct {
filePatterns []string
scanners map[string]scanners.FSScanner
type ScannerOption struct {
Trace bool
RegoOnly bool
Namespaces []string
PolicyPaths []string
DataPaths []string
DisableEmbeddedPolicies bool
HelmValues []string
HelmValueFiles []string
HelmFileValues []string
HelmStringValues []string
TerraformTFVars []string
K8sVersion string
}
func NewScanner(filePatterns []string, opt config.ScannerOption) (Scanner, error) {
func (o *ScannerOption) Sort() {
sort.Strings(o.Namespaces)
sort.Strings(o.PolicyPaths)
sort.Strings(o.DataPaths)
}
type Scanner struct {
fileType detection.FileType
scanner scanners.FSScanner
hasFilePattern bool
}
func NewAzureARMScanner(filePatterns []string, opt ScannerOption) (*Scanner, error) {
return newScanner(detection.FileTypeAzureARM, filePatterns, opt)
}
func NewCloudFormationScanner(filePatterns []string, opt ScannerOption) (*Scanner, error) {
return newScanner(detection.FileTypeCloudFormation, filePatterns, opt)
}
func NewDockerfileScanner(filePatterns []string, opt ScannerOption) (*Scanner, error) {
return newScanner(detection.FileTypeDockerfile, filePatterns, opt)
}
func NewHelmScanner(filePatterns []string, opt ScannerOption) (*Scanner, error) {
return newScanner(detection.FileTypeHelm, filePatterns, opt)
}
func NewKubernetesScanner(filePatterns []string, opt ScannerOption) (*Scanner, error) {
return newScanner(detection.FileTypeKubernetes, filePatterns, opt)
}
func NewTerraformScanner(filePatterns []string, opt ScannerOption) (*Scanner, error) {
return newScanner(detection.FileTypeTerraform, filePatterns, opt)
}
func newScanner(t detection.FileType, filePatterns []string, opt ScannerOption) (*Scanner, error) {
opts, err := scannerOptions(t, opt)
if err != nil {
return nil, err
}
var scanner scanners.FSScanner
switch t {
case detection.FileTypeAzureARM:
scanner = arm.New(opts...)
case detection.FileTypeCloudFormation:
scanner = cfscanner.New(opts...)
case detection.FileTypeDockerfile:
scanner = dfscanner.NewScanner(opts...)
case detection.FileTypeHelm:
scanner = helm.New(opts...)
case detection.FileTypeKubernetes:
scanner = k8sscanner.NewScanner(opts...)
case detection.FileTypeTerraform:
scanner = tfscanner.New(opts...)
}
return &Scanner{
fileType: t,
scanner: scanner,
hasFilePattern: hasFilePattern(t, filePatterns),
}, nil
}
func (s *Scanner) Scan(ctx context.Context, fsys fs.FS) ([]types.Misconfiguration, error) {
newfs, err := s.filterFS(fsys)
if err != nil {
return nil, xerrors.Errorf("fs filter error: %w", err)
} else if newfs == nil {
// Skip scanning if no relevant files are found
return nil, nil
}
log.Logger.Debugf("Scanning %s files for misconfigurations...", s.scanner.Name())
results, err := s.scanner.ScanFS(ctx, newfs, ".")
if err != nil {
if _, ok := err.(*cfparser.InvalidContentError); ok {
log.Logger.Errorf("scan %q was broken with InvalidContentError: %v", s.scanner.Name(), err)
return nil, nil
}
return nil, xerrors.Errorf("scan config error: %w", err)
}
configType := enabledDefsecTypes[s.fileType]
misconfs := ResultsToMisconf(configType, s.scanner.Name(), results)
// Sort misconfigurations
for _, misconf := range misconfs {
sort.Sort(misconf.Successes)
sort.Sort(misconf.Warnings)
sort.Sort(misconf.Failures)
}
return misconfs, nil
}
func (s *Scanner) filterFS(fsys fs.FS) (fs.FS, error) {
mfs, ok := fsys.(*mapfs.FS)
if !ok {
// Unable to filter this filesystem
return fsys, nil
}
var foundRelevantFile bool
filter := func(path string, d fs.DirEntry) (bool, error) {
file, err := fsys.Open(path)
if err != nil {
return false, err
}
rs, ok := file.(io.ReadSeeker)
if !ok {
return false, xerrors.Errorf("type assertion error: %w", err)
}
defer file.Close()
if !s.hasFilePattern && !detection.IsType(path, rs, s.fileType) {
return true, nil
}
foundRelevantFile = true
return false, nil
}
newfs, err := mfs.FilterFunc(filter)
if err != nil {
return nil, xerrors.Errorf("fs filter error: %w", err)
}
if !foundRelevantFile {
return nil, nil
}
return newfs, nil
}
func scannerOptions(t detection.FileType, opt ScannerOption) ([]options.ScannerOption, error) {
opts := []options.ScannerOption{
options.ScannerWithSkipRequiredCheck(true),
options.ScannerWithEmbeddedPolicies(!opt.DisableEmbeddedPolicies),
@@ -54,7 +194,7 @@ func NewScanner(filePatterns []string, opt config.ScannerOption) (Scanner, error
policyFS, policyPaths, err := createPolicyFS(opt.PolicyPaths)
if err != nil {
return Scanner{}, err
return nil, err
}
if policyFS != nil {
opts = append(opts, options.ScannerWithPolicyFilesystem(policyFS))
@@ -62,7 +202,7 @@ func NewScanner(filePatterns []string, opt config.ScannerOption) (Scanner, error
dataFS, dataPaths, err := createDataFS(opt.DataPaths, opt.K8sVersion)
if err != nil {
return Scanner{}, err
return nil, err
}
opts = append(opts, options.ScannerWithDataDirs(dataPaths...))
opts = append(opts, options.ScannerWithDataFilesystem(dataFS))
@@ -87,23 +227,26 @@ func NewScanner(filePatterns []string, opt config.ScannerOption) (Scanner, error
opts = append(opts, options.ScannerWithPolicyNamespaces(opt.Namespaces...))
}
helmOpts := addHelmOpts(opts, opt)
tfOpts := addTFOpts(opts, opt)
return Scanner{
filePatterns: filePatterns,
scanners: map[string]scanners.FSScanner{
types.AzureARM: arm.New(opts...),
types.Terraform: tfscanner.New(tfOpts...),
types.CloudFormation: cfscanner.New(opts...),
types.Dockerfile: dfscanner.NewScanner(opts...),
types.Kubernetes: k8sscanner.NewScanner(opts...),
types.Helm: helm.New(helmOpts...),
},
}, nil
switch t {
case detection.FileTypeHelm:
return addHelmOpts(opts, opt), nil
case detection.FileTypeTerraform:
return addTFOpts(opts, opt), nil
default:
return opts, nil
}
}
func addTFOpts(opts []options.ScannerOption, scannerOption config.ScannerOption) []options.ScannerOption {
func hasFilePattern(t detection.FileType, filePatterns []string) bool {
for _, pattern := range filePatterns {
if strings.HasPrefix(pattern, fmt.Sprintf("%s:", t)) {
return true
}
}
return false
}
func addTFOpts(opts []options.ScannerOption, scannerOption ScannerOption) []options.ScannerOption {
if len(scannerOption.TerraformTFVars) > 0 {
opts = append(opts, tfscanner.ScannerWithTFVarsPaths(scannerOption.TerraformTFVars...))
}
@@ -111,7 +254,7 @@ func addTFOpts(opts []options.ScannerOption, scannerOption config.ScannerOption)
return opts
}
func addHelmOpts(opts []options.ScannerOption, scannerOption config.ScannerOption) []options.ScannerOption {
func addHelmOpts(opts []options.ScannerOption, scannerOption ScannerOption) []options.ScannerOption {
if len(scannerOption.HelmValueFiles) > 0 {
opts = append(opts, helm.ScannerWithValuesFile(scannerOption.HelmValueFiles...))
}
@@ -178,84 +321,7 @@ func createDataFS(dataPaths []string, k8sVersion string) (fs.FS, []string, error
return fsys, dataPaths, nil
}
func (s *Scanner) hasCustomPatternForType(t string) bool {
for _, pattern := range s.filePatterns {
if strings.HasPrefix(pattern, t+":") {
return true
}
}
return false
}
// Scan detects misconfigurations.
func (s *Scanner) Scan(ctx context.Context, files []types.File) ([]types.Misconfiguration, error) {
mapMemoryFS := make(map[string]*memoryfs.FS)
for t := range s.scanners {
mapMemoryFS[t] = memoryfs.New()
}
for _, file := range files {
for defsecType, localType := range enabledDefsecTypes {
buffer := bytes.NewReader(file.Content)
if !s.hasCustomPatternForType(localType) && !detection.IsType(file.Path, buffer, defsecType) {
continue
}
// Replace with more detailed config type
file.Type = localType
if memfs, ok := mapMemoryFS[file.Type]; ok {
if filepath.Dir(file.Path) != "." {
if err := memfs.MkdirAll(filepath.Dir(file.Path), os.ModePerm); err != nil {
return nil, xerrors.Errorf("memoryfs mkdir error: %w", err)
}
}
if err := memfs.WriteFile(file.Path, file.Content, os.ModePerm); err != nil {
return nil, xerrors.Errorf("memoryfs write error: %w", err)
}
}
}
}
var misconfs []types.Misconfiguration
for t, scanner := range s.scanners {
results, err := scanner.ScanFS(ctx, mapMemoryFS[t], ".")
if err != nil {
if _, ok := err.(*cfparser.InvalidContentError); ok {
log.Logger.Errorf("scan %q was broken with InvalidContentError: %v", scanner.Name(), err)
continue
}
return nil, xerrors.Errorf("scan config error: %w", err)
}
misconfs = append(misconfs, ResultsToMisconf(t, scanner.Name(), results)...)
}
// Sort misconfigurations
for _, misconf := range misconfs {
sort.Slice(misconf.Successes, func(i, j int) bool {
if misconf.Successes[i].AVDID == misconf.Successes[j].AVDID {
return misconf.Successes[i].StartLine < misconf.Successes[j].StartLine
}
return misconf.Successes[i].AVDID < misconf.Successes[j].AVDID
})
sort.Slice(misconf.Warnings, func(i, j int) bool {
if misconf.Warnings[i].AVDID == misconf.Warnings[j].AVDID {
return misconf.Warnings[i].StartLine < misconf.Warnings[j].StartLine
}
return misconf.Warnings[i].AVDID < misconf.Warnings[j].AVDID
})
sort.Slice(misconf.Failures, func(i, j int) bool {
if misconf.Failures[i].AVDID == misconf.Failures[j].AVDID {
return misconf.Failures[i].StartLine < misconf.Failures[j].StartLine
}
return misconf.Failures[i].AVDID < misconf.Failures[j].AVDID
})
}
return misconfs, nil
}
// This function is exported for trivy-plugin-aqua purposes only
// ResultsToMisconf is exported for trivy-plugin-aqua purposes only
func ResultsToMisconf(configType string, scannerName string, results scan.Results) []types.Misconfiguration {
misconfs := map[string]types.Misconfiguration{}

View File

@@ -9,25 +9,85 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/mapfs"
)
func TestScannerOption_Sort(t *testing.T) {
type fields struct {
Namespaces []string
PolicyPaths []string
DataPaths []string
}
tests := []struct {
name string
fields fields
want ScannerOption
}{
{
name: "happy path",
fields: fields{
Namespaces: []string{"main", "custom", "default"},
PolicyPaths: []string{"policy"},
DataPaths: []string{"data/b", "data/c", "data/a"},
},
want: ScannerOption{
Namespaces: []string{"custom", "default", "main"},
PolicyPaths: []string{"policy"},
DataPaths: []string{"data/a", "data/b", "data/c"},
},
},
{
name: "missing some fields",
fields: fields{
Namespaces: []string{"main"},
PolicyPaths: nil,
DataPaths: nil,
},
want: ScannerOption{
Namespaces: []string{"main"},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
o := ScannerOption{
Namespaces: tt.fields.Namespaces,
PolicyPaths: tt.fields.PolicyPaths,
DataPaths: tt.fields.DataPaths,
}
o.Sort()
assert.Equal(t, tt.want, o)
})
}
}
func TestScanner_Scan(t *testing.T) {
type fields struct {
filePatterns []string
opt ScannerOption
}
type file struct {
path string
content []byte
}
tests := []struct {
name string
files []types.File
filePatterns []string
fields fields
files []file
wantFilePath string
wantFileType string
}{
{
name: "happy path. Dockerfile",
files: []types.File{
fields: fields{
opt: ScannerOption{},
},
files: []file{
{
Path: "Dockerfile",
Type: types.Dockerfile,
Content: []byte(`FROM alpine`),
path: "Dockerfile",
content: []byte(`FROM alpine`),
},
},
wantFilePath: "Dockerfile",
@@ -35,26 +95,35 @@ func TestScanner_Scan(t *testing.T) {
},
{
name: "happy path. Dockerfile with custom file name",
files: []types.File{
fields: fields{
filePatterns: []string{"dockerfile:dockerf"},
opt: ScannerOption{},
},
files: []file{
{
Path: "dockerf",
Type: types.Dockerfile,
Content: []byte(`FROM alpine`),
path: "dockerf",
content: []byte(`FROM alpine`),
},
},
filePatterns: []string{"dockerfile:dockerf"},
wantFilePath: "dockerf",
wantFileType: types.Dockerfile,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s, err := NewScanner(tt.filePatterns, config.ScannerOption{})
// Create a virtual filesystem for testing
fsys := mapfs.New()
for _, f := range tt.files {
err := fsys.WriteVirtualFile(f.path, f.content, 0666)
require.NoError(t, err)
}
s, err := NewDockerfileScanner(tt.fields.filePatterns, tt.fields.opt)
require.NoError(t, err)
misconfs, err := s.Scan(context.Background(), tt.files)
misconfs, err := s.Scan(context.Background(), fsys)
require.NoError(t, err)
assert.Equal(t, 1, len(misconfs), "wrong number of misconfigurations found")
require.Equal(t, 1, len(misconfs), "wrong number of misconfigurations found")
assert.Equal(t, tt.wantFilePath, misconfs[0].FilePath, "filePaths don't equal")
assert.Equal(t, tt.wantFileType, misconfs[0].FileType, "fileTypes don't equal")
})