refactor: Merge fanal into Trivy

This commit is contained in:
Liam Galvin
2022-06-20 09:40:05 +01:00
453 changed files with 205533 additions and 0 deletions

16
pkg/fanal/.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,16 @@
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: monthly
- package-ecosystem: gomod
open-pull-requests-limit: 10
directory: /
schedule:
interval: monthly

59
pkg/fanal/.github/workflows/test.yml vendored Normal file
View File

@@ -0,0 +1,59 @@
name: Test
on:
push:
branches:
- main
pull_request:
env:
GO_VERSION: "1.18"
jobs:
lint:
name: Lint
runs-on: ubuntu-latest
steps:
- name: Setup go
uses: actions/setup-go@v3
with:
go-version: ${{ env.GO_VERSION }}
- name: Checkout repository
uses: actions/checkout@v3
- name: Setup golangci-lint
uses: golangci/golangci-lint-action@v3.2.0
with:
version: v1.45
args : --verbose
skip-go-installation: true
unittest:
name: Unit Test
runs-on: ubuntu-latest
steps:
- name: Set up Go
uses: actions/setup-go@v3
with:
go-version: ${{ env.GO_VERSION }}
- name: Check out code into the Go module directory
uses: actions/checkout@v3
- name: Install dependencies
run: sudo apt-get update && sudo apt-get install libdb-dev
- name: Run unit tests
run: make test
integration:
name: Integration Test
runs-on: ubuntu-latest
steps:
- name: Set up Go
uses: actions/setup-go@v3
with:
go-version: ${{ env.GO_VERSION }}
id: go
- name: Check out code into the Go module directory
uses: actions/checkout@v3
- name: Install dependencies
run: sudo apt-get update && sudo apt-get install libdb-dev
- name: Run integration tests
run: make test-integration

22
pkg/fanal/.gitignore vendored Normal file
View File

@@ -0,0 +1,22 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
.idea
.vscode
main
vendor
test/integration/testdata/fixtures
*.tar
*.gz
/fanal

14
pkg/fanal/.golangci.yml Normal file
View File

@@ -0,0 +1,14 @@
run:
go: 1.18
timeout: 5m
linters:
enable:
- gofmt
disable:
- errcheck
- gosimple
- govet
- ineffassign
- staticcheck
- structcheck
- unused

201
pkg/fanal/LICENSE Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

34
pkg/fanal/Makefile Normal file
View File

@@ -0,0 +1,34 @@
GOPATH=$(shell go env GOPATH)
GOBIN=$(GOPATH)/bin
.PHONY: deps
deps:
go get -d
.PHONY: test
test:
go test ./...
.PHONY: lint
lint: devel-deps
go vet ./...
golint -set_exit_status
.PHONY: cover
cover: devel-deps
goveralls
$(GOBIN)/crane:
go install github.com/google/go-containerregistry/cmd/crane@v0.9.0
test/integration/testdata/fixtures/*.tar.gz: $(GOBIN)/crane
mkdir -p test/integration/testdata/fixtures/
test/integration/scripts/download-images.sh
.PHONY: test-integration
test-integration: test/integration/testdata/fixtures/*.tar.gz
go test -v -tags="integration" ./test/integration/...
.PHONY: test-performance
test-performance: test/integration/testdata/fixtures/*.tar.gz
go test -v -benchtime=10x -run=^$$ -tags="performance" -bench=. ./test/integration/...

4
pkg/fanal/NOTICE Normal file
View File

@@ -0,0 +1,4 @@
Fanal
Copyright 2019-2020 Aqua Security Software Ltd.
This product includes software developed by Aqua Security (https://aquasec.com).

130
pkg/fanal/README.md Normal file
View File

@@ -0,0 +1,130 @@
# fanal
Static Analysis Library for Containers
[![GoDoc](https://godoc.org/github.com/aquasecurity/fanal?status.svg)](https://godoc.org/github.com/aquasecurity/fanal)
![Test](https://github.com/aquasecurity/fanal/workflows/Test/badge.svg)
[![Go Report Card](https://goreportcard.com/badge/github.com/aquasecurity/fanal)](https://goreportcard.com/report/github.com/aquasecurity/fanal)
[![License: Apache-2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/aquasecurity/fanal/blob/main/LICENSE)
## Feature
- Detect OS
- Extract OS packages
- Extract libraries used by an application
- Bundler, Composer, npm, Yarn, Pipenv, Poetry, Cargo, Go Binary, Java Archive (JAR/WAR/EAR), NuGet
## Example
See [`cmd/fanal/`](cmd/fanal)
```go
package main
import (
"context"
"flag"
"fmt"
"log"
"os"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/cache"
"github.com/aquasecurity/fanal/analyzer"
_ "github.com/aquasecurity/fanal/analyzer/library/bundler"
_ "github.com/aquasecurity/fanal/analyzer/library/composer"
_ "github.com/aquasecurity/fanal/analyzer/library/npm"
_ "github.com/aquasecurity/fanal/analyzer/library/pipenv"
_ "github.com/aquasecurity/fanal/analyzer/library/poetry"
_ "github.com/aquasecurity/fanal/analyzer/library/yarn"
_ "github.com/aquasecurity/fanal/analyzer/library/cargo"
_ "github.com/aquasecurity/fanal/analyzer/os/alpine"
_ "github.com/aquasecurity/fanal/analyzer/os/amazonlinux"
_ "github.com/aquasecurity/fanal/analyzer/os/debianbase"
_ "github.com/aquasecurity/fanal/analyzer/os/suse"
_ "github.com/aquasecurity/fanal/analyzer/os/redhatbase"
_ "github.com/aquasecurity/fanal/analyzer/pkg/apk"
_ "github.com/aquasecurity/fanal/analyzer/pkg/dpkg"
_ "github.com/aquasecurity/fanal/analyzer/pkg/rpm"
"github.com/aquasecurity/fanal/extractor"
"golang.org/x/crypto/ssh/terminal"
)
func main() {
if err := run(); err != nil {
log.Fatal(err)
}
}
func run() (err error) {
ctx := context.Background()
tarPath := flag.String("f", "-", "layer.tar path")
clearCache := flag.Bool("clear", false, "clear cache")
flag.Parse()
if *clearCache {
if err = cache.Clear(); err != nil {
return xerrors.Errorf("error in cache clear: %w", err)
}
}
args := flag.Args()
var files extractor.FileMap
if len(args) > 0 {
files, err = analyzer.Analyze(ctx, args[0])
if err != nil {
return err
}
} else {
rc, err := openStream(*tarPath)
if err != nil {
return err
}
files, err = analyzer.AnalyzeFromFile(ctx, rc)
if err != nil {
return err
}
}
os, err := analyzer.GetOS(files)
if err != nil {
return err
}
fmt.Printf("%+v\n", os)
pkgs, err := analyzer.GetPackages(files)
if err != nil {
return err
}
fmt.Printf("Packages: %d\n", len(pkgs))
libs, err := analyzer.GetLibraries(files)
if err != nil {
return err
}
for filepath, libList := range libs {
fmt.Printf("%s: %d\n", filepath, len(libList))
}
return nil
}
func openStream(path string) (*os.File, error) {
if path == "-" {
if terminal.IsTerminal(0) {
flag.Usage()
os.Exit(64)
} else {
return os.Stdin, nil
}
}
return os.Open(path)
}
```
## Notes
When using `latest` tag, that image will be cached. After `latest` tag is updated, you need to clear cache.

View File

@@ -0,0 +1,34 @@
package all
import (
_ "github.com/aquasecurity/fanal/analyzer/buildinfo"
_ "github.com/aquasecurity/fanal/analyzer/command/apk"
_ "github.com/aquasecurity/fanal/analyzer/language/dotnet/nuget"
_ "github.com/aquasecurity/fanal/analyzer/language/golang/binary"
_ "github.com/aquasecurity/fanal/analyzer/language/golang/mod"
_ "github.com/aquasecurity/fanal/analyzer/language/java/jar"
_ "github.com/aquasecurity/fanal/analyzer/language/java/pom"
_ "github.com/aquasecurity/fanal/analyzer/language/nodejs/npm"
_ "github.com/aquasecurity/fanal/analyzer/language/nodejs/pkg"
_ "github.com/aquasecurity/fanal/analyzer/language/nodejs/yarn"
_ "github.com/aquasecurity/fanal/analyzer/language/php/composer"
_ "github.com/aquasecurity/fanal/analyzer/language/python/packaging"
_ "github.com/aquasecurity/fanal/analyzer/language/python/pip"
_ "github.com/aquasecurity/fanal/analyzer/language/python/pipenv"
_ "github.com/aquasecurity/fanal/analyzer/language/python/poetry"
_ "github.com/aquasecurity/fanal/analyzer/language/ruby/bundler"
_ "github.com/aquasecurity/fanal/analyzer/language/ruby/gemspec"
_ "github.com/aquasecurity/fanal/analyzer/language/rust/cargo"
_ "github.com/aquasecurity/fanal/analyzer/os/alpine"
_ "github.com/aquasecurity/fanal/analyzer/os/amazonlinux"
_ "github.com/aquasecurity/fanal/analyzer/os/debian"
_ "github.com/aquasecurity/fanal/analyzer/os/mariner"
_ "github.com/aquasecurity/fanal/analyzer/os/redhatbase"
_ "github.com/aquasecurity/fanal/analyzer/os/release"
_ "github.com/aquasecurity/fanal/analyzer/os/ubuntu"
_ "github.com/aquasecurity/fanal/analyzer/pkg/apk"
_ "github.com/aquasecurity/fanal/analyzer/pkg/dpkg"
_ "github.com/aquasecurity/fanal/analyzer/pkg/rpm"
_ "github.com/aquasecurity/fanal/analyzer/repo/apk"
_ "github.com/aquasecurity/fanal/analyzer/secret"
)

View File

@@ -0,0 +1,354 @@
package analyzer
import (
"context"
"errors"
"io/fs"
"os"
"sort"
"strings"
"sync"
"golang.org/x/exp/slices"
"golang.org/x/sync/semaphore"
"golang.org/x/xerrors"
aos "github.com/aquasecurity/fanal/analyzer/os"
"github.com/aquasecurity/fanal/log"
"github.com/aquasecurity/fanal/types"
dio "github.com/aquasecurity/go-dep-parser/pkg/io"
)
var (
analyzers = map[Type]analyzer{}
configAnalyzers = map[Type]configAnalyzer{}
// ErrUnknownOS occurs when unknown OS is analyzed.
ErrUnknownOS = xerrors.New("unknown OS")
// ErrPkgAnalysis occurs when the analysis of packages is failed.
ErrPkgAnalysis = xerrors.New("failed to analyze packages")
// ErrNoPkgsDetected occurs when the required files for an OS package manager are not detected
ErrNoPkgsDetected = xerrors.New("no packages detected")
)
type AnalysisInput struct {
Dir string
FilePath string
Info os.FileInfo
Content dio.ReadSeekerAt
Options AnalysisOptions
}
type AnalysisOptions struct {
Offline bool
}
type analyzer interface {
Type() Type
Version() int
Analyze(ctx context.Context, input AnalysisInput) (*AnalysisResult, error)
Required(filePath string, info os.FileInfo) bool
}
type configAnalyzer interface {
Type() Type
Version() int
Analyze(targetOS types.OS, content []byte) ([]types.Package, error)
Required(osFound types.OS) bool
}
type Group string
const GroupBuiltin Group = "builtin"
func RegisterAnalyzer(analyzer analyzer) {
analyzers[analyzer.Type()] = analyzer
}
// DeregisterAnalyzer is mainly for testing
func DeregisterAnalyzer(t Type) {
delete(analyzers, t)
}
func RegisterConfigAnalyzer(analyzer configAnalyzer) {
configAnalyzers[analyzer.Type()] = analyzer
}
// DeregisterConfigAnalyzer is mainly for testing
func DeregisterConfigAnalyzer(t Type) {
delete(configAnalyzers, t)
}
// CustomGroup returns a group name for custom analyzers
// This is mainly intended to be used in Aqua products.
type CustomGroup interface {
Group() Group
}
type Opener func() (dio.ReadSeekCloserAt, error)
type AnalyzerGroup struct {
analyzers []analyzer
configAnalyzers []configAnalyzer
}
type AnalysisResult struct {
m sync.Mutex
OS *types.OS
Repository *types.Repository
PackageInfos []types.PackageInfo
Applications []types.Application
Secrets []types.Secret
SystemInstalledFiles []string // A list of files installed by OS package manager
Files map[types.HandlerType][]types.File
// For Red Hat
BuildInfo *types.BuildInfo
// CustomResources hold analysis results from custom analyzers.
// It is for extensibility and not used in OSS.
CustomResources []types.CustomResource
}
func NewAnalysisResult() *AnalysisResult {
result := new(AnalysisResult)
result.Files = map[types.HandlerType][]types.File{}
return result
}
func (r *AnalysisResult) isEmpty() bool {
return r.OS == nil && r.Repository == nil && len(r.PackageInfos) == 0 && len(r.Applications) == 0 &&
len(r.Secrets) == 0 && len(r.SystemInstalledFiles) == 0 && r.BuildInfo == nil && len(r.Files) == 0 && len(r.CustomResources) == 0
}
func (r *AnalysisResult) Sort() {
sort.Slice(r.PackageInfos, func(i, j int) bool {
return r.PackageInfos[i].FilePath < r.PackageInfos[j].FilePath
})
for _, pi := range r.PackageInfos {
sort.Slice(pi.Packages, func(i, j int) bool {
return pi.Packages[i].Name < pi.Packages[j].Name
})
}
sort.Slice(r.Applications, func(i, j int) bool {
return r.Applications[i].FilePath < r.Applications[j].FilePath
})
for _, app := range r.Applications {
sort.Slice(app.Libraries, func(i, j int) bool {
if app.Libraries[i].Name != app.Libraries[j].Name {
return app.Libraries[i].Name < app.Libraries[j].Name
}
return app.Libraries[i].Version < app.Libraries[j].Version
})
}
for _, files := range r.Files {
sort.Slice(files, func(i, j int) bool {
return files[i].Path < files[j].Path
})
}
// Secrets
sort.Slice(r.Secrets, func(i, j int) bool {
return r.Secrets[i].FilePath < r.Secrets[j].FilePath
})
for _, sec := range r.Secrets {
sort.Slice(sec.Findings, func(i, j int) bool {
if sec.Findings[i].RuleID != sec.Findings[j].RuleID {
return sec.Findings[i].RuleID < sec.Findings[j].RuleID
}
return sec.Findings[i].StartLine < sec.Findings[j].StartLine
})
}
}
func (r *AnalysisResult) Merge(new *AnalysisResult) {
if new == nil || new.isEmpty() {
return
}
// this struct is accessed by multiple goroutines
r.m.Lock()
defer r.m.Unlock()
if new.OS != nil {
// OLE also has /etc/redhat-release and it detects OLE as RHEL by mistake.
// In that case, OS must be overwritten with the content of /etc/oracle-release.
// There is the same problem between Debian and Ubuntu.
if r.OS == nil || r.OS.Family == aos.RedHat || r.OS.Family == aos.Debian {
r.OS = new.OS
}
}
if new.Repository != nil {
r.Repository = new.Repository
}
if len(new.PackageInfos) > 0 {
r.PackageInfos = append(r.PackageInfos, new.PackageInfos...)
}
if len(new.Applications) > 0 {
r.Applications = append(r.Applications, new.Applications...)
}
for t, files := range new.Files {
if v, ok := r.Files[t]; ok {
r.Files[t] = append(v, files...)
} else {
r.Files[t] = files
}
}
r.Secrets = append(r.Secrets, new.Secrets...)
r.SystemInstalledFiles = append(r.SystemInstalledFiles, new.SystemInstalledFiles...)
if new.BuildInfo != nil {
if r.BuildInfo == nil {
r.BuildInfo = new.BuildInfo
} else {
// We don't need to merge build info here
// because there is theoretically only one file about build info in each layer.
if new.BuildInfo.Nvr != "" || new.BuildInfo.Arch != "" {
r.BuildInfo.Nvr = new.BuildInfo.Nvr
r.BuildInfo.Arch = new.BuildInfo.Arch
}
if len(new.BuildInfo.ContentSets) > 0 {
r.BuildInfo.ContentSets = new.BuildInfo.ContentSets
}
}
}
r.CustomResources = append(r.CustomResources, new.CustomResources...)
}
func belongToGroup(groupName Group, analyzerType Type, disabledAnalyzers []Type, analyzer any) bool {
if slices.Contains(disabledAnalyzers, analyzerType) {
return false
}
analyzerGroupName := GroupBuiltin
if cg, ok := analyzer.(CustomGroup); ok {
analyzerGroupName = cg.Group()
}
if analyzerGroupName != groupName {
return false
}
return true
}
func NewAnalyzerGroup(groupName Group, disabledAnalyzers []Type) AnalyzerGroup {
if groupName == "" {
groupName = GroupBuiltin
}
var group AnalyzerGroup
for analyzerType, a := range analyzers {
if !belongToGroup(groupName, analyzerType, disabledAnalyzers, a) {
continue
}
group.analyzers = append(group.analyzers, a)
}
for analyzerType, a := range configAnalyzers {
if slices.Contains(disabledAnalyzers, analyzerType) {
continue
}
group.configAnalyzers = append(group.configAnalyzers, a)
}
return group
}
// AnalyzerVersions returns analyzer version identifier used for cache keys.
func (ag AnalyzerGroup) AnalyzerVersions() map[string]int {
versions := map[string]int{}
for _, a := range ag.analyzers {
versions[string(a.Type())] = a.Version()
}
return versions
}
// ImageConfigAnalyzerVersions returns analyzer version identifier used for cache keys.
func (ag AnalyzerGroup) ImageConfigAnalyzerVersions() map[string]int {
versions := map[string]int{}
for _, ca := range ag.configAnalyzers {
versions[string(ca.Type())] = ca.Version()
}
return versions
}
func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, limit *semaphore.Weighted, result *AnalysisResult,
dir, filePath string, info os.FileInfo, opener Opener, disabled []Type, opts AnalysisOptions) error {
if info.IsDir() {
return nil
}
for _, a := range ag.analyzers {
// Skip disabled analyzers
if slices.Contains(disabled, a.Type()) {
continue
}
// filepath extracted from tar file doesn't have the prefix "/"
if !a.Required(strings.TrimLeft(filePath, "/"), info) {
continue
}
rc, err := opener()
if errors.Is(err, fs.ErrPermission) {
log.Logger.Debugf("Permission error: %s", filePath)
break
} else if err != nil {
return xerrors.Errorf("unable to open %s: %w", filePath, err)
}
if err = limit.Acquire(ctx, 1); err != nil {
return xerrors.Errorf("semaphore acquire: %w", err)
}
wg.Add(1)
go func(a analyzer, rc dio.ReadSeekCloserAt) {
defer limit.Release(1)
defer wg.Done()
defer rc.Close()
ret, err := a.Analyze(ctx, AnalysisInput{
Dir: dir,
FilePath: filePath,
Info: info,
Content: rc,
Options: opts,
})
if err != nil && !xerrors.Is(err, aos.AnalyzeOSError) {
log.Logger.Debugf("Analysis error: %s", err)
return
}
if ret != nil {
result.Merge(ret)
}
}(a, rc)
}
return nil
}
func (ag AnalyzerGroup) AnalyzeImageConfig(targetOS types.OS, configBlob []byte) []types.Package {
for _, d := range ag.configAnalyzers {
if !d.Required(targetOS) {
continue
}
pkgs, err := d.Analyze(targetOS, configBlob)
if err != nil {
continue
}
return pkgs
}
return nil
}

View File

@@ -0,0 +1,557 @@
package analyzer_test
import (
"context"
"errors"
"fmt"
"os"
"sync"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"golang.org/x/sync/semaphore"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
aos "github.com/aquasecurity/fanal/analyzer/os"
"github.com/aquasecurity/fanal/types"
dio "github.com/aquasecurity/go-dep-parser/pkg/io"
_ "github.com/aquasecurity/fanal/analyzer/command/apk"
_ "github.com/aquasecurity/fanal/analyzer/language/ruby/bundler"
_ "github.com/aquasecurity/fanal/analyzer/os/alpine"
_ "github.com/aquasecurity/fanal/analyzer/os/ubuntu"
_ "github.com/aquasecurity/fanal/analyzer/pkg/apk"
_ "github.com/aquasecurity/fanal/analyzer/repo/apk"
_ "github.com/aquasecurity/fanal/handler/all"
)
type mockConfigAnalyzer struct{}
func (mockConfigAnalyzer) Required(targetOS types.OS) bool {
return targetOS.Family == "alpine"
}
func (mockConfigAnalyzer) Analyze(targetOS types.OS, configBlob []byte) ([]types.Package, error) {
if string(configBlob) != `foo` {
return nil, errors.New("error")
}
return []types.Package{
{Name: "musl", Version: "1.1.24-r2"},
}, nil
}
func (mockConfigAnalyzer) Type() analyzer.Type {
return analyzer.Type("test")
}
func (mockConfigAnalyzer) Version() int {
return 1
}
func TestMain(m *testing.M) {
analyzer.RegisterConfigAnalyzer(mockConfigAnalyzer{})
os.Exit(m.Run())
}
func TestAnalysisResult_Merge(t *testing.T) {
type fields struct {
m sync.Mutex
OS *types.OS
PackageInfos []types.PackageInfo
Applications []types.Application
}
type args struct {
new *analyzer.AnalysisResult
}
tests := []struct {
name string
fields fields
args args
want analyzer.AnalysisResult
}{
{
name: "happy path",
fields: fields{
OS: &types.OS{
Family: aos.Debian,
Name: "9.8",
},
PackageInfos: []types.PackageInfo{
{
FilePath: "var/lib/dpkg/status.d/libc",
Packages: []types.Package{
{Name: "libc", Version: "1.2.3"},
},
},
},
Applications: []types.Application{
{
Type: "bundler",
FilePath: "app/Gemfile.lock",
Libraries: []types.Package{
{
Name: "rails",
Version: "5.0.0",
},
},
},
},
},
args: args{
new: &analyzer.AnalysisResult{
PackageInfos: []types.PackageInfo{
{
FilePath: "var/lib/dpkg/status.d/openssl",
Packages: []types.Package{
{Name: "openssl", Version: "1.1.1"},
},
},
},
Applications: []types.Application{
{
Type: "bundler",
FilePath: "app2/Gemfile.lock",
Libraries: []types.Package{
{
Name: "nokogiri",
Version: "1.0.0",
},
},
},
},
},
},
want: analyzer.AnalysisResult{
OS: &types.OS{
Family: aos.Debian,
Name: "9.8",
},
PackageInfos: []types.PackageInfo{
{
FilePath: "var/lib/dpkg/status.d/libc",
Packages: []types.Package{
{Name: "libc", Version: "1.2.3"},
},
},
{
FilePath: "var/lib/dpkg/status.d/openssl",
Packages: []types.Package{
{Name: "openssl", Version: "1.1.1"},
},
},
},
Applications: []types.Application{
{
Type: "bundler",
FilePath: "app/Gemfile.lock",
Libraries: []types.Package{
{
Name: "rails",
Version: "5.0.0",
},
},
},
{
Type: "bundler",
FilePath: "app2/Gemfile.lock",
Libraries: []types.Package{
{
Name: "nokogiri",
Version: "1.0.0",
},
},
},
},
},
},
{
name: "redhat must be replaced with oracle",
fields: fields{
OS: &types.OS{
Family: aos.RedHat, // this must be overwritten
Name: "8.0",
},
},
args: args{
new: &analyzer.AnalysisResult{
OS: &types.OS{
Family: aos.Oracle,
Name: "8.0",
},
},
},
want: analyzer.AnalysisResult{
OS: &types.OS{
Family: aos.Oracle,
Name: "8.0",
},
},
},
{
name: "debian must be replaced with ubuntu",
fields: fields{
OS: &types.OS{
Family: aos.Debian, // this must be overwritten
Name: "9.0",
},
},
args: args{
new: &analyzer.AnalysisResult{
OS: &types.OS{
Family: aos.Ubuntu,
Name: "18.04",
},
},
},
want: analyzer.AnalysisResult{
OS: &types.OS{
Family: aos.Ubuntu,
Name: "18.04",
},
},
},
{
name: "alpine OS needs to be extended with apk repositories",
fields: fields{
OS: &types.OS{
Family: aos.Alpine,
Name: "3.15.3",
},
},
args: args{
new: &analyzer.AnalysisResult{
Repository: &types.Repository{
Family: aos.Alpine,
Release: "edge",
},
},
},
want: analyzer.AnalysisResult{
OS: &types.OS{
Family: aos.Alpine,
Name: "3.15.3",
},
Repository: &types.Repository{
Family: aos.Alpine,
Release: "edge",
},
},
},
{
name: "alpine must not be replaced with oracle",
fields: fields{
OS: &types.OS{
Family: aos.Alpine, // this must not be overwritten
Name: "3.11",
},
},
args: args{
new: &analyzer.AnalysisResult{
OS: &types.OS{
Family: aos.Oracle,
Name: "8.0",
},
},
},
want: analyzer.AnalysisResult{
OS: &types.OS{
Family: aos.Alpine, // this must not be overwritten
Name: "3.11",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := analyzer.AnalysisResult{
OS: tt.fields.OS,
PackageInfos: tt.fields.PackageInfos,
Applications: tt.fields.Applications,
}
r.Merge(tt.args.new)
assert.Equal(t, tt.want, r)
})
}
}
func TestAnalyzeFile(t *testing.T) {
type args struct {
filePath string
testFilePath string
disabledAnalyzers []analyzer.Type
}
tests := []struct {
name string
args args
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path with os analyzer",
args: args{
filePath: "/etc/alpine-release",
testFilePath: "testdata/etc/alpine-release",
},
want: &analyzer.AnalysisResult{
OS: &types.OS{
Family: "alpine",
Name: "3.11.6",
},
},
},
{
name: "happy path with disabled os analyzer",
args: args{
filePath: "/etc/alpine-release",
testFilePath: "testdata/etc/alpine-release",
disabledAnalyzers: []analyzer.Type{analyzer.TypeAlpine},
},
want: &analyzer.AnalysisResult{},
},
{
name: "happy path with package analyzer",
args: args{
filePath: "/lib/apk/db/installed",
testFilePath: "testdata/lib/apk/db/installed",
},
want: &analyzer.AnalysisResult{
PackageInfos: []types.PackageInfo{
{
FilePath: "/lib/apk/db/installed",
Packages: []types.Package{
{Name: "musl", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2", License: "MIT"},
},
},
},
SystemInstalledFiles: []string{
"lib/libc.musl-x86_64.so.1",
"lib/ld-musl-x86_64.so.1",
},
},
},
{
name: "happy path with disabled package analyzer",
args: args{
filePath: "/lib/apk/db/installed",
testFilePath: "testdata/lib/apk/db/installed",
disabledAnalyzers: []analyzer.Type{analyzer.TypeApk},
},
want: &analyzer.AnalysisResult{},
},
{
name: "happy path with library analyzer",
args: args{
filePath: "/app/Gemfile.lock",
testFilePath: "testdata/app/Gemfile.lock",
},
want: &analyzer.AnalysisResult{
Applications: []types.Application{
{
Type: "bundler",
FilePath: "/app/Gemfile.lock",
Libraries: []types.Package{
{
Name: "actioncable",
Version: "5.2.3",
},
},
},
},
},
},
{
name: "happy path with invalid os information",
args: args{
filePath: "/etc/lsb-release",
testFilePath: "testdata/etc/hostname",
},
want: &analyzer.AnalysisResult{},
},
{
name: "happy path with a directory",
args: args{
filePath: "/etc/lsb-release",
testFilePath: "testdata/etc",
},
want: &analyzer.AnalysisResult{},
},
{
name: "ignore permission error",
args: args{
filePath: "/etc/alpine-release",
testFilePath: "testdata/no-permission",
},
want: &analyzer.AnalysisResult{},
},
{
name: "sad path with opener error",
args: args{
filePath: "/lib/apk/db/installed",
testFilePath: "testdata/error",
},
wantErr: "unable to open /lib/apk/db/installed",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
var wg sync.WaitGroup
limit := semaphore.NewWeighted(3)
got := new(analyzer.AnalysisResult)
a := analyzer.NewAnalyzerGroup(analyzer.GroupBuiltin, tt.args.disabledAnalyzers)
info, err := os.Stat(tt.args.testFilePath)
require.NoError(t, err)
ctx := context.Background()
err = a.AnalyzeFile(ctx, &wg, limit, got, "", tt.args.filePath, info,
func() (dio.ReadSeekCloserAt, error) {
if tt.args.testFilePath == "testdata/error" {
return nil, xerrors.New("error")
} else if tt.args.testFilePath == "testdata/no-permission" {
os.Chmod(tt.args.testFilePath, 0000)
t.Cleanup(func() {
os.Chmod(tt.args.testFilePath, 0644)
})
}
return os.Open(tt.args.testFilePath)
},
nil, analyzer.AnalysisOptions{},
)
wg.Wait()
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func TestAnalyzeConfig(t *testing.T) {
type args struct {
targetOS types.OS
configBlob []byte
disabledAnalyzers []analyzer.Type
}
tests := []struct {
name string
args args
want []types.Package
}{
{
name: "happy path",
args: args{
targetOS: types.OS{
Family: "alpine",
Name: "3.11.6",
},
configBlob: []byte("foo"),
},
want: []types.Package{
{Name: "musl", Version: "1.1.24-r2"},
},
},
{
name: "non-target OS",
args: args{
targetOS: types.OS{
Family: "debian",
Name: "9.2",
},
configBlob: []byte("foo"),
},
},
{
name: "Analyze returns an error",
args: args{
targetOS: types.OS{
Family: "alpine",
Name: "3.11.6",
},
configBlob: []byte("bar"),
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := analyzer.NewAnalyzerGroup(analyzer.GroupBuiltin, tt.args.disabledAnalyzers)
got := a.AnalyzeImageConfig(tt.args.targetOS, tt.args.configBlob)
assert.Equal(t, tt.want, got)
})
}
}
func TestAnalyzer_AnalyzerVersions(t *testing.T) {
tests := []struct {
name string
disabled []analyzer.Type
want map[string]int
}{
{
name: "happy path",
disabled: []analyzer.Type{},
want: map[string]int{
"alpine": 1,
"apk-repo": 1,
"apk": 1,
"bundler": 1,
"ubuntu": 1,
},
},
{
name: "disable analyzers",
disabled: []analyzer.Type{analyzer.TypeAlpine, analyzer.TypeApkRepo, analyzer.TypeUbuntu},
want: map[string]int{
"apk": 1,
"bundler": 1,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := analyzer.NewAnalyzerGroup(analyzer.GroupBuiltin, tt.disabled)
got := a.AnalyzerVersions()
fmt.Printf("%v\n", got)
assert.Equal(t, tt.want, got)
})
}
}
func TestAnalyzer_ImageConfigAnalyzerVersions(t *testing.T) {
tests := []struct {
name string
disabled []analyzer.Type
want map[string]int
}{
{
name: "happy path",
disabled: []analyzer.Type{},
want: map[string]int{
"apk-command": 1,
"test": 1,
},
},
{
name: "disable analyzers",
disabled: []analyzer.Type{analyzer.TypeAlpine, analyzer.TypeApkCommand},
want: map[string]int{
"test": 1,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := analyzer.NewAnalyzerGroup(analyzer.GroupBuiltin, tt.disabled)
got := a.ImageConfigAnalyzerVersions()
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -0,0 +1,55 @@
package buildinfo
import (
"context"
"encoding/json"
"os"
"path/filepath"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
func init() {
analyzer.RegisterAnalyzer(&contentManifestAnalyzer{})
}
const contentManifestAnalyzerVersion = 1
type contentManifest struct {
ContentSets []string `json:"content_sets"`
}
// For Red Hat products
type contentManifestAnalyzer struct{}
func (a contentManifestAnalyzer) Analyze(_ context.Context, target analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
var manifest contentManifest
if err := json.NewDecoder(target.Content).Decode(&manifest); err != nil {
return nil, xerrors.Errorf("invalid content manifest: %w", err)
}
return &analyzer.AnalysisResult{
BuildInfo: &types.BuildInfo{
ContentSets: manifest.ContentSets,
},
}, nil
}
func (a contentManifestAnalyzer) Required(filePath string, _ os.FileInfo) bool {
dir, file := filepath.Split(filepath.ToSlash(filePath))
if dir != "root/buildinfo/content_manifests/" {
return false
}
return filepath.Ext(file) == ".json"
}
func (a contentManifestAnalyzer) Type() analyzer.Type {
return analyzer.TypeRedHatContentManifestType
}
func (a contentManifestAnalyzer) Version() int {
return contentManifestAnalyzerVersion
}

View File

@@ -0,0 +1,88 @@
package buildinfo
import (
"context"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
func Test_contentManifestAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
input string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path",
input: "testdata/content_manifests/ubi8-minimal-container-8.5-218.json",
want: &analyzer.AnalysisResult{
BuildInfo: &types.BuildInfo{
ContentSets: []string{
"rhel-8-for-x86_64-baseos-rpms",
"rhel-8-for-x86_64-appstream-rpms",
},
},
},
},
{
name: "broken json",
input: "testdata/content_manifests/broken.json",
wantErr: "invalid content manifest",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.input)
require.NoError(t, err)
defer f.Close()
a := contentManifestAnalyzer{}
got, err := a.Analyze(context.Background(), analyzer.AnalysisInput{
FilePath: tt.input,
Content: f,
})
if tt.wantErr != "" {
require.Error(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_contentManifestAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "happy path",
filePath: "root/buildinfo/content_manifests/nodejs-12-container-1-66.json",
want: true,
},
{
name: "sad path",
filePath: "root/buildinfo/content_manifests/nodejs-12-container-1-66.xml",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := contentManifestAnalyzer{}
got := a.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -0,0 +1,136 @@
package buildinfo
import (
"context"
"os"
"path/filepath"
"strings"
"github.com/moby/buildkit/frontend/dockerfile/instructions"
"github.com/moby/buildkit/frontend/dockerfile/parser"
"github.com/moby/buildkit/frontend/dockerfile/shell"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
func init() {
analyzer.RegisterAnalyzer(&dockerfileAnalyzer{})
}
const dockerfileAnalyzerVersion = 1
// For Red Hat products
type dockerfileAnalyzer struct{}
func (a dockerfileAnalyzer) Analyze(_ context.Context, target analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
// ported from https://github.com/moby/buildkit/blob/b33357bcd2e3319b0323037c900c13b45a228df1/frontend/dockerfile/dockerfile2llb/convert.go#L73
dockerfile, err := parser.Parse(target.Content)
if err != nil {
return nil, xerrors.Errorf("dockerfile parse error: %w", err)
}
stages, metaArgs, err := instructions.Parse(dockerfile.AST)
if err != nil {
return nil, xerrors.Errorf("instruction parse error: %w", err)
}
var args []instructions.KeyValuePairOptional
for _, cmd := range metaArgs {
for _, metaArg := range cmd.Args {
args = append(args, setKVValue(metaArg, nil))
}
}
shlex := shell.NewLex(dockerfile.EscapeToken)
env := metaArgsToMap(args)
var component, arch string
for _, st := range stages {
for _, cmd := range st.Commands {
switch c := cmd.(type) {
case *instructions.EnvCommand:
for _, kvp := range c.Env {
env[kvp.Key] = kvp.Value
}
case *instructions.LabelCommand:
for _, kvp := range c.Labels {
key, err := shlex.ProcessWordWithMap(kvp.Key, env)
if err != nil {
return nil, xerrors.Errorf("unable to evaluate the label '%s': %w", kvp.Key, err)
}
key = strings.ToLower(key)
if key == "com.redhat.component" || key == "bzcomponent" {
component, err = shlex.ProcessWordWithMap(kvp.Value, env)
} else if key == "architecture" {
arch, err = shlex.ProcessWordWithMap(kvp.Value, env)
}
if err != nil {
return nil, xerrors.Errorf("failed to process the label '%s': %w", key, err)
}
}
}
}
}
if component == "" {
return nil, xerrors.New("no component found")
} else if arch == "" {
return nil, xerrors.New("no arch found")
}
return &analyzer.AnalysisResult{
BuildInfo: &types.BuildInfo{
Nvr: component + "-" + parseVersion(target.FilePath),
Arch: arch,
},
}, nil
}
func (a dockerfileAnalyzer) Required(filePath string, _ os.FileInfo) bool {
dir, file := filepath.Split(filepath.ToSlash(filePath))
if dir != "root/buildinfo/" {
return false
}
return strings.HasPrefix(file, "Dockerfile")
}
func (a dockerfileAnalyzer) Type() analyzer.Type {
return analyzer.TypeRedHatDockerfileType
}
func (a dockerfileAnalyzer) Version() int {
return dockerfileAnalyzerVersion
}
// parseVersion parses version from a file name
func parseVersion(nvr string) string {
releaseIndex := strings.LastIndex(nvr, "-")
if releaseIndex < 0 {
return ""
}
versionIndex := strings.LastIndex(nvr[:releaseIndex], "-")
version := nvr[versionIndex+1:]
return version
}
// https://github.com/moby/buildkit/blob/b33357bcd2e3319b0323037c900c13b45a228df1/frontend/dockerfile/dockerfile2llb/convert.go#L474-L482
func metaArgsToMap(metaArgs []instructions.KeyValuePairOptional) map[string]string {
m := map[string]string{}
for _, arg := range metaArgs {
m[arg.Key] = arg.ValueString()
}
return m
}
func setKVValue(kvpo instructions.KeyValuePairOptional, values map[string]string) instructions.KeyValuePairOptional {
if v, ok := values[kvpo.Key]; ok {
kvpo.Value = &v
}
return kvpo
}

View File

@@ -0,0 +1,95 @@
package buildinfo
import (
"context"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
func Test_dockerfileAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "com.redhat.component",
inputFile: "testdata/dockerfile/Dockerfile-ubi8-8.3-227",
want: &analyzer.AnalysisResult{
BuildInfo: &types.BuildInfo{
Nvr: "ubi8-container-8.3-227",
Arch: "x86_64",
},
},
},
{
name: "BZcomponent",
inputFile: "testdata/dockerfile/Dockerfile-jboss-base-7-base-1.1-3",
want: &analyzer.AnalysisResult{
BuildInfo: &types.BuildInfo{
Nvr: "jboss-base-7-docker-1.1-3",
Arch: "x86_64",
},
},
},
{
name: "missing architecture",
inputFile: "testdata/dockerfile/Dockerfile.sad",
wantErr: "no arch found",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer f.Close()
a := dockerfileAnalyzer{}
got, err := a.Analyze(context.Background(), analyzer.AnalysisInput{
FilePath: tt.inputFile,
Content: f,
})
if tt.wantErr != "" {
require.Error(t, err)
assert.Equal(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_dockerfileAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "happy path",
filePath: "root/buildinfo/Dockerfile-ubi8-8.3-227",
want: true,
},
{
name: "sad path",
filePath: "app/Dockerfile-ubi8-8.3-227",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := dockerfileAnalyzer{}
got := a.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -0,0 +1 @@
broken

View File

@@ -0,0 +1,12 @@
{
"metadata": {
"icm_version": 1,
"icm_spec": "https://raw.githubusercontent.com/containerbuildsystem/atomic-reactor/master/atomic_reactor/schemas/content_manifest.json",
"image_layer_index": 1
},
"content_sets": [
"rhel-8-for-x86_64-baseos-rpms",
"rhel-8-for-x86_64-appstream-rpms"
],
"image_contents": []
}

View File

@@ -0,0 +1,35 @@
# registry.redhat.io/openshift3/metrics-cassandra:3.1.0
FROM 82ad5fa11820c2889c60f7f748d67aab04400700c581843db0d1e68735327443
MAINTAINER JBoss Cloud Enablement Feedback <cloud-enablement-feedback@redhat.com>
ENV base jboss-base
LABEL BZComponent="${base}-7-docker" \
Architecture="x86_64" \
Name="jboss-base-7/base" \
Version="1.1" \
Release="3"
# Explicitly set the $HOME env variable so it can be referenced in Dockerfiles
ENV HOME /home/jboss
ADD jboss.repo /etc/yum.repos.d/jboss.repo
# Install unzip and tar package which is required to unpack product distributions
# Cleanup the YUM metadata
RUN yum -y --disablerepo \* --enablerepo=jboss install yum-utils unzip tar && \
yum clean all
RUN rm /etc/yum.repos.d/jboss.repo
# Create a user and group used to launch processes
# We use the ID 185 fot the group as well as for the user.
# This ID is registered static ID for the JBoss EAP product
# on RHEL which makes it safe to use.
RUN groupadd -r jboss -g 185 && useradd -u 185 -r -g jboss -m -d /home/jboss -s /sbin/nologin -c "JBoss user" jboss
# Set the working directory to jboss' user home directory
WORKDIR /home/jboss
# Specify the user which should be used to execute all commands below
USER jboss

View File

@@ -0,0 +1,30 @@
FROM sha256:4224eead35ea350b4b9d4ac67550e92efb9a50d3855cb3381469fe4c7e3f2053
LABEL maintainer="Red Hat, Inc."
LABEL com.redhat.component="ubi8-container" \
name="ubi8" \
version="8.3"
#label for EULA
LABEL com.redhat.license_terms="https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI"
#labels for container catalog
LABEL summary="Provides the latest release of Red Hat Universal Base Image 8."
LABEL description="The Universal Base Image is designed and engineered to be the base layer for all of your containerized applications, middleware and utilities. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly."
LABEL io.k8s.display-name="Red Hat Universal Base Image 8"
LABEL io.openshift.expose-services=""
LABEL io.openshift.tags="base rhel8"
ENV container oci
ENV PATH /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
ENV arch x86_64
CMD ["/bin/bash"]
RUN rm -rf /var/log/*
#rhbz 1609043
RUN mkdir -p /var/log/rhsm
ADD ubi8-container-8.3-227.json /root/buildinfo/content_manifests/ubi8-container-8.3-227.json
LABEL "release"="227" "distribution-scope"="public" "vendor"="Red Hat, Inc." "build-date"="2020-12-10T01:59:40.343735" "architecture"=$arch "vcs-type"="git" "vcs-ref"="3652f52021079930cba3bf90d27d9f191b18115b" "com.redhat.build-host"="cpt-1002.osbs.prod.upshift.rdu2.redhat.com" "io.k8s.description"="The Universal Base Image is designed and engineered to be the base layer for all of your containerized applications, middleware and utilities. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly." "url"="https://access.redhat.com/containers/#/registry.access.redhat.com/ubi8/images/8.3-227"

View File

@@ -0,0 +1,28 @@
FROM sha256:4224eead35ea350b4b9d4ac67550e92efb9a50d3855cb3381469fe4c7e3f2053
LABEL maintainer="Red Hat, Inc."
LABEL com.redhat.component="ubi8-container" \
name="ubi8" \
version="8.3"
#label for EULA
LABEL com.redhat.license_terms="https://www.redhat.com/en/about/red-hat-end-user-license-agreements#UBI"
#labels for container catalog
LABEL summary="Provides the latest release of Red Hat Universal Base Image 8."
LABEL description="The Universal Base Image is designed and engineered to be the base layer for all of your containerized applications, middleware and utilities. This base image is freely redistributable, but Red Hat only supports Red Hat technologies through subscriptions for Red Hat products. This image is maintained by Red Hat and updated regularly."
LABEL io.k8s.display-name="Red Hat Universal Base Image 8"
LABEL io.openshift.expose-services=""
LABEL io.openshift.tags="base rhel8"
ENV container oci
ENV PATH /usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
CMD ["/bin/bash"]
RUN rm -rf /var/log/*
#rhbz 1609043
RUN mkdir -p /var/log/rhsm
ADD ubi8-container-8.3-227.json /root/buildinfo/content_manifests/ubi8-container-8.3-227.json

View File

@@ -0,0 +1,282 @@
package apk
import (
"encoding/json"
"fmt"
"io"
"log"
"net/http"
builtinos "os"
"sort"
"strings"
"time"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/os"
"github.com/aquasecurity/fanal/applier"
"github.com/aquasecurity/fanal/types"
)
const (
envApkIndexArchiveURL = "FANAL_APK_INDEX_ARCHIVE_URL"
analyzerVersion = 1
)
var apkIndexArchiveURL = "https://raw.githubusercontent.com/knqyf263/apkIndex-archive/master/alpine/v%s/main/x86_64/history.json"
func init() {
if builtinos.Getenv(envApkIndexArchiveURL) != "" {
apkIndexArchiveURL = builtinos.Getenv(envApkIndexArchiveURL)
}
analyzer.RegisterConfigAnalyzer(&alpineCmdAnalyzer{})
}
type alpineCmdAnalyzer struct{}
type apkIndex struct {
Package map[string]archive
Provide provide
}
type archive struct {
Origin string
Versions version
Dependencies []string
Provides []string
}
type provide struct {
SO map[string]pkg // package which provides the shared object
Package map[string]pkg // package which provides the package
}
type pkg struct {
Package string
Versions version
}
type version map[string]int
func (a alpineCmdAnalyzer) Analyze(targetOS types.OS, configBlob []byte) ([]types.Package, error) {
var apkIndexArchive *apkIndex
var err error
if apkIndexArchive, err = a.fetchApkIndexArchive(targetOS); err != nil {
log.Println(err)
return nil, xerrors.Errorf("failed to fetch apk index archive: %w", err)
}
var config applier.Config
if err = json.Unmarshal(configBlob, &config); err != nil {
return nil, xerrors.Errorf("failed to unmarshal docker config: %w", err)
}
pkgs := a.parseConfig(apkIndexArchive, config)
return pkgs, nil
}
func (a alpineCmdAnalyzer) fetchApkIndexArchive(targetOS types.OS) (*apkIndex, error) {
// 3.9.3 => 3.9
osVer := targetOS.Name
if strings.Count(osVer, ".") > 1 {
osVer = osVer[:strings.LastIndex(osVer, ".")]
}
url := fmt.Sprintf(apkIndexArchiveURL, osVer)
var reader io.Reader
if strings.HasPrefix(url, "file://") {
var err error
reader, err = builtinos.Open(strings.TrimPrefix(url, "file://"))
if err != nil {
return nil, xerrors.Errorf("failed to read APKINDEX archive file: %w", err)
}
} else {
resp, err := http.Get(url)
if err != nil {
return nil, xerrors.Errorf("failed to fetch APKINDEX archive: %w", err)
}
defer resp.Body.Close()
reader = resp.Body
}
apkIndexArchive := &apkIndex{}
if err := json.NewDecoder(reader).Decode(apkIndexArchive); err != nil {
return nil, xerrors.Errorf("failed to decode APKINDEX JSON: %w", err)
}
return apkIndexArchive, nil
}
func (a alpineCmdAnalyzer) parseConfig(apkIndexArchive *apkIndex, config applier.Config) (packages []types.Package) {
envs := map[string]string{}
for _, env := range config.ContainerConfig.Env {
index := strings.Index(env, "=")
envs["$"+env[:index]] = env[index+1:]
}
uniqPkgs := map[string]types.Package{}
for _, history := range config.History {
pkgs := a.parseCommand(history.CreatedBy, envs)
pkgs = a.resolveDependencies(apkIndexArchive, pkgs)
results := a.guessVersion(apkIndexArchive, pkgs, history.Created)
for _, result := range results {
uniqPkgs[result.Name] = result
}
}
for _, pkg := range uniqPkgs {
packages = append(packages, pkg)
}
return packages
}
func (a alpineCmdAnalyzer) parseCommand(command string, envs map[string]string) (pkgs []string) {
if strings.Contains(command, "#(nop)") {
return nil
}
command = strings.TrimPrefix(command, "/bin/sh -c")
var commands []string
for _, cmd := range strings.Split(command, "&&") {
for _, c := range strings.Split(cmd, ";") {
commands = append(commands, strings.TrimSpace(c))
}
}
for _, cmd := range commands {
if !strings.HasPrefix(cmd, "apk") {
continue
}
var add bool
for _, field := range strings.Fields(cmd) {
if strings.HasPrefix(field, "-") || strings.HasPrefix(field, ".") {
continue
} else if field == "add" {
add = true
} else if add {
if strings.HasPrefix(field, "$") {
for _, pkg := range strings.Fields(envs[field]) {
pkgs = append(pkgs, pkg)
}
continue
}
pkgs = append(pkgs, field)
}
}
}
return pkgs
}
func (a alpineCmdAnalyzer) resolveDependencies(apkIndexArchive *apkIndex, originalPkgs []string) (pkgs []string) {
uniqPkgs := map[string]struct{}{}
for _, pkgName := range originalPkgs {
if _, ok := uniqPkgs[pkgName]; ok {
continue
}
seenPkgs := map[string]struct{}{}
for _, p := range a.resolveDependency(apkIndexArchive, pkgName, seenPkgs) {
uniqPkgs[p] = struct{}{}
}
}
for pkg := range uniqPkgs {
pkgs = append(pkgs, pkg)
}
return pkgs
}
func (a alpineCmdAnalyzer) resolveDependency(apkIndexArchive *apkIndex, pkgName string, seenPkgs map[string]struct{}) (pkgNames []string) {
pkg, ok := apkIndexArchive.Package[pkgName]
if !ok {
return nil
}
if _, ok = seenPkgs[pkgName]; ok {
return nil
}
seenPkgs[pkgName] = struct{}{}
pkgNames = append(pkgNames, pkgName)
for _, dependency := range pkg.Dependencies {
// sqlite-libs=3.26.0-r3 => sqlite-libs
if strings.Contains(dependency, "=") {
dependency = dependency[:strings.Index(dependency, "=")]
}
if strings.HasPrefix(dependency, "so:") {
soProvidePkg := apkIndexArchive.Provide.SO[dependency[3:]].Package
pkgNames = append(pkgNames, a.resolveDependency(apkIndexArchive, soProvidePkg, seenPkgs)...)
continue
} else if strings.HasPrefix(dependency, "pc:") || strings.HasPrefix(dependency, "cmd:") {
continue
}
pkgProvidePkg, ok := apkIndexArchive.Provide.Package[dependency]
if ok {
pkgNames = append(pkgNames, a.resolveDependency(apkIndexArchive, pkgProvidePkg.Package, seenPkgs)...)
continue
}
pkgNames = append(pkgNames, a.resolveDependency(apkIndexArchive, dependency, seenPkgs)...)
}
return pkgNames
}
type historyVersion struct {
Version string
BuiltAt int
}
func (a alpineCmdAnalyzer) guessVersion(apkIndexArchive *apkIndex, originalPkgs []string, createdAt time.Time) (pkgs []types.Package) {
for _, pkg := range originalPkgs {
archive, ok := apkIndexArchive.Package[pkg]
if !ok {
continue
}
var historyVersions []historyVersion
for version, builtAt := range archive.Versions {
historyVersions = append(historyVersions, historyVersion{
Version: version,
BuiltAt: builtAt,
})
}
sort.Slice(historyVersions, func(i, j int) bool {
return historyVersions[i].BuiltAt < historyVersions[j].BuiltAt
})
createdUnix := int(createdAt.Unix())
var candidateVersion string
for _, historyVersion := range historyVersions {
if historyVersion.BuiltAt <= createdUnix {
candidateVersion = historyVersion.Version
} else if createdUnix < historyVersion.BuiltAt {
break
}
}
if candidateVersion == "" {
continue
}
pkgs = append(pkgs, types.Package{
Name: pkg,
Version: candidateVersion,
})
// Add origin package name
if archive.Origin != "" && archive.Origin != pkg {
pkgs = append(pkgs, types.Package{
Name: archive.Origin,
Version: candidateVersion,
})
}
}
return pkgs
}
func (a alpineCmdAnalyzer) Required(targetOS types.OS) bool {
return targetOS.Family == os.Alpine
}
func (a alpineCmdAnalyzer) Type() analyzer.Type {
return analyzer.TypeApkCommand
}
func (a alpineCmdAnalyzer) Version() int {
return analyzerVersion
}

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,73 @@
package config
import (
"regexp"
"sort"
"strings"
"github.com/aquasecurity/fanal/analyzer/config/helm"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/config/dockerfile"
"github.com/aquasecurity/fanal/analyzer/config/json"
"github.com/aquasecurity/fanal/analyzer/config/terraform"
"github.com/aquasecurity/fanal/analyzer/config/yaml"
"github.com/aquasecurity/fanal/types"
)
const separator = ":"
type ScannerOption struct {
Trace bool
RegoOnly bool
Namespaces []string
FilePatterns []string
PolicyPaths []string
DataPaths []string
DisableEmbeddedPolicies bool
}
func (o *ScannerOption) Sort() {
sort.Strings(o.Namespaces)
sort.Strings(o.FilePatterns)
sort.Strings(o.PolicyPaths)
sort.Strings(o.DataPaths)
}
func RegisterConfigAnalyzers(filePatterns []string) error {
var dockerRegexp, jsonRegexp, yamlRegexp, helmRegexp *regexp.Regexp
for _, p := range filePatterns {
// e.g. "dockerfile:my_dockerfile_*"
s := strings.SplitN(p, separator, 2)
if len(s) != 2 {
return xerrors.Errorf("invalid file pattern (%s)", p)
}
fileType, pattern := s[0], s[1]
r, err := regexp.Compile(pattern)
if err != nil {
return xerrors.Errorf("invalid file regexp (%s): %w", p, err)
}
switch fileType {
case types.Dockerfile:
dockerRegexp = r
case types.JSON:
jsonRegexp = r
case types.YAML:
yamlRegexp = r
case types.Helm:
helmRegexp = r
default:
return xerrors.Errorf("unknown file type: %s, pattern: %s", fileType, pattern)
}
}
analyzer.RegisterAnalyzer(dockerfile.NewConfigAnalyzer(dockerRegexp))
analyzer.RegisterAnalyzer(terraform.NewConfigAnalyzer())
analyzer.RegisterAnalyzer(json.NewConfigAnalyzer(jsonRegexp))
analyzer.RegisterAnalyzer(yaml.NewConfigAnalyzer(yamlRegexp))
analyzer.RegisterAnalyzer(helm.NewConfigAnalyzer(helmRegexp))
return nil
}

View File

@@ -0,0 +1,64 @@
package config_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/aquasecurity/fanal/analyzer/config"
)
func TestScannerOption_Sort(t *testing.T) {
type fields struct {
Namespaces []string
FilePatterns []string
PolicyPaths []string
DataPaths []string
}
tests := []struct {
name string
fields fields
want config.ScannerOption
}{
{
name: "happy path",
fields: fields{
Namespaces: []string{"main", "custom", "default"},
FilePatterns: []string{"dockerfile:foo*", "yaml:yml_*"},
PolicyPaths: []string{"policy"},
DataPaths: []string{"data/b", "data/c", "data/a"},
},
want: config.ScannerOption{
Namespaces: []string{"custom", "default", "main"},
FilePatterns: []string{"dockerfile:foo*", "yaml:yml_*"},
PolicyPaths: []string{"policy"},
DataPaths: []string{"data/a", "data/b", "data/c"},
},
},
{
name: "missing some fields",
fields: fields{
Namespaces: []string{"main"},
FilePatterns: nil,
PolicyPaths: nil,
DataPaths: nil,
},
want: config.ScannerOption{
Namespaces: []string{"main"},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
o := config.ScannerOption{
Namespaces: tt.fields.Namespaces,
FilePatterns: tt.fields.FilePatterns,
PolicyPaths: tt.fields.PolicyPaths,
DataPaths: tt.fields.DataPaths,
}
o.Sort()
assert.Equal(t, tt.want, o)
})
}
}

View File

@@ -0,0 +1,78 @@
package dockerfile
import (
"context"
"io"
"os"
"path/filepath"
"regexp"
"strings"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
const version = 1
var requiredFiles = []string{"Dockerfile", "Containerfile"}
type ConfigAnalyzer struct {
filePattern *regexp.Regexp
}
func NewConfigAnalyzer(filePattern *regexp.Regexp) ConfigAnalyzer {
return ConfigAnalyzer{
filePattern: filePattern,
}
}
func (s ConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
b, err := io.ReadAll(input.Content)
if err != nil {
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// It will be passed to misconfig post handler
types.MisconfPostHandler: {
{
Type: types.Dockerfile,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
}
// Required does a case-insensitive check for filePath and returns true if
// filePath equals/startsWith/hasExtension requiredFiles
func (s ConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
if s.filePattern != nil && s.filePattern.MatchString(filePath) {
return true
}
base := filepath.Base(filePath)
ext := filepath.Ext(base)
for _, file := range requiredFiles {
if strings.EqualFold(base, file+ext) {
return true
}
if strings.EqualFold(ext, "."+file) {
return true
}
}
return false
}
func (s ConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeDockerfile
}
func (s ConfigAnalyzer) Version() int {
return version
}

View File

@@ -0,0 +1,167 @@
package dockerfile_test
import (
"context"
"os"
"regexp"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/config/dockerfile"
"github.com/aquasecurity/fanal/types"
)
func Test_dockerConfigAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path",
inputFile: "testdata/Dockerfile.deployment",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: types.Dockerfile,
Path: "testdata/Dockerfile.deployment",
Content: []byte(`FROM foo
COPY . /
RUN echo hello
`),
},
},
},
},
},
{
name: "happy path with multi-stage",
inputFile: "testdata/Dockerfile.multistage",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: types.Dockerfile,
Path: "testdata/Dockerfile.multistage",
Content: []byte(`FROM foo AS build
COPY . /
RUN echo hello
FROM scratch
COPY --from=build /bar /bar
`),
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer f.Close()
a := dockerfile.NewConfigAnalyzer(nil)
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Content: f,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_dockerConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePattern *regexp.Regexp
filePath string
want bool
}{
{
name: "dockerfile",
filePath: "dockerfile",
want: true,
},
{
name: "Dockerfile",
filePath: "Dockerfile",
want: true,
},
{
name: "Dockerfile with ext",
filePath: "Dockerfile.build",
want: true,
},
{
name: "dockerfile as ext",
filePath: "build.dockerfile",
want: true,
},
{
name: "Dockerfile in dir",
filePath: "docker/Dockerfile",
want: true,
},
{
name: "Dockerfile as prefix",
filePath: "Dockerfilebuild",
want: false,
},
{
name: "Dockerfile as suffix",
filePath: "buildDockerfile",
want: false,
},
{
name: "Dockerfile as prefix with ext",
filePath: "Dockerfilebuild.sh",
want: false,
},
{
name: "Dockerfile as suffix with ext",
filePath: "buildDockerfile.sh",
want: false,
},
{
name: "json",
filePath: "deployment.json",
want: false,
},
{
name: "file pattern",
filePattern: regexp.MustCompile(`foo*`),
filePath: "foo_file",
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := dockerfile.NewConfigAnalyzer(tt.filePattern)
got := s.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}
func Test_dockerConfigAnalyzer_Type(t *testing.T) {
s := dockerfile.NewConfigAnalyzer(nil)
want := analyzer.TypeDockerfile
got := s.Type()
assert.Equal(t, want, got)
}

View File

@@ -0,0 +1,3 @@
FROM foo
COPY . /
RUN echo hello

View File

@@ -0,0 +1,6 @@
FROM foo AS build
COPY . /
RUN echo hello
FROM scratch
COPY --from=build /bar /bar

View File

@@ -0,0 +1,139 @@
package helm
import (
"archive/tar"
"compress/gzip"
"context"
"errors"
"io"
"os"
"path/filepath"
"regexp"
"strings"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
dio "github.com/aquasecurity/go-dep-parser/pkg/io"
"golang.org/x/xerrors"
)
const version = 1
const maxTarSize = 209_715_200 // 200MB
type ConfigAnalyzer struct {
filePattern *regexp.Regexp
}
func NewConfigAnalyzer(filePattern *regexp.Regexp) ConfigAnalyzer {
return ConfigAnalyzer{
filePattern: filePattern,
}
}
func (a ConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
if isArchive(input.FilePath) {
if !isHelmChart(input.FilePath, input.Content) {
return nil, nil
}
// reset the content
_, err := input.Content.Seek(0, 0)
if err != nil {
return nil, err
}
}
b, err := io.ReadAll(input.Content)
if err != nil {
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// it will be passed to misconfig post handler
types.MisconfPostHandler: {
{
Type: types.Helm,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
}
func (a ConfigAnalyzer) Required(filePath string, info os.FileInfo) bool {
if a.filePattern != nil && a.filePattern.MatchString(filePath) {
return true
}
if info.Size() > maxTarSize {
// tarball is too big to be Helm chart - move on
return false
}
for _, acceptable := range []string{".tpl", ".json", ".yaml", ".tar", ".tgz", ".tar.gz"} {
if strings.HasSuffix(strings.ToLower(filePath), acceptable) {
return true
}
}
name := filepath.Base(filePath)
for _, acceptable := range []string{"Chart.yaml", ".helmignore"} {
if strings.EqualFold(name, acceptable) {
return true
}
}
return false
}
func (ConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeHelm
}
func (ConfigAnalyzer) Version() int {
return version
}
func isHelmChart(path string, file dio.ReadSeekerAt) bool {
var err error
var fr io.Reader = file
if isGzip(path) {
if fr, err = gzip.NewReader(file); err != nil {
return false
}
}
tr := tar.NewReader(fr)
for {
header, err := tr.Next()
if err != nil {
if errors.Is(err, io.EOF) {
break
}
return false
}
if header.Typeflag == tar.TypeReg && strings.HasSuffix(header.Name, "Chart.yaml") {
return true
}
}
return false
}
func isArchive(path string) bool {
if strings.HasSuffix(path, ".tar") || isGzip(path) {
return true
}
return false
}
func isGzip(path string) bool {
if strings.HasSuffix(path, ".tgz") ||
strings.HasSuffix(path, ".tar.gz") {
return true
}
return false
}

View File

@@ -0,0 +1,434 @@
package helm
import (
"context"
"os"
"regexp"
"testing"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func Test_helmConfigAnalyzer_Analyze(t *testing.T) {
type args struct {
namespaces []string
policyPaths []string
}
tests := []struct {
name string
args args
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "Chart.yaml",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/Chart.yaml",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "helm",
Path: "testdata/Chart.yaml",
Content: []byte(`apiVersion: v2
name: testchart
description: A Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"
`),
},
},
},
},
},
{
name: "values.yaml",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/values.yaml",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "helm",
Path: "testdata/values.yaml",
Content: []byte(`# Default values for testchart.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
replicaCount: 1
image:
repository: nginx
pullPolicy: IfNotPresent
# Overrides the image tag whose default is the chart appVersion.
tag: ""
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
# Specifies whether a service account should be created
create: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: ""
podAnnotations: {}
podSecurityContext:
{}
# fsGroup: 2000
securityContext:
{}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
service:
type: ClusterIP
port: 80
ingress:
enabled: false
className: ""
annotations:
{}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: chart-example.local
paths:
- path: /
pathType: ImplementationSpecific
tls: []
# - secretName: chart-example-tls
# hosts:
# - chart-example.local
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
nodeSelector: {}
tolerations: []
affinity: {}
`),
},
},
},
},
},
{
name: "testchart.tgz",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/testchart.tgz",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "helm",
Path: "testdata/testchart.tgz",
Content: []uint8{
0x1f, 0x8b, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0xed, 0x58, 0x5b,
0x6f, 0xdb, 0x36, 0x14, 0xce, 0xb3, 0x7e, 0x5, 0xd7, 0x3c, 0xa4, 0x2d, 0x1a, 0xd9,
0x4a, 0xec, 0xa4, 0xf0, 0x5b, 0x90, 0xec, 0x12, 0x2c, 0x69, 0x83, 0xa6, 0xed, 0x30,
0xc, 0xc3, 0x40, 0x4b, 0xb4, 0xcd, 0x85, 0x22, 0x55, 0x92, 0x72, 0xe2, 0xe, 0xdb,
0x6f, 0xdf, 0x77, 0x48, 0xc9, 0x76, 0x9c, 0xa6, 0xed, 0x43, 0x9b, 0x62, 0x98, 0xce,
0x83, 0x6d, 0x51, 0x87, 0xe7, 0x7e, 0xb5, 0x17, 0xce, 0xe7, 0x33, 0x6e, 0x7d, 0x6f,
0xeb, 0xab, 0x41, 0x1f, 0x70, 0x38, 0x1c, 0xd2, 0x77, 0x76, 0x38, 0xec, 0xc7, 0xef,
0xfd, 0xf0, 0xdd, 0xc2, 0x56, 0x36, 0xd8, 0x1b, 0x64, 0x7b, 0xd9, 0x60, 0x98, 0x1,
0x2f, 0xcb, 0xe, 0x7, 0x87, 0x5b, 0x6c, 0xf8, 0xf5, 0x44, 0x5a, 0x41, 0xed, 0x3c, 0xb7,
0x8c, 0x6d, 0x99, 0x6b, 0xa1, 0x3f, 0x8e, 0x27, 0xac, 0x7b, 0x8, 0x81, 0x1e, 0x16,
0xfc, 0xd2, 0xff, 0xc7, 0xf4, 0x99, 0x2e, 0x78, 0xa9, 0xbe, 0x34, 0xf, 0x72, 0xf0,
0xc1, 0x60, 0x70, 0x9f, 0xff, 0xf7, 0xb2, 0xc3, 0x2c, 0xf8, 0xbf, 0x3f, 0x18, 0xec,
0xed, 0xf, 0xf6, 0xe0, 0xff, 0xfd, 0x83, 0xc3, 0xe1, 0x16, 0xeb, 0x7f, 0x69, 0x41,
0x3e, 0x4, 0xff, 0x73, 0xff, 0xf3, 0x4a, 0xbe, 0x85, 0x62, 0xd2, 0xe8, 0x11, 0x9b,
0xef, 0x25, 0x9a, 0x97, 0x62, 0xc4, 0x96, 0x41, 0x91, 0x14, 0xc2, 0xe5, 0x56,
0x56, 0x3e, 0xbc, 0x3f, 0x62, 0x3f, 0x9, 0x55, 0xb2, 0xf0, 0x86, 0x4d, 0x8c, 0x65,
0x3f, 0xd7, 0x63, 0x61, 0xb5, 0x0, 0x7a, 0x92, 0x6c, 0xe3, 0x75, 0x7c, 0x93, 0x73,
0xcd, 0xc6, 0x82, 0x9, 0xe9, 0x67, 0xc2, 0x32, 0x3c, 0xec, 0xf0, 0xaa, 0x52, 0x32,
0xe7, 0x44, 0x65, 0x87, 0xe1, 0x1e, 0x67, 0x3b, 0x4a, 0x8e, 0x2d, 0xb7, 0x8b, 0x9d,
0x78, 0x27, 0x4d, 0xb6, 0x89, 0xc0, 0xa, 0x2d, 0x1e, 0x3b, 0xc6, 0xad, 0x0, 0x76, 0x6e,
0x94, 0x12, 0x79, 0x38, 0x37, 0x13, 0x48, 0x57, 0x56, 0x8a, 0x83, 0x29, 0xf3, 0x33,
0xbe, 0x64, 0x57, 0xf1, 0xfc, 0x8a, 0x4f, 0x45, 0xc1, 0xa4, 0xf6, 0x86, 0xcd, 0xa3,
0x4e, 0x78, 0xe4, 0x36, 0x9f, 0xc9, 0x39, 0x24, 0xdc, 0x66, 0x38, 0x7, 0x62, 0x21,
0x2a, 0x65, 0x16, 0xa2, 0x88, 0x3c, 0xcf, 0xa2, 0x1c, 0x2d, 0xbf, 0xca, 0x9a, 0xb9,
0x2c, 0x4, 0x83, 0xb7, 0x27, 0xb5, 0x62, 0xb5, 0x97, 0x4a, 0x7a, 0x9, 0x56, 0x90,
0x7a, 0x52, 0xeb, 0x20, 0x83, 0xb, 0xba, 0x43, 0xb9, 0x46, 0xdf, 0x42, 0xcc, 0x85,
0x32, 0x95, 0xb0, 0x29, 0x7b, 0x3d, 0x13, 0x8b, 0x1d, 0x88, 0x2c, 0x75, 0xae, 0xea,
0x82, 0xb8, 0x13, 0x5f, 0x4e, 0x3c, 0x85, 0x2e, 0x84, 0xce, 0x17, 0xa4, 0x0, 0xbf,
0xab, 0x27, 0x44, 0x93, 0xfa, 0x4f, 0xe8, 0x8, 0xba, 0xc6, 0x89, 0x35, 0xc6, 0x5c,
0x17, 0x6b, 0x9c, 0x83, 0x72, 0xc4, 0xda, 0x12, 0x3d, 0x2b, 0xf5, 0x14, 0xf4, 0x2b,
0x59, 0x9, 0x25, 0xb5, 0x48, 0x37, 0xb5, 0x29, 0xc, 0xd3, 0x86, 0x4, 0x9c, 0xe0,
0x2d, 0x28, 0x2d, 0xd6, 0x6c, 0x47, 0x74, 0xc9, 0x41, 0x2, 0xca, 0x8, 0x32, 0x22,
0x61, 0xde, 0x32, 0x8f, 0x5f, 0x54, 0x88, 0x85, 0x35, 0x61, 0xc9, 0xc9, 0xaf, 0x67,
0x12, 0x52, 0xb8, 0x35, 0xf5, 0x1b, 0x53, 0xa7, 0xf1, 0x55, 0xf3, 0xc4, 0x74, 0x5d,
0x22, 0x36, 0x98, 0x9b, 0x99, 0x5a, 0x15, 0x44, 0x17, 0x26, 0xb1, 0xa2, 0x14, 0xda,
0xc3, 0x2a, 0x82, 0xe7, 0x33, 0xe6, 0x65, 0x29, 0xd8, 0xc2, 0xd4, 0xac, 0xe4, 0x57,
0x81, 0x96, 0x9e, 0xb6, 0x5e, 0x5a, 0x11, 0x27, 0x29, 0x25, 0xd9, 0xa7, 0x95, 0xfb,
0x59, 0x63, 0x5b, 0x68, 0x1e, 0xd0, 0x20, 0xde, 0x52, 0x2, 0x5c, 0x6e, 0x62, 0x39,
0x6, 0x8e, 0xb8, 0xa9, 0x60, 0x51, 0xf0, 0x3, 0xc9, 0x9, 0x42, 0xc8, 0x5c, 0xb3, 0x4b,
0x51, 0x72, 0xed, 0x65, 0xde, 0x22, 0x12, 0x99, 0xc7, 0x33, 0xef, 0x2b, 0x37, 0xea,
0xf5, 0x9c, 0x28, 0x41, 0x2a, 0x35, 0x76, 0xda, 0x7b, 0x92, 0xcc, 0xdb, 0xac, 0xe8,
0xa7, 0x59, 0xda, 0xdf, 0x54, 0x7d, 0x43, 0x4d, 0x8a, 0xca, 0x28, 0xcc, 0xd2, 0xb1,
0x63, 0x41, 0xb4, 0x97, 0xd6, 0xfc, 0xb8, 0x75, 0x40, 0xfd, 0xf3, 0xec, 0xd3, 0x5a,
0x67, 0x8d, 0x53, 0x7a, 0x5b, 0x69, 0x72, 0xe3, 0x9a, 0xe2, 0xa0, 0x7c, 0xbf, 0xea,
0x31, 0x60, 0x5b, 0x31, 0x10, 0xa, 0x2a, 0x46, 0xe0, 0x4a, 0xbf, 0x4d, 0xad, 0xa0,
0x43, 0xed, 0xe8, 0x26, 0xe8, 0x9e, 0x7a, 0x7a, 0xb4, 0x22, 0x37, 0x65, 0x49, 0xd1,
0x18, 0xec, 0x8c, 0xc4, 0x81, 0xc3, 0xd8, 0x35, 0x92, 0x9f, 0xbd, 0xab, 0xd, 0x3c,
0x96, 0x26, 0x20, 0xb0, 0xac, 0x31, 0x8f, 0xb2, 0x34, 0x3b, 0x48, 0xfb, 0x8f, 0x92,
0x6f, 0x5d, 0xf8, 0x3a, 0x8, 0xb0, 0xea, 0xff, 0xe9, 0xc, 0xc5, 0x5d, 0x4e, 0x35,
0xca, 0xc1, 0x17, 0xe6, 0xf1, 0x89, 0xfe, 0xdf, 0x1f, 0xee, 0xf, 0x37, 0xfa, 0xff,
0x20, 0xeb, 0xf, 0xba, 0xfe, 0xff, 0x10, 0xb0, 0xcd, 0x2e, 0xb8, 0xf7, 0xe8, 0xe2,
0xb1, 0x7, 0x5, 0xf7, 0xb3, 0xeb, 0x99, 0x40, 0x1, 0xab, 0xa5, 0xa, 0x65, 0xb6, 0xe9,
0xac, 0x2e, 0x6d, 0x6b, 0xa0, 0xab, 0xab, 0xca, 0x50, 0x7f, 0x71, 0x8, 0x19, 0xc5, 0xa6,
0xca, 0x8c, 0x51, 0xa4, 0x10, 0x46, 0xc0, 0x7e, 0x86, 0x82, 0x80, 0x42, 0x8d, 0x8e,
0x8b, 0x7b, 0xa8, 0x1, 0xab, 0x73, 0x94, 0x72, 0x10, 0xd0, 0x62, 0x1a, 0x2b, 0xc9,
0xe3, 0xa, 0xf5, 0x46, 0xde, 0xa0, 0x6a, 0x84, 0x5a, 0xf1, 0xdd, 0x93, 0x94, 0xbd,
0xd4, 0xa, 0xfd, 0x51, 0x87, 0x9b, 0x24, 0x12, 0x43, 0x4f, 0x65, 0xa1, 0xb1, 0x25,
0xe9, 0xc9, 0xe5, 0x1f, 0x97, 0x1e, 0xb2, 0x81, 0xc4, 0x31, 0xea, 0xd, 0x8, 0xbc,
0x3d, 0xbe, 0x64, 0x85, 0xb4, 0x2e, 0x49, 0xa7, 0xd2, 0xf7, 0xc2, 0x67, 0x14, 0x3f,
0x49, 0xc7, 0xef, 0x6d, 0x2f, 0x7c, 0xb6, 0x7, 0xb3, 0x69, 0x8f, 0x3e, 0xda, 0x47,
0x37, 0xd7, 0xbd, 0x15, 0xa1, 0x31, 0xf4, 0xab, 0x2b, 0x36, 0x91, 0xa, 0xfd, 0xe7,
0x69, 0xea, 0xae, 0x2b, 0x7c, 0x8e, 0xf9, 0x15, 0x3e, 0x7d, 0x49, 0xbf, 0xd, 0xe8,
0x24, 0x4f, 0xff, 0xa1, 0xf6, 0xc2, 0xad, 0x34, 0xb5, 0x63, 0xa7, 0x27, 0xdf, 0x83,
0x2f, 0x86, 0x5, 0x6a, 0xd9, 0x49, 0x8a, 0x89, 0x81, 0xf7, 0x22, 0x3a, 0x8e, 0x92,
0x74, 0xee, 0x72, 0x53, 0x88, 0xde, 0x7f, 0xa1, 0xc6, 0xad, 0xf2, 0x7f, 0xce, 0x55,
0xd, 0x27, 0x7f, 0x85, 0x5, 0xe0, 0x13, 0xf9, 0xbf, 0x3f, 0xdc, 0x3f, 0xb8, 0x93,
0xff, 0x83, 0xac, 0xcb, 0xff, 0x87, 0x80, 0x6d, 0x76, 0x22, 0x26, 0xbc, 0x56, 0x98,
0xe3, 0x82, 0xff, 0xe3, 0x6c, 0xdb, 0x6, 0x45, 0xba, 0x36, 0xf6, 0x70, 0xf6, 0xeb,
0xd1, 0xf9, 0xd9, 0x2e, 0xde, 0x97, 0x94, 0x9e, 0x45, 0x48, 0x18, 0x42, 0x38, 0x11,
0xb9, 0xa2, 0xc9, 0x63, 0x8e, 0xe4, 0xe0, 0x63, 0x15, 0x87, 0x94, 0x30, 0x91, 0x3b,
0xd7, 0xce, 0xe3, 0x98, 0x63, 0xec, 0x6a, 0x88, 0x4b, 0x93, 0xc4, 0x8a, 0x30, 0x54,
0x1c, 0x9b, 0x5a, 0xfb, 0x11, 0xcb, 0x92, 0x44, 0x96, 0xa8, 0x31, 0xa3, 0x84, 0xa1,
0x7e, 0x54, 0xc6, 0x49, 0xe4, 0xfa, 0x62, 0xc4, 0xf4, 0x54, 0xea, 0x1b, 0x9c, 0x55,
0xb5, 0x52, 0x17, 0x6, 0x17, 0x70, 0x76, 0x3a, 0x79, 0x61, 0xfc, 0x85, 0x15, 0xe,
0xb3, 0x12, 0x5e, 0x6d, 0xb3, 0x97, 0x18, 0x56, 0x2c, 0x52, 0x30, 0x4e, 0x66, 0x81,
0xe, 0xf3, 0x7c, 0x8a, 0x2a, 0x46, 0x93, 0x74, 0xd1, 0xa8, 0x77, 0x6b, 0x66, 0x5d,
0x4d, 0x24, 0x29, 0x48, 0x0, 0x19, 0x83, 0xc9, 0xa3, 0x46, 0x86, 0xb, 0xf0, 0xba,
0x14, 0x18, 0xc6, 0xbc, 0x1b, 0xb1, 0xdf, 0x7e, 0xf, 0x2b, 0x51, 0xcb, 0x22, 0xa0,
0x61, 0x31, 0x50, 0x77, 0xe, 0x13, 0x84, 0xc7, 0x5c, 0xe6, 0xe2, 0x28, 0xcf, 0x83,
0x4a, 0x41, 0xb2, 0x4b, 0x8c, 0x61, 0x72, 0x42, 0x53, 0x3c, 0x2a, 0x6a, 0xdc, 0x86,
0x58, 0x83, 0xc7, 0x78, 0x44, 0x5c, 0x9b, 0x90, 0xc1, 0x12, 0xc6, 0x29, 0x70, 0x33,
0xfe, 0xc2, 0x1e, 0x66, 0x6b, 0x11, 0x8, 0x1d, 0xd1, 0x74, 0xce, 0xe3, 0xa, 0x0, 0x6b,
0xf2, 0xa2, 0x68, 0x7, 0xc1, 0xd, 0x72, 0xc0, 0xe6, 0x2b, 0xdc, 0x11, 0xfb, 0xeb, 0xef,
0x70, 0x1f, 0x63, 0x1e, 0x23, 0x91, 0xdb, 0x41, 0x75, 0x53, 0x88, 0x38, 0xb8, 0xa5,
0x1, 0xf7, 0x74, 0x12, 0x66, 0x48, 0x27, 0xe2, 0xf4, 0x1d, 0x65, 0x9, 0xe6, 0x83,
0x34, 0x28, 0xe3, 0x91, 0x10, 0x9e, 0xa7, 0x42, 0xb, 0x4b, 0x12, 0xc7, 0x91, 0x30,
0x10, 0x6e, 0x6d, 0xb3, 0xf4, 0x35, 0x48, 0xc6, 0x9d, 0x92, 0x6c, 0x54, 0x99, 0xe2,
0x68, 0x43, 0x3a, 0x3a, 0x83, 0xb5, 0x6b, 0x2b, 0xfd, 0xe2, 0xd8, 0x60, 0xfa, 0xbd,
0x9, 0xb6, 0x6b, 0xe4, 0x9e, 0xb8, 0x1f, 0xad, 0xa9, 0xab, 0x11, 0xdb, 0x43, 0x9d,
0x20, 0x1b, 0xdf, 0x87, 0x98, 0xf3, 0x8a, 0x8f, 0x9b, 0x95, 0x29, 0xda, 0x9e, 0xb1,
0xc2, 0x9a, 0xaa, 0xfd, 0xbd, 0xcb, 0x8e, 0xce, 0xce, 0xc2, 0x6f, 0xa8, 0x53, 0x50,
0xa3, 0x79, 0x65, 0x8c, 0xff, 0x81, 0x4a, 0xfe, 0xc2, 0x41, 0xd6, 0x35, 0x5b, 0xdb,
0x5a, 0x1f, 0xb9, 0x17, 0x46, 0x13, 0xc2, 0xe6, 0xf1, 0x1b, 0x18, 0xe, 0xd1, 0xda, 0x8,
0x13, 0x6c, 0x48, 0x1c, 0xe2, 0xa6, 0x74, 0xac, 0x50, 0x4b, 0x84, 0x3d, 0xbd, 0xa0,
0x80, 0x45, 0x7f, 0x1c, 0xb1, 0xe7, 0x40, 0x83, 0x65, 0x10, 0xab, 0x41, 0x28, 0xa1,
0x29, 0x3d, 0x8a, 0x11, 0x9b, 0x70, 0xe5, 0x88, 0x2a, 0xf2, 0xc6, 0xb9, 0x17, 0xad,
0x79, 0x6e, 0xbb, 0xe, 0x8f, 0x8d, 0x76, 0xc4, 0xfd, 0x6a, 0xb9, 0x69, 0xa7, 0xd2, 0xf4,
0x1a, 0x9a, 0x69, 0xb8, 0xbf, 0xca, 0x92, 0xbb, 0x88, 0x5e, 0xb9, 0x5d, 0x9e, 0x7, 0xfa,
0xa4, 0x8, 0xf1, 0x40, 0x52, 0xf8, 0x86, 0xfa, 0x6e, 0x78, 0x18, 0xc5, 0x9c, 0xd8, 0x15,
0x37, 0x1c, 0x2e, 0x13, 0xa9, 0x32, 0x39, 0x57, 0xe1, 0x3d, 0xb, 0x2d, 0xbc, 0x41, 0x8e,
0x17, 0xe8, 0x60, 0xc4, 0x7a, 0xcb, 0x93, 0x88, 0xf2, 0x3a, 0xe8, 0x7f, 0x4a, 0xd7,
0x69, 0x83, 0x9, 0x1a, 0x34, 0xb1, 0x9f, 0x93, 0x79, 0x54, 0xcc, 0xa4, 0xe0, 0x8a, 0x5d,
0x44, 0x16, 0x25, 0x57, 0x54, 0xfb, 0x16, 0xeb, 0x5d, 0x20, 0x36, 0xfe, 0x5a, 0x89,
0xb9, 0xdd, 0xb2, 0xfe, 0x90, 0x94, 0x28, 0x23, 0xe, 0x95, 0x25, 0x8f, 0x4e, 0x6f, 0x62,
0xe1, 0x17, 0x5a, 0xdf, 0x6b, 0xae, 0x30, 0x4c, 0x2c, 0x97, 0x93, 0x10, 0xd2, 0x88,
0x72, 0x17, 0xa4, 0x5a, 0x2c, 0xab, 0xc2, 0xf2, 0x7e, 0xdc, 0x87, 0xd, 0x53, 0x82, 0x63,
0x74, 0xf1, 0x54, 0xf3, 0xb8, 0xb, 0x7f, 0x3d, 0x68, 0x97, 0x53, 0xcf, 0x8f, 0x61, 0x36,
0x33, 0x94, 0x36, 0xed, 0xfe, 0x4f, 0x3d, 0xa1, 0xd9, 0xec, 0xe0, 0x51, 0x13, 0x77,
0x38, 0xee, 0x40, 0x8d, 0xd6, 0xb5, 0x3c, 0x7e, 0xd3, 0xa0, 0x84, 0xe0, 0xc1, 0x60,
0x83, 0x0, 0x98, 0x4b, 0x6b, 0x34, 0xd9, 0xc8, 0xc5, 0xb1, 0x7, 0x11, 0xeb, 0x55, 0x13,
0x60, 0xad, 0x28, 0xcf, 0x30, 0x60, 0x61, 0xff, 0x3, 0xfb, 0x73, 0xa9, 0x25, 0x39, 0x34,
0xa5, 0xa4, 0xa4, 0x55, 0x10, 0xb, 0xfd, 0x35, 0xd7, 0xb7, 0x34, 0x59, 0xbb, 0x56, 0xeb,
0xa8, 0x6d, 0x5c, 0xe1, 0xe2, 0xda, 0x47, 0xff, 0x10, 0x10, 0x75, 0x1a, 0xa3, 0x80,
0xc2, 0x8b, 0x3f, 0x11, 0xa4, 0xf4, 0xbe, 0x24, 0x6, 0x5a, 0xe0, 0xa2, 0xe3, 0x76, 0x11,
0xc6, 0x33, 0xd0, 0x2a, 0x4d, 0xd0, 0x1e, 0xc5, 0xa8, 0xb6, 0xb0, 0xdf, 0xd8, 0xf2,
0x60, 0x9b, 0x9, 0xe2, 0x9a, 0xed, 0xac, 0x8c, 0xbd, 0x93, 0x36, 0x44, 0x4b, 0xb9, 0xf2,
0x52, 0x5e, 0xd5, 0x21, 0x37, 0xca, 0xe6, 0xb9, 0x4, 0x35, 0x2a, 0xe2, 0xd9, 0xde, 0xf3,
0x73, 0xd9, 0xa8, 0xf8, 0xe, 0x2d, 0xe6, 0x73, 0x6f, 0x24, 0xbc, 0xf6, 0xc6, 0xc1, 0xcd,
0xd0, 0xe1, 0x83, 0xe9, 0x53, 0x4a, 0xfd, 0x2a, 0xb6, 0x11, 0x47, 0x2d, 0x4, 0x7, 0xfc,
0x66, 0xed, 0x0, 0x49, 0x4a, 0xb5, 0xdd, 0x4e, 0x85, 0x3f, 0xbe, 0x78, 0xf3, 0x86, 0xfe,
0x51, 0x79, 0x1f, 0x42, 0xf3, 0x42, 0x40, 0x9, 0x84, 0x29, 0xfa, 0xd, 0xa5, 0x28, 0xf1,
0x8e, 0x68, 0xe7, 0x81, 0xff, 0xfd, 0x98, 0x89, 0xc6, 0x58, 0x77, 0x29, 0x68, 0x4b,
0x36, 0x36, 0x96, 0x2f, 0x6f, 0x14, 0x15, 0xc1, 0x58, 0xcf, 0x10, 0xe3, 0x9, 0x9f, 0x4c,
0xe0, 0x35, 0xbf, 0x8, 0xaf, 0xbf, 0x75, 0x77, 0xef, 0xa0, 0x83, 0xe, 0x3a, 0xe8, 0xa0,
0x83, 0xe, 0x3a, 0xe8, 0xa0, 0x83, 0xe, 0x3a, 0xe8, 0xa0, 0x83, 0xe, 0x3a, 0xd8, 0xda,
0xfa, 0x17, 0xe2, 0x8a, 0xf9, 0x39, 0x0, 0x28, 0x0, 0x0,
},
},
},
},
},
},
{
name: "nope.tgz",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/nope.tgz",
want: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer func() {
_ = f.Close()
}()
info, err := os.Stat(tt.inputFile)
require.NoError(t, err)
a := NewConfigAnalyzer(nil)
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Info: info,
Content: f,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_helmConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePattern *regexp.Regexp
filePath string
want bool
}{
{
name: "yaml",
filePath: "testdata/testchart/Chart.yaml",
want: true,
},
{
name: "tpl",
filePath: "testdata/testchart/templates/_helpers.tpl",
want: true,
},
{
name: "json",
filePath: "testdata/testchart/values.yaml",
want: true,
},
{
name: "NOTES.txt",
filePath: "testdata/testchart/templates/NOTES.txt",
want: false,
},
{
name: ".helmignore",
filePath: "testdata/testchart/.helmignore",
want: true,
},
{
name: "testchart.tgz",
filePath: "testdata/testchart.tgz",
want: true,
},
{
name: "testchart.tar.gz",
filePath: "testdata/testchart.tar.gz",
want: true,
},
{
name: "nope.tgz",
filePath: "testdata/nope.tgz",
want: true, // its a tarball after all
},
{
name: "file pattern",
filePattern: regexp.MustCompile(`foo*`),
filePath: "foo_file",
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := NewConfigAnalyzer(tt.filePattern)
info, _ := os.Stat(tt.filePath)
got := s.Required(tt.filePath, info)
assert.Equal(t, tt.want, got)
})
}
}
func Test_helmConfigAnalyzer_Type(t *testing.T) {
s := NewConfigAnalyzer(nil)
want := analyzer.TypeHelm
got := s.Type()
assert.Equal(t, want, got)
}

View File

@@ -0,0 +1,24 @@
apiVersion: v2
name: testchart
description: A Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@@ -0,0 +1,23 @@
# Patterns to ignore when building packages.
# This supports shell glob matching, relative path matching, and
# negation (prefixed with !). Only one pattern per line.
.DS_Store
# Common VCS dirs
.git/
.gitignore
.bzr/
.bzrignore
.hg/
.hgignore
.svn/
# Common backup files
*.swp
*.bak
*.tmp
*.orig
*~
# Various IDEs
.project
.idea/
*.tmproj
.vscode/

View File

@@ -0,0 +1,24 @@
apiVersion: v2
name: testchart
description: A Helm chart for Kubernetes
# A chart can be either an 'application' or a 'library' chart.
#
# Application charts are a collection of templates that can be packaged into versioned archives
# to be deployed.
#
# Library charts provide useful utilities or functions for the chart developer. They're included as
# a dependency of application charts to inject those utilities and functions into the rendering
# pipeline. Library charts do not define any templates and therefore cannot be deployed.
type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 0.1.0
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application. Versions are not expected to
# follow Semantic Versioning. They should reflect the version the application is using.
# It is recommended to use it with quotes.
appVersion: "1.16.0"

View File

@@ -0,0 +1,22 @@
1. Get the application URL by running these commands:
{{- if .Values.ingress.enabled }}
{{- range $host := .Values.ingress.hosts }}
{{- range .paths }}
http{{ if $.Values.ingress.tls }}s{{ end }}://{{ $host.host }}{{ .path }}
{{- end }}
{{- end }}
{{- else if contains "NodePort" .Values.service.type }}
export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "testchart.fullname" . }})
export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}")
echo http://$NODE_IP:$NODE_PORT
{{- else if contains "LoadBalancer" .Values.service.type }}
NOTE: It may take a few minutes for the LoadBalancer IP to be available.
You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "testchart.fullname" . }}'
export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "testchart.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}")
echo http://$SERVICE_IP:{{ .Values.service.port }}
{{- else if contains "ClusterIP" .Values.service.type }}
export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "testchart.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}")
export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}")
echo "Visit http://127.0.0.1:8080 to use your application"
kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT
{{- end }}

View File

@@ -0,0 +1,62 @@
{{/*
Expand the name of the chart.
*/}}
{{- define "testchart.name" -}}
{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Create a default fully qualified app name.
We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
If release name contains chart name it will be used as a full name.
*/}}
{{- define "testchart.fullname" -}}
{{- if .Values.fullnameOverride }}
{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- $name := default .Chart.Name .Values.nameOverride }}
{{- if contains $name .Release.Name }}
{{- .Release.Name | trunc 63 | trimSuffix "-" }}
{{- else }}
{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
{{- end }}
{{- end }}
{{- end }}
{{/*
Create chart name and version as used by the chart label.
*/}}
{{- define "testchart.chart" -}}
{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
{{- end }}
{{/*
Common labels
*/}}
{{- define "testchart.labels" -}}
helm.sh/chart: {{ include "testchart.chart" . }}
{{ include "testchart.selectorLabels" . }}
{{- if .Chart.AppVersion }}
app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
{{- end }}
app.kubernetes.io/managed-by: {{ .Release.Service }}
{{- end }}
{{/*
Selector labels
*/}}
{{- define "testchart.selectorLabels" -}}
app.kubernetes.io/name: {{ include "testchart.name" . }}
app.kubernetes.io/instance: {{ .Release.Name }}
{{- end }}
{{/*
Create the name of the service account to use
*/}}
{{- define "testchart.serviceAccountName" -}}
{{- if .Values.serviceAccount.create }}
{{- default (include "testchart.fullname" .) .Values.serviceAccount.name }}
{{- else }}
{{- default "default" .Values.serviceAccount.name }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,61 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "testchart.fullname" . }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
spec:
{{- if not .Values.autoscaling.enabled }}
replicas: {{ .Values.replicaCount }}
{{- end }}
selector:
matchLabels:
{{- include "testchart.selectorLabels" . | nindent 6 }}
template:
metadata:
{{- with .Values.podAnnotations }}
annotations:
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "testchart.selectorLabels" . | nindent 8 }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
serviceAccountName: {{ include "testchart.serviceAccountName" . }}
securityContext:
{{- toYaml .Values.podSecurityContext | nindent 8 }}
containers:
- name: {{ .Chart.Name }}
securityContext:
{{- toYaml .Values.securityContext | nindent 12 }}
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.image.pullPolicy }}
ports:
- name: http
containerPort: 80
protocol: TCP
livenessProbe:
httpGet:
path: /
port: http
readinessProbe:
httpGet:
path: /
port: http
resources:
{{- toYaml .Values.resources | nindent 12 }}
{{- with .Values.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}

View File

@@ -0,0 +1,28 @@
{{- if .Values.autoscaling.enabled }}
apiVersion: autoscaling/v2beta1
kind: HorizontalPodAutoscaler
metadata:
name: {{ include "testchart.fullname" . }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
spec:
scaleTargetRef:
apiVersion: apps/v1
kind: Deployment
name: {{ include "testchart.fullname" . }}
minReplicas: {{ .Values.autoscaling.minReplicas }}
maxReplicas: {{ .Values.autoscaling.maxReplicas }}
metrics:
{{- if .Values.autoscaling.targetCPUUtilizationPercentage }}
- type: Resource
resource:
name: cpu
targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }}
{{- end }}
{{- if .Values.autoscaling.targetMemoryUtilizationPercentage }}
- type: Resource
resource:
name: memory
targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,61 @@
{{- if .Values.ingress.enabled -}}
{{- $fullName := include "testchart.fullname" . -}}
{{- $svcPort := .Values.service.port -}}
{{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
{{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }}
{{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}}
{{- end }}
{{- end }}
{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}}
apiVersion: networking.k8s.io/v1
{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}}
apiVersion: networking.k8s.io/v1beta1
{{- else -}}
apiVersion: extensions/v1beta1
{{- end }}
kind: Ingress
metadata:
name: {{ $fullName }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
{{- with .Values.ingress.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
{{- if and .Values.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }}
ingressClassName: {{ .Values.ingress.className }}
{{- end }}
{{- if .Values.ingress.tls }}
tls:
{{- range .Values.ingress.tls }}
- hosts:
{{- range .hosts }}
- {{ . | quote }}
{{- end }}
secretName: {{ .secretName }}
{{- end }}
{{- end }}
rules:
{{- range .Values.ingress.hosts }}
- host: {{ .host | quote }}
http:
paths:
{{- range .paths }}
- path: {{ .path }}
{{- if and .pathType (semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion) }}
pathType: {{ .pathType }}
{{- end }}
backend:
{{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }}
service:
name: {{ $fullName }}
port:
number: {{ $svcPort }}
{{- else }}
serviceName: {{ $fullName }}
servicePort: {{ $svcPort }}
{{- end }}
{{- end }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,15 @@
apiVersion: v1
kind: Service
metadata:
name: {{ include "testchart.fullname" . }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
spec:
type: {{ .Values.service.type }}
ports:
- port: {{ .Values.service.port }}
targetPort: http
protocol: TCP
name: http
selector:
{{- include "testchart.selectorLabels" . | nindent 4 }}

View File

@@ -0,0 +1,12 @@
{{- if .Values.serviceAccount.create -}}
apiVersion: v1
kind: ServiceAccount
metadata:
name: {{ include "testchart.serviceAccountName" . }}
labels:
{{- include "testchart.labels" . | nindent 4 }}
{{- with .Values.serviceAccount.annotations }}
annotations:
{{- toYaml . | nindent 4 }}
{{- end }}
{{- end }}

View File

@@ -0,0 +1,15 @@
apiVersion: v1
kind: Pod
metadata:
name: "{{ include "testchart.fullname" . }}-test-connection"
labels:
{{- include "testchart.labels" . | nindent 4 }}
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: ['{{ include "testchart.fullname" . }}:{{ .Values.service.port }}']
restartPolicy: Never

View File

@@ -0,0 +1,86 @@
# Default values for testchart.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
replicaCount: 1
image:
repository: nginx
pullPolicy: IfNotPresent
# Overrides the image tag whose default is the chart appVersion.
tag: ""
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
# Specifies whether a service account should be created
create: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: ""
podAnnotations: {}
podSecurityContext:
{}
# fsGroup: 2000
securityContext:
{}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
service:
type: ClusterIP
port: 80
ingress:
enabled: false
className: ""
annotations:
{}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: chart-example.local
paths:
- path: /
pathType: ImplementationSpecific
tls: []
# - secretName: chart-example-tls
# hosts:
# - chart-example.local
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -0,0 +1,86 @@
# Default values for testchart.
# This is a YAML-formatted file.
# Declare variables to be passed into your templates.
replicaCount: 1
image:
repository: nginx
pullPolicy: IfNotPresent
# Overrides the image tag whose default is the chart appVersion.
tag: ""
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
serviceAccount:
# Specifies whether a service account should be created
create: true
# Annotations to add to the service account
annotations: {}
# The name of the service account to use.
# If not set and create is true, a name is generated using the fullname template
name: ""
podAnnotations: {}
podSecurityContext:
{}
# fsGroup: 2000
securityContext:
{}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
service:
type: ClusterIP
port: 80
ingress:
enabled: false
className: ""
annotations:
{}
# kubernetes.io/ingress.class: nginx
# kubernetes.io/tls-acme: "true"
hosts:
- host: chart-example.local
paths:
- path: /
pathType: ImplementationSpecific
tls: []
# - secretName: chart-example-tls
# hosts:
# - chart-example.local
resources:
{}
# We usually recommend not to specify default resources and to leave this as a conscious
# choice for the user. This also increases chances charts run on environments with little
# resources, such as Minikube. If you do want to specify resources, uncomment the following
# lines, adjust them as necessary, and remove the curly braces after 'resources:'.
# limits:
# cpu: 100m
# memory: 128Mi
# requests:
# cpu: 100m
# memory: 128Mi
autoscaling:
enabled: false
minReplicas: 1
maxReplicas: 100
targetCPUUtilizationPercentage: 80
# targetMemoryUtilizationPercentage: 80
nodeSelector: {}
tolerations: []
affinity: {}

View File

@@ -0,0 +1,74 @@
package json
import (
"context"
"io"
"os"
"path/filepath"
"regexp"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
const version = 1
var (
requiredExt = ".json"
excludedFiles = []string{types.NpmPkgLock, types.NuGetPkgsLock, types.NuGetPkgsConfig}
)
type ConfigAnalyzer struct {
filePattern *regexp.Regexp
}
func NewConfigAnalyzer(filePattern *regexp.Regexp) ConfigAnalyzer {
return ConfigAnalyzer{
filePattern: filePattern,
}
}
func (a ConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
b, err := io.ReadAll(input.Content)
if err != nil {
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// It will be passed to misconfig post handler
types.MisconfPostHandler: {
{
Type: types.JSON,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
}
func (a ConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
if a.filePattern != nil && a.filePattern.MatchString(filePath) {
return true
}
filename := filepath.Base(filePath)
for _, excludedFile := range excludedFiles {
if filename == excludedFile {
return false
}
}
return filepath.Ext(filePath) == requiredExt
}
func (ConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeJSON
}
func (ConfigAnalyzer) Version() int {
return version
}

View File

@@ -0,0 +1,200 @@
package json_test
import (
"context"
"os"
"regexp"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/config/json"
"github.com/aquasecurity/fanal/types"
)
func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
type args struct {
namespaces []string
policyPaths []string
}
tests := []struct {
name string
args args
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/deployment.json",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "json",
Path: "testdata/deployment.json",
Content: []byte(`{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 3
}
}
`),
},
},
},
},
},
{
name: "deny",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/deployment_deny.json",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "json",
Path: "testdata/deployment_deny.json",
Content: []byte(`{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 4
}
}
`),
},
},
},
},
},
{
name: "json array",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/array.json",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "json",
Path: "testdata/array.json",
Content: []byte(`[
{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 4
}
},
{
"apiVersion": "apps/v2",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 5
}
}
]
`),
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer f.Close()
s := json.NewConfigAnalyzer(nil)
ctx := context.Background()
got, err := s.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Content: f,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_jsonConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePattern *regexp.Regexp
filePath string
want bool
}{
{
name: "json",
filePath: "deployment.json",
want: true,
},
{
name: "yaml",
filePath: "deployment.yaml",
want: false,
},
{
name: "npm json",
filePath: "package-lock.json",
want: false,
},
{
name: "file pattern",
filePattern: regexp.MustCompile(`foo*`),
filePath: "foo_file",
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := json.NewConfigAnalyzer(tt.filePattern)
got := s.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}
func Test_jsonConfigAnalyzer_Type(t *testing.T) {
s := json.NewConfigAnalyzer(nil)
want := analyzer.TypeJSON
got := s.Type()
assert.Equal(t, want, got)
}

View File

@@ -0,0 +1,22 @@
[
{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 4
}
},
{
"apiVersion": "apps/v2",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 5
}
}
]

View File

@@ -0,0 +1,10 @@
{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 3
}
}

View File

@@ -0,0 +1,10 @@
{
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": "hello-kubernetes"
},
"spec": {
"replicas": 4
}
}

View File

@@ -0,0 +1,57 @@
package terraform
import (
"context"
"io"
"os"
"path/filepath"
"golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
const version = 1
var requiredExts = []string{".tf", ".tf.json"}
type ConfigAnalyzer struct {
}
func NewConfigAnalyzer() ConfigAnalyzer {
return ConfigAnalyzer{}
}
// Analyze returns a name of Terraform file
func (a ConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
b, err := io.ReadAll(input.Content)
if err != nil {
return nil, xerrors.Errorf("read error (%s): %w", input.FilePath, err)
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// It will be passed to misconf post handler
types.MisconfPostHandler: {
{
Type: types.Terraform,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
}
func (a ConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
return slices.Contains(requiredExts, filepath.Ext(filePath))
}
func (ConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeTerraform
}
func (ConfigAnalyzer) Version() int {
return version
}

View File

@@ -0,0 +1,83 @@
package terraform_test
import (
"bytes"
"context"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/config/terraform"
"github.com/aquasecurity/fanal/types"
)
func TestConfigAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
input analyzer.AnalysisInput
want *analyzer.AnalysisResult
}{
{
name: "happy path",
input: analyzer.AnalysisInput{
Dir: "path/to/",
FilePath: "main.tf",
Content: bytes.NewReader(nil),
},
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: types.Terraform,
Path: "main.tf",
Content: []byte{},
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := terraform.ConfigAnalyzer{}
ctx := context.Background()
got, err := a.Analyze(ctx, tt.input)
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func TestConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "happy path",
filePath: "/path/to/main.tf",
want: true,
},
{
name: "hcl",
filePath: "/path/to/main.hcl",
want: false,
},
{
name: "yaml",
filePath: "deployment.yaml",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := terraform.ConfigAnalyzer{}
got := a.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -0,0 +1,21 @@
package users.dockerfile.xyz_100
__rego_metadata__ := {
"id": "XYZ-100",
"title": "Bad Dockerfile",
"version": "v1.0.0",
"severity": "HIGH",
"type": "Docker Security Check",
}
denylist = [
"foo"
]
deny[res] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], denylist[_])
res = {"type": "Docker Security Check", "msg": sprintf("deny: image found %s", [val]), "severity": "HIGH", "id": "RULE-100"}
}

View File

@@ -0,0 +1,35 @@
package main.dockerfile
denylist = [
"foo"
]
deny[res] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], denylist[_])
res = {
"type": "Docker Security Check",
"msg": sprintf("deny: image found %s", [val]),
"severity": "HIGH",
"id": "RULE-100"
}
}
warnlist = [
"echo"
]
warn[res] {
input[i].Cmd == "run"
val := input[i].Value
contains(val[_], warnlist[_])
res = {
"type": "Docker Security Check",
"msg": sprintf("warn: command %s contains banned: %s", [val, warnlist]),
"severity": "LOW",
"id": "RULE-10"
}
}

View File

@@ -0,0 +1,20 @@
package main.dockerfile
__rego_metadata__ := {
"id": "XYZ-100",
"title": "Bad Dockerfile",
"version": "v1.0.0",
"severity": "HIGH",
"type": "Docker Security Check",
}
denylist = [
]
deny[msg] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], denylist[_])
msg = sprintf("deny: image found %s", [val])
}

View File

@@ -0,0 +1,13 @@
package main.dockerfile.id_100
violationlist = [
"foo"
]
violation[{"msg": msg, "details": {}}] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], violationlist[_])
msg = sprintf("violation: image found %s", [val])
}

View File

@@ -0,0 +1,19 @@
package main.dockerfile.xyz_100
__rego_metadata__ := {
"id": "XYZ-100",
"title": "Bad Dockerfile",
"version": "v1.0.0",
}
warnlist = [
"foo"
]
warn[msg] {
input[i].Cmd == "from"
val := input[i].Value
contains(val[i], warnlist[_])
msg = sprintf("warn: image found %s", [val])
}

View File

@@ -0,0 +1,15 @@
package main.kubernetes.xyz_100
__rego_metadata__ := {
"id": "XYZ-100",
"title": "Bad Kubernetes Replicas",
"version": "v1.0.0",
"severity": "HIGH",
"type": "Kubernetes Security Check",
}
deny[msg] {
rpl = input.spec.replicas
rpl > 3
msg = sprintf("too many replicas: %d", [rpl])
}

View File

@@ -0,0 +1,15 @@
default: &default
line: single line
john: &J
john_name: john
fred: &F
fred_name: fred
main:
<<: *default
name:
<<: [*J, *F]
comment: |
multi
line

View File

@@ -0,0 +1 @@
apiVersion": foo: bar

View File

@@ -0,0 +1,3 @@
circular: &circular
name:
<<: *circular

View File

@@ -0,0 +1,13 @@
package main.yaml.xyz_123
__rego_metadata__ := {
"id": "XYZ-123",
"title": "Bad YAML",
"version": "v1.0.0",
"severity": "CRITICAL",
"type": "YAML Security Check",
}
deny[msg]{
msg := "bad"
}

View File

@@ -0,0 +1,6 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-kubernetes
spec:
replicas: 3

View File

@@ -0,0 +1,6 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-kubernetes
spec:
replicas: 4

View File

@@ -0,0 +1,4 @@
replacements:
amd64: 64bit
386: 32bit
arm: ARM

View File

@@ -0,0 +1,18 @@
apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-kubernetes
spec:
replicas: 4
---
apiVersion: v1
kind: Service
metadata:
name: hello-kubernetes
spec:
ports:
- protocol: TCP
port: 80
targetPort: 8080

View File

@@ -0,0 +1,70 @@
package yaml
import (
"context"
"io"
"os"
"path/filepath"
"regexp"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
const version = 1
var requiredExts = []string{".yaml", ".yml"}
type ConfigAnalyzer struct {
filePattern *regexp.Regexp
}
func NewConfigAnalyzer(filePattern *regexp.Regexp) ConfigAnalyzer {
return ConfigAnalyzer{
filePattern: filePattern,
}
}
func (a ConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
b, err := io.ReadAll(input.Content)
if err != nil {
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
}
return &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
// it will be passed to misconfig post handler
types.MisconfPostHandler: {
{
Type: types.YAML,
Path: input.FilePath,
Content: b,
},
},
},
}, nil
}
func (a ConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
if a.filePattern != nil && a.filePattern.MatchString(filePath) {
return true
}
ext := filepath.Ext(filePath)
for _, required := range requiredExts {
if ext == required {
return true
}
}
return false
}
func (ConfigAnalyzer) Type() analyzer.Type {
return analyzer.TypeYaml
}
func (ConfigAnalyzer) Version() int {
return version
}

View File

@@ -0,0 +1,229 @@
package yaml_test
import (
"context"
"os"
"regexp"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/config/yaml"
"github.com/aquasecurity/fanal/types"
)
func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
type args struct {
namespaces []string
policyPaths []string
}
tests := []struct {
name string
args args
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/deployment.yaml",
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "yaml",
Path: "testdata/deployment.yaml",
Content: []byte(`apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-kubernetes
spec:
replicas: 3
`),
},
},
},
},
},
{
name: "deny",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/deployment_deny.yaml",
want: &analyzer.AnalysisResult{
OS: (*types.OS)(nil),
PackageInfos: []types.PackageInfo(nil),
Applications: []types.Application(nil),
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "yaml",
Path: "testdata/deployment_deny.yaml",
Content: []byte(`apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-kubernetes
spec:
replicas: 4
`),
},
},
},
},
},
{
name: "happy path using anchors",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"testdata/deny.rego"},
},
inputFile: "testdata/anchor.yaml",
want: &analyzer.AnalysisResult{
OS: (*types.OS)(nil),
PackageInfos: []types.PackageInfo(nil),
Applications: []types.Application(nil),
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "yaml",
Path: "testdata/anchor.yaml",
Content: []byte(`default: &default
line: single line
john: &J
john_name: john
fred: &F
fred_name: fred
main:
<<: *default
name:
<<: [*J, *F]
comment: |
multi
line
`),
},
},
},
},
},
{
name: "multiple yaml",
args: args{
namespaces: []string{"main"},
policyPaths: []string{"../testdata/kubernetes.rego"},
},
inputFile: "testdata/multiple.yaml",
want: &analyzer.AnalysisResult{
OS: (*types.OS)(nil),
PackageInfos: []types.PackageInfo(nil),
Applications: []types.Application(nil),
Files: map[types.HandlerType][]types.File{
types.MisconfPostHandler: {
{
Type: "yaml",
Path: "testdata/multiple.yaml",
Content: []byte(`apiVersion: apps/v1
kind: Deployment
metadata:
name: hello-kubernetes
spec:
replicas: 4
---
apiVersion: v1
kind: Service
metadata:
name: hello-kubernetes
spec:
ports:
- protocol: TCP
port: 80
targetPort: 8080
`),
},
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer f.Close()
a := yaml.NewConfigAnalyzer(nil)
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Content: f,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_yamlConfigAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePattern *regexp.Regexp
filePath string
want bool
}{
{
name: "yaml",
filePath: "deployment.yaml",
want: true,
},
{
name: "yml",
filePath: "deployment.yml",
want: true,
},
{
name: "json",
filePath: "deployment.json",
want: false,
},
{
name: "file pattern",
filePattern: regexp.MustCompile(`foo*`),
filePath: "foo_file",
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
s := yaml.NewConfigAnalyzer(tt.filePattern)
got := s.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}
func Test_yamlConfigAnalyzer_Type(t *testing.T) {
s := yaml.NewConfigAnalyzer(nil)
want := analyzer.TypeYaml
got := s.Type()
assert.Equal(t, want, got)
}

119
pkg/fanal/analyzer/const.go Normal file
View File

@@ -0,0 +1,119 @@
package analyzer
type Type string
const (
// ======
// OS
// ======
TypeOSRelease Type = "os-release"
TypeAlpine Type = "alpine"
TypeAmazon Type = "amazon"
TypeCBLMariner Type = "cbl-mariner"
TypeDebian Type = "debian"
TypePhoton Type = "photon"
TypeCentOS Type = "centos"
TypeRocky Type = "rocky"
TypeAlma Type = "alma"
TypeFedora Type = "fedora"
TypeOracle Type = "oracle"
TypeRedHatBase Type = "redhat"
TypeSUSE Type = "suse"
TypeUbuntu Type = "ubuntu"
// OS Package
TypeApk Type = "apk"
TypeDpkg Type = "dpkg"
TypeRpm Type = "rpm"
TypeRpmqa Type = "rpmqa"
// OS Package Repository
TypeApkRepo Type = "apk-repo"
// ============================
// Programming Language Package
// ============================
// Ruby
TypeBundler Type = "bundler"
TypeGemSpec Type = "gemspec"
// Rust
TypeCargo Type = "cargo"
// PHP
TypeComposer Type = "composer"
// Java
TypeJar Type = "jar"
TypePom Type = "pom"
// Node.js
TypeNpmPkgLock Type = "npm"
TypeNodePkg Type = "node-pkg"
TypeYarn Type = "yarn"
// .NET
TypeNuget Type = "nuget"
// Python
TypePythonPkg Type = "python-pkg"
TypePip Type = "pip"
TypePipenv Type = "pipenv"
TypePoetry Type = "poetry"
// Go
TypeGoBinary Type = "gobinary"
TypeGoMod Type = "gomod"
// ============
// Image Config
// ============
TypeApkCommand Type = "apk-command"
// =================
// Structured Config
// =================
TypeYaml Type = "yaml"
TypeJSON Type = "json"
TypeDockerfile Type = "dockerfile"
TypeTerraform Type = "terraform"
TypeCloudFormation Type = "cloudFormation"
TypeHelm Type = "helm"
// ========
// Secrets
// ========
TypeSecret Type = "secret"
// =======
// Red Hat
// =======
TypeRedHatContentManifestType = "redhat-content-manifest"
TypeRedHatDockerfileType = "redhat-dockerfile"
)
var (
// TypeOSes has all OS-related analyzers
TypeOSes = []Type{TypeAlpine, TypeAmazon, TypeDebian, TypePhoton, TypeCentOS,
TypeRocky, TypeAlma, TypeFedora, TypeOracle, TypeRedHatBase, TypeSUSE, TypeUbuntu,
TypeApk, TypeDpkg, TypeRpm,
}
// TypeLanguages has all language analyzers
TypeLanguages = []Type{TypeBundler, TypeGemSpec, TypeCargo, TypeComposer, TypeJar, TypePom,
TypeNpmPkgLock, TypeNodePkg, TypeYarn, TypeNuget, TypePythonPkg, TypePip, TypePipenv,
TypePoetry, TypeGoBinary, TypeGoMod,
}
// TypeLockfiles has all lock file analyzers
TypeLockfiles = []Type{TypeBundler, TypeNpmPkgLock, TypeYarn,
TypePip, TypePipenv, TypePoetry, TypeGoMod, TypePom,
}
// TypeIndividualPkgs has all analyzers for individual packages
TypeIndividualPkgs = []Type{TypeGemSpec, TypeNodePkg, TypePythonPkg, TypeGoBinary, TypeJar}
// TypeConfigFiles has all config file analyzers
TypeConfigFiles = []Type{TypeYaml, TypeJSON, TypeDockerfile, TypeTerraform, TypeCloudFormation}
)

View File

@@ -0,0 +1,52 @@
package language
import (
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
dio "github.com/aquasecurity/go-dep-parser/pkg/io"
godeptypes "github.com/aquasecurity/go-dep-parser/pkg/types"
)
func Analyze(fileType, filePath string, r dio.ReadSeekerAt, parser godeptypes.Parser) (*analyzer.AnalysisResult, error) {
parsedLibs, parsedDependencies, err := parser.Parse(r)
if err != nil {
return nil, xerrors.Errorf("failed to parse %s: %w", filePath, err)
}
// The file path of each library should be empty in case of dependency list such as lock file
// since they all will be the same path.
return ToAnalysisResult(fileType, filePath, "", parsedLibs, parsedDependencies), nil
}
func ToAnalysisResult(fileType, filePath, libFilePath string, libs []godeptypes.Library, depGraph []godeptypes.Dependency) *analyzer.AnalysisResult {
if len(libs) == 0 {
return nil
}
deps := make(map[string][]string)
for _, dep := range depGraph {
deps[dep.ID] = dep.DependsOn
}
var pkgs []types.Package
for _, lib := range libs {
pkgs = append(pkgs, types.Package{
ID: lib.ID,
Name: lib.Name,
Version: lib.Version,
FilePath: libFilePath,
Indirect: lib.Indirect,
License: lib.License,
DependsOn: deps[lib.ID],
})
}
apps := []types.Application{{
Type: fileType,
FilePath: filePath,
Libraries: pkgs,
}}
return &analyzer.AnalysisResult{Applications: apps}
}

View File

@@ -0,0 +1,105 @@
package language_test
import (
"io"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/language"
"github.com/aquasecurity/fanal/types"
dio "github.com/aquasecurity/go-dep-parser/pkg/io"
godeptypes "github.com/aquasecurity/go-dep-parser/pkg/types"
)
type mockParser struct {
t *testing.T
}
func (p *mockParser) Parse(r dio.ReadSeekerAt) ([]godeptypes.Library, []godeptypes.Dependency, error) {
b, err := io.ReadAll(r)
require.NoError(p.t, err)
switch string(b) {
case "happy":
return []godeptypes.Library{{Name: "test", Version: "1.2.3"}}, nil, nil
case "sad":
return nil, nil, xerrors.New("unexpected error")
}
return nil, nil, nil
}
func TestAnalyze(t *testing.T) {
type args struct {
analyzerType string
filePath string
content dio.ReadSeekerAt
}
tests := []struct {
name string
args args
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path",
args: args{
analyzerType: types.GoBinary,
filePath: "app/myweb",
content: strings.NewReader("happy"),
},
want: &analyzer.AnalysisResult{
Applications: []types.Application{
{
Type: types.GoBinary,
FilePath: "app/myweb",
Libraries: []types.Package{
{
Name: "test",
Version: "1.2.3",
},
},
},
},
},
},
{
name: "empty",
args: args{
analyzerType: types.GoBinary,
filePath: "app/myweb",
content: strings.NewReader(""),
},
want: nil,
},
{
name: "sad path",
args: args{
analyzerType: types.Jar,
filePath: "app/myweb",
content: strings.NewReader("sad"),
},
wantErr: "unexpected error",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mp := &mockParser{t: t}
got, err := language.Analyze(tt.args.analyzerType, tt.args.filePath, tt.args.content, mp)
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -0,0 +1,60 @@
package nuget
import (
"context"
"os"
"path/filepath"
"golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/language"
"github.com/aquasecurity/fanal/types"
"github.com/aquasecurity/go-dep-parser/pkg/nuget/config"
"github.com/aquasecurity/go-dep-parser/pkg/nuget/lock"
)
func init() {
analyzer.RegisterAnalyzer(&nugetLibraryAnalyzer{})
}
const (
version = 2
lockFile = types.NuGetPkgsLock
configFile = types.NuGetPkgsConfig
)
var requiredFiles = []string{lockFile, configFile}
type nugetLibraryAnalyzer struct{}
func (a nugetLibraryAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
// Set the default parser
parser := lock.NewParser()
targetFile := filepath.Base(input.FilePath)
if targetFile == configFile {
parser = config.NewParser()
}
res, err := language.Analyze(types.NuGet, input.FilePath, input.Content, parser)
if err != nil {
return nil, xerrors.Errorf("NuGet analysis error: %w", err)
}
return res, nil
}
func (a nugetLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
fileName := filepath.Base(filePath)
return slices.Contains(requiredFiles, fileName)
}
func (a nugetLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeNuget
}
func (a nugetLibraryAnalyzer) Version() int {
return version
}

View File

@@ -0,0 +1,134 @@
package nuget
import (
"context"
"os"
"sort"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
func Test_nugetibraryAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path config file",
inputFile: "testdata/packages.config",
want: &analyzer.AnalysisResult{
Applications: []types.Application{
{
Type: types.NuGet,
FilePath: "testdata/packages.config",
Libraries: []types.Package{
{
Name: "Microsoft.AspNet.WebApi",
Version: "5.2.2",
},
{
Name: "Newtonsoft.Json",
Version: "6.0.4",
},
},
},
},
},
},
{
name: "happy path lock file",
inputFile: "testdata/packages.lock.json",
want: &analyzer.AnalysisResult{
Applications: []types.Application{
{
Type: types.NuGet,
FilePath: "testdata/packages.lock.json",
Libraries: []types.Package{
{
Name: "Newtonsoft.Json",
Version: "12.0.3",
},
{
Name: "NuGet.Frameworks",
Version: "5.7.0",
},
},
},
},
},
},
{
name: "sad path",
inputFile: "testdata/invalid.txt",
wantErr: "NuGet analysis error",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer f.Close()
a := nugetLibraryAnalyzer{}
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Content: f,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
// Sort libraries for consistency
for _, app := range got.Applications {
sort.Slice(app.Libraries, func(i, j int) bool {
return app.Libraries[i].Name < app.Libraries[j].Name
})
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_nugetLibraryAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "config",
filePath: "test/packages.config",
want: true,
},
{
name: "lock",
filePath: "test/packages.lock.json",
want: true,
},
{
name: "zip",
filePath: "test.zip",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := nugetLibraryAnalyzer{}
got := a.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -0,0 +1 @@
test

View File

@@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Microsoft.AspNet.WebApi" version="5.2.2" targetFramework="net45" />
<package id="Newtonsoft.Json" version="6.0.4" targetFramework="net45" />
</packages>

View File

@@ -0,0 +1,19 @@
{
"version": 1,
"dependencies": {
".NETCoreApp,Version=v5.0": {
"Newtonsoft.Json": {
"type": "Direct",
"requested": "[12.0.3, )",
"resolved": "12.0.3",
"contentHash": "6mgjfnRB4jKMlzHSl+VD+oUc1IebOZabkbyWj2RiTgWwYPPuaK1H97G1sHqGwPlS5npiF5Q0OrxN1wni2n5QWg=="
},
"NuGet.Frameworks": {
"type": "Direct",
"requested": "[5.7.0, )",
"resolved": "5.7.0",
"contentHash": "7Q/wUoB3jCBcq9zoBOBGHFhe78C13jViPmvjvzTwthVV8DAjMfpXnqAYtgwdaRLJMkTXrtdLxfPBIFFhmlsnIQ=="
}
}
}
}

View File

@@ -0,0 +1,55 @@
package binary
import (
"context"
"errors"
"os"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/language"
"github.com/aquasecurity/fanal/types"
"github.com/aquasecurity/go-dep-parser/pkg/golang/binary"
)
func init() {
analyzer.RegisterAnalyzer(&gobinaryLibraryAnalyzer{})
}
const version = 1
type gobinaryLibraryAnalyzer struct{}
func (a gobinaryLibraryAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
p := binary.NewParser()
libs, deps, err := p.Parse(input.Content)
if errors.Is(err, binary.ErrUnrecognizedExe) || errors.Is(err, binary.ErrNonGoBinary) {
return nil, nil
} else if err != nil {
return nil, xerrors.Errorf("go binary parse error: %w", err)
}
return language.ToAnalysisResult(types.GoBinary, input.FilePath, "", libs, deps), nil
}
func (a gobinaryLibraryAnalyzer) Required(_ string, fileInfo os.FileInfo) bool {
mode := fileInfo.Mode()
if !mode.IsRegular() {
return false
}
// Check executable file
if mode.Perm()&0111 != 0 {
return true
}
return false
}
func (a gobinaryLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeGoBinary
}
func (a gobinaryLibraryAnalyzer) Version() int {
return version
}

View File

@@ -0,0 +1,101 @@
package binary
import (
"context"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
func Test_gobinaryLibraryAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
inputFile string
want *analyzer.AnalysisResult
}{
{
name: "happy path",
inputFile: "testdata/executable_gobinary",
want: &analyzer.AnalysisResult{
Applications: []types.Application{
{
Type: types.GoBinary,
FilePath: "testdata/executable_gobinary",
Libraries: []types.Package{
{
Name: "github.com/aquasecurity/go-pep440-version",
Version: "v0.0.0-20210121094942-22b2f8951d46",
},
{Name: "github.com/aquasecurity/go-version", Version: "v0.0.0-20210121072130-637058cfe492"},
{Name: "golang.org/x/xerrors", Version: "v0.0.0-20200804184101-5ec99f83aff1"},
},
},
},
},
},
{
name: "not go binary",
inputFile: "testdata/executable_bash",
},
{
name: "broken elf",
inputFile: "testdata/broken_elf",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer f.Close()
a := gobinaryLibraryAnalyzer{}
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Content: f,
})
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_gobinaryLibraryAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "file perm 0755",
filePath: "testdata/0755",
want: true,
},
{
name: "file perm 0644",
filePath: "testdata/0644",
want: false,
},
{
name: "symlink",
filePath: "testdata/symlink",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := gobinaryLibraryAnalyzer{}
fileInfo, err := os.Lstat(tt.filePath)
require.NoError(t, err)
got := a.Required(tt.filePath, fileInfo)
assert.Equal(t, tt.want, got, fileInfo.Mode().Perm())
})
}
}

View File

View File

@@ -0,0 +1 @@
ELF

View File

@@ -0,0 +1,3 @@
#!/bin/bash
echo "hello"

Binary file not shown.

View File

View File

@@ -0,0 +1 @@
foo

View File

@@ -0,0 +1,58 @@
package mod
import (
"context"
"os"
"path/filepath"
"golang.org/x/exp/slices"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/language"
"github.com/aquasecurity/fanal/types"
"github.com/aquasecurity/go-dep-parser/pkg/golang/mod"
"github.com/aquasecurity/go-dep-parser/pkg/golang/sum"
godeptypes "github.com/aquasecurity/go-dep-parser/pkg/types"
)
func init() {
analyzer.RegisterAnalyzer(&gomodAnalyzer{})
}
const version = 2
var requiredFiles = []string{types.GoMod, types.GoSum}
type gomodAnalyzer struct{}
func (a gomodAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
var parser godeptypes.Parser
switch filepath.Base(input.FilePath) {
case types.GoMod:
parser = mod.NewParser()
case types.GoSum:
parser = sum.NewParser()
default:
return nil, nil
}
res, err := language.Analyze(types.GoModule, input.FilePath, input.Content, parser)
if err != nil {
return nil, xerrors.Errorf("failed to analyze %s: %w", input.FilePath, err)
}
return res, nil
}
func (a gomodAnalyzer) Required(filePath string, _ os.FileInfo) bool {
fileName := filepath.Base(filePath)
return slices.Contains(requiredFiles, fileName)
}
func (a gomodAnalyzer) Type() analyzer.Type {
return analyzer.TypeGoMod
}
func (a gomodAnalyzer) Version() int {
return version
}

View File

@@ -0,0 +1,149 @@
package mod
import (
"context"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"golang.org/x/exp/slices"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
func Test_gomodAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
filePath string
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "go.mod",
filePath: "testdata/go.mod",
inputFile: "testdata/normal_go.mod",
want: &analyzer.AnalysisResult{
Applications: []types.Application{
{
Type: types.GoModule,
FilePath: "testdata/go.mod",
Libraries: []types.Package{
{
Name: "github.com/aquasecurity/go-dep-parser",
Version: "0.0.0-20220406074731-71021a481237",
},
{
Name: "golang.org/x/xerrors", Version: "0.0.0-20200804184101-5ec99f83aff1",
Indirect: true,
},
},
},
},
},
},
{
name: "go.sum",
filePath: "testdata/go.sum",
inputFile: "testdata/normal_go.sum",
want: &analyzer.AnalysisResult{
Applications: []types.Application{
{
Type: types.GoModule,
FilePath: "testdata/go.sum",
Libraries: []types.Package{
{Name: "github.com/BurntSushi/toml", Version: "0.3.1"},
{Name: "github.com/cpuguy83/go-md2man/v2", Version: "2.0.0-20190314233015-f79a8a8ca69d"},
{Name: "github.com/davecgh/go-spew", Version: "1.1.0"},
{Name: "github.com/pmezard/go-difflib", Version: "1.0.0"},
{Name: "github.com/russross/blackfriday/v2", Version: "2.0.1"},
{Name: "github.com/shurcooL/sanitized_anchor_name", Version: "1.0.0"},
{Name: "github.com/stretchr/objx", Version: "0.1.0"},
{Name: "github.com/stretchr/testify", Version: "1.7.0"},
{Name: "github.com/urfave/cli", Version: "1.22.5"},
{Name: "golang.org/x/xerrors", Version: "0.0.0-20200804184101-5ec99f83aff1"},
{Name: "gopkg.in/check.v1", Version: "0.0.0-20161208181325-20d25e280405"},
{Name: "gopkg.in/yaml.v2", Version: "2.2.2"},
{Name: "gopkg.in/yaml.v3", Version: "3.0.0-20200313102051-9f266ea9e77c"},
},
},
},
},
},
{
name: "sad go.mod",
filePath: "testdata/go.mod",
inputFile: "testdata/sad_go.mod",
wantErr: "unknown directive",
},
{
name: "sad go.sum",
filePath: "testdata/go.sum",
inputFile: "testdata/sad_go.sum",
want: nil,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer f.Close()
a := gomodAnalyzer{}
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.filePath,
Content: f,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
if got != nil {
slices.SortFunc(got.Applications[0].Libraries, func(a, b types.Package) bool {
return a.Name < b.Name
})
slices.SortFunc(tt.want.Applications[0].Libraries, func(a, b types.Package) bool {
return a.Name < b.Name
})
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_gomodAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "go.mod",
filePath: "test/go.mod",
want: true,
},
{
name: "go.sum",
filePath: "test/foo/go.sum",
want: true,
},
{
name: "sad",
filePath: "a/b/c/d/test.sum",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := gomodAnalyzer{}
got := a.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -0,0 +1,9 @@
module github.com/org/repo
go 1.17
require github.com/aquasecurity/go-dep-parser v0.0.0-20211110174639-8257534ffed3
require golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
replace github.com/aquasecurity/go-dep-parser => github.com/aquasecurity/go-dep-parser v0.0.0-20220406074731-71021a481237

View File

@@ -0,0 +1,24 @@
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/urfave/cli v1.22.5 h1:lNq9sAHXK2qfdI8W+GRItjCEkI+2oR4d+MEHy1CKXoU=
github.com/urfave/cli v1.22.5/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -0,0 +1 @@
invalid

View File

@@ -0,0 +1 @@
invalid

View File

@@ -0,0 +1,55 @@
package jar
import (
"context"
"os"
"path/filepath"
"strings"
"github.com/aquasecurity/fanal/types"
"golang.org/x/xerrors"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/language"
"github.com/aquasecurity/go-dep-parser/pkg/java/jar"
)
func init() {
analyzer.RegisterAnalyzer(&javaLibraryAnalyzer{})
}
const version = 1
var requiredExtensions = []string{".jar", ".war", ".ear", ".par"}
// javaLibraryAnalyzer analyzes jar/war/ear/par files
type javaLibraryAnalyzer struct{}
func (a javaLibraryAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
p := jar.NewParser(jar.WithSize(input.Info.Size()), jar.WithFilePath(input.FilePath), jar.WithOffline(input.Options.Offline))
libs, deps, err := p.Parse(input.Content)
if err != nil {
return nil, xerrors.Errorf("jar/war/ear/par parse error: %w", err)
}
return language.ToAnalysisResult(types.Jar, input.FilePath, input.FilePath, libs, deps), nil
}
func (a javaLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
ext := filepath.Ext(filePath)
for _, required := range requiredExtensions {
if strings.EqualFold(ext, required) {
return true
}
}
return false
}
func (a javaLibraryAnalyzer) Type() analyzer.Type {
return analyzer.TypeJar
}
func (a javaLibraryAnalyzer) Version() int {
return version
}

View File

@@ -0,0 +1,144 @@
package jar
import (
"context"
"os"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/types"
)
func Test_javaLibraryAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
inputFile string
want *analyzer.AnalysisResult
wantErr string
}{
{
name: "happy path (WAR file)",
inputFile: "testdata/test.war",
want: &analyzer.AnalysisResult{
Applications: []types.Application{
{
Type: types.Jar,
FilePath: "testdata/test.war",
Libraries: []types.Package{
{Name: "org.glassfish:javax.el", FilePath: "testdata/test.war", Version: "3.0.0"},
{
Name: "com.fasterxml.jackson.core:jackson-databind", FilePath: "testdata/test.war",
Version: "2.9.10.6",
},
{
Name: "com.fasterxml.jackson.core:jackson-annotations", FilePath: "testdata/test.war",
Version: "2.9.10",
},
{
Name: "com.fasterxml.jackson.core:jackson-core", FilePath: "testdata/test.war",
Version: "2.9.10",
},
{Name: "org.slf4j:slf4j-api", FilePath: "testdata/test.war", Version: "1.7.30"},
{Name: "com.cronutils:cron-utils", FilePath: "testdata/test.war", Version: "9.1.2"},
{Name: "org.apache.commons:commons-lang3", FilePath: "testdata/test.war", Version: "3.11"},
{Name: "com.example:web-app", FilePath: "testdata/test.war", Version: "1.0-SNAPSHOT"},
},
},
},
},
},
{
name: "happy path (PAR file)",
inputFile: "testdata/test.par",
want: &analyzer.AnalysisResult{
Applications: []types.Application{
{
Type: types.Jar,
FilePath: "testdata/test.par",
Libraries: []types.Package{
{
Name: "com.fasterxml.jackson.core:jackson-core", FilePath: "testdata/test.par",
Version: "2.9.10",
},
},
},
},
},
},
{
name: "sad path",
inputFile: "testdata/test.txt",
wantErr: "not a valid zip file",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
f, err := os.Open(tt.inputFile)
require.NoError(t, err)
defer f.Close()
stat, err := f.Stat()
require.NoError(t, err)
a := javaLibraryAnalyzer{}
ctx := context.Background()
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
FilePath: tt.inputFile,
Info: stat,
Content: f,
})
if tt.wantErr != "" {
require.NotNil(t, err)
assert.Contains(t, err.Error(), tt.wantErr)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}
}
func Test_javaLibraryAnalyzer_Required(t *testing.T) {
tests := []struct {
name string
filePath string
want bool
}{
{
name: "war",
filePath: "test/test.war",
want: true,
},
{
name: "jar",
filePath: "test.jar",
want: true,
},
{
name: "ear",
filePath: "a/b/c/d/test.ear",
want: true,
},
{
name: "capital jar",
filePath: "a/b/c/d/test.JAR",
want: true,
},
{
name: "zip",
filePath: "test.zip",
want: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a := javaLibraryAnalyzer{}
got := a.Required(tt.filePath, nil)
assert.Equal(t, tt.want, got)
})
}
}

Binary file not shown.

View File

@@ -0,0 +1 @@
test

Binary file not shown.

View File

@@ -0,0 +1,45 @@
package pom
import (
"context"
"os"
"path/filepath"
"golang.org/x/xerrors"
"github.com/aquasecurity/go-dep-parser/pkg/java/pom"
"github.com/aquasecurity/fanal/analyzer"
"github.com/aquasecurity/fanal/analyzer/language"
"github.com/aquasecurity/fanal/types"
)
func init() {
analyzer.RegisterAnalyzer(&pomAnalyzer{})
}
const version = 1
// pomAnalyzer analyzes pom.xml
type pomAnalyzer struct{}
func (a pomAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
p := pom.NewParser(filepath.Join(input.Dir, input.FilePath), pom.WithOffline(input.Options.Offline))
res, err := language.Analyze(types.Pom, input.FilePath, input.Content, p)
if err != nil {
return nil, xerrors.Errorf("%s parse error: %w", input.FilePath, err)
}
return res, nil
}
func (a pomAnalyzer) Required(filePath string, _ os.FileInfo) bool {
return filepath.Base(filePath) == types.MavenPom
}
func (a pomAnalyzer) Type() analyzer.Type {
return analyzer.TypePom
}
func (a pomAnalyzer) Version() int {
return version
}

Some files were not shown because too many files have changed in this diff Show More