feat(image): scan misconfigurations in image config (#3437)

This commit is contained in:
Teppei Fukuda
2023-01-30 04:48:29 +02:00
committed by GitHub
parent 501d424d1f
commit fb0d8f3f30
43 changed files with 1851 additions and 1361 deletions

View File

@@ -17,7 +17,7 @@ import (
func TestFilesystem(t *testing.T) { func TestFilesystem(t *testing.T) {
type args struct { type args struct {
scanners string scanner types.Scanner
severity []string severity []string
ignoreIDs []string ignoreIDs []string
policyPaths []string policyPaths []string
@@ -41,7 +41,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "gomod", name: "gomod",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
input: "testdata/fixtures/fs/gomod", input: "testdata/fixtures/fs/gomod",
}, },
golden: "testdata/gomod.json.golden", golden: "testdata/gomod.json.golden",
@@ -49,7 +49,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "gomod with skip files", name: "gomod with skip files",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
input: "testdata/fixtures/fs/gomod", input: "testdata/fixtures/fs/gomod",
skipFiles: []string{"testdata/fixtures/fs/gomod/submod2/go.mod"}, skipFiles: []string{"testdata/fixtures/fs/gomod/submod2/go.mod"},
}, },
@@ -58,7 +58,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "gomod with skip dirs", name: "gomod with skip dirs",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
input: "testdata/fixtures/fs/gomod", input: "testdata/fixtures/fs/gomod",
skipDirs: []string{"testdata/fixtures/fs/gomod/submod2"}, skipDirs: []string{"testdata/fixtures/fs/gomod/submod2"},
}, },
@@ -67,7 +67,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "nodejs", name: "nodejs",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
input: "testdata/fixtures/fs/nodejs", input: "testdata/fixtures/fs/nodejs",
listAllPkgs: true, listAllPkgs: true,
}, },
@@ -76,7 +76,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "yarn", name: "yarn",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
input: "testdata/fixtures/fs/yarn", input: "testdata/fixtures/fs/yarn",
listAllPkgs: true, listAllPkgs: true,
}, },
@@ -85,7 +85,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "pnpm", name: "pnpm",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
input: "testdata/fixtures/fs/pnpm", input: "testdata/fixtures/fs/pnpm",
}, },
golden: "testdata/pnpm.json.golden", golden: "testdata/pnpm.json.golden",
@@ -93,7 +93,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "pip", name: "pip",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
listAllPkgs: true, listAllPkgs: true,
input: "testdata/fixtures/fs/pip", input: "testdata/fixtures/fs/pip",
}, },
@@ -102,7 +102,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "pom", name: "pom",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
input: "testdata/fixtures/fs/pom", input: "testdata/fixtures/fs/pom",
}, },
golden: "testdata/pom.json.golden", golden: "testdata/pom.json.golden",
@@ -110,7 +110,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "gradle", name: "gradle",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
input: "testdata/fixtures/fs/gradle", input: "testdata/fixtures/fs/gradle",
}, },
golden: "testdata/gradle.json.golden", golden: "testdata/gradle.json.golden",
@@ -118,7 +118,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "conan", name: "conan",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
listAllPkgs: true, listAllPkgs: true,
input: "testdata/fixtures/fs/conan", input: "testdata/fixtures/fs/conan",
}, },
@@ -127,7 +127,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "nuget", name: "nuget",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
listAllPkgs: true, listAllPkgs: true,
input: "testdata/fixtures/fs/nuget", input: "testdata/fixtures/fs/nuget",
}, },
@@ -136,7 +136,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "dotnet", name: "dotnet",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
listAllPkgs: true, listAllPkgs: true,
input: "testdata/fixtures/fs/dotnet", input: "testdata/fixtures/fs/dotnet",
}, },
@@ -145,7 +145,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "cocoapods", name: "cocoapods",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
listAllPkgs: true, listAllPkgs: true,
input: "testdata/fixtures/fs/cocoapods", input: "testdata/fixtures/fs/cocoapods",
}, },
@@ -154,7 +154,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "pubspec.lock", name: "pubspec.lock",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
listAllPkgs: true, listAllPkgs: true,
input: "testdata/fixtures/fs/pubspec", input: "testdata/fixtures/fs/pubspec",
}, },
@@ -163,7 +163,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "mix.lock", name: "mix.lock",
args: args{ args: args{
scanners: types.VulnerabilityScanner, scanner: types.VulnerabilityScanner,
listAllPkgs: true, listAllPkgs: true,
input: "testdata/fixtures/fs/mixlock", input: "testdata/fixtures/fs/mixlock",
}, },
@@ -172,7 +172,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "dockerfile", name: "dockerfile",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
input: "testdata/fixtures/fs/dockerfile", input: "testdata/fixtures/fs/dockerfile",
namespaces: []string{"testing"}, namespaces: []string{"testing"},
}, },
@@ -181,7 +181,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "dockerfile with custom file pattern", name: "dockerfile with custom file pattern",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
input: "testdata/fixtures/fs/dockerfile_file_pattern", input: "testdata/fixtures/fs/dockerfile_file_pattern",
namespaces: []string{"testing"}, namespaces: []string{"testing"},
filePatterns: []string{"dockerfile:Customfile"}, filePatterns: []string{"dockerfile:Customfile"},
@@ -191,7 +191,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "dockerfile with rule exception", name: "dockerfile with rule exception",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
policyPaths: []string{"testdata/fixtures/fs/rule-exception/policy"}, policyPaths: []string{"testdata/fixtures/fs/rule-exception/policy"},
input: "testdata/fixtures/fs/rule-exception", input: "testdata/fixtures/fs/rule-exception",
}, },
@@ -200,7 +200,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "dockerfile with namespace exception", name: "dockerfile with namespace exception",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
policyPaths: []string{"testdata/fixtures/fs/namespace-exception/policy"}, policyPaths: []string{"testdata/fixtures/fs/namespace-exception/policy"},
input: "testdata/fixtures/fs/namespace-exception", input: "testdata/fixtures/fs/namespace-exception",
}, },
@@ -209,7 +209,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "dockerfile with custom policies", name: "dockerfile with custom policies",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
policyPaths: []string{"testdata/fixtures/fs/custom-policy/policy"}, policyPaths: []string{"testdata/fixtures/fs/custom-policy/policy"},
namespaces: []string{"user"}, namespaces: []string{"user"},
input: "testdata/fixtures/fs/custom-policy", input: "testdata/fixtures/fs/custom-policy",
@@ -219,7 +219,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "tarball helm chart scanning with builtin policies", name: "tarball helm chart scanning with builtin policies",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
input: "testdata/fixtures/fs/helm", input: "testdata/fixtures/fs/helm",
}, },
golden: "testdata/helm.json.golden", golden: "testdata/helm.json.golden",
@@ -227,7 +227,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "helm chart directory scanning with builtin policies", name: "helm chart directory scanning with builtin policies",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
input: "testdata/fixtures/fs/helm_testchart", input: "testdata/fixtures/fs/helm_testchart",
}, },
golden: "testdata/helm_testchart.json.golden", golden: "testdata/helm_testchart.json.golden",
@@ -235,7 +235,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "helm chart directory scanning with value overrides using set", name: "helm chart directory scanning with value overrides using set",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
input: "testdata/fixtures/fs/helm_testchart", input: "testdata/fixtures/fs/helm_testchart",
helmSet: []string{"securityContext.runAsUser=0"}, helmSet: []string{"securityContext.runAsUser=0"},
}, },
@@ -244,7 +244,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "helm chart directory scanning with value overrides using value file", name: "helm chart directory scanning with value overrides using value file",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
input: "testdata/fixtures/fs/helm_testchart", input: "testdata/fixtures/fs/helm_testchart",
helmValuesFile: []string{"testdata/fixtures/fs/helm_values/values.yaml"}, helmValuesFile: []string{"testdata/fixtures/fs/helm_values/values.yaml"},
}, },
@@ -253,7 +253,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "helm chart directory scanning with builtin policies and non string Chart name", name: "helm chart directory scanning with builtin policies and non string Chart name",
args: args{ args: args{
scanners: types.MisconfigScanner, scanner: types.MisconfigScanner,
input: "testdata/fixtures/fs/helm_badname", input: "testdata/fixtures/fs/helm_badname",
}, },
golden: "testdata/helm_badname.json.golden", golden: "testdata/helm_badname.json.golden",
@@ -261,7 +261,7 @@ func TestFilesystem(t *testing.T) {
{ {
name: "secrets", name: "secrets",
args: args{ args: args{
scanners: "vuln,secret", scanner: "vuln,secret",
input: "testdata/fixtures/fs/secrets", input: "testdata/fixtures/fs/secrets",
secretConfig: "testdata/fixtures/fs/secrets/trivy-secret.yaml", secretConfig: "testdata/fixtures/fs/secrets/trivy-secret.yaml",
}, },
@@ -318,8 +318,8 @@ func TestFilesystem(t *testing.T) {
"--offline-scan", "--offline-scan",
} }
if tt.args.scanners != "" { if tt.args.scanner != "" {
osArgs = append(osArgs, "--scanners", tt.args.scanners) osArgs = append(osArgs, "--scanners", string(tt.args.scanner))
} }
if len(tt.args.policyPaths) != 0 { if len(tt.args.policyPaths) != 0 {

View File

@@ -581,7 +581,7 @@ func NewConfigCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
options.DisabledAnalyzers = append(analyzer.TypeOSes, analyzer.TypeLanguages...) options.DisabledAnalyzers = append(analyzer.TypeOSes, analyzer.TypeLanguages...)
// Scan only for misconfigurations // Scan only for misconfigurations
options.Scanners = []string{types.MisconfigScanner} options.Scanners = types.Scanners{types.MisconfigScanner}
return artifact.Run(cmd.Context(), options, artifact.TargetFilesystem) return artifact.Run(cmd.Context(), options, artifact.TargetFilesystem)
}, },
@@ -989,7 +989,7 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
} }
// Scan vulnerabilities // Scan vulnerabilities
options.Scanners = []string{types.VulnerabilityScanner} options.Scanners = types.Scanners{types.VulnerabilityScanner}
return artifact.Run(cmd.Context(), options, artifact.TargetSBOM) return artifact.Run(cmd.Context(), options, artifact.TargetSBOM)
}, },

View File

@@ -297,7 +297,7 @@ func (r *runner) Report(opts flag.Options, report types.Report) error {
func (r *runner) initDB(opts flag.Options) error { func (r *runner) initDB(opts flag.Options) error {
// When scanning config files or running as client mode, it doesn't need to download the vulnerability database. // When scanning config files or running as client mode, it doesn't need to download the vulnerability database.
if opts.ServerAddr != "" || !slices.Contains(opts.Scanners, types.VulnerabilityScanner) { if opts.ServerAddr != "" || !opts.Scanners.Enabled(types.VulnerabilityScanner) {
return nil return nil
} }
@@ -359,16 +359,6 @@ func (r *runner) initCache(opts flag.Options) error {
return nil return nil
} }
// Run performs artifact scanning
//func Run(cliCtx *cli.Context, targetKind TargetKind) error {
// opt, err := InitOption(cliCtx)
// if err != nil {
// return xerrors.Errorf("InitOption: %w", err)
// }
//
// return run(cliCtx.Context, opt, targetKind)
//}
// Run performs artifact scanning // Run performs artifact scanning
func Run(ctx context.Context, opts flag.Options, targetKind TargetKind) (err error) { func Run(ctx context.Context, opts flag.Options, targetKind TargetKind) (err error) {
ctx, cancel := context.WithTimeout(ctx, opts.Timeout) ctx, cancel := context.WithTimeout(ctx, opts.Timeout)
@@ -452,22 +442,27 @@ func disabledAnalyzers(opts flag.Options) []analyzer.Type {
} }
// Do not perform secret scanning when it is not specified. // Do not perform secret scanning when it is not specified.
if !slices.Contains(opts.Scanners, types.SecretScanner) { if !opts.Scanners.Enabled(types.SecretScanner) {
analyzers = append(analyzers, analyzer.TypeSecret) analyzers = append(analyzers, analyzer.TypeSecret)
} }
// Do not perform misconfiguration scanning when it is not specified. // Do not perform misconfiguration scanning when it is not specified.
if !slices.Contains(opts.Scanners, types.MisconfigScanner) && if !opts.Scanners.AnyEnabled(types.MisconfigScanner, types.RBACScanner) {
!slices.Contains(opts.Scanners, types.RBACScanner) {
analyzers = append(analyzers, analyzer.TypeConfigFiles...) analyzers = append(analyzers, analyzer.TypeConfigFiles...)
} }
// Scanning file headers and license files is expensive. // Scanning file headers and license files is expensive.
// It is performed only when '--scanners license' and '--license-full' are specified. // It is performed only when '--scanners license' and '--license-full' are specified together.
if !slices.Contains(opts.Scanners, types.LicenseScanner) || !opts.LicenseFull { if !opts.Scanners.Enabled(types.LicenseScanner) || !opts.LicenseFull {
analyzers = append(analyzers, analyzer.TypeLicenseFile) analyzers = append(analyzers, analyzer.TypeLicenseFile)
} }
// Do not perform misconfiguration scanning on container image config
// when it is not specified.
if !opts.ImageConfigScanners.Enabled(types.MisconfigScanner) {
analyzers = append(analyzers, analyzer.TypeHistoryDockerfile)
}
if len(opts.SBOMSources) == 0 { if len(opts.SBOMSources) == 0 {
analyzers = append(analyzers, analyzer.TypeExecutable) analyzers = append(analyzers, analyzer.TypeExecutable)
} }
@@ -484,6 +479,7 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi
scanOptions := types.ScanOptions{ scanOptions := types.ScanOptions{
VulnType: opts.VulnType, VulnType: opts.VulnType,
Scanners: opts.Scanners, Scanners: opts.Scanners,
ImageConfigScanners: opts.ImageConfigScanners, // this is valid only for 'image' subcommand
ScanRemovedPackages: opts.ScanRemovedPkgs, // this is valid only for 'image' subcommand ScanRemovedPackages: opts.ScanRemovedPkgs, // this is valid only for 'image' subcommand
Platform: opts.Platform, // this is valid only for 'image' subcommand Platform: opts.Platform, // this is valid only for 'image' subcommand
ListAllPackages: opts.ListAllPkgs, ListAllPackages: opts.ListAllPkgs,
@@ -491,11 +487,20 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi
FilePatterns: opts.FilePatterns, FilePatterns: opts.FilePatterns,
} }
if slices.Contains(opts.Scanners, types.VulnerabilityScanner) { if len(opts.ImageConfigScanners) != 0 {
log.Logger.Infof("Container image config scanners: %q", opts.ImageConfigScanners)
}
if opts.Scanners.Enabled(types.VulnerabilityScanner) {
log.Logger.Info("Vulnerability scanning is enabled") log.Logger.Info("Vulnerability scanning is enabled")
log.Logger.Debugf("Vulnerability type: %s", scanOptions.VulnType) log.Logger.Debugf("Vulnerability type: %s", scanOptions.VulnType)
} }
// ScannerOption is filled only when config scanning is enabled.
var configScannerOptions config.ScannerOption
if opts.Scanners.Enabled(types.MisconfigScanner) || opts.ImageConfigScanners.Enabled(types.MisconfigScanner) {
log.Logger.Info("Misconfiguration scanning is enabled")
var downloadedPolicyPaths []string var downloadedPolicyPaths []string
var disableEmbedded bool var disableEmbedded bool
downloadedPolicyPaths, err := operation.InitBuiltinPolicies(context.Background(), opts.CacheDir, opts.Quiet, opts.SkipPolicyUpdate) downloadedPolicyPaths, err := operation.InitBuiltinPolicies(context.Background(), opts.CacheDir, opts.Quiet, opts.SkipPolicyUpdate)
@@ -508,10 +513,6 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi
disableEmbedded = true disableEmbedded = true
} }
// ScannerOption is filled only when config scanning is enabled.
var configScannerOptions config.ScannerOption
if slices.Contains(opts.Scanners, types.MisconfigScanner) {
log.Logger.Info("Misconfiguration scanning is enabled")
configScannerOptions = config.ScannerOption{ configScannerOptions = config.ScannerOption{
Trace: opts.Trace, Trace: opts.Trace,
Namespaces: append(opts.PolicyNamespaces, defaultPolicyNamespaces...), Namespaces: append(opts.PolicyNamespaces, defaultPolicyNamespaces...),
@@ -527,7 +528,7 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi
} }
// Do not load config file for secret scanning // Do not load config file for secret scanning
if slices.Contains(opts.Scanners, types.SecretScanner) { if opts.Scanners.Enabled(types.SecretScanner) {
ver := canonicalVersion(opts.AppVersion) ver := canonicalVersion(opts.AppVersion)
log.Logger.Info("Secret scanning is enabled") log.Logger.Info("Secret scanning is enabled")
log.Logger.Info("If your scanning is slow, please try '--scanners vuln' to disable secret scanning") log.Logger.Info("If your scanning is slow, please try '--scanners vuln' to disable secret scanning")
@@ -536,7 +537,7 @@ func initScannerConfig(opts flag.Options, cacheClient cache.Cache) (ScannerConfi
opts.SecretConfigPath = "" opts.SecretConfigPath = ""
} }
if slices.Contains(opts.Scanners, types.LicenseScanner) { if opts.Scanners.Enabled(types.LicenseScanner) {
if opts.LicenseFull { if opts.LicenseFull {
log.Logger.Info("Full license scanning is enabled") log.Logger.Info("Full license scanning is enabled")
} else { } else {

View File

@@ -62,12 +62,12 @@ const (
) )
// Scanners reads spec control and determines the scanners by check ID prefix // Scanners reads spec control and determines the scanners by check ID prefix
func (cs *ComplianceSpec) Scanners() ([]types.Scanner, error) { func (cs *ComplianceSpec) Scanners() (types.Scanners, error) {
scannerTypes := map[types.Scanner]struct{}{} scannerTypes := map[types.Scanner]struct{}{}
for _, control := range cs.Spec.Controls { for _, control := range cs.Spec.Controls {
for _, check := range control.Checks { for _, check := range control.Checks {
scannerType := scannerByCheckID(check.ID) scannerType := scannerByCheckID(check.ID)
if scannerType == types.ScannerUnknown { if scannerType == types.UnknownScanner {
return nil, xerrors.Errorf("unsupported check ID: %s", check.ID) return nil, xerrors.Errorf("unsupported check ID: %s", check.ID)
} }
scannerTypes[scannerType] = struct{}{} scannerTypes[scannerType] = struct{}{}
@@ -96,11 +96,11 @@ func scannerByCheckID(checkID string) types.Scanner {
case strings.HasPrefix(checkID, "avd-"): case strings.HasPrefix(checkID, "avd-"):
return types.MisconfigScanner return types.MisconfigScanner
default: default:
return types.ScannerUnknown return types.UnknownScanner
} }
} }
// GetComlianceSpec accepct compliance flag name/path and return builtin or file system loaded spec // GetComplianceSpec accepct compliance flag name/path and return builtin or file system loaded spec
func GetComplianceSpec(specNameOrPath string) ([]byte, error) { func GetComplianceSpec(specNameOrPath string) ([]byte, error) {
if strings.HasPrefix(specNameOrPath, "@") { if strings.HasPrefix(specNameOrPath, "@") {
buf, err := os.ReadFile(strings.TrimPrefix(specNameOrPath, "@")) buf, err := os.ReadFile(strings.TrimPrefix(specNameOrPath, "@"))

View File

@@ -15,7 +15,7 @@ func TestComplianceSpec_Scanners(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
spec spec.Spec spec spec.Spec
want []types.Scanner want types.Scanners
wantErr assert.ErrorAssertionFunc wantErr assert.ErrorAssertionFunc
}{ }{
{ {
@@ -127,7 +127,9 @@ func TestComplianceSpec_Scanners(t *testing.T) {
if !tt.wantErr(t, err, fmt.Sprintf("Scanners()")) { if !tt.wantErr(t, err, fmt.Sprintf("Scanners()")) {
return return
} }
sort.Strings(got) // for consistency sort.Slice(got, func(i, j int) bool {
return got[i] < got[j]
}) // for consistency
assert.Equalf(t, tt.want, got, "Scanners()") assert.Equalf(t, tt.want, got, "Scanners()")
}) })
} }

View File

@@ -2,9 +2,10 @@ package all
import ( import (
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/buildinfo" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/buildinfo"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/command/apk"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/all" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/all"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/executable" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/executable"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/imgconf/apk"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/imgconf/dockerfile"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/c/conan" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/c/conan"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/conda/meta" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/conda/meta"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/dart/pub" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/dart/pub"

View File

@@ -10,7 +10,6 @@ import (
"strings" "strings"
"sync" "sync"
v1 "github.com/google/go-containerregistry/pkg/v1"
"github.com/samber/lo" "github.com/samber/lo"
"golang.org/x/exp/slices" "golang.org/x/exp/slices"
"golang.org/x/sync/semaphore" "golang.org/x/sync/semaphore"
@@ -24,7 +23,6 @@ import (
var ( var (
analyzers = map[Type]analyzer{} analyzers = map[Type]analyzer{}
configAnalyzers = map[Type]configAnalyzer{}
// ErrUnknownOS occurs when unknown OS is analyzed. // ErrUnknownOS occurs when unknown OS is analyzed.
ErrUnknownOS = xerrors.New("unknown OS") ErrUnknownOS = xerrors.New("unknown OS")
@@ -72,14 +70,6 @@ type analyzer interface {
Required(filePath string, info os.FileInfo) bool Required(filePath string, info os.FileInfo) bool
} }
// configAnalyzer defines an interface for container image config analyzer
type configAnalyzer interface {
Type() Type
Version() int
Analyze(input ConfigAnalysisInput) (*AnalysisResult, error)
Required(osFound types.OS) bool
}
//////////////////// ////////////////////
// Analyzer group // // Analyzer group //
//////////////////// ////////////////////
@@ -97,15 +87,6 @@ func DeregisterAnalyzer(t Type) {
delete(analyzers, t) delete(analyzers, t)
} }
func RegisterConfigAnalyzer(analyzer configAnalyzer) {
configAnalyzers[analyzer.Type()] = analyzer
}
// DeregisterConfigAnalyzer is mainly for testing
func DeregisterConfigAnalyzer(t Type) {
delete(configAnalyzers, t)
}
// CustomGroup returns a group name for custom analyzers // CustomGroup returns a group name for custom analyzers
// This is mainly intended to be used in Aqua products. // This is mainly intended to be used in Aqua products.
type CustomGroup interface { type CustomGroup interface {
@@ -116,7 +97,6 @@ type Opener func() (dio.ReadSeekCloserAt, error)
type AnalyzerGroup struct { type AnalyzerGroup struct {
analyzers []analyzer analyzers []analyzer
configAnalyzers []configAnalyzer
filePatterns map[Type][]*regexp.Regexp filePatterns map[Type][]*regexp.Regexp
} }
@@ -162,11 +142,6 @@ type AnalysisResult struct {
CustomResources []types.CustomResource CustomResources []types.CustomResource
} }
type ConfigAnalysisInput struct {
OS types.OS
Config *v1.ConfigFile
}
func NewAnalysisResult() *AnalysisResult { func NewAnalysisResult() *AnalysisResult {
result := new(AnalysisResult) result := new(AnalysisResult)
result.Files = map[types.HandlerType][]types.File{} result.Files = map[types.HandlerType][]types.File{}
@@ -360,13 +335,6 @@ func NewAnalyzerGroup(opt AnalyzerOptions) (AnalyzerGroup, error) {
group.analyzers = append(group.analyzers, a) group.analyzers = append(group.analyzers, a)
} }
for analyzerType, a := range configAnalyzers {
if slices.Contains(opt.DisabledAnalyzers, analyzerType) {
continue
}
group.configAnalyzers = append(group.configAnalyzers, a)
}
return group, nil return group, nil
} }
@@ -379,15 +347,6 @@ func (ag AnalyzerGroup) AnalyzerVersions() map[string]int {
return versions return versions
} }
// ImageConfigAnalyzerVersions returns analyzer version identifier used for cache keys.
func (ag AnalyzerGroup) ImageConfigAnalyzerVersions() map[string]int {
versions := map[string]int{}
for _, ca := range ag.configAnalyzers {
versions[string(ca.Type())] = ca.Version()
}
return versions
}
func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, limit *semaphore.Weighted, result *AnalysisResult, func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, limit *semaphore.Weighted, result *AnalysisResult,
dir, filePath string, info os.FileInfo, opener Opener, disabled []Type, opts AnalysisOptions) error { dir, filePath string, info os.FileInfo, opener Opener, disabled []Type, opts AnalysisOptions) error {
if info.IsDir() { if info.IsDir() {
@@ -444,28 +403,6 @@ func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, lim
return nil return nil
} }
func (ag AnalyzerGroup) AnalyzeImageConfig(targetOS types.OS, config *v1.ConfigFile) *AnalysisResult {
input := ConfigAnalysisInput{
OS: targetOS,
Config: config,
}
result := NewAnalysisResult()
for _, a := range ag.configAnalyzers {
if !a.Required(targetOS) {
continue
}
r, err := a.Analyze(input)
if err != nil {
log.Logger.Debugf("Image config analysis error: %s", err)
continue
}
result.Merge(r)
}
return result
}
func (ag AnalyzerGroup) filePatternMatch(analyzerType Type, filePath string) bool { func (ag AnalyzerGroup) filePatternMatch(analyzerType Type, filePath string) bool {
for _, pattern := range ag.filePatterns[analyzerType] { for _, pattern := range ag.filePatterns[analyzerType] {
if pattern.MatchString(filePath) { if pattern.MatchString(filePath) {

View File

@@ -2,13 +2,11 @@ package analyzer_test
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"os" "os"
"sync" "sync"
"testing" "testing"
v1 "github.com/google/go-containerregistry/pkg/v1"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"golang.org/x/sync/semaphore" "golang.org/x/sync/semaphore"
@@ -19,7 +17,7 @@ import (
aos "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os" aos "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os"
"github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/types"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/command/apk" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/imgconf/apk"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/ruby/bundler" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/ruby/bundler"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os/alpine" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os/alpine"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os/ubuntu" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os/ubuntu"
@@ -28,45 +26,6 @@ import (
_ "github.com/aquasecurity/trivy/pkg/fanal/handler/all" _ "github.com/aquasecurity/trivy/pkg/fanal/handler/all"
) )
type mockConfigAnalyzer struct{}
func (mockConfigAnalyzer) Required(targetOS types.OS) bool {
return targetOS.Family == "alpine"
}
func (mockConfigAnalyzer) Analyze(input analyzer.ConfigAnalysisInput) (*analyzer.AnalysisResult, error) {
if input.Config == nil {
return nil, errors.New("error")
}
return &analyzer.AnalysisResult{
PackageInfos: []types.PackageInfo{
{
Packages: types.Packages{
{
Name: "musl",
Version: "1.1.24-r2",
},
},
},
},
}, nil
}
func (mockConfigAnalyzer) Type() analyzer.Type {
return analyzer.Type("test")
}
func (mockConfigAnalyzer) Version() int {
return 1
}
func TestMain(m *testing.M) {
mock := mockConfigAnalyzer{}
analyzer.RegisterConfigAnalyzer(mock)
defer analyzer.DeregisterConfigAnalyzer(mock.Type())
os.Exit(m.Run())
}
func TestAnalysisResult_Merge(t *testing.T) { func TestAnalysisResult_Merge(t *testing.T) {
type fields struct { type fields struct {
m sync.Mutex m sync.Mutex
@@ -325,7 +284,7 @@ func TestAnalysisResult_Merge(t *testing.T) {
} }
} }
func TestAnalyzeFile(t *testing.T) { func TestAnalyzerGroup_AnalyzeFile(t *testing.T) {
type args struct { type args struct {
filePath string filePath string
testFilePath string testFilePath string
@@ -540,81 +499,7 @@ func TestAnalyzeFile(t *testing.T) {
} }
} }
func TestAnalyzeConfig(t *testing.T) { func TestAnalyzerGroup_AnalyzerVersions(t *testing.T) {
type args struct {
targetOS types.OS
config *v1.ConfigFile
disabledAnalyzers []analyzer.Type
filePatterns []string
}
tests := []struct {
name string
args args
want *analyzer.AnalysisResult
}{
{
name: "happy path",
args: args{
targetOS: types.OS{
Family: "alpine",
Name: "3.11.6",
},
config: &v1.ConfigFile{
OS: "linux",
},
},
want: &analyzer.AnalysisResult{
Files: map[types.HandlerType][]types.File{},
PackageInfos: []types.PackageInfo{
{
Packages: []types.Package{
{
Name: "musl",
Version: "1.1.24-r2",
},
},
},
},
},
},
{
name: "non-target OS",
args: args{
targetOS: types.OS{
Family: "debian",
Name: "9.2",
},
config: &v1.ConfigFile{
OS: "linux",
},
},
want: analyzer.NewAnalysisResult(),
},
{
name: "Analyze returns an error",
args: args{
targetOS: types.OS{
Family: "alpine",
Name: "3.11.6",
},
},
want: analyzer.NewAnalysisResult(),
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a, err := analyzer.NewAnalyzerGroup(analyzer.AnalyzerOptions{
FilePatterns: tt.args.filePatterns,
DisabledAnalyzers: tt.args.disabledAnalyzers,
})
require.NoError(t, err)
got := a.AnalyzeImageConfig(tt.args.targetOS, tt.args.config)
assert.Equal(t, tt.want, got)
})
}
}
func TestAnalyzer_AnalyzerVersions(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
disabled []analyzer.Type disabled []analyzer.Type
@@ -656,40 +541,3 @@ func TestAnalyzer_AnalyzerVersions(t *testing.T) {
}) })
} }
} }
func TestAnalyzer_ImageConfigAnalyzerVersions(t *testing.T) {
tests := []struct {
name string
disabled []analyzer.Type
want map[string]int
}{
{
name: "happy path",
disabled: []analyzer.Type{},
want: map[string]int{
"apk-command": 1,
"test": 1,
},
},
{
name: "disable analyzers",
disabled: []analyzer.Type{
analyzer.TypeAlpine,
analyzer.TypeApkCommand,
},
want: map[string]int{
"test": 1,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a, err := analyzer.NewAnalyzerGroup(analyzer.AnalyzerOptions{
DisabledAnalyzers: tt.disabled,
})
require.NoError(t, err)
got := a.ImageConfigAnalyzerVersions()
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -0,0 +1,117 @@
package analyzer
import (
"context"
v1 "github.com/google/go-containerregistry/pkg/v1"
"golang.org/x/exp/slices"
"golang.org/x/xerrors"
misconf "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/log"
)
var configAnalyzerConstructors = map[Type]configAnalyzerConstructor{}
type configAnalyzerConstructor func(ConfigAnalyzerOptions) (ConfigAnalyzer, error)
// RegisterConfigAnalyzer adds a constructor of config analyzer
func RegisterConfigAnalyzer(t Type, init configAnalyzerConstructor) {
configAnalyzerConstructors[t] = init
}
// DeregisterConfigAnalyzer is mainly for testing
func DeregisterConfigAnalyzer(t Type) {
delete(configAnalyzerConstructors, t)
}
// ConfigAnalyzer defines an interface for analyzer of container image config
type ConfigAnalyzer interface {
Type() Type
Version() int
Analyze(ctx context.Context, input ConfigAnalysisInput) (*ConfigAnalysisResult, error)
Required(osFound types.OS) bool
}
// ConfigAnalyzerOptions is used to initialize config analyzers
type ConfigAnalyzerOptions struct {
FilePatterns []string
DisabledAnalyzers []Type
MisconfScannerOption misconf.ScannerOption
}
type ConfigAnalysisInput struct {
OS types.OS
Config *v1.ConfigFile
}
type ConfigAnalysisResult struct {
Misconfiguration *types.Misconfiguration
HistoryPackages types.Packages
}
func (r *ConfigAnalysisResult) Merge(new *ConfigAnalysisResult) {
if new == nil {
return
}
if new.Misconfiguration != nil {
r.Misconfiguration = new.Misconfiguration
}
if new.HistoryPackages != nil {
r.HistoryPackages = new.HistoryPackages
}
}
type ConfigAnalyzerGroup struct {
configAnalyzers []ConfigAnalyzer
}
func NewConfigAnalyzerGroup(opts ConfigAnalyzerOptions) (ConfigAnalyzerGroup, error) {
var g ConfigAnalyzerGroup
for t, newConfigAnalyzer := range configAnalyzerConstructors {
// Skip the handler if it is disabled
if slices.Contains(opts.DisabledAnalyzers, t) {
continue
}
a, err := newConfigAnalyzer(opts)
if err != nil {
return ConfigAnalyzerGroup{}, xerrors.Errorf("config analyzer %s initialize error: %w", t, err)
}
g.configAnalyzers = append(g.configAnalyzers, a)
}
return g, nil
}
// AnalyzerVersions returns analyzer version identifier used for cache keys.
func (ag *ConfigAnalyzerGroup) AnalyzerVersions() map[string]int {
versions := map[string]int{}
for _, ca := range ag.configAnalyzers {
versions[string(ca.Type())] = ca.Version()
}
return versions
}
func (ag *ConfigAnalyzerGroup) AnalyzeImageConfig(ctx context.Context, targetOS types.OS, config *v1.ConfigFile) *ConfigAnalysisResult {
input := ConfigAnalysisInput{
OS: targetOS,
Config: config,
}
result := new(ConfigAnalysisResult)
for _, a := range ag.configAnalyzers {
if !a.Required(targetOS) {
continue
}
r, err := a.Analyze(ctx, input)
if err != nil {
log.Logger.Debugf("Image config analysis error: %s", err)
continue
}
result.Merge(r)
}
return result
}

View File

@@ -0,0 +1,160 @@
package analyzer_test
import (
"context"
"errors"
"os"
"testing"
v1 "github.com/google/go-containerregistry/pkg/v1"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
type mockConfigAnalyzer struct{}
func newMockConfigAnalyzer(_ analyzer.ConfigAnalyzerOptions) (analyzer.ConfigAnalyzer, error) {
return mockConfigAnalyzer{}, nil
}
func (mockConfigAnalyzer) Required(targetOS types.OS) bool {
return targetOS.Family == "alpine"
}
func (mockConfigAnalyzer) Analyze(_ context.Context, input analyzer.ConfigAnalysisInput) (*analyzer.ConfigAnalysisResult, error) {
if input.Config == nil {
return nil, errors.New("error")
}
return &analyzer.ConfigAnalysisResult{
HistoryPackages: types.Packages{
{
Name: "musl",
Version: "1.1.24-r2",
},
},
}, nil
}
func (mockConfigAnalyzer) Type() analyzer.Type {
return analyzer.Type("test")
}
func (mockConfigAnalyzer) Version() int {
return 1
}
func TestMain(m *testing.M) {
mock := mockConfigAnalyzer{}
analyzer.RegisterConfigAnalyzer(mock.Type(), newMockConfigAnalyzer)
defer analyzer.DeregisterConfigAnalyzer(mock.Type())
os.Exit(m.Run())
}
func TestAnalyzeConfig(t *testing.T) {
type args struct {
targetOS types.OS
config *v1.ConfigFile
disabledAnalyzers []analyzer.Type
filePatterns []string
}
tests := []struct {
name string
args args
want *analyzer.ConfigAnalysisResult
}{
{
name: "happy path",
args: args{
targetOS: types.OS{
Family: "alpine",
Name: "3.11.6",
},
config: &v1.ConfigFile{
OS: "linux",
},
},
want: &analyzer.ConfigAnalysisResult{
HistoryPackages: []types.Package{
{
Name: "musl",
Version: "1.1.24-r2",
},
},
},
},
{
name: "non-target OS",
args: args{
targetOS: types.OS{
Family: "debian",
Name: "9.2",
},
config: &v1.ConfigFile{
OS: "linux",
},
},
want: &analyzer.ConfigAnalysisResult{},
},
{
name: "Analyze returns an error",
args: args{
targetOS: types.OS{
Family: "alpine",
Name: "3.11.6",
},
},
want: &analyzer.ConfigAnalysisResult{},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a, err := analyzer.NewConfigAnalyzerGroup(analyzer.ConfigAnalyzerOptions{
FilePatterns: tt.args.filePatterns,
DisabledAnalyzers: tt.args.disabledAnalyzers,
})
require.NoError(t, err)
got := a.AnalyzeImageConfig(context.Background(), tt.args.targetOS, tt.args.config)
assert.Equal(t, tt.want, got)
})
}
}
func TestConfigAnalyzerGroup_AnalyzerVersions(t *testing.T) {
tests := []struct {
name string
disabled []analyzer.Type
want map[string]int
}{
{
name: "happy path",
disabled: []analyzer.Type{},
want: map[string]int{
"apk-command": 1,
"test": 1,
},
},
{
name: "disable analyzers",
disabled: []analyzer.Type{
analyzer.TypeAlpine,
analyzer.TypeApkCommand,
},
want: map[string]int{
"test": 1,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a, err := analyzer.NewConfigAnalyzerGroup(analyzer.ConfigAnalyzerOptions{
DisabledAnalyzers: tt.disabled,
})
require.NoError(t, err)
got := a.AnalyzerVersions()
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -95,6 +95,7 @@ const (
// Image Config // Image Config
// ============ // ============
TypeApkCommand Type = "apk-command" TypeApkCommand Type = "apk-command"
TypeHistoryDockerfile Type = "history-dockerfile"
// ================= // =================
// Structured Config // Structured Config

View File

@@ -1,6 +1,7 @@
package apk package apk
import ( import (
"context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
@@ -24,17 +25,25 @@ const (
analyzerVersion = 1 analyzerVersion = 1
) )
var apkIndexArchiveURL = "https://raw.githubusercontent.com/knqyf263/apkIndex-archive/master/alpine/v%s/main/x86_64/history.json" var defaultApkIndexArchiveURL = "https://raw.githubusercontent." +
"com/knqyf263/apkIndex-archive/master/alpine/v%s/main/x86_64/history.json"
func init() { func init() {
analyzer.RegisterConfigAnalyzer(analyzer.TypeApkCommand, newAlpineCmdAnalyzer)
}
type alpineCmdAnalyzer struct {
apkIndexArchiveURL string
}
func newAlpineCmdAnalyzer(_ analyzer.ConfigAnalyzerOptions) (analyzer.ConfigAnalyzer, error) {
apkIndexArchiveURL := defaultApkIndexArchiveURL
if builtinos.Getenv(envApkIndexArchiveURL) != "" { if builtinos.Getenv(envApkIndexArchiveURL) != "" {
apkIndexArchiveURL = builtinos.Getenv(envApkIndexArchiveURL) apkIndexArchiveURL = builtinos.Getenv(envApkIndexArchiveURL)
} }
analyzer.RegisterConfigAnalyzer(&alpineCmdAnalyzer{}) return alpineCmdAnalyzer{apkIndexArchiveURL: apkIndexArchiveURL}, nil
} }
type alpineCmdAnalyzer struct{}
type apkIndex struct { type apkIndex struct {
Package map[string]archive Package map[string]archive
Provide provide Provide provide
@@ -59,7 +68,7 @@ type pkg struct {
type version map[string]int type version map[string]int
func (a alpineCmdAnalyzer) Analyze(input analyzer.ConfigAnalysisInput) (*analyzer.AnalysisResult, error) { func (a alpineCmdAnalyzer) Analyze(_ context.Context, input analyzer.ConfigAnalysisInput) (*analyzer.ConfigAnalysisResult, error) {
if input.Config == nil { if input.Config == nil {
return nil, nil return nil, nil
} }
@@ -75,13 +84,8 @@ func (a alpineCmdAnalyzer) Analyze(input analyzer.ConfigAnalysisInput) (*analyze
return nil, nil return nil, nil
} }
return &analyzer.AnalysisResult{ return &analyzer.ConfigAnalysisResult{
PackageInfos: []types.PackageInfo{ HistoryPackages: pkgs,
{
FilePath: types.HistoryPkgs,
Packages: pkgs,
},
},
}, nil }, nil
} }
func (a alpineCmdAnalyzer) fetchApkIndexArchive(targetOS types.OS) (*apkIndex, error) { func (a alpineCmdAnalyzer) fetchApkIndexArchive(targetOS types.OS) (*apkIndex, error) {
@@ -91,7 +95,7 @@ func (a alpineCmdAnalyzer) fetchApkIndexArchive(targetOS types.OS) (*apkIndex, e
osVer = osVer[:strings.LastIndex(osVer, ".")] osVer = osVer[:strings.LastIndex(osVer, ".")]
} }
url := fmt.Sprintf(apkIndexArchiveURL, osVer) url := fmt.Sprintf(a.apkIndexArchiveURL, osVer)
var reader io.Reader var reader io.Reader
if strings.HasPrefix(url, "file://") { if strings.HasPrefix(url, "file://") {
var err error var err error
@@ -193,7 +197,8 @@ func (a alpineCmdAnalyzer) resolveDependencies(apkIndexArchive *apkIndex, origin
return pkgs return pkgs
} }
func (a alpineCmdAnalyzer) resolveDependency(apkIndexArchive *apkIndex, pkgName string, seenPkgs map[string]struct{}) (pkgNames []string) { func (a alpineCmdAnalyzer) resolveDependency(apkIndexArchive *apkIndex, pkgName string,
seenPkgs map[string]struct{}) (pkgNames []string) {
pkg, ok := apkIndexArchive.Package[pkgName] pkg, ok := apkIndexArchive.Package[pkgName]
if !ok { if !ok {
return nil return nil
@@ -232,7 +237,8 @@ type historyVersion struct {
BuiltAt int BuiltAt int
} }
func (a alpineCmdAnalyzer) guessVersion(apkIndexArchive *apkIndex, originalPkgs []string, createdAt time.Time) (pkgs []types.Package) { func (a alpineCmdAnalyzer) guessVersion(apkIndexArchive *apkIndex, originalPkgs []string,
createdAt time.Time) (pkgs []types.Package) {
for _, pkg := range originalPkgs { for _, pkg := range originalPkgs {
archive, ok := apkIndexArchive.Package[pkg] archive, ok := apkIndexArchive.Package[pkg]
if !ok { if !ok {

View File

@@ -1,6 +1,7 @@
package apk package apk
import ( import (
"context"
"encoding/json" "encoding/json"
"net/http" "net/http"
"net/http/httptest" "net/http/httptest"
@@ -10,13 +11,14 @@ import (
"testing" "testing"
"time" "time"
v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/samber/lo"
"github.com/kylelemons/godebug/pretty"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/types"
v1 "github.com/google/go-containerregistry/pkg/v1"
"github.com/kylelemons/godebug/pretty"
"github.com/stretchr/testify/assert"
) )
var ( var (
@@ -564,11 +566,7 @@ var (
}, },
} }
wantPkgs = &analyzer.AnalysisResult{ wantPkgs = []types.Package{
PackageInfos: []types.PackageInfo{
{
FilePath: "pkgs-from-history",
Packages: []types.Package{
{ {
Name: "acl", Name: "acl",
Version: "2.2.52-r5", Version: "2.2.52-r5",
@@ -1021,9 +1019,6 @@ var (
Name: "zlib-dev", Name: "zlib-dev",
Version: "1.2.11-r1", Version: "1.2.11-r1",
}, },
},
},
},
} }
) )
@@ -1047,7 +1042,7 @@ func TestAnalyze(t *testing.T) {
var tests = map[string]struct { var tests = map[string]struct {
args args args args
apkIndexArchivePath string apkIndexArchivePath string
want *analyzer.AnalysisResult want types.Packages
}{ }{
"old": { "old": {
args: args{ args: args{
@@ -1085,17 +1080,18 @@ func TestAnalyze(t *testing.T) {
} }
for testName, v := range tests { for testName, v := range tests {
t.Run(testName, func(t *testing.T) { t.Run(testName, func(t *testing.T) {
apkIndexArchiveURL = v.apkIndexArchivePath t.Setenv(envApkIndexArchiveURL, v.apkIndexArchivePath)
a := alpineCmdAnalyzer{} a, err := newAlpineCmdAnalyzer(analyzer.ConfigAnalyzerOptions{})
actual, _ := a.Analyze(analyzer.ConfigAnalysisInput{ require.NoError(t, err)
result, err := a.Analyze(context.Background(), analyzer.ConfigAnalysisInput{
OS: v.args.targetOS, OS: v.args.targetOS,
Config: v.args.config, Config: v.args.config,
}) })
if actual != nil { require.NoError(t, err)
require.Equal(t, 1, len(actual.PackageInfos))
sort.Sort(actual.PackageInfos[0].Packages) got := lo.FromPtr(result)
} sort.Sort(got.HistoryPackages)
assert.Equal(t, v.want, actual) assert.Equal(t, v.want, got.HistoryPackages)
}) })
} }
} }

View File

@@ -0,0 +1,86 @@
package dockerfile
import (
"bytes"
"context"
"strings"
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/misconf"
)
const analyzerVersion = 1
func init() {
analyzer.RegisterConfigAnalyzer(analyzer.TypeHistoryDockerfile, newHistoryAnalyzer)
}
type historyAnalyzer struct {
scanner misconf.Scanner
}
func newHistoryAnalyzer(opts analyzer.ConfigAnalyzerOptions) (analyzer.ConfigAnalyzer, error) {
s, err := misconf.NewScanner(opts.FilePatterns, opts.MisconfScannerOption)
if err != nil {
return nil, xerrors.Errorf("misconfiguration scanner error: %w", err)
}
return &historyAnalyzer{
scanner: s,
}, nil
}
func (a *historyAnalyzer) Analyze(ctx context.Context, input analyzer.ConfigAnalysisInput) (*analyzer.
ConfigAnalysisResult, error) {
if input.Config == nil {
return nil, nil
}
dockerfile := new(bytes.Buffer)
for _, h := range input.Config.History {
var createdBy string
switch {
case strings.HasPrefix(h.CreatedBy, "/bin/sh -c #(nop)"):
// Instruction other than RUN
createdBy = strings.TrimPrefix(h.CreatedBy, "/bin/sh -c #(nop)")
case strings.HasPrefix(h.CreatedBy, "/bin/sh -c"):
// RUN instruction
createdBy = strings.ReplaceAll(h.CreatedBy, "/bin/sh -c", "RUN")
}
dockerfile.WriteString(strings.TrimSpace(createdBy) + "\n")
}
files := []types.File{
{
Type: types.Dockerfile,
Path: "Dockerfile",
Content: dockerfile.Bytes(),
},
}
misconfs, err := a.scanner.Scan(ctx, files)
if err != nil {
return nil, xerrors.Errorf("history scan error: %w", err)
}
// The result should be a single element as it passes one Dockerfile.
if len(misconfs) != 1 {
return nil, nil
}
return &analyzer.ConfigAnalysisResult{
Misconfiguration: &misconfs[0],
}, nil
}
func (a *historyAnalyzer) Required(_ types.OS) bool {
return true
}
func (a *historyAnalyzer) Type() analyzer.Type {
return analyzer.TypeHistoryDockerfile
}
func (a *historyAnalyzer) Version() int {
return analyzerVersion
}

View File

@@ -0,0 +1,125 @@
package dockerfile
import (
"context"
"testing"
v1 "github.com/google/go-containerregistry/pkg/v1"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/types"
)
func Test_historyAnalyzer_Analyze(t *testing.T) {
tests := []struct {
name string
input analyzer.ConfigAnalysisInput
want *analyzer.ConfigAnalysisResult
wantErr bool
}{
{
name: "happy",
input: analyzer.ConfigAnalysisInput{
Config: &v1.ConfigFile{
History: []v1.History{
{
CreatedBy: "/bin/sh -c #(nop) ADD file:e4d600fc4c9c293efe360be7b30ee96579925d1b4634c94332e2ec73f7d8eca1 in /",
EmptyLayer: false,
},
{
CreatedBy: `/bin/sh -c #(nop) CMD [\"/bin/sh\"]`,
EmptyLayer: true,
},
},
},
},
want: &analyzer.ConfigAnalysisResult{
Misconfiguration: &types.Misconfiguration{
FileType: "dockerfile",
FilePath: "Dockerfile",
Failures: types.MisconfResults{
types.MisconfResult{
Namespace: "builtin.dockerfile.DS002",
Query: "data.builtin.dockerfile.DS002.deny",
Message: "Specify at least 1 USER command in Dockerfile with non-root user as argument",
PolicyMetadata: types.PolicyMetadata{
ID: "DS002",
AVDID: "AVD-DS-0002",
Type: "Dockerfile Security Check",
Title: "Image user should not be 'root'",
Description: "Running containers with 'root' user can lead to a container escape situation. It is a best practice to run containers as non-root users, which can be done by adding a 'USER' statement to the Dockerfile.",
Severity: "HIGH",
RecommendedActions: "Add 'USER <non root user name>' line to the Dockerfile",
References: []string{
"https://docs.docker." +
"com/develop/develop-images/dockerfile_best-practices/",
},
},
CauseMetadata: types.CauseMetadata{
Provider: "Dockerfile",
Service: "general",
},
},
types.MisconfResult{
Namespace: "builtin.dockerfile.DS005",
Query: "data.builtin.dockerfile.DS005.deny",
Message: "Consider using 'COPY file:e4d600fc4c9c293efe360be7b30ee96579925d1b4634c94332e2ec73f7d8eca1 in /' command instead of 'ADD file:e4d600fc4c9c293efe360be7b30ee96579925d1b4634c94332e2ec73f7d8eca1 in /'",
PolicyMetadata: types.PolicyMetadata{
ID: "DS005",
AVDID: "AVD-DS-0005",
Type: "Dockerfile Security Check",
Title: "ADD instead of COPY",
Description: "You should use COPY instead of ADD unless you want to extract a tar file. Note that an ADD command will extract a tar file, which adds the risk of Zip-based vulnerabilities. Accordingly, it is advised to use a COPY command, which does not extract tar files.",
Severity: "LOW",
RecommendedActions: "Use COPY instead of ADD",
References: []string{"https://docs.docker.com/engine/reference/builder/#add"},
},
CauseMetadata: types.CauseMetadata{
Provider: "Dockerfile",
Service: "general",
StartLine: 1,
EndLine: 1,
Code: types.Code{
Lines: []types.Line{
{
Number: 1,
Content: "ADD file:e4d600fc4c9c293efe360be7b30ee96579925d1b4634c94332e2ec73f7d8eca1 in /",
IsCause: true,
Truncated: false,
Highlighted: "\x1b[38;5;64mADD\x1b[0m file:e4d600fc4c9c293efe360be7b30ee96579925d1b4634c94332e2ec73f7d8eca1 in /",
FirstCause: true,
LastCause: true,
},
},
},
},
},
},
},
},
},
{
name: "nil config",
input: analyzer.ConfigAnalysisInput{
Config: nil,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
a, err := newHistoryAnalyzer(analyzer.ConfigAnalyzerOptions{})
require.NoError(t, err)
got, err := a.Analyze(context.Background(), tt.input)
if tt.wantErr {
assert.Error(t, err)
return
}
if got != nil && got.Misconfiguration != nil {
got.Misconfiguration.Successes = nil // Not compare successes in this test
}
assert.Equal(t, tt.want, got)
})
}
}

View File

@@ -27,14 +27,16 @@ func (a Applier) ApplyLayers(imageID string, layerKeys []string) (types.Artifact
} }
mergedLayer := ApplyLayers(layers) mergedLayer := ApplyLayers(layers)
imageInfo, _ := a.cache.GetArtifact(imageID) // nolint
mergedLayer.HistoryPackages = imageInfo.HistoryPackages
mergedLayer.ImageMisconfiguration = imageInfo.Misconfiguration
if !mergedLayer.OS.Detected() { if !mergedLayer.OS.Detected() {
return mergedLayer, analyzer.ErrUnknownOS // send back package and apps info regardless return mergedLayer, analyzer.ErrUnknownOS // send back package and apps info regardless
} else if mergedLayer.Packages == nil { } else if mergedLayer.Packages == nil {
return mergedLayer, analyzer.ErrNoPkgsDetected // send back package and apps info regardless return mergedLayer, analyzer.ErrNoPkgsDetected // send back package and apps info regardless
} }
imageInfo, _ := a.cache.GetArtifact(imageID) // nolint
mergedLayer.HistoryPackages = imageInfo.HistoryPackages
return mergedLayer, nil return mergedLayer, nil
} }

View File

@@ -25,7 +25,7 @@ func TestApplier_ApplyLayers(t *testing.T) {
name string name string
args args args args
getLayerExpectations []cache.LocalArtifactCacheGetBlobExpectation getLayerExpectations []cache.LocalArtifactCacheGetBlobExpectation
getImageExpectations []cache.LocalArtifactCacheGetArtifactExpectation getArtifactExpectations []cache.LocalArtifactCacheGetArtifactExpectation
want types.ArtifactDetail want types.ArtifactDetail
wantErr string wantErr string
}{ }{
@@ -126,7 +126,7 @@ func TestApplier_ApplyLayers(t *testing.T) {
}, },
}, },
}, },
getImageExpectations: []cache.LocalArtifactCacheGetArtifactExpectation{ getArtifactExpectations: []cache.LocalArtifactCacheGetArtifactExpectation{
{ {
Args: cache.LocalArtifactCacheGetArtifactArgs{ Args: cache.LocalArtifactCacheGetArtifactArgs{
ArtifactID: "sha256:4791503518dff090d6a82f7a5c1fd71c41146920e2562fb64308e17ab6834b7e", ArtifactID: "sha256:4791503518dff090d6a82f7a5c1fd71c41146920e2562fb64308e17ab6834b7e",
@@ -145,14 +145,20 @@ func TestApplier_ApplyLayers(t *testing.T) {
}, },
Packages: []types.Package{ Packages: []types.Package{
{ {
Name: "libc6", Version: "2.24-11+deb9u4", SrcName: "glibc", SrcVersion: "2.24-11+deb9u4", Name: "libc6",
Version: "2.24-11+deb9u4",
SrcName: "glibc",
SrcVersion: "2.24-11+deb9u4",
Layer: types.Layer{ Layer: types.Layer{
Digest: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5", Digest: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
DiffID: "sha256:aad63a9339440e7c3e1fff2b988991b9bfb81280042fa7f39a5e327023056819", DiffID: "sha256:aad63a9339440e7c3e1fff2b988991b9bfb81280042fa7f39a5e327023056819",
}, },
}, },
{ {
Name: "tzdata", Version: "2019a-0+deb9u1", SrcName: "tzdata", SrcVersion: "2019a-0+deb9u1", Name: "tzdata",
Version: "2019a-0+deb9u1",
SrcName: "tzdata",
SrcVersion: "2019a-0+deb9u1",
Layer: types.Layer{ Layer: types.Layer{
Digest: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02", Digest: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
DiffID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72", DiffID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
@@ -211,11 +217,25 @@ func TestApplier_ApplyLayers(t *testing.T) {
{ {
FilePath: "lib/apk/db/installed", FilePath: "lib/apk/db/installed",
Packages: []types.Package{ Packages: []types.Package{
{Name: "musl", Version: "1.1.22-r3"}, {
{Name: "busybox", Version: "1.30.1-r3"}, Name: "musl",
{Name: "openssl", Version: "1.1.1d-r2"}, Version: "1.1.22-r3",
{Name: "libcrypto1.1", Version: "1.1.1d-r2"}, },
{Name: "libssl1.1", Version: "1.1.1d-r2"}, {
Name: "busybox",
Version: "1.30.1-r3",
},
{
Name: "openssl",
Version: "1.1.1d-r2",
},
{
Name: "libcrypto1.1",
Version: "1.1.1d-r2",
},
{
Name: "libssl1.1",
Version: "1.1.1d-r2",
}, },
}, },
}, },
@@ -223,7 +243,8 @@ func TestApplier_ApplyLayers(t *testing.T) {
}, },
}, },
}, },
getImageExpectations: []cache.LocalArtifactCacheGetArtifactExpectation{ },
getArtifactExpectations: []cache.LocalArtifactCacheGetArtifactExpectation{
{ {
Args: cache.LocalArtifactCacheGetArtifactArgs{ Args: cache.LocalArtifactCacheGetArtifactArgs{
ArtifactID: "sha256:3bb70bd5fb37e05b8ecaaace5d6a6b5ec7834037c07ecb5907355c23ab70352d", ArtifactID: "sha256:3bb70bd5fb37e05b8ecaaace5d6a6b5ec7834037c07ecb5907355c23ab70352d",
@@ -232,14 +253,38 @@ func TestApplier_ApplyLayers(t *testing.T) {
ArtifactInfo: types.ArtifactInfo{ ArtifactInfo: types.ArtifactInfo{
SchemaVersion: 1, SchemaVersion: 1,
HistoryPackages: []types.Package{ HistoryPackages: []types.Package{
{Name: "musl", Version: "1.1.23"}, {
{Name: "busybox", Version: "1.31"}, Name: "musl",
{Name: "ncurses-libs", Version: "6.1_p20190518-r0"}, Version: "1.1.23",
{Name: "ncurses-terminfo-base", Version: "6.1_p20190518-r0"}, },
{Name: "ncurses", Version: "6.1_p20190518-r0"}, {
{Name: "ncurses-terminfo", Version: "6.1_p20190518-r0"}, Name: "busybox",
{Name: "bash", Version: "5.0.0-r0"}, Version: "1.31",
{Name: "readline", Version: "8.0.0-r0"}, },
{
Name: "ncurses-libs",
Version: "6.1_p20190518-r0",
},
{
Name: "ncurses-terminfo-base",
Version: "6.1_p20190518-r0",
},
{
Name: "ncurses",
Version: "6.1_p20190518-r0",
},
{
Name: "ncurses-terminfo",
Version: "6.1_p20190518-r0",
},
{
Name: "bash",
Version: "5.0.0-r0",
},
{
Name: "readline",
Version: "8.0.0-r0",
},
}, },
}, },
}, },
@@ -293,14 +338,38 @@ func TestApplier_ApplyLayers(t *testing.T) {
}, },
}, },
HistoryPackages: []types.Package{ HistoryPackages: []types.Package{
{Name: "musl", Version: "1.1.23"}, {
{Name: "busybox", Version: "1.31"}, Name: "musl",
{Name: "ncurses-libs", Version: "6.1_p20190518-r0"}, Version: "1.1.23",
{Name: "ncurses-terminfo-base", Version: "6.1_p20190518-r0"}, },
{Name: "ncurses", Version: "6.1_p20190518-r0"}, {
{Name: "ncurses-terminfo", Version: "6.1_p20190518-r0"}, Name: "busybox",
{Name: "bash", Version: "5.0.0-r0"}, Version: "1.31",
{Name: "readline", Version: "8.0.0-r0"}, },
{
Name: "ncurses-libs",
Version: "6.1_p20190518-r0",
},
{
Name: "ncurses-terminfo-base",
Version: "6.1_p20190518-r0",
},
{
Name: "ncurses",
Version: "6.1_p20190518-r0",
},
{
Name: "ncurses-terminfo",
Version: "6.1_p20190518-r0",
},
{
Name: "bash",
Version: "5.0.0-r0",
},
{
Name: "readline",
Version: "8.0.0-r0",
},
}, },
}, },
}, },
@@ -348,6 +417,18 @@ func TestApplier_ApplyLayers(t *testing.T) {
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7", "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7",
}, },
}, },
getArtifactExpectations: []cache.LocalArtifactCacheGetArtifactExpectation{
{
Args: cache.LocalArtifactCacheGetArtifactArgs{
ArtifactID: "sha256:4791503518dff090d6a82f7a5c1fd71c41146920e2562fb64308e17ab6834b7e",
},
Returns: cache.LocalArtifactCacheGetArtifactReturns{
ArtifactInfo: types.ArtifactInfo{
SchemaVersion: 1,
},
},
},
},
getLayerExpectations: []cache.LocalArtifactCacheGetBlobExpectation{ getLayerExpectations: []cache.LocalArtifactCacheGetBlobExpectation{
{ {
Args: cache.LocalArtifactCacheGetBlobArgs{ Args: cache.LocalArtifactCacheGetBlobArgs{
@@ -434,14 +515,20 @@ func TestApplier_ApplyLayers(t *testing.T) {
want: types.ArtifactDetail{ want: types.ArtifactDetail{
Packages: []types.Package{ Packages: []types.Package{
{ {
Name: "libc6", Version: "2.24-11+deb9u4", SrcName: "glibc", SrcVersion: "2.24-11+deb9u4", Name: "libc6",
Version: "2.24-11+deb9u4",
SrcName: "glibc",
SrcVersion: "2.24-11+deb9u4",
Layer: types.Layer{ Layer: types.Layer{
Digest: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5", Digest: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
DiffID: "sha256:aad63a9339440e7c3e1fff2b988991b9bfb81280042fa7f39a5e327023056819", DiffID: "sha256:aad63a9339440e7c3e1fff2b988991b9bfb81280042fa7f39a5e327023056819",
}, },
}, },
{ {
Name: "tzdata", Version: "2019a-0+deb9u1", SrcName: "tzdata", SrcVersion: "2019a-0+deb9u1", Name: "tzdata",
Version: "2019a-0+deb9u1",
SrcName: "tzdata",
SrcVersion: "2019a-0+deb9u1",
Layer: types.Layer{ Layer: types.Layer{
Digest: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02", Digest: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
DiffID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72", DiffID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
@@ -478,10 +565,23 @@ func TestApplier_ApplyLayers(t *testing.T) {
{ {
name: "sad path no package detected", name: "sad path no package detected",
args: args{ args: args{
imageID: "sha256:4791503518dff090d6a82f7a5c1fd71c41146920e2562fb64308e17ab6834b7e",
layerIDs: []string{ layerIDs: []string{
"sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02", "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
}, },
}, },
getArtifactExpectations: []cache.LocalArtifactCacheGetArtifactExpectation{
{
Args: cache.LocalArtifactCacheGetArtifactArgs{
ArtifactID: "sha256:4791503518dff090d6a82f7a5c1fd71c41146920e2562fb64308e17ab6834b7e",
},
Returns: cache.LocalArtifactCacheGetArtifactReturns{
ArtifactInfo: types.ArtifactInfo{
SchemaVersion: 1,
},
},
},
},
getLayerExpectations: []cache.LocalArtifactCacheGetBlobExpectation{ getLayerExpectations: []cache.LocalArtifactCacheGetBlobExpectation{
{ {
Args: cache.LocalArtifactCacheGetBlobArgs{ Args: cache.LocalArtifactCacheGetBlobArgs{
@@ -515,6 +615,18 @@ func TestApplier_ApplyLayers(t *testing.T) {
"sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5", "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5",
}, },
}, },
getArtifactExpectations: []cache.LocalArtifactCacheGetArtifactExpectation{
{
Args: cache.LocalArtifactCacheGetArtifactArgs{
ArtifactID: "sha256:4791503518dff090d6a82f7a5c1fd71c41146920e2562fb64308e17ab6834b7e",
},
Returns: cache.LocalArtifactCacheGetArtifactReturns{
ArtifactInfo: types.ArtifactInfo{
SchemaVersion: 1,
},
},
},
},
getLayerExpectations: []cache.LocalArtifactCacheGetBlobExpectation{ getLayerExpectations: []cache.LocalArtifactCacheGetBlobExpectation{
{ {
Args: cache.LocalArtifactCacheGetBlobArgs{ Args: cache.LocalArtifactCacheGetBlobArgs{
@@ -605,7 +717,10 @@ func TestApplier_ApplyLayers(t *testing.T) {
want: types.ArtifactDetail{ want: types.ArtifactDetail{
Packages: []types.Package{ Packages: []types.Package{
{ {
Name: "tzdata", Version: "2019a-0+deb9u1", SrcName: "tzdata", SrcVersion: "2019a-0+deb9u1", Name: "tzdata",
Version: "2019a-0+deb9u1",
SrcName: "tzdata",
SrcVersion: "2019a-0+deb9u1",
Layer: types.Layer{ Layer: types.Layer{
Digest: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02", Digest: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
DiffID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72", DiffID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72",
@@ -679,7 +794,7 @@ func TestApplier_ApplyLayers(t *testing.T) {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
c := new(cache.MockLocalArtifactCache) c := new(cache.MockLocalArtifactCache)
c.ApplyGetBlobExpectations(tt.getLayerExpectations) c.ApplyGetBlobExpectations(tt.getLayerExpectations)
c.ApplyGetArtifactExpectations(tt.getImageExpectations) c.ApplyGetArtifactExpectations(tt.getArtifactExpectations)
a := applier.NewApplier(c) a := applier.NewApplier(c)

View File

@@ -28,7 +28,8 @@ type Artifact struct {
image types.Image image types.Image
cache cache.ArtifactCache cache cache.ArtifactCache
walker walker.LayerTar walker walker.LayerTar
analyzer analyzer.AnalyzerGroup analyzer analyzer.AnalyzerGroup // analyzer for files in container image
configAnalyzer analyzer.ConfigAnalyzerGroup // analyzer for container image config
handlerManager handler.Manager handlerManager handler.Manager
artifactOption artifact.Option artifactOption artifact.Option
@@ -57,11 +58,21 @@ func NewArtifact(img types.Image, c cache.ArtifactCache, opt artifact.Option) (a
return nil, xerrors.Errorf("analyzer group error: %w", err) return nil, xerrors.Errorf("analyzer group error: %w", err)
} }
ca, err := analyzer.NewConfigAnalyzerGroup(analyzer.ConfigAnalyzerOptions{
FilePatterns: opt.FilePatterns,
DisabledAnalyzers: opt.DisabledAnalyzers,
MisconfScannerOption: opt.MisconfScannerOption,
})
if err != nil {
return nil, xerrors.Errorf("config analyzer group error: %w", err)
}
return Artifact{ return Artifact{
image: img, image: img,
cache: c, cache: c,
walker: walker.NewLayerTar(opt.SkipFiles, opt.SkipDirs, opt.Slow), walker: walker.NewLayerTar(opt.SkipFiles, opt.SkipDirs, opt.Slow),
analyzer: a, analyzer: a,
configAnalyzer: ca,
handlerManager: handlerManager, handlerManager: handlerManager,
artifactOption: opt, artifactOption: opt,
@@ -119,7 +130,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error)
missingImageKey = "" missingImageKey = ""
} }
if err = a.inspect(ctx, missingImageKey, missingLayers, baseDiffIDs, layerKeyMap); err != nil { if err = a.inspect(ctx, missingImageKey, missingLayers, baseDiffIDs, layerKeyMap, configFile); err != nil {
return types.ArtifactReference{}, xerrors.Errorf("analyze error: %w", err) return types.ArtifactReference{}, xerrors.Errorf("analyze error: %w", err)
} }
@@ -144,7 +155,7 @@ func (Artifact) Clean(_ types.ArtifactReference) error {
func (a Artifact) calcCacheKeys(imageID string, diffIDs []string) (string, []string, error) { func (a Artifact) calcCacheKeys(imageID string, diffIDs []string) (string, []string, error) {
// Pass an empty config scanner option so that the cache key can be the same, even when policies are updated. // Pass an empty config scanner option so that the cache key can be the same, even when policies are updated.
imageKey, err := cache.CalcKey(imageID, a.analyzer.ImageConfigAnalyzerVersions(), nil, artifact.Option{}) imageKey, err := cache.CalcKey(imageID, a.configAnalyzer.AnalyzerVersions(), nil, artifact.Option{})
if err != nil { if err != nil {
return "", nil, err return "", nil, err
} }
@@ -195,7 +206,8 @@ func (a Artifact) consolidateCreatedBy(diffIDs, layerKeys []string, configFile *
return layerKeyMap return layerKeyMap
} }
func (a Artifact) inspect(ctx context.Context, missingImage string, layerKeys, baseDiffIDs []string, layerKeyMap map[string]LayerInfo) error { func (a Artifact) inspect(ctx context.Context, missingImage string, layerKeys, baseDiffIDs []string,
layerKeyMap map[string]LayerInfo, configFile *v1.ConfigFile) error {
done := make(chan struct{}) done := make(chan struct{})
errCh := make(chan error) errCh := make(chan error)
limit := semaphore.New(a.artifactOption.Slow) limit := semaphore.New(a.artifactOption.Slow)
@@ -246,7 +258,7 @@ func (a Artifact) inspect(ctx context.Context, missingImage string, layerKeys, b
} }
if missingImage != "" { if missingImage != "" {
if err := a.inspectConfig(missingImage, osFound); err != nil { if err := a.inspectConfig(ctx, missingImage, osFound, configFile); err != nil {
return xerrors.Errorf("unable to analyze config: %w", err) return xerrors.Errorf("unable to analyze config: %w", err)
} }
} }
@@ -357,22 +369,8 @@ func (a Artifact) isCompressed(l v1.Layer) bool {
return !uncompressed return !uncompressed
} }
func (a Artifact) inspectConfig(imageID string, osFound types.OS) error { func (a Artifact) inspectConfig(ctx context.Context, imageID string, osFound types.OS, config *v1.ConfigFile) error {
config, err := a.image.ConfigFile() result := lo.FromPtr(a.configAnalyzer.AnalyzeImageConfig(ctx, osFound, config))
if err != nil {
return xerrors.Errorf("unable to get config blob: %w", err)
}
result := lo.FromPtr(a.analyzer.AnalyzeImageConfig(osFound, config))
// Identify packages from history.
var historyPkgs types.Packages
for _, pi := range result.PackageInfos {
if pi.FilePath == types.HistoryPkgs {
historyPkgs = pi.Packages
break
}
}
info := types.ArtifactInfo{ info := types.ArtifactInfo{
SchemaVersion: types.ArtifactJSONSchemaVersion, SchemaVersion: types.ArtifactJSONSchemaVersion,
@@ -380,10 +378,11 @@ func (a Artifact) inspectConfig(imageID string, osFound types.OS) error {
Created: config.Created.Time, Created: config.Created.Time,
DockerVersion: config.DockerVersion, DockerVersion: config.DockerVersion,
OS: config.OS, OS: config.OS,
HistoryPackages: historyPkgs, Misconfiguration: result.Misconfiguration,
HistoryPackages: result.HistoryPackages,
} }
if err = a.cache.PutArtifact(imageID, info); err != nil { if err := a.cache.PutArtifact(imageID, info); err != nil {
return xerrors.Errorf("failed to put image info into the cache: %w", err) return xerrors.Errorf("failed to put image info into the cache: %w", err)
} }

View File

@@ -12,8 +12,8 @@ import (
"golang.org/x/xerrors" "golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/analyzer"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/command/apk"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/all" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/all"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/imgconf/apk"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/php/composer" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/php/composer"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/ruby/bundler" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/language/ruby/bundler"
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/licensing" _ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/licensing"

View File

@@ -1,40 +1,16 @@
package misconf package misconf
import ( import (
"bytes"
"context" "context"
_ "embed" _ "embed"
"fmt"
"io/fs"
"os"
"path/filepath"
"sort"
"strings"
"github.com/samber/lo"
"golang.org/x/xerrors" "golang.org/x/xerrors"
"github.com/aquasecurity/memoryfs"
"github.com/aquasecurity/defsec/pkg/scanners/azure/arm"
"github.com/aquasecurity/defsec/pkg/detection"
"github.com/aquasecurity/defsec/pkg/scan"
"github.com/aquasecurity/defsec/pkg/scanners"
cfscanner "github.com/aquasecurity/defsec/pkg/scanners/cloudformation"
cfparser "github.com/aquasecurity/defsec/pkg/scanners/cloudformation/parser"
dfscanner "github.com/aquasecurity/defsec/pkg/scanners/dockerfile"
"github.com/aquasecurity/defsec/pkg/scanners/helm"
k8sscanner "github.com/aquasecurity/defsec/pkg/scanners/kubernetes"
"github.com/aquasecurity/defsec/pkg/scanners/options"
tfscanner "github.com/aquasecurity/defsec/pkg/scanners/terraform"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/analyzer"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/artifact" "github.com/aquasecurity/trivy/pkg/fanal/artifact"
"github.com/aquasecurity/trivy/pkg/fanal/handler" "github.com/aquasecurity/trivy/pkg/fanal/handler"
"github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/log" "github.com/aquasecurity/trivy/pkg/misconf"
) )
func init() { func init() {
@@ -44,205 +20,19 @@ func init() {
const version = 1 const version = 1
type misconfPostHandler struct { type misconfPostHandler struct {
filePatterns []string scanner misconf.Scanner
scanners map[string]scanners.FSScanner
}
// for a given set of paths, find the most specific filesystem path that contains all the descendants
// the function also returns a filtered version of the input paths that are compatible with a fs.FS
// using the resultant target path. This means they will always use "/" as a separator
func findFSTarget(paths []string) (string, []string, error) {
if len(paths) == 0 {
return "", nil, xerrors.New("must specify at least one path")
}
var absPaths []string
var minSegmentCount int
for _, relPath := range paths {
abs, err := filepath.Abs(relPath)
if err != nil {
return "", nil, xerrors.Errorf("failed to derive absolute path from '%s': %w", relPath, err)
}
count := len(strings.Split(filepath.ToSlash(abs), "/"))
if count < minSegmentCount || minSegmentCount == 0 {
minSegmentCount = count
}
absPaths = append(absPaths, abs)
}
var outputSegments []string
for i := 0; i < minSegmentCount; i++ {
required := strings.Split(absPaths[0], string(filepath.Separator))[i]
match := true
for _, path := range absPaths[1:] {
actual := strings.Split(path, string(filepath.Separator))[i]
if required != actual {
match = false
break
}
}
if !match {
break
}
outputSegments = append(outputSegments, required)
}
slashTarget := strings.Join(outputSegments, "/")
if slashTarget == "" {
slashTarget = string(filepath.Separator)
}
var cleanPaths []string
for _, path := range absPaths {
path := filepath.ToSlash(path)
path = strings.TrimPrefix(path, slashTarget)
path = strings.TrimPrefix(path, "/")
if path == "" {
path = "."
}
cleanPaths = append(cleanPaths, path)
}
// we don't use filepath.Join here as we need to maintain the root "/"
target := strings.Join(outputSegments, string(filepath.Separator))
if target == "" || filepath.VolumeName(target) == target {
target += string(filepath.Separator)
}
return target, cleanPaths, nil
}
func createPolicyFS(policyPaths []string) (fs.FS, []string, error) {
if len(policyPaths) == 0 {
return nil, nil, nil
}
var outsideCWD bool
for _, path := range policyPaths {
if strings.Contains(path, "..") || strings.HasPrefix(path, "/") || (len(path) > 1 && path[1] == ':') {
outsideCWD = true
break
}
}
// all policy paths are inside the CWD, so create a filesystem from CWD to load from
if !outsideCWD {
cwd, err := os.Getwd()
if err != nil {
return nil, nil, err
}
var cleanPaths []string
for _, path := range policyPaths {
path = strings.TrimPrefix(path, ".")
path = strings.TrimPrefix(path, "/")
cleanPaths = append(cleanPaths, path)
}
return os.DirFS(cwd), cleanPaths, nil
}
target, cleanPaths, err := findFSTarget(policyPaths)
if err != nil {
return nil, nil, err
}
return os.DirFS(target), cleanPaths, nil
} }
func newMisconfPostHandler(artifactOpt artifact.Option) (handler.PostHandler, error) { func newMisconfPostHandler(artifactOpt artifact.Option) (handler.PostHandler, error) {
opt := artifactOpt.MisconfScannerOption s, err := misconf.NewScanner(artifactOpt.FilePatterns, artifactOpt.MisconfScannerOption)
opts := []options.ScannerOption{
options.ScannerWithSkipRequiredCheck(true),
options.ScannerWithEmbeddedPolicies(!artifactOpt.MisconfScannerOption.DisableEmbeddedPolicies),
}
policyFS, policyPaths, err := createPolicyFS(opt.PolicyPaths)
if err != nil { if err != nil {
return nil, err return nil, xerrors.Errorf("scanner init error: %w", err)
} }
if policyFS != nil {
opts = append(opts, options.ScannerWithPolicyFilesystem(policyFS))
}
if opt.Trace {
opts = append(opts, options.ScannerWithPerResultTracing(true))
}
if opt.RegoOnly {
opts = append(opts, options.ScannerWithRegoOnly(true))
}
if len(policyPaths) > 0 {
opts = append(opts, options.ScannerWithPolicyDirs(policyPaths...))
}
if len(opt.DataPaths) > 0 {
opts = append(opts, options.ScannerWithDataDirs(opt.DataPaths...))
}
if len(opt.Namespaces) > 0 {
opts = append(opts, options.ScannerWithPolicyNamespaces(opt.Namespaces...))
}
helmOpts := addHelmOpts(opts, artifactOpt.MisconfScannerOption)
tfOpts := addTFOpts(opts, artifactOpt.MisconfScannerOption)
return misconfPostHandler{ return misconfPostHandler{
filePatterns: artifactOpt.FilePatterns, scanner: s,
scanners: map[string]scanners.FSScanner{
types.AzureARM: arm.New(opts...),
types.Terraform: tfscanner.New(tfOpts...),
types.CloudFormation: cfscanner.New(opts...),
types.Dockerfile: dfscanner.NewScanner(opts...),
types.Kubernetes: k8sscanner.NewScanner(opts...),
types.Helm: helm.New(helmOpts...),
},
}, nil }, nil
} }
func addTFOpts(opts []options.ScannerOption, scannerOption config.ScannerOption) []options.ScannerOption {
if len(scannerOption.TerraformTFVars) > 0 {
opts = append(opts, tfscanner.ScannerWithTFVarsPaths(scannerOption.TerraformTFVars...))
}
return opts
}
func addHelmOpts(opts []options.ScannerOption, scannerOption config.ScannerOption) []options.ScannerOption {
if len(scannerOption.HelmValueFiles) > 0 {
opts = append(opts, helm.ScannerWithValuesFile(scannerOption.HelmValueFiles...))
}
if len(scannerOption.HelmValues) > 0 {
opts = append(opts, helm.ScannerWithValues(scannerOption.HelmValues...))
}
if len(scannerOption.HelmFileValues) > 0 {
opts = append(opts, helm.ScannerWithFileValues(scannerOption.HelmFileValues...))
}
if len(scannerOption.HelmStringValues) > 0 {
opts = append(opts, helm.ScannerWithStringValues(scannerOption.HelmStringValues...))
}
return opts
}
var enabledDefsecTypes = map[detection.FileType]string{
detection.FileTypeAzureARM: types.AzureARM,
detection.FileTypeCloudFormation: types.CloudFormation,
detection.FileTypeTerraform: types.Terraform,
detection.FileTypeDockerfile: types.Dockerfile,
detection.FileTypeKubernetes: types.Kubernetes,
detection.FileTypeHelm: types.Helm,
}
func (h misconfPostHandler) hasCustomPatternForType(t string) bool {
for _, pattern := range h.filePatterns {
if strings.HasPrefix(pattern, t+":") {
return true
}
}
return false
}
// Handle detects misconfigurations. // Handle detects misconfigurations.
func (h misconfPostHandler) Handle(ctx context.Context, result *analyzer.AnalysisResult, blob *types.BlobInfo) error { func (h misconfPostHandler) Handle(ctx context.Context, result *analyzer.AnalysisResult, blob *types.BlobInfo) error {
files, ok := result.Files[h.Type()] files, ok := result.Files[h.Type()]
@@ -250,69 +40,9 @@ func (h misconfPostHandler) Handle(ctx context.Context, result *analyzer.Analysi
return nil return nil
} }
mapMemoryFS := make(map[string]*memoryfs.FS) misconfs, err := h.scanner.Scan(ctx, files)
for t := range h.scanners {
mapMemoryFS[t] = memoryfs.New()
}
for _, file := range files {
for defsecType, localType := range enabledDefsecTypes {
buffer := bytes.NewReader(file.Content)
if !h.hasCustomPatternForType(localType) && !detection.IsType(file.Path, buffer, defsecType) {
continue
}
// Replace with more detailed config type
file.Type = localType
if memfs, ok := mapMemoryFS[file.Type]; ok {
if filepath.Dir(file.Path) != "." {
if err := memfs.MkdirAll(filepath.Dir(file.Path), os.ModePerm); err != nil {
return xerrors.Errorf("memoryfs mkdir error: %w", err)
}
}
if err := memfs.WriteFile(file.Path, file.Content, os.ModePerm); err != nil {
return xerrors.Errorf("memoryfs write error: %w", err)
}
}
}
}
var misconfs []types.Misconfiguration
for t, scanner := range h.scanners {
results, err := scanner.ScanFS(ctx, mapMemoryFS[t], ".")
if err != nil { if err != nil {
if _, ok := err.(*cfparser.InvalidContentError); ok { return xerrors.Errorf("misconfiguration scan error: %w", err)
log.Logger.Errorf("scan %q was broken with InvalidContentError: %v", scanner.Name(), err)
continue
}
return xerrors.Errorf("scan config error: %w", err)
}
misconfs = append(misconfs, ResultsToMisconf(t, scanner.Name(), results)...)
}
// Add misconfigurations
for _, misconf := range misconfs {
sort.Slice(misconf.Successes, func(i, j int) bool {
if misconf.Successes[i].AVDID == misconf.Successes[j].AVDID {
return misconf.Successes[i].StartLine < misconf.Successes[j].StartLine
}
return misconf.Successes[i].AVDID < misconf.Successes[j].AVDID
})
sort.Slice(misconf.Warnings, func(i, j int) bool {
if misconf.Warnings[i].AVDID == misconf.Warnings[j].AVDID {
return misconf.Warnings[i].StartLine < misconf.Warnings[j].StartLine
}
return misconf.Warnings[i].AVDID < misconf.Warnings[j].AVDID
})
sort.Slice(misconf.Failures, func(i, j int) bool {
if misconf.Failures[i].AVDID == misconf.Failures[j].AVDID {
return misconf.Failures[i].StartLine < misconf.Failures[j].StartLine
}
return misconf.Failures[i].AVDID < misconf.Failures[j].AVDID
})
} }
blob.Misconfigurations = misconfs blob.Misconfigurations = misconfs
@@ -331,92 +61,3 @@ func (h misconfPostHandler) Type() types.HandlerType {
func (h misconfPostHandler) Priority() int { func (h misconfPostHandler) Priority() int {
return types.MisconfPostHandlerPriority return types.MisconfPostHandlerPriority
} }
// This function is exported for trivy-plugin-aqua purposes only
func ResultsToMisconf(configType string, scannerName string, results scan.Results) []types.Misconfiguration {
misconfs := map[string]types.Misconfiguration{}
for _, result := range results {
flattened := result.Flatten()
query := fmt.Sprintf("data.%s.%s", result.RegoNamespace(), result.RegoRule())
ruleID := result.Rule().AVDID
if result.RegoNamespace() != "" && len(result.Rule().Aliases) > 0 {
ruleID = result.Rule().Aliases[0]
}
cause := NewCauseWithCode(result)
misconfResult := types.MisconfResult{
Namespace: result.RegoNamespace(),
Query: query,
Message: flattened.Description,
PolicyMetadata: types.PolicyMetadata{
ID: ruleID,
AVDID: result.Rule().AVDID,
Type: fmt.Sprintf("%s Security Check", scannerName),
Title: result.Rule().Summary,
Description: result.Rule().Explanation,
Severity: string(flattened.Severity),
RecommendedActions: flattened.Resolution,
References: flattened.Links,
},
CauseMetadata: cause,
Traces: result.Traces(),
}
filePath := flattened.Location.Filename
misconf, ok := misconfs[filePath]
if !ok {
misconf = types.Misconfiguration{
FileType: configType,
FilePath: filePath,
}
}
if flattened.Warning {
misconf.Warnings = append(misconf.Warnings, misconfResult)
} else {
switch flattened.Status {
case scan.StatusPassed:
misconf.Successes = append(misconf.Successes, misconfResult)
case scan.StatusIgnored:
misconf.Exceptions = append(misconf.Exceptions, misconfResult)
case scan.StatusFailed:
misconf.Failures = append(misconf.Failures, misconfResult)
}
}
misconfs[filePath] = misconf
}
return types.ToMisconfigurations(misconfs)
}
func NewCauseWithCode(underlying scan.Result) types.CauseMetadata {
flat := underlying.Flatten()
cause := types.CauseMetadata{
Resource: flat.Resource,
Provider: flat.RuleProvider.DisplayName(),
Service: flat.RuleService,
StartLine: flat.Location.StartLine,
EndLine: flat.Location.EndLine,
}
if code, err := underlying.GetCode(); err == nil {
cause.Code = types.Code{
Lines: lo.Map(code.Lines, func(l scan.Line, i int) types.Line {
return types.Line{
Number: l.Number,
Content: l.Content,
IsCause: l.IsCause,
Annotation: l.Annotation,
Truncated: l.Truncated,
Highlighted: l.Highlighted,
FirstCause: l.FirstCause,
LastCause: l.LastCause,
}
}),
}
}
return cause
}

View File

@@ -199,6 +199,9 @@ type ArtifactInfo struct {
DockerVersion string DockerVersion string
OS string OS string
// Misconfiguration holds misconfiguration in container image config
Misconfiguration *Misconfiguration `json:",omitempty"`
// HistoryPackages are packages extracted from RUN instructions // HistoryPackages are packages extracted from RUN instructions
HistoryPackages Packages `json:",omitempty"` HistoryPackages Packages `json:",omitempty"`
} }
@@ -243,6 +246,9 @@ type ArtifactDetail struct {
Secrets []Secret `json:",omitempty"` Secrets []Secret `json:",omitempty"`
Licenses []LicenseFile `json:",omitempty"` Licenses []LicenseFile `json:",omitempty"`
// ImageMisconfiguration holds misconfigurations in container image config
ImageMisconfiguration *Misconfiguration `json:",omitempty"`
// HistoryPackages are packages extracted from RUN instructions // HistoryPackages are packages extracted from RUN instructions
HistoryPackages []Package `json:",omitempty"` HistoryPackages []Package `json:",omitempty"`

View File

@@ -79,7 +79,4 @@ const (
PubSpecLock = "pubspec.lock" PubSpecLock = "pubspec.lock"
MixLock = "mix.lock" MixLock = "mix.lock"
// Container image config
HistoryPkgs = "pkgs-from-history"
) )

View File

@@ -1,11 +1,23 @@
package flag package flag
import (
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/types"
)
// e.g. config yaml // e.g. config yaml
// image: // image:
// removed-pkgs: true // removed-pkgs: true
// input: "/path/to/alpine" // input: "/path/to/alpine"
var ( var (
ImageConfigScannersFlag = Flag{
Name: "image-config-scanners",
ConfigName: "image.image-config-scanners",
Value: "",
Usage: "comma-separated list of what security issues to detect on container image configurations (config,secret)",
}
ScanRemovedPkgsFlag = Flag{ ScanRemovedPkgsFlag = Flag{
Name: "removed-pkgs", Name: "removed-pkgs",
ConfigName: "image.removed-pkgs", ConfigName: "image.removed-pkgs",
@@ -18,7 +30,6 @@ var (
Value: "", Value: "",
Usage: "input file path instead of image name", Usage: "input file path instead of image name",
} }
PlatformFlag = Flag{ PlatformFlag = Flag{
Name: "platform", Name: "platform",
ConfigName: "image.platform", ConfigName: "image.platform",
@@ -29,12 +40,14 @@ var (
type ImageFlagGroup struct { type ImageFlagGroup struct {
Input *Flag // local image archive Input *Flag // local image archive
ImageConfigScanners *Flag
ScanRemovedPkgs *Flag ScanRemovedPkgs *Flag
Platform *Flag Platform *Flag
} }
type ImageOptions struct { type ImageOptions struct {
Input string Input string
ImageConfigScanners types.Scanners
ScanRemovedPkgs bool ScanRemovedPkgs bool
Platform string Platform string
} }
@@ -42,6 +55,7 @@ type ImageOptions struct {
func NewImageFlagGroup() *ImageFlagGroup { func NewImageFlagGroup() *ImageFlagGroup {
return &ImageFlagGroup{ return &ImageFlagGroup{
Input: &InputFlag, Input: &InputFlag,
ImageConfigScanners: &ImageConfigScannersFlag,
ScanRemovedPkgs: &ScanRemovedPkgsFlag, ScanRemovedPkgs: &ScanRemovedPkgsFlag,
Platform: &PlatformFlag, Platform: &PlatformFlag,
} }
@@ -52,13 +66,18 @@ func (f *ImageFlagGroup) Name() string {
} }
func (f *ImageFlagGroup) Flags() []*Flag { func (f *ImageFlagGroup) Flags() []*Flag {
return []*Flag{f.Input, f.ScanRemovedPkgs, f.Platform} return []*Flag{f.Input, f.ImageConfigScanners, f.ScanRemovedPkgs, f.Platform}
} }
func (f *ImageFlagGroup) ToOptions() ImageOptions { func (f *ImageFlagGroup) ToOptions() (ImageOptions, error) {
scanners, err := parseScanners(getStringSlice(f.ImageConfigScanners), types.AllImageConfigScanners)
if err != nil {
return ImageOptions{}, xerrors.Errorf("unable to parse image config scanners: %w", err)
}
return ImageOptions{ return ImageOptions{
Input: getString(f.Input), Input: getString(f.Input),
ImageConfigScanners: scanners,
ScanRemovedPkgs: getBool(f.ScanRemovedPkgs), ScanRemovedPkgs: getBool(f.ScanRemovedPkgs),
Platform: getString(f.Platform), Platform: getString(f.Platform),
} }, nil
} }

View File

@@ -383,7 +383,10 @@ func (f *Flags) ToOptions(appVersion string, args []string, globalFlags *GlobalF
} }
if f.ImageFlagGroup != nil { if f.ImageFlagGroup != nil {
opts.ImageOptions = f.ImageFlagGroup.ToOptions() opts.ImageOptions, err = f.ImageFlagGroup.ToOptions()
if err != nil {
return Options{}, xerrors.Errorf("image flag error: %w", err)
}
} }
if f.K8sFlagGroup != nil { if f.K8sFlagGroup != nil {

View File

@@ -1,7 +1,6 @@
package flag package flag
import ( import (
"os"
"testing" "testing"
"github.com/spf13/viper" "github.com/spf13/viper"
@@ -33,8 +32,8 @@ func Test_getStringSlice(t *testing.T) {
flag: &ScannersFlag, flag: &ScannersFlag,
flagValue: "license,vuln", flagValue: "license,vuln",
want: []string{ want: []string{
types.LicenseScanner, string(types.LicenseScanner),
types.VulnerabilityScanner, string(types.VulnerabilityScanner),
}, },
}, },
{ {
@@ -45,8 +44,8 @@ func Test_getStringSlice(t *testing.T) {
"secret", "secret",
}, },
want: []string{ want: []string{
types.LicenseScanner, string(types.LicenseScanner),
types.SecretScanner, string(types.SecretScanner),
}, },
}, },
{ {
@@ -57,8 +56,8 @@ func Test_getStringSlice(t *testing.T) {
value: "rbac,config", value: "rbac,config",
}, },
want: []string{ want: []string{
types.RBACScanner, string(types.RBACScanner),
types.MisconfigScanner, string(types.MisconfigScanner),
}, },
}, },
} }
@@ -71,10 +70,7 @@ func Test_getStringSlice(t *testing.T) {
err := viper.BindEnv(tt.flag.ConfigName, tt.env.key) err := viper.BindEnv(tt.flag.ConfigName, tt.env.key)
assert.NoError(t, err) assert.NoError(t, err)
savedEnvValue := os.Getenv(tt.env.key) t.Setenv(tt.env.key, tt.env.value)
err = os.Setenv(tt.env.key, tt.env.value)
assert.NoError(t, err)
defer os.Setenv(tt.env.key, savedEnvValue)
} }
sl := getStringSlice(tt.flag) sl := getStringSlice(tt.flag)

View File

@@ -29,10 +29,10 @@ var (
ScannersFlag = Flag{ ScannersFlag = Flag{
Name: "scanners", Name: "scanners",
ConfigName: "scan.scanners", ConfigName: "scan.scanners",
Value: []string{ Value: types.Scanners{
types.VulnerabilityScanner, types.VulnerabilityScanner,
types.SecretScanner, types.SecretScanner,
}, }.StringSlice(),
Aliases: []Alias{ Aliases: []Alias{
{ {
Name: "security-checks", Name: "security-checks",
@@ -84,7 +84,7 @@ type ScanOptions struct {
SkipDirs []string SkipDirs []string
SkipFiles []string SkipFiles []string
OfflineScan bool OfflineScan bool
Scanners []string Scanners types.Scanners
FilePatterns []string FilePatterns []string
Slow bool Slow bool
SBOMSources []string SBOMSources []string
@@ -126,7 +126,7 @@ func (f *ScanFlagGroup) ToOptions(args []string) (ScanOptions, error) {
if len(args) == 1 { if len(args) == 1 {
target = args[0] target = args[0]
} }
scanners, err := parseScanners(getStringSlice(f.Scanners)) scanners, err := parseScanners(getStringSlice(f.Scanners), types.AllScanners)
if err != nil { if err != nil {
return ScanOptions{}, xerrors.Errorf("unable to parse scanners: %w", err) return ScanOptions{}, xerrors.Errorf("unable to parse scanners: %w", err)
} }
@@ -149,13 +149,14 @@ func (f *ScanFlagGroup) ToOptions(args []string) (ScanOptions, error) {
}, nil }, nil
} }
func parseScanners(scanner []string) ([]string, error) { func parseScanners(scanner []string, allowedScanners []types.Scanner) (types.Scanners, error) {
var scanners []string var scanners types.Scanners
for _, v := range scanner { for _, v := range scanner {
if !slices.Contains(types.Scanners, v) { s := types.Scanner(v)
if !slices.Contains(allowedScanners, s) {
return nil, xerrors.Errorf("unknown scanner: %s", v) return nil, xerrors.Errorf("unknown scanner: %s", v)
} }
scanners = append(scanners, v) scanners = append(scanners, s)
} }
return scanners, nil return scanners, nil
} }

View File

@@ -42,7 +42,7 @@ func TestScanFlagGroup_ToOptions(t *testing.T) {
}, },
want: flag.ScanOptions{ want: flag.ScanOptions{
Target: "alpine:latest", Target: "alpine:latest",
Scanners: []string{types.MisconfigScanner}, Scanners: types.Scanners{types.MisconfigScanner},
}, },
assertion: require.NoError, assertion: require.NoError,
}, },

View File

@@ -34,7 +34,7 @@ type Option struct {
Output io.Writer Output io.Writer
Severities []dbTypes.Severity Severities []dbTypes.Severity
ColumnHeading []string ColumnHeading []string
Scanners []string Scanners types.Scanners
Components []string Components []string
} }
@@ -176,7 +176,7 @@ type reports struct {
// - misconfiguration report // - misconfiguration report
// - rbac report // - rbac report
// - infra checks report // - infra checks report
func separateMisconfigReports(k8sReport Report, scanners, components []string) []reports { func separateMisconfigReports(k8sReport Report, scanners types.Scanners, components []string) []reports {
workloadMisconfig := make([]Resource, 0) workloadMisconfig := make([]Resource, 0)
infraMisconfig := make([]Resource, 0) infraMisconfig := make([]Resource, 0)
@@ -184,7 +184,7 @@ func separateMisconfigReports(k8sReport Report, scanners, components []string) [
for _, misConfig := range k8sReport.Misconfigurations { for _, misConfig := range k8sReport.Misconfigurations {
switch { switch {
case slices.Contains(scanners, types.RBACScanner) && rbacResource(misConfig): case scanners.Enabled(types.RBACScanner) && rbacResource(misConfig):
rbacAssessment = append(rbacAssessment, misConfig) rbacAssessment = append(rbacAssessment, misConfig)
case infraResource(misConfig): case infraResource(misConfig):
workload, infra := splitInfraAndWorkloadResources(misConfig) workload, infra := splitInfraAndWorkloadResources(misConfig)
@@ -197,7 +197,7 @@ func separateMisconfigReports(k8sReport Report, scanners, components []string) [
workloadMisconfig = append(workloadMisconfig, workload) workloadMisconfig = append(workloadMisconfig, workload)
} }
case slices.Contains(scanners, types.MisconfigScanner) && !rbacResource(misConfig): case scanners.Enabled(types.MisconfigScanner) && !rbacResource(misConfig):
if slices.Contains(components, workloadComponent) { if slices.Contains(components, workloadComponent) {
workloadMisconfig = append(workloadMisconfig, misConfig) workloadMisconfig = append(workloadMisconfig, misConfig)
} }
@@ -225,7 +225,7 @@ func separateMisconfigReports(k8sReport Report, scanners, components []string) [
} }
} }
if slices.Contains(scanners, types.RBACScanner) && len(rbacAssessment) > 0 { if scanners.Enabled(types.RBACScanner) && len(rbacAssessment) > 0 {
r = append(r, reports{ r = append(r, reports{
report: Report{ report: Report{
SchemaVersion: 0, SchemaVersion: 0,
@@ -237,7 +237,7 @@ func separateMisconfigReports(k8sReport Report, scanners, components []string) [
}) })
} }
if slices.Contains(scanners, types.MisconfigScanner) && if scanners.Enabled(types.MisconfigScanner) &&
slices.Contains(components, infraComponent) && slices.Contains(components, infraComponent) &&
len(infraMisconfig) > 0 { len(infraMisconfig) > 0 {
@@ -363,8 +363,6 @@ func copyResult(r types.Result, misconfigs []types.DetectedMisconfiguration) typ
} }
} }
func shouldAddWorkloadReport(scanners []string) bool { func shouldAddWorkloadReport(scanners types.Scanners) bool {
return slices.Contains(scanners, types.MisconfigScanner) || return scanners.AnyEnabled(types.MisconfigScanner, types.VulnerabilityScanner, types.SecretScanner)
slices.Contains(scanners, types.VulnerabilityScanner) ||
slices.Contains(scanners, types.SecretScanner)
} }

View File

@@ -482,14 +482,14 @@ func Test_separateMisconfigReports(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
k8sReport Report k8sReport Report
scanners []string scanners types.Scanners
components []string components []string
expectedReports []Report expectedReports []Report
}{ }{
{ {
name: "Config, Rbac, and Infra Reports", name: "Config, Rbac, and Infra Reports",
k8sReport: k8sReport, k8sReport: k8sReport,
scanners: []string{ scanners: types.Scanners{
types.MisconfigScanner, types.MisconfigScanner,
types.RBACScanner, types.RBACScanner,
}, },
@@ -513,7 +513,7 @@ func Test_separateMisconfigReports(t *testing.T) {
{ {
name: "Config and Infra for the same resource", name: "Config and Infra for the same resource",
k8sReport: k8sReport, k8sReport: k8sReport,
scanners: []string{types.MisconfigScanner}, scanners: types.Scanners{types.MisconfigScanner},
components: []string{ components: []string{
workloadComponent, workloadComponent,
infraComponent, infraComponent,
@@ -533,7 +533,7 @@ func Test_separateMisconfigReports(t *testing.T) {
{ {
name: "Role Report Only", name: "Role Report Only",
k8sReport: k8sReport, k8sReport: k8sReport,
scanners: []string{types.RBACScanner}, scanners: types.Scanners{types.RBACScanner},
expectedReports: []Report{ expectedReports: []Report{
{Misconfigurations: []Resource{{Kind: "Role"}}}, {Misconfigurations: []Resource{{Kind: "Role"}}},
}, },
@@ -541,7 +541,7 @@ func Test_separateMisconfigReports(t *testing.T) {
{ {
name: "Config Report Only", name: "Config Report Only",
k8sReport: k8sReport, k8sReport: k8sReport,
scanners: []string{types.MisconfigScanner}, scanners: types.Scanners{types.MisconfigScanner},
components: []string{workloadComponent}, components: []string{workloadComponent},
expectedReports: []Report{ expectedReports: []Report{
{ {
@@ -556,7 +556,7 @@ func Test_separateMisconfigReports(t *testing.T) {
{ {
name: "Infra Report Only", name: "Infra Report Only",
k8sReport: k8sReport, k8sReport: k8sReport,
scanners: []string{types.MisconfigScanner}, scanners: types.Scanners{types.MisconfigScanner},
components: []string{infraComponent}, components: []string{infraComponent},
expectedReports: []Report{ expectedReports: []Report{
{Misconfigurations: []Resource{{Kind: "Pod"}}}, {Misconfigurations: []Resource{{Kind: "Pod"}}},
@@ -594,7 +594,7 @@ func TestReportWrite_Summary(t *testing.T) {
name string name string
report Report report Report
opt Option opt Option
scanners []string scanners types.Scanners
components []string components []string
severities []dbTypes.Severity severities []dbTypes.Severity
expectedOutput string expectedOutput string
@@ -605,7 +605,7 @@ func TestReportWrite_Summary(t *testing.T) {
ClusterName: "test", ClusterName: "test",
Misconfigurations: []Resource{deployOrionWithMisconfigs}, Misconfigurations: []Resource{deployOrionWithMisconfigs},
}, },
scanners: []string{types.MisconfigScanner}, scanners: types.Scanners{types.MisconfigScanner},
components: []string{workloadComponent}, components: []string{workloadComponent},
severities: allSeverities, severities: allSeverities,
expectedOutput: `Summary Report for test expectedOutput: `Summary Report for test
@@ -627,7 +627,7 @@ Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
ClusterName: "test", ClusterName: "test",
Vulnerabilities: []Resource{deployOrionWithVulns}, Vulnerabilities: []Resource{deployOrionWithVulns},
}, },
scanners: []string{types.VulnerabilityScanner}, scanners: types.Scanners{types.VulnerabilityScanner},
severities: allSeverities, severities: allSeverities,
expectedOutput: `Summary Report for test expectedOutput: `Summary Report for test
======================= =======================
@@ -648,7 +648,7 @@ Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
ClusterName: "test", ClusterName: "test",
Misconfigurations: []Resource{roleWithMisconfig}, Misconfigurations: []Resource{roleWithMisconfig},
}, },
scanners: []string{types.RBACScanner}, scanners: types.Scanners{types.RBACScanner},
severities: allSeverities, severities: allSeverities,
expectedOutput: `Summary Report for test expectedOutput: `Summary Report for test
======================= =======================
@@ -669,7 +669,7 @@ Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
ClusterName: "test", ClusterName: "test",
Vulnerabilities: []Resource{deployLuaWithSecrets}, Vulnerabilities: []Resource{deployLuaWithSecrets},
}, },
scanners: []string{types.SecretScanner}, scanners: types.Scanners{types.SecretScanner},
severities: allSeverities, severities: allSeverities,
expectedOutput: `Summary Report for test expectedOutput: `Summary Report for test
======================= =======================
@@ -690,7 +690,7 @@ Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
ClusterName: "test", ClusterName: "test",
Misconfigurations: []Resource{apiseverPodWithMisconfigAndInfra}, Misconfigurations: []Resource{apiseverPodWithMisconfigAndInfra},
}, },
scanners: []string{types.MisconfigScanner}, scanners: types.Scanners{types.MisconfigScanner},
components: []string{infraComponent}, components: []string{infraComponent},
severities: allSeverities, severities: allSeverities,
expectedOutput: `Summary Report for test expectedOutput: `Summary Report for test
@@ -712,7 +712,7 @@ Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
ClusterName: "test", ClusterName: "test",
Misconfigurations: []Resource{apiseverPodWithMisconfigAndInfra}, Misconfigurations: []Resource{apiseverPodWithMisconfigAndInfra},
}, },
scanners: []string{ scanners: types.Scanners{
types.VulnerabilityScanner, types.VulnerabilityScanner,
types.MisconfigScanner, types.MisconfigScanner,
types.SecretScanner, types.SecretScanner,
@@ -738,7 +738,7 @@ Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
ClusterName: "test", ClusterName: "test",
Misconfigurations: []Resource{apiseverPodWithMisconfigAndInfra}, Misconfigurations: []Resource{apiseverPodWithMisconfigAndInfra},
}, },
scanners: []string{ scanners: types.Scanners{
types.MisconfigScanner, types.MisconfigScanner,
types.VulnerabilityScanner, types.VulnerabilityScanner,
types.RBACScanner, types.RBACScanner,

View File

@@ -35,7 +35,7 @@ func NewSummaryWriter(output io.Writer, requiredSevs []dbTypes.Severity, columnH
} }
} }
func ColumnHeading(scanners, components, availableColumns []string) []string { func ColumnHeading(scanners types.Scanners, components, availableColumns []string) []string {
columns := []string{ columns := []string{
NamespaceColumn, NamespaceColumn,
ResourceColumn, ResourceColumn,

View File

@@ -10,7 +10,7 @@ import (
) )
func TestReport_ColumnHeading(t *testing.T) { func TestReport_ColumnHeading(t *testing.T) {
allScanners := []string{ allScanners := types.Scanners{
types.VulnerabilityScanner, types.VulnerabilityScanner,
types.MisconfigScanner, types.MisconfigScanner,
types.SecretScanner, types.SecretScanner,
@@ -19,7 +19,7 @@ func TestReport_ColumnHeading(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
scanners []string scanners types.Scanners
components []string components []string
availableColumns []string availableColumns []string
want []string want []string
@@ -67,7 +67,7 @@ func TestReport_ColumnHeading(t *testing.T) {
}, },
{ {
name: "config column only", name: "config column only",
scanners: []string{types.MisconfigScanner}, scanners: types.Scanners{types.MisconfigScanner},
components: []string{ components: []string{
workloadComponent, workloadComponent,
infraComponent, infraComponent,
@@ -81,7 +81,7 @@ func TestReport_ColumnHeading(t *testing.T) {
}, },
{ {
name: "secret column only", name: "secret column only",
scanners: []string{types.SecretScanner}, scanners: types.Scanners{types.SecretScanner},
components: []string{}, components: []string{},
availableColumns: WorkloadColumns(), availableColumns: WorkloadColumns(),
want: []string{ want: []string{
@@ -92,7 +92,7 @@ func TestReport_ColumnHeading(t *testing.T) {
}, },
{ {
name: "vuln column only", name: "vuln column only",
scanners: []string{types.VulnerabilityScanner}, scanners: types.Scanners{types.VulnerabilityScanner},
components: []string{}, components: []string{},
availableColumns: WorkloadColumns(), availableColumns: WorkloadColumns(),
want: []string{ want: []string{

View File

@@ -5,7 +5,6 @@ import (
"io" "io"
"github.com/cheggaaa/pb/v3" "github.com/cheggaaa/pb/v3"
"golang.org/x/exp/slices"
"golang.org/x/xerrors" "golang.org/x/xerrors"
"github.com/aquasecurity/trivy-kubernetes/pkg/artifacts" "github.com/aquasecurity/trivy-kubernetes/pkg/artifacts"
@@ -63,7 +62,7 @@ func (s *Scanner) Scan(ctx context.Context, artifacts []*artifacts.Artifact) (re
for _, artifact := range artifacts { for _, artifact := range artifacts {
bar.Increment() bar.Increment()
if shouldScanVulnsOrSecrets(s.opts.Scanners) { if s.opts.Scanners.AnyEnabled(types.VulnerabilityScanner, types.SecretScanner) {
resources, err := s.scanVulns(ctx, artifact) resources, err := s.scanVulns(ctx, artifact)
if err != nil { if err != nil {
return report.Report{}, xerrors.Errorf("scanning vulnerabilities error: %w", err) return report.Report{}, xerrors.Errorf("scanning vulnerabilities error: %w", err)
@@ -140,8 +139,3 @@ func (s *Scanner) filter(ctx context.Context, r types.Report, artifact *artifact
} }
return report.CreateResource(artifact, r, nil), nil return report.CreateResource(artifact, r, nil), nil
} }
func shouldScanVulnsOrSecrets(scanners []string) bool {
return slices.Contains(scanners, types.VulnerabilityScanner) ||
slices.Contains(scanners, types.SecretScanner)
}

387
pkg/misconf/scanner.go Normal file
View File

@@ -0,0 +1,387 @@
package misconf
import (
"bytes"
"context"
_ "embed"
"fmt"
"io/fs"
"os"
"path/filepath"
"sort"
"strings"
"github.com/samber/lo"
"golang.org/x/xerrors"
"github.com/aquasecurity/defsec/pkg/detection"
"github.com/aquasecurity/defsec/pkg/scan"
"github.com/aquasecurity/defsec/pkg/scanners"
"github.com/aquasecurity/defsec/pkg/scanners/azure/arm"
cfscanner "github.com/aquasecurity/defsec/pkg/scanners/cloudformation"
cfparser "github.com/aquasecurity/defsec/pkg/scanners/cloudformation/parser"
dfscanner "github.com/aquasecurity/defsec/pkg/scanners/dockerfile"
"github.com/aquasecurity/defsec/pkg/scanners/helm"
k8sscanner "github.com/aquasecurity/defsec/pkg/scanners/kubernetes"
"github.com/aquasecurity/defsec/pkg/scanners/options"
tfscanner "github.com/aquasecurity/defsec/pkg/scanners/terraform"
"github.com/aquasecurity/memoryfs"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/types"
"github.com/aquasecurity/trivy/pkg/log"
)
var enabledDefsecTypes = map[detection.FileType]string{
detection.FileTypeAzureARM: types.AzureARM,
detection.FileTypeCloudFormation: types.CloudFormation,
detection.FileTypeTerraform: types.Terraform,
detection.FileTypeDockerfile: types.Dockerfile,
detection.FileTypeKubernetes: types.Kubernetes,
detection.FileTypeHelm: types.Helm,
}
type Scanner struct {
filePatterns []string
scanners map[string]scanners.FSScanner
}
func NewScanner(filePatterns []string, opt config.ScannerOption) (Scanner, error) {
opts := []options.ScannerOption{
options.ScannerWithSkipRequiredCheck(true),
options.ScannerWithEmbeddedPolicies(!opt.DisableEmbeddedPolicies),
}
policyFS, policyPaths, err := createPolicyFS(opt.PolicyPaths)
if err != nil {
return Scanner{}, err
}
if policyFS != nil {
opts = append(opts, options.ScannerWithPolicyFilesystem(policyFS))
}
if opt.Trace {
opts = append(opts, options.ScannerWithPerResultTracing(true))
}
if opt.RegoOnly {
opts = append(opts, options.ScannerWithRegoOnly(true))
}
if len(policyPaths) > 0 {
opts = append(opts, options.ScannerWithPolicyDirs(policyPaths...))
}
if len(opt.DataPaths) > 0 {
opts = append(opts, options.ScannerWithDataDirs(opt.DataPaths...))
}
if len(opt.Namespaces) > 0 {
opts = append(opts, options.ScannerWithPolicyNamespaces(opt.Namespaces...))
}
helmOpts := addHelmOpts(opts, opt)
tfOpts := addTFOpts(opts, opt)
return Scanner{
filePatterns: filePatterns,
scanners: map[string]scanners.FSScanner{
types.AzureARM: arm.New(opts...),
types.Terraform: tfscanner.New(tfOpts...),
types.CloudFormation: cfscanner.New(opts...),
types.Dockerfile: dfscanner.NewScanner(opts...),
types.Kubernetes: k8sscanner.NewScanner(opts...),
types.Helm: helm.New(helmOpts...),
},
}, nil
}
func addTFOpts(opts []options.ScannerOption, scannerOption config.ScannerOption) []options.ScannerOption {
if len(scannerOption.TerraformTFVars) > 0 {
opts = append(opts, tfscanner.ScannerWithTFVarsPaths(scannerOption.TerraformTFVars...))
}
return opts
}
func addHelmOpts(opts []options.ScannerOption, scannerOption config.ScannerOption) []options.ScannerOption {
if len(scannerOption.HelmValueFiles) > 0 {
opts = append(opts, helm.ScannerWithValuesFile(scannerOption.HelmValueFiles...))
}
if len(scannerOption.HelmValues) > 0 {
opts = append(opts, helm.ScannerWithValues(scannerOption.HelmValues...))
}
if len(scannerOption.HelmFileValues) > 0 {
opts = append(opts, helm.ScannerWithFileValues(scannerOption.HelmFileValues...))
}
if len(scannerOption.HelmStringValues) > 0 {
opts = append(opts, helm.ScannerWithStringValues(scannerOption.HelmStringValues...))
}
return opts
}
// for a given set of paths, find the most specific filesystem path that contains all the descendants
// the function also returns a filtered version of the input paths that are compatible with a fs.FS
// using the resultant target path. This means they will always use "/" as a separator
func findFSTarget(paths []string) (string, []string, error) {
if len(paths) == 0 {
return "", nil, xerrors.New("must specify at least one path")
}
var absPaths []string
var minSegmentCount int
for _, relPath := range paths {
abs, err := filepath.Abs(relPath)
if err != nil {
return "", nil, xerrors.Errorf("failed to derive absolute path from '%s': %w", relPath, err)
}
count := len(strings.Split(filepath.ToSlash(abs), "/"))
if count < minSegmentCount || minSegmentCount == 0 {
minSegmentCount = count
}
absPaths = append(absPaths, abs)
}
var outputSegments []string
for i := 0; i < minSegmentCount; i++ {
required := strings.Split(absPaths[0], string(filepath.Separator))[i]
match := true
for _, path := range absPaths[1:] {
actual := strings.Split(path, string(filepath.Separator))[i]
if required != actual {
match = false
break
}
}
if !match {
break
}
outputSegments = append(outputSegments, required)
}
slashTarget := strings.Join(outputSegments, "/")
if slashTarget == "" {
slashTarget = string(filepath.Separator)
}
var cleanPaths []string
for _, path := range absPaths {
path := filepath.ToSlash(path)
path = strings.TrimPrefix(path, slashTarget)
path = strings.TrimPrefix(path, "/")
if path == "" {
path = "."
}
cleanPaths = append(cleanPaths, path)
}
// we don't use filepath.Join here as we need to maintain the root "/"
target := strings.Join(outputSegments, string(filepath.Separator))
if target == "" || filepath.VolumeName(target) == target {
target += string(filepath.Separator)
}
return target, cleanPaths, nil
}
func createPolicyFS(policyPaths []string) (fs.FS, []string, error) {
if len(policyPaths) == 0 {
return nil, nil, nil
}
var outsideCWD bool
for _, path := range policyPaths {
if strings.Contains(path, "..") || strings.HasPrefix(path, "/") || (len(path) > 1 && path[1] == ':') {
outsideCWD = true
break
}
}
// all policy paths are inside the CWD, so create a filesystem from CWD to load from
if !outsideCWD {
cwd, err := os.Getwd()
if err != nil {
return nil, nil, err
}
var cleanPaths []string
for _, path := range policyPaths {
path = strings.TrimPrefix(path, ".")
path = strings.TrimPrefix(path, "/")
cleanPaths = append(cleanPaths, path)
}
return os.DirFS(cwd), cleanPaths, nil
}
target, cleanPaths, err := findFSTarget(policyPaths)
if err != nil {
return nil, nil, err
}
return os.DirFS(target), cleanPaths, nil
}
func (s *Scanner) hasCustomPatternForType(t string) bool {
for _, pattern := range s.filePatterns {
if strings.HasPrefix(pattern, t+":") {
return true
}
}
return false
}
// Scan detects misconfigurations.
func (s *Scanner) Scan(ctx context.Context, files []types.File) ([]types.Misconfiguration, error) {
mapMemoryFS := make(map[string]*memoryfs.FS)
for t := range s.scanners {
mapMemoryFS[t] = memoryfs.New()
}
for _, file := range files {
for defsecType, localType := range enabledDefsecTypes {
buffer := bytes.NewReader(file.Content)
if !s.hasCustomPatternForType(localType) && !detection.IsType(file.Path, buffer, defsecType) {
continue
}
// Replace with more detailed config type
file.Type = localType
if memfs, ok := mapMemoryFS[file.Type]; ok {
if filepath.Dir(file.Path) != "." {
if err := memfs.MkdirAll(filepath.Dir(file.Path), os.ModePerm); err != nil {
return nil, xerrors.Errorf("memoryfs mkdir error: %w", err)
}
}
if err := memfs.WriteFile(file.Path, file.Content, os.ModePerm); err != nil {
return nil, xerrors.Errorf("memoryfs write error: %w", err)
}
}
}
}
var misconfs []types.Misconfiguration
for t, scanner := range s.scanners {
results, err := scanner.ScanFS(ctx, mapMemoryFS[t], ".")
if err != nil {
if _, ok := err.(*cfparser.InvalidContentError); ok {
log.Logger.Errorf("scan %q was broken with InvalidContentError: %v", scanner.Name(), err)
continue
}
return nil, xerrors.Errorf("scan config error: %w", err)
}
misconfs = append(misconfs, ResultsToMisconf(t, scanner.Name(), results)...)
}
// Sort misconfigurations
for _, misconf := range misconfs {
sort.Slice(misconf.Successes, func(i, j int) bool {
if misconf.Successes[i].AVDID == misconf.Successes[j].AVDID {
return misconf.Successes[i].StartLine < misconf.Successes[j].StartLine
}
return misconf.Successes[i].AVDID < misconf.Successes[j].AVDID
})
sort.Slice(misconf.Warnings, func(i, j int) bool {
if misconf.Warnings[i].AVDID == misconf.Warnings[j].AVDID {
return misconf.Warnings[i].StartLine < misconf.Warnings[j].StartLine
}
return misconf.Warnings[i].AVDID < misconf.Warnings[j].AVDID
})
sort.Slice(misconf.Failures, func(i, j int) bool {
if misconf.Failures[i].AVDID == misconf.Failures[j].AVDID {
return misconf.Failures[i].StartLine < misconf.Failures[j].StartLine
}
return misconf.Failures[i].AVDID < misconf.Failures[j].AVDID
})
}
return misconfs, nil
}
// This function is exported for trivy-plugin-aqua purposes only
func ResultsToMisconf(configType string, scannerName string, results scan.Results) []types.Misconfiguration {
misconfs := map[string]types.Misconfiguration{}
for _, result := range results {
flattened := result.Flatten()
query := fmt.Sprintf("data.%s.%s", result.RegoNamespace(), result.RegoRule())
ruleID := result.Rule().AVDID
if result.RegoNamespace() != "" && len(result.Rule().Aliases) > 0 {
ruleID = result.Rule().Aliases[0]
}
cause := NewCauseWithCode(result)
misconfResult := types.MisconfResult{
Namespace: result.RegoNamespace(),
Query: query,
Message: flattened.Description,
PolicyMetadata: types.PolicyMetadata{
ID: ruleID,
AVDID: result.Rule().AVDID,
Type: fmt.Sprintf("%s Security Check", scannerName),
Title: result.Rule().Summary,
Description: result.Rule().Explanation,
Severity: string(flattened.Severity),
RecommendedActions: flattened.Resolution,
References: flattened.Links,
},
CauseMetadata: cause,
Traces: result.Traces(),
}
filePath := flattened.Location.Filename
misconf, ok := misconfs[filePath]
if !ok {
misconf = types.Misconfiguration{
FileType: configType,
FilePath: filePath,
}
}
if flattened.Warning {
misconf.Warnings = append(misconf.Warnings, misconfResult)
} else {
switch flattened.Status {
case scan.StatusPassed:
misconf.Successes = append(misconf.Successes, misconfResult)
case scan.StatusIgnored:
misconf.Exceptions = append(misconf.Exceptions, misconfResult)
case scan.StatusFailed:
misconf.Failures = append(misconf.Failures, misconfResult)
}
}
misconfs[filePath] = misconf
}
return types.ToMisconfigurations(misconfs)
}
func NewCauseWithCode(underlying scan.Result) types.CauseMetadata {
flat := underlying.Flatten()
cause := types.CauseMetadata{
Resource: flat.Resource,
Provider: flat.RuleProvider.DisplayName(),
Service: flat.RuleService,
StartLine: flat.Location.StartLine,
EndLine: flat.Location.EndLine,
}
if code, err := underlying.GetCode(); err == nil {
cause.Code = types.Code{
Lines: lo.Map(code.Lines, func(l scan.Line, i int) types.Line {
return types.Line{
Number: l.Number,
Content: l.Content,
IsCause: l.IsCause,
Annotation: l.Annotation,
Truncated: l.Truncated,
Highlighted: l.Highlighted,
FirstCause: l.FirstCause,
LastCause: l.LastCause,
}
}),
}
}
return cause
}

View File

@@ -11,44 +11,39 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/aquasecurity/trivy/pkg/fanal/analyzer" "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
"github.com/aquasecurity/trivy/pkg/fanal/types" "github.com/aquasecurity/trivy/pkg/fanal/types"
) )
func Test_Handle(t *testing.T) { func TestScanner_Scan(t *testing.T) {
tests := []struct { tests := []struct {
name string name string
files map[types.HandlerType][]types.File files []types.File
filePatterns []string filePatterns []string
wantFilePath string wantFilePath string
wantFileType string wantFileType string
}{ }{
{ {
name: "happy path. Dockerfile", name: "happy path. Dockerfile",
files: map[types.HandlerType][]types.File{ files: []types.File{
types.MisconfPostHandler: {
{ {
Path: "Dockerfile", Path: "Dockerfile",
Type: types.Dockerfile, Type: types.Dockerfile,
Content: []byte(`FROM alpine`), Content: []byte(`FROM alpine`),
}, },
}, },
},
wantFilePath: "Dockerfile", wantFilePath: "Dockerfile",
wantFileType: types.Dockerfile, wantFileType: types.Dockerfile,
}, },
{ {
name: "happy path. Dockerfile with custom file name", name: "happy path. Dockerfile with custom file name",
files: map[types.HandlerType][]types.File{ files: []types.File{
types.MisconfPostHandler: {
{ {
Path: "dockerf", Path: "dockerf",
Type: types.Dockerfile, Type: types.Dockerfile,
Content: []byte(`FROM alpine`), Content: []byte(`FROM alpine`),
}, },
}, },
},
filePatterns: []string{"dockerfile:dockerf"}, filePatterns: []string{"dockerfile:dockerf"},
wantFilePath: "dockerf", wantFilePath: "dockerf",
wantFileType: types.Dockerfile, wantFileType: types.Dockerfile,
@@ -56,18 +51,14 @@ func Test_Handle(t *testing.T) {
} }
for _, tt := range tests { for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) { t.Run(tt.name, func(t *testing.T) {
result := &analyzer.AnalysisResult{ s, err := NewScanner(tt.filePatterns, config.ScannerOption{})
Files: tt.files, require.NoError(t, err)
}
misconfHandler, err := newMisconfPostHandler(artifact.Option{FilePatterns: tt.filePatterns})
assert.NoError(t, err)
blobInfo := &types.BlobInfo{}
err = misconfHandler.Handle(context.Background(), result, blobInfo) misconfs, err := s.Scan(context.Background(), tt.files)
assert.NoError(t, err) require.NoError(t, err)
assert.Equal(t, 1, len(blobInfo.Misconfigurations), "wrong number of misconfigurations found") assert.Equal(t, 1, len(misconfs), "wrong number of misconfigurations found")
assert.Equal(t, tt.wantFilePath, blobInfo.Misconfigurations[0].FilePath, "filePaths don't equal") assert.Equal(t, tt.wantFilePath, misconfs[0].FilePath, "filePaths don't equal")
assert.Equal(t, tt.wantFileType, blobInfo.Misconfigurations[0].FileType, "fileTypes don't equal") assert.Equal(t, tt.wantFileType, misconfs[0].FileType, "fileTypes don't equal")
}) })
} }
} }

View File

@@ -82,7 +82,7 @@ func (s Scanner) Scan(ctx context.Context, target, artifactKey string, blobKeys
BlobIds: blobKeys, BlobIds: blobKeys,
Options: &rpc.ScanOptions{ Options: &rpc.ScanOptions{
VulnType: opts.VulnType, VulnType: opts.VulnType,
Scanners: opts.Scanners, Scanners: opts.Scanners.StringSlice(),
ListAllPackages: opts.ListAllPackages, ListAllPackages: opts.ListAllPackages,
LicenseCategories: licenseCategories, LicenseCategories: licenseCategories,
}, },

View File

@@ -5,6 +5,7 @@ import (
google_protobuf "github.com/golang/protobuf/ptypes/empty" google_protobuf "github.com/golang/protobuf/ptypes/empty"
"github.com/google/wire" "github.com/google/wire"
"github.com/samber/lo"
"golang.org/x/xerrors" "golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/fanal/cache" "github.com/aquasecurity/trivy/pkg/fanal/cache"
@@ -42,9 +43,12 @@ func teeError(err error) error {
// Scan scans and return response // Scan scans and return response
func (s *ScanServer) Scan(ctx context.Context, in *rpcScanner.ScanRequest) (*rpcScanner.ScanResponse, error) { func (s *ScanServer) Scan(ctx context.Context, in *rpcScanner.ScanRequest) (*rpcScanner.ScanResponse, error) {
scanners := lo.Map(in.Options.Scanners, func(s string, index int) types.Scanner {
return types.Scanner(s)
})
options := types.ScanOptions{ options := types.ScanOptions{
VulnType: in.Options.VulnType, VulnType: in.Options.VulnType,
Scanners: in.Options.Scanners, Scanners: scanners,
ListAllPackages: in.Options.ListAllPackages, ListAllPackages: in.Options.ListAllPackages,
} }
results, os, err := s.localScanner.Scan(ctx, in.Target, in.ArtifactId, in.BlobIds, options) results, os, err := s.localScanner.Scan(ctx, in.Target, in.ArtifactId, in.BlobIds, options)

View File

@@ -57,6 +57,7 @@ func TestScanServer_Scan(t *testing.T) {
Target: "alpine:3.11", Target: "alpine:3.11",
ImageID: "sha256:e7d92cdc71feacf90708cb59182d0df1b911f8ae022d29e8e95d75ca6a99776a", ImageID: "sha256:e7d92cdc71feacf90708cb59182d0df1b911f8ae022d29e8e95d75ca6a99776a",
LayerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"}, LayerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"},
OptionsAnything: true,
}, },
Returns: scanner.DriverScanReturns{ Returns: scanner.DriverScanReturns{
Results: types.Results{ Results: types.Results{
@@ -156,6 +157,7 @@ func TestScanServer_Scan(t *testing.T) {
Target: "alpine:3.11", Target: "alpine:3.11",
ImageID: "sha256:e7d92cdc71feacf90708cb59182d0df1b911f8ae022d29e8e95d75ca6a99776a", ImageID: "sha256:e7d92cdc71feacf90708cb59182d0df1b911f8ae022d29e8e95d75ca6a99776a",
LayerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"}, LayerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"},
OptionsAnything: true,
}, },
Returns: scanner.DriverScanReturns{ Returns: scanner.DriverScanReturns{
Err: errors.New("error"), Err: errors.New("error"),

View File

@@ -114,7 +114,7 @@ func (s Scanner) Scan(ctx context.Context, target, artifactKey string, blobKeys
} }
// Scan packages for vulnerabilities // Scan packages for vulnerabilities
if slices.Contains(options.Scanners, types.VulnerabilityScanner) { if options.Scanners.Enabled(types.VulnerabilityScanner) {
var vulnResults types.Results var vulnResults types.Results
vulnResults, eosl, err = s.scanVulnerabilities(target, artifactDetail, options) vulnResults, eosl, err = s.scanVulnerabilities(target, artifactDetail, options)
if err != nil { if err != nil {
@@ -138,17 +138,30 @@ func (s Scanner) Scan(ctx context.Context, target, artifactKey string, blobKeys
} }
// Scan secrets // Scan secrets
if slices.Contains(options.Scanners, types.SecretScanner) { if options.Scanners.Enabled(types.SecretScanner) {
secretResults := s.secretsToResults(artifactDetail.Secrets) secretResults := s.secretsToResults(artifactDetail.Secrets)
results = append(results, secretResults...) results = append(results, secretResults...)
} }
// Scan licenses // Scan licenses
if slices.Contains(options.Scanners, types.LicenseScanner) { if options.Scanners.Enabled(types.LicenseScanner) {
licenseResults := s.scanLicenses(artifactDetail, options.LicenseCategories) licenseResults := s.scanLicenses(artifactDetail, options.LicenseCategories)
results = append(results, licenseResults...) results = append(results, licenseResults...)
} }
// Scan misconfiguration on container image config
if options.ImageConfigScanners.Enabled(types.MisconfigScanner) {
if im := artifactDetail.ImageMisconfiguration; im != nil {
im.FilePath = target // Set the target name to the file path as container image config is not a real file.
results = append(results, s.MisconfsToResults([]ftypes.Misconfiguration{*im})...)
}
}
// Scan secrets on container image config
if options.ImageConfigScanners.Enabled(types.SecretScanner) {
// TODO
}
// For WASM plugins and custom analyzers // For WASM plugins and custom analyzers
if len(artifactDetail.CustomResources) != 0 { if len(artifactDetail.CustomResources) != 0 {
results = append(results, types.Result{ results = append(results, types.Result{
@@ -538,7 +551,6 @@ func mergePkgs(pkgs, pkgsFromCommands []ftypes.Package) []ftypes.Package {
return pkgs return pkgs
} }
func ShouldScanMisconfigOrRbac(scanners []string) bool { func ShouldScanMisconfigOrRbac(scanners types.Scanners) bool {
return slices.Contains(scanners, types.MisconfigScanner) || return scanners.AnyEnabled(types.MisconfigScanner, types.RBACScanner)
slices.Contains(scanners, types.RBACScanner)
} }

View File

@@ -46,7 +46,7 @@ func TestScanner_Scan(t *testing.T) {
types.VulnTypeOS, types.VulnTypeOS,
types.VulnTypeLibrary, types.VulnTypeLibrary,
}, },
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
}, },
}, },
fixtures: []string{"testdata/fixtures/happy.yaml"}, fixtures: []string{"testdata/fixtures/happy.yaml"},
@@ -156,7 +156,7 @@ func TestScanner_Scan(t *testing.T) {
types.VulnTypeOS, types.VulnTypeOS,
types.VulnTypeLibrary, types.VulnTypeLibrary,
}, },
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
ListAllPackages: true, ListAllPackages: true,
}, },
}, },
@@ -307,7 +307,7 @@ func TestScanner_Scan(t *testing.T) {
types.VulnTypeOS, types.VulnTypeOS,
types.VulnTypeLibrary, types.VulnTypeLibrary,
}, },
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
ListAllPackages: true, ListAllPackages: true,
}, },
}, },
@@ -416,7 +416,7 @@ func TestScanner_Scan(t *testing.T) {
types.VulnTypeOS, types.VulnTypeOS,
types.VulnTypeLibrary, types.VulnTypeLibrary,
}, },
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
}, },
}, },
fixtures: []string{"testdata/fixtures/happy.yaml"}, fixtures: []string{"testdata/fixtures/happy.yaml"},
@@ -499,7 +499,7 @@ func TestScanner_Scan(t *testing.T) {
types.VulnTypeOS, types.VulnTypeOS,
types.VulnTypeLibrary, types.VulnTypeLibrary,
}, },
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
}, },
}, },
fixtures: []string{"testdata/fixtures/happy.yaml"}, fixtures: []string{"testdata/fixtures/happy.yaml"},
@@ -582,7 +582,7 @@ func TestScanner_Scan(t *testing.T) {
types.VulnTypeOS, types.VulnTypeOS,
types.VulnTypeLibrary, types.VulnTypeLibrary,
}, },
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
}, },
}, },
fixtures: []string{"testdata/fixtures/happy.yaml"}, fixtures: []string{"testdata/fixtures/happy.yaml"},
@@ -658,7 +658,7 @@ func TestScanner_Scan(t *testing.T) {
types.VulnTypeOS, types.VulnTypeOS,
types.VulnTypeLibrary, types.VulnTypeLibrary,
}, },
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
}, },
}, },
fixtures: []string{"testdata/fixtures/happy.yaml"}, fixtures: []string{"testdata/fixtures/happy.yaml"},
@@ -679,7 +679,7 @@ func TestScanner_Scan(t *testing.T) {
layerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"}, layerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"},
options: types.ScanOptions{ options: types.ScanOptions{
VulnType: []string{types.VulnTypeLibrary}, VulnType: []string{types.VulnTypeLibrary},
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
}, },
}, },
fixtures: []string{"testdata/fixtures/happy.yaml"}, fixtures: []string{"testdata/fixtures/happy.yaml"},
@@ -788,7 +788,7 @@ func TestScanner_Scan(t *testing.T) {
target: "/app/configs", target: "/app/configs",
layerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"}, layerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"},
options: types.ScanOptions{ options: types.ScanOptions{
Scanners: []string{types.MisconfigScanner}, Scanners: types.Scanners{types.MisconfigScanner},
}, },
}, },
fixtures: []string{"testdata/fixtures/happy.yaml"}, fixtures: []string{"testdata/fixtures/happy.yaml"},
@@ -941,7 +941,7 @@ func TestScanner_Scan(t *testing.T) {
types.VulnTypeOS, types.VulnTypeOS,
types.VulnTypeLibrary, types.VulnTypeLibrary,
}, },
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
}, },
}, },
fixtures: []string{"testdata/fixtures/happy.yaml"}, fixtures: []string{"testdata/fixtures/happy.yaml"},
@@ -962,7 +962,7 @@ func TestScanner_Scan(t *testing.T) {
layerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"}, layerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"},
options: types.ScanOptions{ options: types.ScanOptions{
VulnType: []string{types.VulnTypeLibrary}, VulnType: []string{types.VulnTypeLibrary},
Scanners: []string{types.VulnerabilityScanner}, Scanners: types.Scanners{types.VulnerabilityScanner},
}, },
}, },
fixtures: []string{"testdata/fixtures/sad.yaml"}, fixtures: []string{"testdata/fixtures/sad.yaml"},

View File

@@ -7,7 +7,8 @@ import (
// ScanOptions holds the attributes for scanning vulnerabilities // ScanOptions holds the attributes for scanning vulnerabilities
type ScanOptions struct { type ScanOptions struct {
VulnType []string VulnType []string
Scanners []string Scanners Scanners
ImageConfigScanners Scanners // Scanners for container image configuration
ScanRemovedPackages bool ScanRemovedPackages bool
Platform string Platform string
ListAllPackages bool ListAllPackages bool

View File

@@ -1,10 +1,18 @@
package types package types
import (
"github.com/samber/lo"
"golang.org/x/exp/slices"
)
// VulnType represents vulnerability type // VulnType represents vulnerability type
type VulnType = string type VulnType = string
// Scanner represents the type of security scanning // Scanner represents the type of security scanning
type Scanner = string type Scanner string
// Scanners is a slice of scanners
type Scanners []Scanner
const ( const (
// VulnTypeUnknown is a vulnerability type of unknown // VulnTypeUnknown is a vulnerability type of unknown
@@ -16,8 +24,11 @@ const (
// VulnTypeLibrary is a vulnerability type of programming language dependencies // VulnTypeLibrary is a vulnerability type of programming language dependencies
VulnTypeLibrary = VulnType("library") VulnTypeLibrary = VulnType("library")
// ScannerUnknown is the scanner of unknown // UnknownScanner is the scanner of unknown
ScannerUnknown = Scanner("unknown") UnknownScanner = Scanner("unknown")
// NoneScanner is the scanner of none
NoneScanner = Scanner("none")
// VulnerabilityScanner is the scanner of vulnerabilities // VulnerabilityScanner is the scanner of vulnerabilities
VulnerabilityScanner = Scanner("vuln") VulnerabilityScanner = Scanner("vuln")
@@ -40,11 +51,42 @@ var (
VulnTypeOS, VulnTypeOS,
VulnTypeLibrary, VulnTypeLibrary,
} }
Scanners = []string{
AllScanners = Scanners{
VulnerabilityScanner, VulnerabilityScanner,
MisconfigScanner, MisconfigScanner,
RBACScanner, RBACScanner,
SecretScanner, SecretScanner,
LicenseScanner, LicenseScanner,
NoneScanner,
}
// AllImageConfigScanners has a list of available scanners on container image config.
// The container image in container registries consists of manifest, config and layers.
// Trivy is also able to detect security issues on the image config.
AllImageConfigScanners = Scanners{
MisconfigScanner,
SecretScanner,
NoneScanner,
} }
) )
func (scanners Scanners) Enabled(s Scanner) bool {
return slices.Contains(scanners, s)
}
// AnyEnabled returns true if any of the passed scanners is included.
func (scanners Scanners) AnyEnabled(ss ...Scanner) bool {
for _, s := range ss {
if scanners.Enabled(s) {
return true
}
}
return false
}
func (scanners Scanners) StringSlice() []string {
return lo.Map(scanners, func(s Scanner, _ int) string {
return string(s)
})
}