mirror of
https://github.com/aquasecurity/trivy.git
synced 2025-12-05 20:40:16 -08:00
feat(fs): optimize scanning performance by direct file access for known paths (#8525)
This commit is contained in:
@@ -13,3 +13,16 @@ $ trivy rootfs /path/to/rootfs
|
|||||||
Rootfs scanning works differently from the Filesystem scanning.
|
Rootfs scanning works differently from the Filesystem scanning.
|
||||||
You should use `trivy fs` to scan your local projects in CI/CD.
|
You should use `trivy fs` to scan your local projects in CI/CD.
|
||||||
See [here](../scanner/vulnerability.md) for the differences.
|
See [here](../scanner/vulnerability.md) for the differences.
|
||||||
|
|
||||||
|
## Performance Optimization
|
||||||
|
|
||||||
|
By default, Trivy traverses all files from the specified root directory to find target files for scanning.
|
||||||
|
However, when you only need to scan specific files with absolute paths, you can avoid this traversal, which makes scanning faster.
|
||||||
|
For example, when scanning only OS packages, no full traversal is performed:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
$ trivy rootfs --pkg-types os --scanners vuln /
|
||||||
|
```
|
||||||
|
|
||||||
|
When scanning language-specific packages or secrets, traversal is necessary because the location of these files is unknown.
|
||||||
|
If you want to exclude specific directories from scanning for better performance, you can use the [--skip-dirs](../configuration/skipping.md) option.
|
||||||
|
|||||||
@@ -118,6 +118,13 @@ type CustomGroup interface {
|
|||||||
Group() Group
|
Group() Group
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPathAnalyzer is an interface for analyzers that can specify static file paths
|
||||||
|
// instead of traversing the entire filesystem.
|
||||||
|
type StaticPathAnalyzer interface {
|
||||||
|
// StaticPaths returns a list of static file paths to analyze
|
||||||
|
StaticPaths() []string
|
||||||
|
}
|
||||||
|
|
||||||
type Opener func() (xio.ReadSeekCloserAt, error)
|
type Opener func() (xio.ReadSeekCloserAt, error)
|
||||||
|
|
||||||
type AnalyzerGroup struct {
|
type AnalyzerGroup struct {
|
||||||
@@ -527,3 +534,28 @@ func (ag AnalyzerGroup) filePatternMatch(analyzerType Type, filePath string) boo
|
|||||||
}
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths collects static paths from all enabled analyzers
|
||||||
|
// It returns the collected paths and a boolean indicating if all enabled analyzers implement StaticPathAnalyzer
|
||||||
|
func (ag AnalyzerGroup) StaticPaths(disabled []Type) ([]string, bool) {
|
||||||
|
var paths []string
|
||||||
|
|
||||||
|
for _, a := range ag.analyzers {
|
||||||
|
// Skip disabled analyzers
|
||||||
|
if slices.Contains(disabled, a.Type()) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// If any analyzer doesn't implement StaticPathAnalyzer, return false
|
||||||
|
staticPathAnalyzer, ok := a.(StaticPathAnalyzer)
|
||||||
|
if !ok {
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect paths from StaticPathAnalyzer
|
||||||
|
paths = append(paths, staticPathAnalyzer.StaticPaths()...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove duplicates
|
||||||
|
return lo.Uniq(paths), true
|
||||||
|
}
|
||||||
|
|||||||
@@ -63,3 +63,7 @@ func (a contentManifestAnalyzer) Type() analyzer.Type {
|
|||||||
func (a contentManifestAnalyzer) Version() int {
|
func (a contentManifestAnalyzer) Version() int {
|
||||||
return contentManifestAnalyzerVersion
|
return contentManifestAnalyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a contentManifestAnalyzer) StaticPaths() []string {
|
||||||
|
return contentSetsDirs.Items()
|
||||||
|
}
|
||||||
|
|||||||
@@ -157,3 +157,7 @@ func setKVValue(kvpo instructions.KeyValuePairOptional, values map[string]string
|
|||||||
}
|
}
|
||||||
return kvpo
|
return kvpo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a dockerfileAnalyzer) StaticPaths() []string {
|
||||||
|
return []string{"root/buildinfo"}
|
||||||
|
}
|
||||||
|
|||||||
@@ -178,6 +178,7 @@ var (
|
|||||||
TypeGemSpec,
|
TypeGemSpec,
|
||||||
TypeCargo,
|
TypeCargo,
|
||||||
TypeComposer,
|
TypeComposer,
|
||||||
|
TypeComposerVendor,
|
||||||
TypeJar,
|
TypeJar,
|
||||||
TypePom,
|
TypePom,
|
||||||
TypeGradleLock,
|
TypeGradleLock,
|
||||||
@@ -192,6 +193,7 @@ var (
|
|||||||
TypeCondaPkg,
|
TypeCondaPkg,
|
||||||
TypeCondaEnv,
|
TypeCondaEnv,
|
||||||
TypePythonPkg,
|
TypePythonPkg,
|
||||||
|
TypePythonPkgEgg,
|
||||||
TypePip,
|
TypePip,
|
||||||
TypePipenv,
|
TypePipenv,
|
||||||
TypePoetry,
|
TypePoetry,
|
||||||
@@ -205,6 +207,7 @@ var (
|
|||||||
TypePubSpecLock,
|
TypePubSpecLock,
|
||||||
TypeMixLock,
|
TypeMixLock,
|
||||||
TypeJulia,
|
TypeJulia,
|
||||||
|
TypeSBOM,
|
||||||
}
|
}
|
||||||
|
|
||||||
// TypeLockfiles has all lock file analyzers
|
// TypeLockfiles has all lock file analyzers
|
||||||
|
|||||||
@@ -48,3 +48,8 @@ func (a alpineOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a alpineOSAnalyzer) Version() int {
|
func (a alpineOSAnalyzer) Version() int {
|
||||||
return version
|
return version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the alpine analyzer
|
||||||
|
func (a alpineOSAnalyzer) StaticPaths() []string {
|
||||||
|
return requiredFiles
|
||||||
|
}
|
||||||
|
|||||||
@@ -73,3 +73,8 @@ func (a amazonlinuxOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a amazonlinuxOSAnalyzer) Version() int {
|
func (a amazonlinuxOSAnalyzer) Version() int {
|
||||||
return version
|
return version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the amazonlinux analyzer
|
||||||
|
func (a amazonlinuxOSAnalyzer) StaticPaths() []string {
|
||||||
|
return requiredFiles
|
||||||
|
}
|
||||||
|
|||||||
@@ -48,3 +48,8 @@ func (a debianOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a debianOSAnalyzer) Version() int {
|
func (a debianOSAnalyzer) Version() int {
|
||||||
return version
|
return version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the debian analyzer
|
||||||
|
func (a debianOSAnalyzer) StaticPaths() []string {
|
||||||
|
return requiredFiles
|
||||||
|
}
|
||||||
|
|||||||
@@ -60,3 +60,8 @@ func (a almaOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a almaOSAnalyzer) Version() int {
|
func (a almaOSAnalyzer) Version() int {
|
||||||
return almaAnalyzerVersion
|
return almaAnalyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the alma analyzer
|
||||||
|
func (a almaOSAnalyzer) StaticPaths() []string {
|
||||||
|
return a.requiredFiles()
|
||||||
|
}
|
||||||
|
|||||||
@@ -60,3 +60,8 @@ func (a centOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a centOSAnalyzer) Version() int {
|
func (a centOSAnalyzer) Version() int {
|
||||||
return centosAnalyzerVersion
|
return centosAnalyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the centos analyzer
|
||||||
|
func (a centOSAnalyzer) StaticPaths() []string {
|
||||||
|
return a.requiredFiles()
|
||||||
|
}
|
||||||
|
|||||||
@@ -62,3 +62,8 @@ func (a fedoraOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a fedoraOSAnalyzer) Version() int {
|
func (a fedoraOSAnalyzer) Version() int {
|
||||||
return fedoraAnalyzerVersion
|
return fedoraAnalyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the fedora analyzer
|
||||||
|
func (a fedoraOSAnalyzer) StaticPaths() []string {
|
||||||
|
return a.requiredFiles()
|
||||||
|
}
|
||||||
|
|||||||
@@ -56,3 +56,8 @@ func (a oracleOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a oracleOSAnalyzer) Version() int {
|
func (a oracleOSAnalyzer) Version() int {
|
||||||
return oracleAnalyzerVersion
|
return oracleAnalyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the oracle analyzer
|
||||||
|
func (a oracleOSAnalyzer) StaticPaths() []string {
|
||||||
|
return a.requiredFiles()
|
||||||
|
}
|
||||||
|
|||||||
@@ -97,3 +97,8 @@ func (a redhatOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a redhatOSAnalyzer) Version() int {
|
func (a redhatOSAnalyzer) Version() int {
|
||||||
return redhatAnalyzerVersion
|
return redhatAnalyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the redhatbase analyzer
|
||||||
|
func (a redhatOSAnalyzer) StaticPaths() []string {
|
||||||
|
return a.requiredFiles()
|
||||||
|
}
|
||||||
|
|||||||
@@ -60,3 +60,8 @@ func (a rockyOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a rockyOSAnalyzer) Version() int {
|
func (a rockyOSAnalyzer) Version() int {
|
||||||
return rockyAnalyzerVersion
|
return rockyAnalyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the rocky analyzer
|
||||||
|
func (a rockyOSAnalyzer) StaticPaths() []string {
|
||||||
|
return a.requiredFiles()
|
||||||
|
}
|
||||||
|
|||||||
@@ -96,3 +96,8 @@ func (a osReleaseAnalyzer) Type() analyzer.Type {
|
|||||||
func (a osReleaseAnalyzer) Version() int {
|
func (a osReleaseAnalyzer) Version() int {
|
||||||
return version
|
return version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the os-release analyzer
|
||||||
|
func (a osReleaseAnalyzer) StaticPaths() []string {
|
||||||
|
return requiredFiles
|
||||||
|
}
|
||||||
|
|||||||
@@ -59,6 +59,11 @@ func (a ubuntuESMAnalyzer) Version() int {
|
|||||||
return ESMAnalyzerVersion
|
return ESMAnalyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the ubuntu ESM analyzer
|
||||||
|
func (a ubuntuESMAnalyzer) StaticPaths() []string {
|
||||||
|
return ESMRequiredFiles
|
||||||
|
}
|
||||||
|
|
||||||
// structs to parse ESM status
|
// structs to parse ESM status
|
||||||
type status struct {
|
type status struct {
|
||||||
Services []service `json:"services"`
|
Services []service `json:"services"`
|
||||||
|
|||||||
@@ -62,3 +62,8 @@ func (a ubuntuOSAnalyzer) Type() analyzer.Type {
|
|||||||
func (a ubuntuOSAnalyzer) Version() int {
|
func (a ubuntuOSAnalyzer) Version() int {
|
||||||
return version
|
return version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns the static paths of the ubuntu analyzer
|
||||||
|
func (a ubuntuOSAnalyzer) StaticPaths() []string {
|
||||||
|
return requiredFiles
|
||||||
|
}
|
||||||
|
|||||||
@@ -209,6 +209,11 @@ func (a alpinePkgAnalyzer) Version() int {
|
|||||||
return analyzerVersion
|
return analyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns a list of static file paths to analyze
|
||||||
|
func (a alpinePkgAnalyzer) StaticPaths() []string {
|
||||||
|
return requiredFiles
|
||||||
|
}
|
||||||
|
|
||||||
// decodeChecksumLine decodes checksum line
|
// decodeChecksumLine decodes checksum line
|
||||||
func (a alpinePkgAnalyzer) decodeChecksumLine(ctx context.Context, line string) digest.Digest {
|
func (a alpinePkgAnalyzer) decodeChecksumLine(ctx context.Context, line string) digest.Digest {
|
||||||
if len(line) < 2 {
|
if len(line) < 2 {
|
||||||
|
|||||||
@@ -149,3 +149,7 @@ func normalizeLicense(s string) string {
|
|||||||
|
|
||||||
return strings.TrimSpace(s)
|
return strings.TrimSpace(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a *dpkgLicenseAnalyzer) StaticPaths() []string {
|
||||||
|
return []string{"usr/share/doc/"}
|
||||||
|
}
|
||||||
|
|||||||
@@ -372,3 +372,13 @@ func (a dpkgAnalyzer) Type() analyzer.Type {
|
|||||||
func (a dpkgAnalyzer) Version() int {
|
func (a dpkgAnalyzer) Version() int {
|
||||||
return analyzerVersion
|
return analyzerVersion
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns a list of static file paths to analyze
|
||||||
|
func (a dpkgAnalyzer) StaticPaths() []string {
|
||||||
|
return []string{
|
||||||
|
statusFile,
|
||||||
|
availableFile,
|
||||||
|
statusDir,
|
||||||
|
infoDir,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -208,6 +208,11 @@ func (a rpmPkgAnalyzer) Version() int {
|
|||||||
return version
|
return version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// StaticPaths returns a list of static file paths to analyze
|
||||||
|
func (a rpmPkgAnalyzer) StaticPaths() []string {
|
||||||
|
return requiredFiles
|
||||||
|
}
|
||||||
|
|
||||||
// splitFileName returns a name, version, release, epoch, arch:
|
// splitFileName returns a name, version, release, epoch, arch:
|
||||||
//
|
//
|
||||||
// e.g.
|
// e.g.
|
||||||
|
|||||||
@@ -92,3 +92,7 @@ func (a rpmqaPkgAnalyzer) Type() analyzer.Type {
|
|||||||
func (a rpmqaPkgAnalyzer) Version() int {
|
func (a rpmqaPkgAnalyzer) Version() int {
|
||||||
return versionRpmqa
|
return versionRpmqa
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a rpmqaPkgAnalyzer) StaticPaths() []string {
|
||||||
|
return requiredRpmqaFiles
|
||||||
|
}
|
||||||
|
|||||||
@@ -96,3 +96,7 @@ func (a apkRepoAnalyzer) Type() analyzer.Type {
|
|||||||
func (a apkRepoAnalyzer) Version() int {
|
func (a apkRepoAnalyzer) Version() int {
|
||||||
return version
|
return version
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a apkRepoAnalyzer) StaticPaths() []string {
|
||||||
|
return requiredFiles
|
||||||
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"errors"
|
"errors"
|
||||||
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@@ -13,11 +14,11 @@ import (
|
|||||||
|
|
||||||
"github.com/go-git/go-git/v5"
|
"github.com/go-git/go-git/v5"
|
||||||
"github.com/google/wire"
|
"github.com/google/wire"
|
||||||
"github.com/opencontainers/go-digest"
|
|
||||||
"github.com/samber/lo"
|
"github.com/samber/lo"
|
||||||
"golang.org/x/xerrors"
|
"golang.org/x/xerrors"
|
||||||
|
|
||||||
"github.com/aquasecurity/trivy/pkg/cache"
|
"github.com/aquasecurity/trivy/pkg/cache"
|
||||||
|
"github.com/aquasecurity/trivy/pkg/digest"
|
||||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||||
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
|
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
|
||||||
"github.com/aquasecurity/trivy/pkg/fanal/handler"
|
"github.com/aquasecurity/trivy/pkg/fanal/handler"
|
||||||
@@ -25,6 +26,7 @@ import (
|
|||||||
"github.com/aquasecurity/trivy/pkg/fanal/walker"
|
"github.com/aquasecurity/trivy/pkg/fanal/walker"
|
||||||
"github.com/aquasecurity/trivy/pkg/log"
|
"github.com/aquasecurity/trivy/pkg/log"
|
||||||
"github.com/aquasecurity/trivy/pkg/semaphore"
|
"github.com/aquasecurity/trivy/pkg/semaphore"
|
||||||
|
"github.com/aquasecurity/trivy/pkg/utils/fsutils"
|
||||||
"github.com/aquasecurity/trivy/pkg/uuid"
|
"github.com/aquasecurity/trivy/pkg/uuid"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -167,34 +169,19 @@ func (a Artifact) Inspect(ctx context.Context) (artifact.Reference, error) {
|
|||||||
}
|
}
|
||||||
defer composite.Cleanup()
|
defer composite.Cleanup()
|
||||||
|
|
||||||
err = a.walker.Walk(a.rootPath, a.artifactOption.WalkerOption, func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
// Use static paths instead of traversing the filesystem when all analyzers implement StaticPathAnalyzer
|
||||||
dir := a.rootPath
|
// so that we can analyze files faster
|
||||||
|
if paths, canUseStaticPaths := a.analyzer.StaticPaths(a.artifactOption.DisabledAnalyzers); canUseStaticPaths {
|
||||||
// When the directory is the same as the filePath, a file was given
|
// Analyze files in static paths
|
||||||
// instead of a directory, rewrite the file path and directory in this case.
|
a.logger.Debug("Analyzing files in static paths")
|
||||||
if filePath == "." {
|
if err = a.analyzeWithStaticPaths(ctx, &wg, limit, result, composite, opts, paths); err != nil {
|
||||||
dir, filePath = path.Split(a.rootPath)
|
return artifact.Reference{}, xerrors.Errorf("analyze with static paths: %w", err)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
if err := a.analyzer.AnalyzeFile(ctx, &wg, limit, result, dir, filePath, info, opener, nil, opts); err != nil {
|
// Analyze files by traversing the root directory
|
||||||
return xerrors.Errorf("analyze file (%s): %w", filePath, err)
|
if err = a.analyzeWithRootDir(ctx, &wg, limit, result, composite, opts); err != nil {
|
||||||
|
return artifact.Reference{}, xerrors.Errorf("analyze with traversal: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Skip post analysis if the file is not required
|
|
||||||
analyzerTypes := a.analyzer.RequiredPostAnalyzers(filePath, info)
|
|
||||||
if len(analyzerTypes) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build filesystem for post analysis
|
|
||||||
if err := composite.CreateLink(analyzerTypes, dir, filePath, filepath.Join(dir, filePath)); err != nil {
|
|
||||||
return xerrors.Errorf("failed to create link: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return artifact.Reference{}, xerrors.Errorf("walk filesystem: %w", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Wait for all the goroutine to finish.
|
// Wait for all the goroutine to finish.
|
||||||
@@ -246,6 +233,61 @@ func (a Artifact) Inspect(ctx context.Context) (artifact.Reference, error) {
|
|||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a Artifact) analyzeWithRootDir(ctx context.Context, wg *sync.WaitGroup, limit *semaphore.Weighted,
|
||||||
|
result *analyzer.AnalysisResult, composite *analyzer.CompositeFS, opts analyzer.AnalysisOptions) error {
|
||||||
|
|
||||||
|
root := a.rootPath
|
||||||
|
relativePath := ""
|
||||||
|
|
||||||
|
// When the root path is a file, rewrite the root path and relative path
|
||||||
|
if fsutils.FileExists(a.rootPath) {
|
||||||
|
root, relativePath = path.Split(a.rootPath)
|
||||||
|
}
|
||||||
|
return a.analyzeWithTraversal(ctx, root, relativePath, wg, limit, result, composite, opts)
|
||||||
|
}
|
||||||
|
|
||||||
|
// analyzeWithStaticPaths analyzes files using static paths from analyzers
|
||||||
|
func (a Artifact) analyzeWithStaticPaths(ctx context.Context, wg *sync.WaitGroup, limit *semaphore.Weighted,
|
||||||
|
result *analyzer.AnalysisResult, composite *analyzer.CompositeFS, opts analyzer.AnalysisOptions,
|
||||||
|
staticPaths []string) error {
|
||||||
|
|
||||||
|
// Process each static path
|
||||||
|
for _, relativePath := range staticPaths {
|
||||||
|
if err := a.analyzeWithTraversal(ctx, a.rootPath, relativePath, wg, limit, result, composite, opts); errors.Is(err, fs.ErrNotExist) {
|
||||||
|
continue
|
||||||
|
} else if err != nil {
|
||||||
|
return xerrors.Errorf("analyze with traversal: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// analyzeWithTraversal analyzes files by traversing the entire filesystem
|
||||||
|
func (a Artifact) analyzeWithTraversal(ctx context.Context, root, relativePath string, wg *sync.WaitGroup, limit *semaphore.Weighted,
|
||||||
|
result *analyzer.AnalysisResult, composite *analyzer.CompositeFS, opts analyzer.AnalysisOptions) error {
|
||||||
|
|
||||||
|
return a.walker.Walk(filepath.Join(root, relativePath), a.artifactOption.WalkerOption, func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
||||||
|
filePath = path.Join(relativePath, filePath)
|
||||||
|
if err := a.analyzer.AnalyzeFile(ctx, wg, limit, result, root, filePath, info, opener, nil, opts); err != nil {
|
||||||
|
return xerrors.Errorf("analyze file (%s): %w", filePath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip post analysis if the file is not required
|
||||||
|
analyzerTypes := a.analyzer.RequiredPostAnalyzers(filePath, info)
|
||||||
|
if len(analyzerTypes) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build filesystem for post analysis
|
||||||
|
if err := composite.CreateLink(analyzerTypes, root, filePath, filepath.Join(root, filePath)); err != nil {
|
||||||
|
return xerrors.Errorf("failed to create link: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func (a Artifact) Clean(reference artifact.Reference) error {
|
func (a Artifact) Clean(reference artifact.Reference) error {
|
||||||
// Don't delete cache if it's a clean git repository
|
// Don't delete cache if it's a clean git repository
|
||||||
if a.commitHash != "" {
|
if a.commitHash != "" {
|
||||||
|
|||||||
@@ -2396,3 +2396,117 @@ func TestYAMLConfigScan(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// recordingWalker wraps an existing walker and records which paths were walked
|
||||||
|
type recordingWalker struct {
|
||||||
|
base Walker
|
||||||
|
walkedRoots []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func newRecordingWalker(base Walker) *recordingWalker {
|
||||||
|
return &recordingWalker{
|
||||||
|
base: base,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (w *recordingWalker) Walk(root string, option walker.Option, walkFn walker.WalkFunc) error {
|
||||||
|
w.walkedRoots = append(w.walkedRoots, filepath.ToSlash(root))
|
||||||
|
// Call the original walker
|
||||||
|
return w.base.Walk(root, option, walkFn)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestArtifact_AnalysisStrategy tests the different analysis strategies
|
||||||
|
func TestArtifact_AnalysisStrategy(t *testing.T) {
|
||||||
|
// Use testdata/alpine directly
|
||||||
|
testDir := "testdata/alpine"
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
disabledAnalyzers []analyzer.Type
|
||||||
|
wantRoots []string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "static paths",
|
||||||
|
disabledAnalyzers: append(analyzer.TypeConfigFiles, analyzer.TypePip, analyzer.TypeSecret),
|
||||||
|
wantRoots: []string{
|
||||||
|
"testdata/alpine/etc/alpine-release",
|
||||||
|
"testdata/alpine/lib/apk/db/installed",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "traversing root dir",
|
||||||
|
wantRoots: []string{
|
||||||
|
testDir, // only the root directory is walked
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
// Create a new artifact with the recording walker
|
||||||
|
baseWalker := walker.NewFS()
|
||||||
|
rw := newRecordingWalker(baseWalker)
|
||||||
|
|
||||||
|
// Create artifact with recording walker
|
||||||
|
a, err := NewArtifact(testDir, cache.NewMemoryCache(), rw, artifact.Option{
|
||||||
|
DisabledAnalyzers: tt.disabledAnalyzers,
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Run the inspection
|
||||||
|
_, err = a.Inspect(t.Context())
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Check if the walked roots match the expected roots
|
||||||
|
assert.ElementsMatch(t, tt.wantRoots, rw.walkedRoots)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestAnalyzerGroup_StaticPaths tests the StaticPaths method of AnalyzerGroup
|
||||||
|
func TestAnalyzerGroup_StaticPaths(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
disabledAnalyzers []analyzer.Type
|
||||||
|
want []string
|
||||||
|
wantAllStatic bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "all analyzers implement StaticPathAnalyzer",
|
||||||
|
disabledAnalyzers: append(analyzer.TypeConfigFiles, analyzer.TypePip, analyzer.TypeSecret),
|
||||||
|
want: []string{
|
||||||
|
"lib/apk/db/installed",
|
||||||
|
"etc/alpine-release",
|
||||||
|
},
|
||||||
|
wantAllStatic: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "some analyzers don't implement StaticPathAnalyzer",
|
||||||
|
want: []string{},
|
||||||
|
wantAllStatic: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "disable all analyzers",
|
||||||
|
disabledAnalyzers: append(analyzer.TypeConfigFiles, analyzer.TypePip, analyzer.TypeApk, analyzer.TypeAlpine, analyzer.TypeSecret),
|
||||||
|
want: []string{},
|
||||||
|
wantAllStatic: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
// Create a new analyzer group
|
||||||
|
a, err := analyzer.NewAnalyzerGroup(analyzer.AnalyzerOptions{})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// Get static paths
|
||||||
|
gotPaths, gotAllStatic := a.StaticPaths(tt.disabledAnalyzers)
|
||||||
|
|
||||||
|
// Check if all analyzers implement StaticPathAnalyzer
|
||||||
|
assert.Equal(t, tt.wantAllStatic, gotAllStatic)
|
||||||
|
|
||||||
|
// Check paths
|
||||||
|
assert.ElementsMatch(t, tt.want, gotPaths)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -4,7 +4,9 @@ import "golang.org/x/sync/semaphore"
|
|||||||
|
|
||||||
const defaultSize = 5
|
const defaultSize = 5
|
||||||
|
|
||||||
func New(parallel int) *semaphore.Weighted {
|
type Weighted = semaphore.Weighted
|
||||||
|
|
||||||
|
func New(parallel int) *Weighted {
|
||||||
if parallel == 0 {
|
if parallel == 0 {
|
||||||
parallel = defaultSize
|
parallel = defaultSize
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user