mirror of
https://github.com/aquasecurity/trivy.git
synced 2025-12-23 07:29:00 -08:00
feat: add log.FilePath() function for logger (#7080)
This commit is contained in:
@@ -157,12 +157,12 @@ func initConfig(configFile string) error {
|
||||
viper.SetConfigType("yaml")
|
||||
if err := viper.ReadInConfig(); err != nil {
|
||||
if errors.Is(err, os.ErrNotExist) {
|
||||
log.Debug("Config file not found", log.String("file_path", configFile))
|
||||
log.Debug("Config file not found", log.FilePath(configFile))
|
||||
return nil
|
||||
}
|
||||
return xerrors.Errorf("config file %q loading error: %s", configFile, err)
|
||||
}
|
||||
log.Info("Loaded", log.String("file_path", configFile))
|
||||
log.Info("Loaded", log.FilePath(configFile))
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -83,7 +83,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc
|
||||
}
|
||||
|
||||
func (p *Parser) parseArtifact(filePath string, size int64, r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) {
|
||||
p.logger.Debug("Parsing Java artifacts...", log.String("file", filePath))
|
||||
p.logger.Debug("Parsing Java artifacts...", log.FilePath(filePath))
|
||||
|
||||
// Try to extract artifactId and version from the file name
|
||||
// e.g. spring-core-5.3.4-SNAPSHOT.jar => sprint-core, 5.3.4-SNAPSHOT
|
||||
|
||||
@@ -196,7 +196,7 @@ func (p *Parser) parseRoot(root artifact, uniqModules map[string]struct{}) ([]ft
|
||||
moduleArtifact, err := p.parseModule(result.filePath, relativePath)
|
||||
if err != nil {
|
||||
p.logger.Debug("Unable to parse the module",
|
||||
log.String("file_path", result.filePath), log.Err(err))
|
||||
log.FilePath(result.filePath), log.Err(err))
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -413,7 +413,7 @@ func (ag AnalyzerGroup) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, lim
|
||||
}
|
||||
rc, err := opener()
|
||||
if errors.Is(err, fs.ErrPermission) {
|
||||
ag.logger.Debug("Permission error", log.String("file_path", filePath))
|
||||
ag.logger.Debug("Permission error", log.FilePath(filePath))
|
||||
break
|
||||
} else if err != nil {
|
||||
return xerrors.Errorf("unable to open %s: %w", filePath, err)
|
||||
|
||||
@@ -87,7 +87,7 @@ func toApplication(fileType types.LangType, filePath, libFilePath string, r xio.
|
||||
// Calculate the file digest when one of `spdx` formats is selected
|
||||
d, err := calculateDigest(r)
|
||||
if err != nil {
|
||||
log.Warn("Unable to get checksum", log.String("file_path", filePath), log.Err(err))
|
||||
log.Warn("Unable to get checksum", log.FilePath(filePath), log.Err(err))
|
||||
}
|
||||
|
||||
deps := make(map[string][]string)
|
||||
|
||||
@@ -114,7 +114,7 @@ func (a pubSpecLockAnalyzer) findDependsOn() (map[string][]string, error) {
|
||||
if err := fsutils.WalkDir(os.DirFS(dir), ".", required, func(path string, d fs.DirEntry, r io.Reader) error {
|
||||
id, dependsOn, err := parsePubSpecYaml(r)
|
||||
if err != nil {
|
||||
a.logger.Debug("Unable to parse pubspec.yaml", log.String("path", path), log.Err(err))
|
||||
a.logger.Debug("Unable to parse pubspec.yaml", log.FilePath(path), log.Err(err))
|
||||
return nil
|
||||
}
|
||||
if id != "" {
|
||||
|
||||
@@ -80,7 +80,7 @@ func (a gradleLockAnalyzer) parsePoms() (map[string]pomXML, error) {
|
||||
err := fsutils.WalkDir(os.DirFS(cacheDir), ".", required, func(path string, _ fs.DirEntry, r io.Reader) error {
|
||||
pom, err := parsePom(r, path)
|
||||
if err != nil {
|
||||
a.logger.Debug("Unable to parse pom", log.String("file_path", path), log.Err(err))
|
||||
a.logger.Debug("Unable to parse pom", log.FilePath(path), log.Err(err))
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -69,7 +69,7 @@ func (a juliaAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysi
|
||||
// Parse Project.toml alongside Manifest.toml to identify the direct dependencies. This mutates `app`.
|
||||
if err = a.analyzeDependencies(input.FS, filepath.Dir(path), app); err != nil {
|
||||
a.logger.Warn("Unable to parse file to analyze dependencies",
|
||||
log.String("FILEPATH", filepath.Join(filepath.Dir(path), types.JuliaProject)), log.Err(err))
|
||||
log.FilePath(filepath.Join(filepath.Dir(path), types.JuliaProject)), log.Err(err))
|
||||
}
|
||||
|
||||
sort.Sort(app.Packages)
|
||||
|
||||
@@ -45,7 +45,7 @@ func (l *License) Traverse(fsys fs.FS, root string) (map[string][]string, error)
|
||||
}
|
||||
|
||||
l.logger.Debug("License names are missing, an attempt to find them in the license file",
|
||||
log.String("file", pkgJSONPath), log.String("license_file", licenseFileName))
|
||||
log.FilePath(pkgJSONPath), log.String("license_file", licenseFileName))
|
||||
licenseFilePath := path.Join(path.Dir(pkgJSONPath), licenseFileName)
|
||||
|
||||
if findings, err := classifyLicense(licenseFilePath, l.classifierConfidenceLevel, fsys); err != nil {
|
||||
|
||||
@@ -81,7 +81,7 @@ func (a yarnAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysis
|
||||
// Parse package.json alongside yarn.lock to find direct deps and mark dev deps
|
||||
if err = a.analyzeDependencies(input.FS, path.Dir(filePath), app); err != nil {
|
||||
a.logger.Warn("Unable to parse package.json to remove dev dependencies",
|
||||
log.String("file_path", path.Join(path.Dir(filePath), types.NpmPkg)), log.Err(err))
|
||||
log.FilePath(path.Join(path.Dir(filePath), types.NpmPkg)), log.Err(err))
|
||||
}
|
||||
|
||||
// Fill licenses
|
||||
@@ -157,7 +157,7 @@ func (a yarnAnalyzer) analyzeDependencies(fsys fs.FS, dir string, app *types.App
|
||||
packageJsonPath := path.Join(dir, types.NpmPkg)
|
||||
directDeps, directDevDeps, err := a.parsePackageJsonDependencies(fsys, packageJsonPath)
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
a.logger.Debug("package.json not found", log.String("path", packageJsonPath))
|
||||
a.logger.Debug("package.json not found", log.FilePath(packageJsonPath))
|
||||
return nil
|
||||
} else if err != nil {
|
||||
return xerrors.Errorf("unable to parse %s: %w", dir, err)
|
||||
|
||||
@@ -62,7 +62,7 @@ func (a composerAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnal
|
||||
// Parse composer.json alongside composer.lock to identify the direct dependencies
|
||||
if err = a.mergeComposerJson(input.FS, filepath.Dir(path), app); err != nil {
|
||||
log.Warn("Unable to parse composer.json to identify direct dependencies",
|
||||
log.String("path", filepath.Join(filepath.Dir(path), types.ComposerJson)), log.Err(err))
|
||||
log.FilePath(filepath.Join(filepath.Dir(path), types.ComposerJson)), log.Err(err))
|
||||
}
|
||||
sort.Sort(app.Packages)
|
||||
apps = append(apps, *app)
|
||||
@@ -109,7 +109,7 @@ func (a composerAnalyzer) mergeComposerJson(fsys fs.FS, dir string, app *types.A
|
||||
p, err := a.parseComposerJson(fsys, path)
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
// Assume all the packages are direct dependencies as it cannot identify them from composer.lock
|
||||
log.Debug("Unable to determine the direct dependencies, composer.json not found", log.String("path", path))
|
||||
log.Debug("Unable to determine the direct dependencies, composer.json not found", log.FilePath(path))
|
||||
return nil
|
||||
} else if err != nil {
|
||||
return xerrors.Errorf("unable to parse %s: %w", path, err)
|
||||
|
||||
@@ -117,7 +117,7 @@ func (a pipLibraryAnalyzer) pkgLicense(pkgName, pkgVer, spDir string) []string {
|
||||
|
||||
metadataPkg, _, err := a.metadataParser.Parse(metadataFile)
|
||||
if err != nil {
|
||||
a.logger.Warn("Unable to parse METADATA file", log.String("path", metadataPath), log.Err(err))
|
||||
a.logger.Warn("Unable to parse METADATA file", log.FilePath(metadataPath), log.Err(err))
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -59,7 +59,7 @@ func (a poetryAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalys
|
||||
// Parse pyproject.toml alongside poetry.lock to identify the direct dependencies
|
||||
if err = a.mergePyProject(input.FS, filepath.Dir(path), app); err != nil {
|
||||
a.logger.Warn("Unable to parse pyproject.toml to identify direct dependencies",
|
||||
log.String("path", filepath.Join(filepath.Dir(path), types.PyProject)), log.Err(err))
|
||||
log.FilePath(filepath.Join(filepath.Dir(path), types.PyProject)), log.Err(err))
|
||||
}
|
||||
apps = append(apps, *app)
|
||||
|
||||
@@ -97,7 +97,7 @@ func (a poetryAnalyzer) mergePyProject(fsys fs.FS, dir string, app *types.Applic
|
||||
p, err := a.parsePyProject(fsys, path)
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
// Assume all the packages are direct dependencies as it cannot identify them from poetry.lock
|
||||
a.logger.Debug("pyproject.toml not found", log.String("path", path))
|
||||
a.logger.Debug("pyproject.toml not found", log.FilePath(path))
|
||||
return nil
|
||||
} else if err != nil {
|
||||
return xerrors.Errorf("unable to parse %s: %w", path, err)
|
||||
|
||||
@@ -72,7 +72,7 @@ func (a cargoAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysi
|
||||
// Parse Cargo.toml alongside Cargo.lock to identify the direct dependencies
|
||||
if err = a.removeDevDependencies(input.FS, path.Dir(filePath), app); err != nil {
|
||||
a.logger.Warn("Unable to parse Cargo.toml q to identify direct dependencies",
|
||||
log.String("path", path.Join(path.Dir(filePath), types.CargoToml)), log.Err(err))
|
||||
log.FilePath(path.Join(path.Dir(filePath), types.CargoToml)), log.Err(err))
|
||||
}
|
||||
sort.Sort(app.Packages)
|
||||
apps = append(apps, *app)
|
||||
@@ -109,7 +109,7 @@ func (a cargoAnalyzer) removeDevDependencies(fsys fs.FS, dir string, app *types.
|
||||
cargoTOMLPath := path.Join(dir, types.CargoToml)
|
||||
directDeps, err := a.parseRootCargoTOML(fsys, cargoTOMLPath)
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
a.logger.Debug("Cargo.toml not found", log.String("path", cargoTOMLPath))
|
||||
a.logger.Debug("Cargo.toml not found", log.FilePath(cargoTOMLPath))
|
||||
return nil
|
||||
} else if err != nil {
|
||||
return xerrors.Errorf("unable to parse %s: %w", cargoTOMLPath, err)
|
||||
|
||||
@@ -92,7 +92,7 @@ func newLicenseFileAnalyzer() *licenseFileAnalyzer {
|
||||
|
||||
func (a *licenseFileAnalyzer) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
|
||||
ctx = log.WithContextPrefix(ctx, "license")
|
||||
log.DebugContext(ctx, "License scanning", log.String("file_path", input.FilePath))
|
||||
log.DebugContext(ctx, "License scanning", log.FilePath(input.FilePath))
|
||||
|
||||
// need files to be text based, readable files
|
||||
readable, err := isHumanReadable(input.Content, input.Info.Size())
|
||||
|
||||
@@ -61,7 +61,7 @@ func (a dpkgAnalyzer) PostAnalyze(_ context.Context, input analyzer.PostAnalysis
|
||||
// parse `available` file to get digest for packages
|
||||
digests, err := a.parseDpkgAvailable(input.FS)
|
||||
if err != nil {
|
||||
a.logger.Debug("Unable to parse the available file", log.String("file", availableFile), log.Err(err))
|
||||
a.logger.Debug("Unable to parse the available file", log.FilePath(availableFile), log.Err(err))
|
||||
}
|
||||
|
||||
required := func(path string, d fs.DirEntry) bool {
|
||||
@@ -169,7 +169,7 @@ func (a dpkgAnalyzer) parseDpkgAvailable(fsys fs.FS) (map[string]digest.Digest,
|
||||
for scanner.Scan() {
|
||||
header, err := scanner.Header()
|
||||
if !errors.Is(err, io.EOF) && err != nil {
|
||||
a.logger.Warn("Parse error", log.String("file", availableFile), log.Err(err))
|
||||
a.logger.Warn("Parse error", log.FilePath(availableFile), log.Err(err))
|
||||
continue
|
||||
}
|
||||
name, version, checksum := header.Get("Package"), header.Get("Version"), header.Get("SHA256")
|
||||
@@ -195,7 +195,7 @@ func (a dpkgAnalyzer) parseDpkgStatus(filePath string, r io.Reader, digests map[
|
||||
for scanner.Scan() {
|
||||
header, err := scanner.Header()
|
||||
if !errors.Is(err, io.EOF) && err != nil {
|
||||
a.logger.Warn("Parse error", log.String("file", filePath), log.Err(err))
|
||||
a.logger.Warn("Parse error", log.FilePath(filePath), log.Err(err))
|
||||
continue
|
||||
}
|
||||
|
||||
|
||||
@@ -70,7 +70,7 @@ func (h unpackagedHook) Handle(ctx context.Context, res *analyzer.AnalysisResult
|
||||
}
|
||||
|
||||
if len(bom.Applications) > 0 {
|
||||
h.logger.Info("Found SBOM attestation in Rekor", log.String("file_path", filePath))
|
||||
h.logger.Info("Found SBOM attestation in Rekor", log.FilePath(filePath))
|
||||
// Take the first app since this SBOM should contain a single application.
|
||||
app := bom.Applications[0]
|
||||
app.FilePath = filePath // Use the original file path rather than the one in the SBOM.
|
||||
|
||||
@@ -278,6 +278,11 @@ func Prefix(prefix string) slog.Attr {
|
||||
return slog.Any(prefixKey, logPrefix("["+prefix+"] "))
|
||||
}
|
||||
|
||||
// FilePath returns an Attr that represents a filePath.
|
||||
func FilePath(filePath string) slog.Attr {
|
||||
return String("file_path", filePath)
|
||||
}
|
||||
|
||||
func isLogPrefix(a slog.Attr) bool {
|
||||
_, ok := a.Value.Any().(logPrefix)
|
||||
return ok
|
||||
|
||||
@@ -445,7 +445,7 @@ func (m *wasmModule) Required(filePath string, _ os.FileInfo) bool {
|
||||
|
||||
func (m *wasmModule) Analyze(ctx context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
|
||||
filePath := "/" + filepath.ToSlash(input.FilePath)
|
||||
log.Debug("Module analyzing...", log.String("module", m.name), log.String("file_path", filePath))
|
||||
log.Debug("Module analyzing...", log.String("module", m.name), log.FilePath(filePath))
|
||||
|
||||
// Wasm module instances are not Goroutine safe, so we take look here since Analyze might be called concurrently.
|
||||
// TODO: This is temporary solution and we could improve the Analyze performance by having module instance pool.
|
||||
|
||||
@@ -39,7 +39,7 @@ func WalkDir[T any](ctx context.Context, fsys fs.FS, root string, parallel int,
|
||||
if err != nil {
|
||||
return err
|
||||
} else if info.Size() == 0 {
|
||||
log.Debug("Skip the empty file", log.String("file_path", path))
|
||||
log.Debug("Skip the empty file", log.FilePath(path))
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -105,7 +105,7 @@ func walk[T any](ctx context.Context, fsys fs.FS, path string, c chan T, onFile
|
||||
}
|
||||
res, err := onFile(path, info, rsa)
|
||||
if err != nil {
|
||||
log.Debug("Walk error", log.String("file_path", path), log.Err(err))
|
||||
log.Debug("Walk error", log.FilePath(path), log.Err(err))
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -223,7 +223,7 @@ func parseIgnoreYAML(ignoreFile string) (IgnoreConfig, error) {
|
||||
return IgnoreConfig{}, xerrors.Errorf("file open error: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
log.Debug("Found an ignore yaml", log.String("path", ignoreFile))
|
||||
log.Debug("Found an ignore yaml", log.FilePath(ignoreFile))
|
||||
|
||||
// Parse the YAML content
|
||||
var ignoreConfig IgnoreConfig
|
||||
@@ -239,7 +239,7 @@ func parseIgnore(ignoreFile string) (IgnoreFindings, error) {
|
||||
return nil, xerrors.Errorf("file open error: %w", err)
|
||||
}
|
||||
defer f.Close()
|
||||
log.Debug("Found an ignore file", log.String("path", ignoreFile))
|
||||
log.Debug("Found an ignore file", log.FilePath(ignoreFile))
|
||||
|
||||
var ignoredFindings IgnoreFindings
|
||||
scanner := bufio.NewScanner(f)
|
||||
|
||||
@@ -85,7 +85,7 @@ func (s *scanner) scanVulnerabilities(ctx context.Context, app ftypes.Applicatio
|
||||
printedTypes[app.Type] = struct{}{}
|
||||
}
|
||||
|
||||
log.DebugContext(ctx, "Scanning packages for vulnerabilities", log.String("file_path", app.FilePath))
|
||||
log.DebugContext(ctx, "Scanning packages for vulnerabilities", log.FilePath(app.FilePath))
|
||||
vulns, err := library.Detect(ctx, app.Type, app.Packages)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed vulnerability detection of libraries: %w", err)
|
||||
|
||||
@@ -198,7 +198,7 @@ func (s Scanner) MisconfsToResults(misconfs []ftypes.Misconfiguration) types.Res
|
||||
log.Info("Detected config files", log.Int("num", len(misconfs)))
|
||||
var results types.Results
|
||||
for _, misconf := range misconfs {
|
||||
log.Debug("Scanned config file", log.String("path", misconf.FilePath))
|
||||
log.Debug("Scanned config file", log.FilePath(misconf.FilePath))
|
||||
|
||||
var detected []types.DetectedMisconfiguration
|
||||
|
||||
@@ -237,7 +237,7 @@ func (s Scanner) secretsToResults(secrets []ftypes.Secret, options types.ScanOpt
|
||||
|
||||
var results types.Results
|
||||
for _, secret := range secrets {
|
||||
log.Debug("Secret file", log.String("path", secret.FilePath))
|
||||
log.Debug("Secret file", log.FilePath(secret.FilePath))
|
||||
|
||||
results = append(results, types.Result{
|
||||
Target: secret.FilePath,
|
||||
|
||||
@@ -88,7 +88,7 @@ func WalkDir(fsys fs.FS, root string, required WalkDirRequiredFunc, fn WalkDirFu
|
||||
defer f.Close()
|
||||
|
||||
if err = fn(path, d, f); err != nil {
|
||||
log.Debug("Walk error", log.String("file_path", path), log.Err(err))
|
||||
log.Debug("Walk error", log.FilePath(path), log.Err(err))
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user