mirror of
https://github.com/aquasecurity/trivy.git
synced 2025-12-09 06:10:47 -08:00
Compare commits
25 Commits
release/v0
...
refactor/c
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
51cefc4221 | ||
|
|
84eb62340e | ||
|
|
346a6b794d | ||
|
|
4a38d0121b | ||
|
|
e25de25262 | ||
|
|
4b84dabd15 | ||
|
|
9792611b36 | ||
|
|
13608eac24 | ||
|
|
a0dc3b688e | ||
|
|
9dcd06fda7 | ||
|
|
12cf218032 | ||
|
|
86138329cb | ||
|
|
a032ad696a | ||
|
|
36f8d0fd67 | ||
|
|
f1329c7ea1 | ||
|
|
c5e03f7d8f | ||
|
|
a8a7ddb127 | ||
|
|
bff0e9b034 | ||
|
|
cc4771158b | ||
|
|
b9b27fce42 | ||
|
|
bfa99d26fa | ||
|
|
890a360244 | ||
|
|
ad1c37984e | ||
|
|
dd28d4e238 | ||
|
|
1d42969518 |
4
.github/workflows/backport.yaml
vendored
4
.github/workflows/backport.yaml
vendored
@@ -41,8 +41,10 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Extract branch name
|
||||
env:
|
||||
COMMENT_BODY: ${{ github.event.comment.body }}
|
||||
run: |
|
||||
BRANCH_NAME=$(echo ${{ github.event.comment.body }} | grep -oE '@aqua-bot backport\s+(\S+)' | awk '{print $3}')
|
||||
BRANCH_NAME=$(echo $COMMENT_BODY | grep -oE '@aqua-bot backport\s+(\S+)' | awk '{print $3}')
|
||||
echo "BRANCH_NAME=$BRANCH_NAME" >> $GITHUB_ENV
|
||||
|
||||
- name: Set up Git user
|
||||
|
||||
8
.github/workflows/cache-test-images.yaml
vendored
8
.github/workflows/cache-test-images.yaml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
run: |
|
||||
source integration/testimages.ini
|
||||
IMAGE_LIST=$(skopeo list-tags docker://$TEST_IMAGES)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags += ["containerd"] | .Tags | sort' | sha256sum | cut -d' ' -f1)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags += ["containerd"] | .Tags |= sort' | sha256sum | cut -d' ' -f1)
|
||||
echo "digest=$DIGEST" >> $GITHUB_OUTPUT
|
||||
|
||||
## We need to work with test image cache only for main branch
|
||||
@@ -37,8 +37,6 @@ jobs:
|
||||
with:
|
||||
path: integration/testdata/fixtures/images
|
||||
key: cache-test-images-${{ steps.image-digest.outputs.digest }}
|
||||
restore-keys:
|
||||
cache-test-images-
|
||||
|
||||
- name: Download test images
|
||||
if: github.ref_name == 'main'
|
||||
@@ -66,7 +64,7 @@ jobs:
|
||||
run: |
|
||||
source integration/testimages.ini
|
||||
IMAGE_LIST=$(skopeo list-tags docker://$TEST_VM_IMAGES)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags | sort' | sha256sum | cut -d' ' -f1)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags |= sort' | sha256sum | cut -d' ' -f1)
|
||||
echo "digest=$DIGEST" >> $GITHUB_OUTPUT
|
||||
|
||||
## We need to work with test VM image cache only for main branch
|
||||
@@ -76,8 +74,6 @@ jobs:
|
||||
with:
|
||||
path: integration/testdata/fixtures/vm-images
|
||||
key: cache-test-vm-images-${{ steps.image-digest.outputs.digest }}
|
||||
restore-keys:
|
||||
cache-test-vm-images-
|
||||
|
||||
- name: Download test VM images
|
||||
if: github.ref_name == 'main'
|
||||
|
||||
8
.github/workflows/release-please.yaml
vendored
8
.github/workflows/release-please.yaml
vendored
@@ -47,10 +47,12 @@ jobs:
|
||||
- name: Extract version and PR number from commit message
|
||||
id: extract_info
|
||||
shell: bash
|
||||
env:
|
||||
COMMIT_MESSAGE: ${{ github.event.head_commit.message }}
|
||||
run: |
|
||||
echo "version=$( echo "${{ github.event.head_commit.message }}" | sed 's/^release: v\([0-9]\+\.[0-9]\+\.[0-9]\+\).*$/\1/' )" >> $GITHUB_OUTPUT
|
||||
echo "pr_number=$( echo "${{ github.event.head_commit.message }}" | sed 's/.*(\#\([0-9]\+\)).*$/\1/' )" >> $GITHUB_OUTPUT
|
||||
echo "release_branch=release/v$( echo "${{ github.event.head_commit.message }}" | sed 's/^release: v\([0-9]\+\.[0-9]\+\).*$/\1/' )" >> $GITHUB_OUTPUT
|
||||
echo "version=$( echo "$COMMIT_MESSAGE" | sed 's/^release: v\([0-9]\+\.[0-9]\+\.[0-9]\+\).*$/\1/' )" >> $GITHUB_OUTPUT
|
||||
echo "pr_number=$( echo "$COMMIT_MESSAGE" | sed 's/.*(\#\([0-9]\+\)).*$/\1/' )" >> $GITHUB_OUTPUT
|
||||
echo "release_branch=release/v$( echo "$COMMIT_MESSAGE" | sed 's/^release: v\([0-9]\+\.[0-9]\+\).*$/\1/' )" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Tag release
|
||||
if: ${{ steps.extract_info.outputs.version }}
|
||||
|
||||
4
.github/workflows/release-pr-check.yaml
vendored
4
.github/workflows/release-pr-check.yaml
vendored
@@ -11,8 +11,10 @@ jobs:
|
||||
steps:
|
||||
- name: Check PR author
|
||||
id: check_author
|
||||
env:
|
||||
PR_AUTHOR: ${{ github.event.pull_request.user.login }}
|
||||
run: |
|
||||
if [ "${{ github.actor }}" != "aqua-bot" ]; then
|
||||
if [ "$PR_AUTHOR" != "aqua-bot" ]; then
|
||||
echo "::error::This branch is intended for automated backporting by bot. Please refer to the documentation:"
|
||||
echo "::error::https://trivy.dev/latest/community/maintainer/backporting/"
|
||||
exit 1
|
||||
|
||||
84
.github/workflows/semantic-pr.yaml
vendored
84
.github/workflows/semantic-pr.yaml
vendored
@@ -1,22 +1,23 @@
|
||||
name: "Lint PR title"
|
||||
name: "Validate PR Title"
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- synchronize
|
||||
|
||||
jobs:
|
||||
main:
|
||||
validate:
|
||||
name: Validate PR title
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: amannn/action-semantic-pull-request@v5
|
||||
- name: Validate PR title
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
types: |
|
||||
PR_TITLE: ${{ github.event.pull_request.title }}
|
||||
# Valid types
|
||||
VALID_TYPES: |
|
||||
feat
|
||||
fix
|
||||
docs
|
||||
@@ -29,13 +30,15 @@ jobs:
|
||||
chore
|
||||
revert
|
||||
release
|
||||
|
||||
scopes: |
|
||||
# Valid scopes categorized by area
|
||||
VALID_SCOPES: |
|
||||
# Scanners
|
||||
vuln
|
||||
misconf
|
||||
secret
|
||||
license
|
||||
|
||||
# Targets
|
||||
image
|
||||
fs
|
||||
repo
|
||||
@@ -46,6 +49,7 @@ jobs:
|
||||
vm
|
||||
plugin
|
||||
|
||||
# OS
|
||||
alpine
|
||||
wolfi
|
||||
chainguard
|
||||
@@ -62,6 +66,7 @@ jobs:
|
||||
distroless
|
||||
windows
|
||||
|
||||
# Languages
|
||||
ruby
|
||||
php
|
||||
python
|
||||
@@ -71,7 +76,7 @@ jobs:
|
||||
java
|
||||
go
|
||||
c
|
||||
c\+\+
|
||||
c++
|
||||
elixir
|
||||
dart
|
||||
swift
|
||||
@@ -79,29 +84,80 @@ jobs:
|
||||
conda
|
||||
julia
|
||||
|
||||
# Package types
|
||||
os
|
||||
lang
|
||||
|
||||
# IaC
|
||||
kubernetes
|
||||
dockerfile
|
||||
terraform
|
||||
cloudformation
|
||||
|
||||
# Container
|
||||
docker
|
||||
podman
|
||||
containerd
|
||||
oci
|
||||
|
||||
# SBOM
|
||||
sbom
|
||||
spdx
|
||||
cyclonedx
|
||||
|
||||
# Misc
|
||||
cli
|
||||
flag
|
||||
|
||||
cyclonedx
|
||||
spdx
|
||||
purl
|
||||
vex
|
||||
|
||||
helm
|
||||
report
|
||||
db
|
||||
parser
|
||||
deps
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
# Convert env vars to regex alternatives, excluding comments and empty lines
|
||||
TYPES_REGEX=$(echo "$VALID_TYPES" | grep -v '^$' | paste -sd '|')
|
||||
SCOPES_REGEX=$(echo "$VALID_SCOPES" | grep -v '^$' | grep -v '^#' | paste -sd '|')
|
||||
|
||||
# Basic format check (should match: type(scope): description or type: description)
|
||||
FORMAT_REGEX="^[a-z]+(\([a-z0-9+]+\))?!?: .+$"
|
||||
if ! echo "$PR_TITLE" | grep -qE "$FORMAT_REGEX"; then
|
||||
echo "Error: Invalid PR title format"
|
||||
echo "Expected format: <type>(<scope>): <description> or <type>: <description>"
|
||||
echo "Examples:"
|
||||
echo " feat(vuln): add new vulnerability detection"
|
||||
echo " fix: correct parsing logic"
|
||||
echo " docs(kubernetes): update installation guide"
|
||||
echo -e "\nCurrent title: $PR_TITLE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Extract type and scope for validation
|
||||
TYPE=$(echo "$PR_TITLE" | sed -E 's/^([a-z]+)(\([a-z0-9+]+\))?!?: .+$/\1/')
|
||||
SCOPE=$(echo "$PR_TITLE" | sed -E 's/^[a-z]+\(([a-z0-9+]+)\)!?: .+$/\1/; t; s/.*//')
|
||||
|
||||
# Validate type
|
||||
if ! echo "$VALID_TYPES" | grep -qx "$TYPE"; then
|
||||
echo "Error: Invalid type '${TYPE}'"
|
||||
echo -e "\nValid types:"
|
||||
echo "$VALID_TYPES" | grep -v '^$' | sed 's/^/- /'
|
||||
echo -e "\nCurrent title: $PR_TITLE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Validate scope if present
|
||||
if [ -n "$SCOPE" ]; then
|
||||
if ! echo "$VALID_SCOPES" | grep -v '^#' | grep -qx "$SCOPE"; then
|
||||
echo "Error: Invalid scope '${SCOPE}'"
|
||||
echo -e "\nValid scopes:"
|
||||
echo "$VALID_SCOPES" | grep -v '^$' | grep -v '^#' | sed 's/^/- /'
|
||||
echo -e "\nCurrent title: $PR_TITLE"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "PR title validation passed ✅"
|
||||
echo "Current title: $PR_TITLE"
|
||||
|
||||
12
.github/workflows/test.yaml
vendored
12
.github/workflows/test.yaml
vendored
@@ -86,7 +86,7 @@ jobs:
|
||||
run: |
|
||||
source integration/testimages.ini
|
||||
IMAGE_LIST=$(skopeo list-tags docker://$TEST_IMAGES)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags += ["containerd"] | .Tags | sort' | sha256sum | cut -d' ' -f1)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags += ["containerd"] | .Tags |= sort' | sha256sum | cut -d' ' -f1)
|
||||
echo "digest=$DIGEST" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore test images from cache
|
||||
@@ -94,8 +94,6 @@ jobs:
|
||||
with:
|
||||
path: integration/testdata/fixtures/images
|
||||
key: cache-test-images-${{ steps.image-digest.outputs.digest }}
|
||||
restore-keys:
|
||||
cache-test-images-
|
||||
|
||||
- name: Run integration tests
|
||||
run: mage test:integration
|
||||
@@ -140,7 +138,7 @@ jobs:
|
||||
run: |
|
||||
source integration/testimages.ini
|
||||
IMAGE_LIST=$(skopeo list-tags docker://$TEST_IMAGES)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags += ["containerd"] | .Tags | sort' | sha256sum | cut -d' ' -f1)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags += ["containerd"] | .Tags |= sort' | sha256sum | cut -d' ' -f1)
|
||||
echo "digest=$DIGEST" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore test images from cache
|
||||
@@ -148,8 +146,6 @@ jobs:
|
||||
with:
|
||||
path: integration/testdata/fixtures/images
|
||||
key: cache-test-images-${{ steps.image-digest.outputs.digest }}
|
||||
restore-keys:
|
||||
cache-test-images-
|
||||
|
||||
- name: Run module integration tests
|
||||
shell: bash
|
||||
@@ -177,7 +173,7 @@ jobs:
|
||||
run: |
|
||||
source integration/testimages.ini
|
||||
IMAGE_LIST=$(skopeo list-tags docker://$TEST_VM_IMAGES)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags | sort' | sha256sum | cut -d' ' -f1)
|
||||
DIGEST=$(echo "$IMAGE_LIST" | jq '.Tags |= sort' | sha256sum | cut -d' ' -f1)
|
||||
echo "digest=$DIGEST" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Restore test VM images from cache
|
||||
@@ -185,8 +181,6 @@ jobs:
|
||||
with:
|
||||
path: integration/testdata/fixtures/vm-images
|
||||
key: cache-test-vm-images-${{ steps.image-digest.outputs.digest }}
|
||||
restore-keys:
|
||||
cache-test-vm-images-
|
||||
|
||||
- name: Run vm integration tests
|
||||
run: |
|
||||
|
||||
@@ -12,6 +12,11 @@ For detailed behavior, please refer to [the GitHub Actions configuration][workfl
|
||||
Commits with prefixes like `chore` or `build` are not considered releasable, and no release PR is created.
|
||||
To include such commits in a release, you need to either include commits with `feat` or `fix` prefixes or perform a manual release as described [below](#manual-release-pr-creation).
|
||||
|
||||
!!! tip
|
||||
It's a good idea to check if there are any outstanding vulnerability updates created by dependabot waiting for your review.
|
||||
They can be found in the "Security" tab of the repository.
|
||||
If there are any, please review and merge them before creating a release. This will help to ensure that the release includes the latest security patches.
|
||||
|
||||
## Flow
|
||||
The release flow consists of the following main steps:
|
||||
|
||||
@@ -74,10 +79,20 @@ Replace URLs with appropriate ones.
|
||||
|
||||
Example: https://github.com/aquasecurity/trivy/releases/tag/v0.52.0
|
||||
|
||||
### Merging the auto-generated Helm chart update PR
|
||||
Once the release PR is merged, there will be an auto-generated PR that bumps the Trivy version for the Trivy Helm Chart. An example can be seen [here](https://github.com/aquasecurity/trivy/pull/8638).
|
||||
|
||||
> [!NOTE]
|
||||
> It is possible that the release action takes a while to finish and the Helm chart action runs prior. In such a case the Helm chart action will fail as it will not be able to find the latest Trivy container image.
|
||||
> In such a case, it is advised to manually restart the Helm chart action, once the release action is finished.
|
||||
|
||||
If things look good, approve and merge this PR to further trigger the publishing of the Helm Chart.
|
||||
|
||||
|
||||
The release is now complete 🍻
|
||||
|
||||
The release is now complete.
|
||||
|
||||
[conventional-commits]: https://www.conventionalcommits.org/en/v1.0.0/
|
||||
[release-please]: https://github.com/googleapis/release-please
|
||||
[release-please]: https://github.com/googleapis/release-please
|
||||
[goreleaser]: https://goreleaser.com/
|
||||
[workflows]: https://github.com/aquasecurity/trivy/tree/main/.github/workflows
|
||||
[workflows]: https://github.com/aquasecurity/trivy/tree/main/.github/workflows
|
||||
|
||||
40
go.mod
40
go.mod
@@ -24,16 +24,16 @@ require (
|
||||
github.com/aquasecurity/table v1.8.0
|
||||
github.com/aquasecurity/testdocker v0.0.0-20240730042311-4642e94c7fc8
|
||||
github.com/aquasecurity/tml v0.6.1
|
||||
github.com/aquasecurity/trivy-checks v1.8.0
|
||||
github.com/aquasecurity/trivy-checks v1.8.1
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20250227071930-8bd8a9b89e2d
|
||||
github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48
|
||||
github.com/aquasecurity/trivy-kubernetes v0.8.0
|
||||
github.com/aquasecurity/trivy-kubernetes v0.8.1
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.3
|
||||
github.com/aws/aws-sdk-go-v2/config v1.29.9
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.62
|
||||
github.com/aws/aws-sdk-go-v2/service/ec2 v1.207.1
|
||||
github.com/aws/aws-sdk-go-v2/service/ecr v1.42.1
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.78.1
|
||||
github.com/aws/aws-sdk-go-v2/config v1.29.13
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.66
|
||||
github.com/aws/aws-sdk-go-v2/service/ec2 v1.211.2
|
||||
github.com/aws/aws-sdk-go-v2/service/ecr v1.43.2
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.79.1
|
||||
github.com/aws/smithy-go v1.22.3
|
||||
github.com/bitnami/go-version v0.0.0-20231130084017-bb00604d650c
|
||||
github.com/bmatcuk/doublestar/v4 v4.8.1
|
||||
@@ -42,13 +42,13 @@ require (
|
||||
github.com/containerd/containerd/v2 v2.0.4
|
||||
github.com/containerd/platforms v1.0.0-rc.1
|
||||
github.com/distribution/reference v0.6.0
|
||||
github.com/docker/cli v27.5.0+incompatible
|
||||
github.com/docker/docker v27.5.1+incompatible
|
||||
github.com/docker/cli v28.0.4+incompatible
|
||||
github.com/docker/docker v28.0.4+incompatible
|
||||
github.com/docker/go-connections v0.5.0
|
||||
github.com/docker/go-units v0.5.0
|
||||
github.com/fatih/color v1.18.0
|
||||
github.com/go-git/go-git/v5 v5.14.0
|
||||
github.com/go-json-experiment/json v0.0.0-20250211171154-1ae217ad3535 // Replace with encoding/json/v2 when proposal is accepted. Track https://github.com/golang/go/issues/71497
|
||||
github.com/go-json-experiment/json v0.0.0-20250223041408-d3c622f1b874 // Replace with encoding/json/v2 when proposal is accepted. Track https://github.com/golang/go/issues/71497
|
||||
github.com/go-openapi/runtime v0.28.0 // indirect
|
||||
github.com/go-openapi/strfmt v0.23.0 // indirect
|
||||
github.com/go-redis/redis/v8 v8.11.5
|
||||
@@ -110,8 +110,8 @@ require (
|
||||
github.com/spf13/pflag v1.0.6
|
||||
github.com/spf13/viper v1.20.0
|
||||
github.com/stretchr/testify v1.10.0
|
||||
github.com/testcontainers/testcontainers-go v0.35.0
|
||||
github.com/testcontainers/testcontainers-go/modules/localstack v0.35.0
|
||||
github.com/testcontainers/testcontainers-go v0.36.0
|
||||
github.com/testcontainers/testcontainers-go/modules/localstack v0.36.0
|
||||
github.com/tetratelabs/wazero v1.9.0
|
||||
github.com/twitchtv/twirp v8.1.3+incompatible
|
||||
github.com/xeipuuv/gojsonschema v1.2.0
|
||||
@@ -339,8 +339,6 @@ require (
|
||||
github.com/sassoftware/relic v7.2.1+incompatible // indirect
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
|
||||
github.com/shibumi/go-pathspec v1.3.0 // indirect
|
||||
github.com/shirou/gopsutil/v3 v3.24.2 // indirect
|
||||
github.com/shoenig/go-m1cpu v0.1.6 // indirect
|
||||
github.com/shopspring/decimal v1.4.0 // indirect
|
||||
github.com/sigstore/cosign/v2 v2.2.4 // indirect
|
||||
github.com/sigstore/sigstore v1.8.12 // indirect
|
||||
@@ -376,11 +374,11 @@ require (
|
||||
go.opentelemetry.io/contrib/detectors/gcp v1.32.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.56.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 // indirect
|
||||
go.opentelemetry.io/otel v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.35.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk/metric v1.32.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.34.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.35.0 // indirect
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
go.uber.org/zap v1.27.0 // indirect
|
||||
golang.org/x/oauth2 v0.26.0 // indirect
|
||||
@@ -427,9 +425,10 @@ require (
|
||||
github.com/aws/aws-sdk-go-v2/service/ebs v1.22.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.25.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.33.17 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.25.3 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.33.18 // indirect
|
||||
github.com/ebitengine/purego v0.8.2 // indirect
|
||||
github.com/evanphx/json-patch/v5 v5.6.0 // indirect
|
||||
github.com/go-viper/mapstructure/v2 v2.2.1 // indirect
|
||||
github.com/google/go-github/v31 v31.0.0 // indirect
|
||||
@@ -439,6 +438,7 @@ require (
|
||||
github.com/oklog/ulid/v2 v2.1.0 // indirect
|
||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
||||
github.com/samber/oops v1.15.0 // indirect
|
||||
github.com/shirou/gopsutil/v4 v4.25.1 // indirect
|
||||
github.com/tonglil/versioning v0.0.0-20170205083536-8b2a4334bd1d // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
sigs.k8s.io/kind v0.19.0 // indirect
|
||||
|
||||
87
go.sum
87
go.sum
@@ -802,14 +802,14 @@ github.com/aquasecurity/testdocker v0.0.0-20240730042311-4642e94c7fc8 h1:b43UVqY
|
||||
github.com/aquasecurity/testdocker v0.0.0-20240730042311-4642e94c7fc8/go.mod h1:wXA9k3uuaxY3yu7gxrxZDPo/04FEMJtwyecdAlYrEIo=
|
||||
github.com/aquasecurity/tml v0.6.1 h1:y2ZlGSfrhnn7t4ZJ/0rotuH+v5Jgv6BDDO5jB6A9gwo=
|
||||
github.com/aquasecurity/tml v0.6.1/go.mod h1:OnYMWY5lvI9ejU7yH9LCberWaaTBW7hBFsITiIMY2yY=
|
||||
github.com/aquasecurity/trivy-checks v1.8.0 h1:frMR06SEeDff1oEO6wBaTCqZCTBmZ+j8QAAl5EM1M4w=
|
||||
github.com/aquasecurity/trivy-checks v1.8.0/go.mod h1:zc1DGUFDUP/NUEMXlfaMsnVAEEEsygJrcd4SRQ7Mpko=
|
||||
github.com/aquasecurity/trivy-checks v1.8.1 h1:7df8KhZ0du2WAdGCUNcKYdz74iubAmP89+vaCUmxGbU=
|
||||
github.com/aquasecurity/trivy-checks v1.8.1/go.mod h1:zc1DGUFDUP/NUEMXlfaMsnVAEEEsygJrcd4SRQ7Mpko=
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20250227071930-8bd8a9b89e2d h1:T16WrTi21YsMLQVhtp1r1hOIYK3x4BjnftpL9cp64Eo=
|
||||
github.com/aquasecurity/trivy-db v0.0.0-20250227071930-8bd8a9b89e2d/go.mod h1:4bTsQPtMBN8v+UfUlE1aQBN1imftefnDafHBF85+aT8=
|
||||
github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48 h1:JVgBIuIYbwG+ekC5lUHUpGJboPYiCcxiz06RCtz8neI=
|
||||
github.com/aquasecurity/trivy-java-db v0.0.0-20240109071736-184bd7481d48/go.mod h1:Ldya37FLi0e/5Cjq2T5Bty7cFkzUDwTcPeQua+2M8i8=
|
||||
github.com/aquasecurity/trivy-kubernetes v0.8.0 h1:GWnRPSPCmXnd5NTMZdobQx6xO2XzEtvxcv0gjS2tYnY=
|
||||
github.com/aquasecurity/trivy-kubernetes v0.8.0/go.mod h1:ueJJboNlUyi99DJvO5ZyZ/jnJWvZ0eTA6AzKxHHqMF8=
|
||||
github.com/aquasecurity/trivy-kubernetes v0.8.1 h1:MMy4bUSAoWJ4rQUANPu6dhao2AC81SnIfi3jxIHBk00=
|
||||
github.com/aquasecurity/trivy-kubernetes v0.8.1/go.mod h1:FOrdN3IKBcyRoFPtmTFyDx8U3eBch+djCvmmBW4awM4=
|
||||
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0 h1:jfIu9sQUG6Ig+0+Ap1h4unLjW6YQJpKZVmUzxsD4E/Q=
|
||||
github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE=
|
||||
github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=
|
||||
@@ -821,10 +821,10 @@ github.com/aws/aws-sdk-go v1.55.6 h1:cSg4pvZ3m8dgYcgqB97MrcdjUmZ1BeMYKUxMMB89IPk
|
||||
github.com/aws/aws-sdk-go v1.55.6/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU=
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.3 h1:mJoei2CxPutQVxaATCzDUjcZEjVRdpsiiXi2o38yqWM=
|
||||
github.com/aws/aws-sdk-go-v2 v1.36.3/go.mod h1:LLXuLpgzEbD766Z5ECcRmi8AzSwfZItDtmABVkRLGzg=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.29.9 h1:Kg+fAYNaJeGXp1vmjtidss8O2uXIsXwaRqsQJKXVr+0=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.29.9/go.mod h1:oU3jj2O53kgOU4TXq/yipt6ryiooYjlkqqVaZk7gY/U=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.62 h1:fvtQY3zFzYJ9CfixuAQ96IxDrBajbBWGqjNTCa79ocU=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.62/go.mod h1:ElETBxIQqcxej++Cs8GyPBbgMys5DgQPTwo7cUPDKt8=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.29.13 h1:RgdPqWoE8nPpIekpVpDJsBckbqT4Liiaq9f35pbTh1Y=
|
||||
github.com/aws/aws-sdk-go-v2/config v1.29.13/go.mod h1:NI28qs/IOUIRhsR7GQ/JdexoqRN9tDxkIrYZq0SOF44=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.66 h1:aKpEKaTy6n4CEJeYI1MNj97oSDLi4xro3UzQfwf5RWE=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.17.66/go.mod h1:xQ5SusDmHb/fy55wU0QqTy0yNfLqxzec59YcsRZB+rI=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30 h1:x793wxmUWVDhshP8WW2mlnXuFrO4cOd3HLBroh1paFw=
|
||||
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.30/go.mod h1:Jpne2tDnYiFascUEs2AWHJL9Yp7A5ZVy3TNyxaAjD6M=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.34 h1:ZK5jHhnrioRkUNOc+hOgQKlUL5JeC3S6JgLxtQ+Rm0Q=
|
||||
@@ -835,10 +835,10 @@ github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d
|
||||
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo=
|
||||
github.com/aws/aws-sdk-go-v2/service/ebs v1.22.1 h1:SeDJWG4pmye+/aO6k+zt9clPTUy1MXqUmkW8rbAddQg=
|
||||
github.com/aws/aws-sdk-go-v2/service/ebs v1.22.1/go.mod h1:wRzaW0v9GGQS0h//wpsVDw3Hah5gs5UP+NxoyGeZIGM=
|
||||
github.com/aws/aws-sdk-go-v2/service/ec2 v1.207.1 h1:yIbrcRq0nKF75IlSiUlo4g/Qe3RzGBdDCR+WRZLf5IE=
|
||||
github.com/aws/aws-sdk-go-v2/service/ec2 v1.207.1/go.mod h1:ouvGEfHbLaIlWwpDpOVWPWR+YwO0HDv3vm5tYLq8ImY=
|
||||
github.com/aws/aws-sdk-go-v2/service/ecr v1.42.1 h1:mgtRN6wS2Frq29O7YNTn18ieokL193RFpLhUbsvs7o0=
|
||||
github.com/aws/aws-sdk-go-v2/service/ecr v1.42.1/go.mod h1:iQ1skgw1XRK+6Lgkb0I9ODatAP72WoTILh0zXQ5DtbU=
|
||||
github.com/aws/aws-sdk-go-v2/service/ec2 v1.211.2 h1:KMoQ43HysbPqs1vufMn9h2UcUyc2WCMaKxYhExKJZuo=
|
||||
github.com/aws/aws-sdk-go-v2/service/ec2 v1.211.2/go.mod h1:ouvGEfHbLaIlWwpDpOVWPWR+YwO0HDv3vm5tYLq8ImY=
|
||||
github.com/aws/aws-sdk-go-v2/service/ecr v1.43.2 h1:G6Rj8A8zgukXWSSB8evVE8az2S3Ci/Q3CpSMxJsK4G0=
|
||||
github.com/aws/aws-sdk-go-v2/service/ecr v1.43.2/go.mod h1:iQ1skgw1XRK+6Lgkb0I9ODatAP72WoTILh0zXQ5DtbU=
|
||||
github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.18.2 h1:PpbXaecV3sLAS6rjQiaKw4/jyq3Z8gNzmoJupHAoBp0=
|
||||
github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.18.2/go.mod h1:fUHpGXr4DrXkEDpGAjClPsviWf+Bszeb0daKE0blxv8=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.3 h1:eAh2A4b5IzM/lum78bZ590jy36+d/aFLgKF/4Vd1xPE=
|
||||
@@ -847,14 +847,14 @@ github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15 h1:dM9/92u2
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.15/go.mod h1:SwFBy2vjtA0vZbjjaFtfN045boopadnoVPhu4Fv66vY=
|
||||
github.com/aws/aws-sdk-go-v2/service/kms v1.37.8 h1:KbLZjYqhQ9hyB4HwXiheiflTlYQa0+Fz0Ms/rh5f3mk=
|
||||
github.com/aws/aws-sdk-go-v2/service/kms v1.37.8/go.mod h1:ANs9kBhK4Ghj9z1W+bsr3WsNaPF71qkgd6eE6Ekol/Y=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.78.1 h1:1M0gSbyP6q06gl3384wpoKPaH9G16NPqZFieEhLboSU=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.78.1/go.mod h1:4qzsZSzB/KiX2EzDjs9D7A8rI/WGJxZceVJIHqtJjIU=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.25.1 h1:8JdC7Gr9NROg1Rusk25IcZeTO59zLxsKgE0gkh5O6h0=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.25.1/go.mod h1:qs4a9T5EMLl/Cajiw2TcbNt2UNo/Hqlyp+GiuG4CFDI=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.1 h1:KwuLovgQPcdjNMfFt9OhUd9a2OwcOKhxfvF4glTzLuA=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.29.1/go.mod h1:MlYRNmYu/fGPoxBQVvBYr9nyr948aY/WLUvwBMBJubs=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.33.17 h1:PZV5W8yk4OtH1JAuhV2PXwwO9v5G5Aoj+eMCn4T+1Kc=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.33.17/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.79.1 h1:2Ku1xwAohSSXHR1tpAnyVDSQSxoDMA+/NZBytW+f4qg=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.79.1/go.mod h1:U5SNqwhXB3Xe6F47kXvWihPl/ilGaEDe8HD/50Z9wxc=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.25.3 h1:1Gw+9ajCV1jogloEv1RRnvfRFia2cL6c9cuKV2Ps+G8=
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.25.3/go.mod h1:qs4a9T5EMLl/Cajiw2TcbNt2UNo/Hqlyp+GiuG4CFDI=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1 h1:hXmVKytPfTy5axZ+fYbR5d0cFmC3JvwLm5kM83luako=
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.1/go.mod h1:MlYRNmYu/fGPoxBQVvBYr9nyr948aY/WLUvwBMBJubs=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.33.18 h1:xz7WvTMfSStb9Y8NpCT82FXLNC3QasqBfuAFHY4Pk5g=
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.33.18/go.mod h1:cQnB8CUnxbMU82JvlqjKR2HBOm3fe9pWorWBza6MBJ4=
|
||||
github.com/aws/smithy-go v1.22.3 h1:Z//5NuZCSW6R4PhQ93hShNbyBbn8BWCmCVCt+Q8Io5k=
|
||||
github.com/aws/smithy-go v1.22.3/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI=
|
||||
github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20231024185945-8841054dbdb8 h1:SoFYaT9UyGkR0+nogNyD/Lj+bsixB+SNuAS4ABlEs6M=
|
||||
@@ -1013,12 +1013,12 @@ github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5Qvfr
|
||||
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
github.com/dlclark/regexp2 v1.4.0 h1:F1rxgk7p4uKjwIQxBs9oAXe5CqrXlCduYEJvrF4u93E=
|
||||
github.com/dlclark/regexp2 v1.4.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/docker/cli v27.5.0+incompatible h1:aMphQkcGtpHixwwhAXJT1rrK/detk2JIvDaFkLctbGM=
|
||||
github.com/docker/cli v27.5.0+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/cli v28.0.4+incompatible h1:pBJSJeNd9QeIWPjRcV91RVJihd/TXB77q1ef64XEu4A=
|
||||
github.com/docker/cli v28.0.4+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk=
|
||||
github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w=
|
||||
github.com/docker/docker v27.5.1+incompatible h1:4PYU5dnBYqRQi0294d1FBECqT9ECWeQAIfE8q4YnPY8=
|
||||
github.com/docker/docker v27.5.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker v28.0.4+incompatible h1:JNNkBctYKurkw6FrHfKqY0nKIDf5nrbxjVBtS+cdcok=
|
||||
github.com/docker/docker v28.0.4+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
|
||||
github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo=
|
||||
github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M=
|
||||
github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
|
||||
@@ -1038,6 +1038,8 @@ github.com/dsnet/golib v0.0.0-20171103203638-1ea166775780/go.mod h1:Lj+Z9rebOhdf
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/ebitengine/purego v0.8.2 h1:jPPGWs2sZ1UgOSgD2bClL0MJIqu58nOmIcBuXr62z1I=
|
||||
github.com/ebitengine/purego v0.8.2/go.mod h1:iIjxzd6CiRiOG0UyXP+V1+jWqUXVjPKLAI0mRfJZTmQ=
|
||||
github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o=
|
||||
github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE=
|
||||
github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g=
|
||||
@@ -1125,8 +1127,8 @@ github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7
|
||||
github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ=
|
||||
github.com/go-jose/go-jose/v4 v4.0.5 h1:M6T8+mKZl/+fNNuFHvGIzDz7BTLQPIounk/b9dw3AaE=
|
||||
github.com/go-jose/go-jose/v4 v4.0.5/go.mod h1:s3P1lRrkT8igV8D9OjyL4WRyHvjB6a4JSllnOrmmBOA=
|
||||
github.com/go-json-experiment/json v0.0.0-20250211171154-1ae217ad3535 h1:yE7argOs92u+sSCRgqqe6eF+cDaVhSPlioy1UkA0p/w=
|
||||
github.com/go-json-experiment/json v0.0.0-20250211171154-1ae217ad3535/go.mod h1:BWmvoE1Xia34f3l/ibJweyhrT+aROb/FQ6d+37F0e2s=
|
||||
github.com/go-json-experiment/json v0.0.0-20250223041408-d3c622f1b874 h1:F8d1AJ6M9UQCavhwmO6ZsrYLfG8zVFWfEfMS2MXPkSY=
|
||||
github.com/go-json-experiment/json v0.0.0-20250223041408-d3c622f1b874/go.mod h1:TiCD2a1pcmjd7YnhGH0f/zKNcCD06B029pHhzV23c2M=
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U=
|
||||
github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpxhq9o2S/CELCSUxEWWAuoCUcVCQWv7G2OCk=
|
||||
@@ -1516,7 +1518,6 @@ github.com/lib/pq v1.10.9 h1:YXG7RB+JIjhP29X+OtkiDnYaXQwpS4JEWq7dtCCRUEw=
|
||||
github.com/lib/pq v1.10.9/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
|
||||
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0=
|
||||
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE=
|
||||
github.com/lufia/plan9stats v0.0.0-20211012122336-39d0f177ccd0/go.mod h1:zJYVVT2jmtg6P3p1VtQj7WsuWi/y4VnjVBn7F8KPB3I=
|
||||
github.com/lufia/plan9stats v0.0.0-20240226150601-1dcf7310316a h1:3Bm7EwfUQUvhNeKIkUct/gl9eod1TcXuj8stxvi/GoI=
|
||||
github.com/lufia/plan9stats v0.0.0-20240226150601-1dcf7310316a/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k=
|
||||
github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40 h1:EnfXoSqDfSNJv0VBNqY/88RNnhSGYkrHaO0mmFGbVsc=
|
||||
@@ -1709,7 +1710,6 @@ github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10/go.mod h1
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U=
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/power-devops/perfstat v0.0.0-20210106213030-5aafc221ea8c/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 h1:o4JXh1EVt9k/+g42oCprj/FisM4qX9L3sZB3upGN2ZU=
|
||||
github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55/go.mod h1:OmDBASR4679mdNQnz2pUhc2G8CO2JrUAVFDRBDP/hJE=
|
||||
github.com/poy/onpar v1.1.2 h1:QaNrNiZx0+Nar5dLgTVp5mXkyoVFIbepjyEoGSnhbAY=
|
||||
@@ -1789,12 +1789,8 @@ github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN
|
||||
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
|
||||
github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI=
|
||||
github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE=
|
||||
github.com/shirou/gopsutil/v3 v3.24.2 h1:kcR0erMbLg5/3LcInpw0X/rrPSqq4CDPyI6A6ZRC18Y=
|
||||
github.com/shirou/gopsutil/v3 v3.24.2/go.mod h1:tSg/594BcA+8UdQU2XcW803GWYgdtauFFPgJCJKZlVk=
|
||||
github.com/shoenig/go-m1cpu v0.1.6 h1:nxdKQNcEB6vzgA2E2bvzKIYRuNj7XNJ4S/aRSwKzFtM=
|
||||
github.com/shoenig/go-m1cpu v0.1.6/go.mod h1:1JJMcUBvfNwpq05QDQVAnx3gUHr9IYF7GNg9SUEw2VQ=
|
||||
github.com/shoenig/test v0.6.4 h1:kVTaSd7WLz5WZ2IaoM0RSzRsUD+m8wRR+5qvntpn4LU=
|
||||
github.com/shoenig/test v0.6.4/go.mod h1:byHiCGXqrVaflBLAMq/srcZIHynQPQgeyvkvXnjqq0k=
|
||||
github.com/shirou/gopsutil/v4 v4.25.1 h1:QSWkTc+fu9LTAWfkZwZ6j8MSUk4A2LV7rbH0ZqmLjXs=
|
||||
github.com/shirou/gopsutil/v4 v4.25.1/go.mod h1:RoUCUpndaJFtT+2zsZzzmhvbfGoDCJ7nFXKJf8GqJbI=
|
||||
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
|
||||
github.com/shopspring/decimal v1.4.0 h1:bxl37RwXBklmTi0C79JfXCEBD1cqqHt0bbgBAGFp81k=
|
||||
github.com/shopspring/decimal v1.4.0/go.mod h1:gawqmDU56v4yIKSwfBSFip1HdCCXN8/+DMd9qYNcwME=
|
||||
@@ -1882,10 +1878,10 @@ github.com/tchap/go-patricia/v2 v2.3.2 h1:xTHFutuitO2zqKAQ5rCROYgUb7Or/+IC3fts9/
|
||||
github.com/tchap/go-patricia/v2 v2.3.2/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k=
|
||||
github.com/terminalstatic/go-xsd-validate v0.1.6 h1:TenYeQ3eY631qNi1/cTmLH/s2slHPRKTTHT+XSHkepo=
|
||||
github.com/terminalstatic/go-xsd-validate v0.1.6/go.mod h1:18lsvYFofBflqCrvo1umpABZ99+GneNTw2kEEc8UPJw=
|
||||
github.com/testcontainers/testcontainers-go v0.35.0 h1:uADsZpTKFAtp8SLK+hMwSaa+X+JiERHtd4sQAFmXeMo=
|
||||
github.com/testcontainers/testcontainers-go v0.35.0/go.mod h1:oEVBj5zrfJTrgjwONs1SsRbnBtH9OKl+IGl3UMcr2B4=
|
||||
github.com/testcontainers/testcontainers-go/modules/localstack v0.35.0 h1:0EbOXcy8XQkyDUs1Y9YPUHOUByNnlGsLi5B3ln8F/RU=
|
||||
github.com/testcontainers/testcontainers-go/modules/localstack v0.35.0/go.mod h1:MlHuaWQimz+15dmQ6R2S1vpYxhGFEpmRZQsL2NVWNng=
|
||||
github.com/testcontainers/testcontainers-go v0.36.0 h1:YpffyLuHtdp5EUsI5mT4sRw8GZhO/5ozyDT1xWGXt00=
|
||||
github.com/testcontainers/testcontainers-go v0.36.0/go.mod h1:yk73GVJ0KUZIHUtFna6MO7QS144qYpoY8lEEtU9Hed0=
|
||||
github.com/testcontainers/testcontainers-go/modules/localstack v0.36.0 h1:zVwbe46NYg2vtC26aF0ndClK5S9J7TgAliQbTLyHm+0=
|
||||
github.com/testcontainers/testcontainers-go/modules/localstack v0.36.0/go.mod h1:rxyzj5nX/OUn7QK5PVxKYHJg1eeNtNzWMX2hSbNNJk0=
|
||||
github.com/tetratelabs/wazero v1.9.0 h1:IcZ56OuxrtaEz8UYNRHBrUa9bYeX9oVY93KspZZBf/I=
|
||||
github.com/tetratelabs/wazero v1.9.0/go.mod h1:TSbcXCfFP0L2FGkRPxHphadXPjo1T6W+CseNNY7EkjM=
|
||||
github.com/thales-e-security/pool v0.0.2 h1:RAPs4q2EbWsTit6tpzuvTFlgFRJ3S8Evf5gtvVDbmPg=
|
||||
@@ -1902,10 +1898,8 @@ github.com/titanous/rocacheck v0.0.0-20171023193734-afe73141d399 h1:e/5i7d4oYZ+C
|
||||
github.com/titanous/rocacheck v0.0.0-20171023193734-afe73141d399/go.mod h1:LdwHTNJT99C5fTAzDz0ud328OgXz+gierycbcIx2fRs=
|
||||
github.com/tjfoc/gmsm v1.4.1 h1:aMe1GlZb+0bLjn+cKTPEvvn9oUEBlJitaZiiBwsbgho=
|
||||
github.com/tjfoc/gmsm v1.4.1/go.mod h1:j4INPkHWMrhJb38G+J6W4Tw0AbuN8Thu3PbdVYhVcTE=
|
||||
github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI=
|
||||
github.com/tklauser/go-sysconf v0.3.13 h1:GBUpcahXSpR2xN01jhkNAbTLRk2Yzgggk8IM08lq3r4=
|
||||
github.com/tklauser/go-sysconf v0.3.13/go.mod h1:zwleP4Q4OehZHGn4CYZDipCgg9usW5IJePewFCGVEa0=
|
||||
github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY=
|
||||
github.com/tklauser/numcpus v0.7.0 h1:yjuerZP127QG9m5Zh/mSO4wqurYil27tHrqwRoRjpr4=
|
||||
github.com/tklauser/numcpus v0.7.0/go.mod h1:bb6dMVcj8A42tSE7i32fsIUCbQNllK5iDguyOZRUzAY=
|
||||
github.com/tonglil/versioning v0.0.0-20170205083536-8b2a4334bd1d h1:3H+wrTJTy3PVEeCyrjiCWjrh7pVEodGgJgA8Q1tpcbg=
|
||||
@@ -2000,8 +1994,8 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.5
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.56.0/go.mod h1:n8MR6/liuGB5EmTETUBeU5ZgqMOlqKRxUaqPQBOANZ8=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0 h1:CV7UdSGJt/Ao6Gp4CXckLxVRRsRgDHoI8XjbL3PDl8s=
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.59.0/go.mod h1:FRmFuRJfag1IZ2dPkHnEoSFVgTVPUd2qf5Vi69hLb8I=
|
||||
go.opentelemetry.io/otel v1.34.0 h1:zRLXxLCgL1WyKsPVrgbSdMN4c0FMkDAskSTQP+0hdUY=
|
||||
go.opentelemetry.io/otel v1.34.0/go.mod h1:OWFPOQ+h4G8xpyjgqo4SxJYdDQ/qmRH+wivy7zzx9oI=
|
||||
go.opentelemetry.io/otel v1.35.0 h1:xKWKPxrxB6OtMCbmMY021CqC45J+3Onta9MqjhnusiQ=
|
||||
go.opentelemetry.io/otel v1.35.0/go.mod h1:UEqy8Zp11hpkUrL73gSlELM0DupHoiq72dR+Zqel/+Y=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.34.0 h1:OeNbIYk/2C15ckl7glBlOBp5+WlYsOElzTNmiPW/x60=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.34.0/go.mod h1:7Bept48yIeqxP2OZ9/AqIpYS94h2or0aB4FypJTc8ZM=
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.34.0 h1:tgJ0uaNS4c98WRNUEx5U3aDlrDOI5Rs+1Vifcw4DJ8U=
|
||||
@@ -2010,14 +2004,14 @@ go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0 h1:wpMfg
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.33.0/go.mod h1:wAy0T/dUbs468uOlkT31xjvqQgEVXv58BRFWEgn5v/0=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0 h1:WDdP9acbMYjbKIyJUhTvtzj601sVJOqgWdUxSdR/Ysc=
|
||||
go.opentelemetry.io/otel/exporters/stdout/stdoutmetric v1.29.0/go.mod h1:BLbf7zbNIONBLPwvFnwNHGj4zge8uTCM/UPIVW1Mq2I=
|
||||
go.opentelemetry.io/otel/metric v1.34.0 h1:+eTR3U0MyfWjRDhmFMxe2SsW64QrZ84AOhvqS7Y+PoQ=
|
||||
go.opentelemetry.io/otel/metric v1.34.0/go.mod h1:CEDrp0fy2D0MvkXE+dPV7cMi8tWZwX3dmaIhwPOaqHE=
|
||||
go.opentelemetry.io/otel/metric v1.35.0 h1:0znxYu2SNyuMSQT4Y9WDWej0VpcsxkuklLa4/siN90M=
|
||||
go.opentelemetry.io/otel/metric v1.35.0/go.mod h1:nKVFgxBZ2fReX6IlyW28MgZojkoAkJGaE8CpgeAU3oE=
|
||||
go.opentelemetry.io/otel/sdk v1.34.0 h1:95zS4k/2GOy069d321O8jWgYsW3MzVV+KuSPKp7Wr1A=
|
||||
go.opentelemetry.io/otel/sdk v1.34.0/go.mod h1:0e/pNiaMAqaykJGKbi+tSjWfNNHMTxoC9qANsCzbyxU=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.32.0 h1:rZvFnvmvawYb0alrYkjraqJq0Z4ZUJAiyYCU9snn1CU=
|
||||
go.opentelemetry.io/otel/sdk/metric v1.32.0/go.mod h1:PWeZlq0zt9YkYAp3gjKZ0eicRYvOh1Gd+X99x6GHpCQ=
|
||||
go.opentelemetry.io/otel/trace v1.34.0 h1:+ouXS2V8Rd4hp4580a8q23bg0azF2nI8cqLYnC8mh/k=
|
||||
go.opentelemetry.io/otel/trace v1.34.0/go.mod h1:Svm7lSjQD7kG7KJ/MUHPVXSDGz2OX4h0M2jHBhmSfRE=
|
||||
go.opentelemetry.io/otel/trace v1.35.0 h1:dPpEfJu1sDIqruz7BHFG3c7528f6ddfSWfFDVt/xgMs=
|
||||
go.opentelemetry.io/otel/trace v1.35.0/go.mod h1:WUk7DtFp1Aw2MkvqGdwiXYDZZNvA/1J8o6xRXLrIkyc=
|
||||
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
|
||||
go.opentelemetry.io/proto/otlp v0.15.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
|
||||
go.opentelemetry.io/proto/otlp v0.19.0/go.mod h1:H7XAot3MsfNsj7EXtrA2q5xSNQ10UqI405h3+duxN4U=
|
||||
@@ -2337,7 +2331,6 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
apiVersion: v2
|
||||
name: trivy
|
||||
version: 0.12.0
|
||||
appVersion: 0.60.0
|
||||
version: 0.13.0
|
||||
appVersion: 0.61.0
|
||||
description: Trivy helm chart
|
||||
keywords:
|
||||
- scanner
|
||||
|
||||
@@ -3,12 +3,12 @@
|
||||
package integration
|
||||
|
||||
import (
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||
"github.com/aquasecurity/trivy/pkg/scan/post"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
func TestModule(t *testing.T) {
|
||||
@@ -52,7 +52,7 @@ func TestModule(t *testing.T) {
|
||||
|
||||
t.Cleanup(func() {
|
||||
analyzer.DeregisterAnalyzer("spring4shell")
|
||||
post.DeregisterPostScanner("spring4shell")
|
||||
extension.DeregisterHook("spring4shell")
|
||||
})
|
||||
|
||||
// Run Trivy
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
# Configuration file for both shell scripts and Go programs
|
||||
TEST_IMAGES=ghcr.io/knqyf263/trivy-test-images
|
||||
TEST_VM_IMAGES=ghcr.io/knqyf263/trivy-test-vm-images
|
||||
TEST_IMAGES=ghcr.io/aquasecurity/trivy-test-images
|
||||
TEST_VM_IMAGES=ghcr.io/aquasecurity/trivy-test-vm-images
|
||||
|
||||
96
internal/hooktest/hook.go
Normal file
96
internal/hooktest/hook.go
Normal file
@@ -0,0 +1,96 @@
|
||||
package hooktest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
"github.com/aquasecurity/trivy/pkg/flag"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
type testHook struct{}
|
||||
|
||||
func (*testHook) Name() string {
|
||||
return "test"
|
||||
}
|
||||
|
||||
func (*testHook) Version() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
// RunHook implementation
|
||||
func (*testHook) PreRun(ctx context.Context, opts flag.Options) error {
|
||||
if opts.GlobalOptions.ConfigFile == "bad-config" {
|
||||
return errors.New("bad pre-run")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*testHook) PostRun(ctx context.Context, opts flag.Options) error {
|
||||
if opts.GlobalOptions.ConfigFile == "bad-config" {
|
||||
return errors.New("bad post-run")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ScanHook implementation
|
||||
func (*testHook) PreScan(ctx context.Context, target *types.ScanTarget, options types.ScanOptions) error {
|
||||
if target.Name == "bad-pre" {
|
||||
return errors.New("bad pre-scan")
|
||||
}
|
||||
target.Name += " (pre-scan)"
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*testHook) PostScan(ctx context.Context, results types.Results) (types.Results, error) {
|
||||
for i, r := range results {
|
||||
if r.Target == "bad" {
|
||||
return nil, errors.New("bad")
|
||||
}
|
||||
for j := range r.Vulnerabilities {
|
||||
results[i].Vulnerabilities[j].References = []string{
|
||||
"https://example.com/post-scan",
|
||||
}
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// ReportHook implementation
|
||||
func (*testHook) PreReport(ctx context.Context, report *types.Report, opts flag.Options) error {
|
||||
if report.ArtifactName == "bad-report" {
|
||||
return errors.New("bad pre-report")
|
||||
}
|
||||
|
||||
// Modify the report
|
||||
for i := range report.Results {
|
||||
for j := range report.Results[i].Vulnerabilities {
|
||||
report.Results[i].Vulnerabilities[j].Title = "Modified by pre-report hook"
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (*testHook) PostReport(ctx context.Context, report *types.Report, opts flag.Options) error {
|
||||
if report.ArtifactName == "bad-report" {
|
||||
return errors.New("bad post-report")
|
||||
}
|
||||
|
||||
// Modify the report
|
||||
for i := range report.Results {
|
||||
for j := range report.Results[i].Vulnerabilities {
|
||||
report.Results[i].Vulnerabilities[j].Description = "Modified by post-report hook"
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func Init(t *testing.T) {
|
||||
h := &testHook{}
|
||||
extension.RegisterHook(h)
|
||||
t.Cleanup(func() {
|
||||
extension.DeregisterHook(h.Name())
|
||||
})
|
||||
}
|
||||
@@ -29,7 +29,7 @@ func (c *DockerClient) ImageLoad(t *testing.T, ctx context.Context, imageFile st
|
||||
defer testfile.Close()
|
||||
|
||||
// Load image into docker engine
|
||||
res, err := c.Client.ImageLoad(ctx, testfile, true)
|
||||
res, err := c.Client.ImageLoad(ctx, testfile, client.ImageLoadWithQuiet(true))
|
||||
require.NoError(t, err)
|
||||
defer res.Body.Close()
|
||||
|
||||
|
||||
2
pkg/cache/redis.go
vendored
2
pkg/cache/redis.go
vendored
@@ -142,7 +142,7 @@ func (c RedisCache) PutBlob(blobID string, blobInfo types.BlobInfo) error {
|
||||
func (c RedisCache) DeleteBlobs(blobIDs []string) error {
|
||||
var errs error
|
||||
for _, blobID := range blobIDs {
|
||||
key := fmt.Sprintf("%s::%s::%s", redisPrefix, artifactBucket, blobID)
|
||||
key := fmt.Sprintf("%s::%s::%s", redisPrefix, blobBucket, blobID)
|
||||
if err := c.client.Del(context.TODO(), key).Err(); err != nil {
|
||||
errs = multierror.Append(errs, xerrors.Errorf("unable to delete blob %s: %w", blobID, err))
|
||||
}
|
||||
|
||||
8
pkg/cache/redis_test.go
vendored
8
pkg/cache/redis_test.go
vendored
@@ -523,8 +523,6 @@ func TestRedisCache_DeleteBlobs(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
defer s.Close()
|
||||
|
||||
s.Set(correctHash, "any string")
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
addr := s.Addr()
|
||||
@@ -535,12 +533,18 @@ func TestRedisCache_DeleteBlobs(t *testing.T) {
|
||||
c, err := cache.NewRedisCache(fmt.Sprintf("redis://%s", addr), "", "", "", false, 0)
|
||||
require.NoError(t, err)
|
||||
|
||||
s.Set(tt.wantKey, "any string")
|
||||
|
||||
err = c.DeleteBlobs(tt.args.blobIDs)
|
||||
if tt.wantErr != "" {
|
||||
require.ErrorContains(t, err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify that the blobs are deleted
|
||||
got := s.Keys()
|
||||
assert.NotContains(t, got, tt.wantKey)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ import (
|
||||
"github.com/aquasecurity/trivy/pkg/commands/clean"
|
||||
"github.com/aquasecurity/trivy/pkg/commands/convert"
|
||||
"github.com/aquasecurity/trivy/pkg/commands/server"
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||
"github.com/aquasecurity/trivy/pkg/flag"
|
||||
k8scommands "github.com/aquasecurity/trivy/pkg/k8s/commands"
|
||||
@@ -211,25 +212,26 @@ func NewRootCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
return err
|
||||
}
|
||||
|
||||
globalOptions, err := globalFlags.ToOptions()
|
||||
flags := flag.Flags{globalFlags}
|
||||
opts, err := flags.ToOptions(args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Initialize logger
|
||||
log.InitLogger(globalOptions.Debug, globalOptions.Quiet)
|
||||
log.InitLogger(opts.Debug, opts.Quiet)
|
||||
|
||||
return nil
|
||||
},
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
globalOptions, err := globalFlags.ToOptions()
|
||||
flags := flag.Flags{globalFlags}
|
||||
opts, err := flags.ToOptions(args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if globalOptions.ShowVersion {
|
||||
if opts.ShowVersion {
|
||||
// Customize version output
|
||||
return showVersion(globalOptions.CacheDir, versionFormat, cmd.OutOrStdout())
|
||||
return showVersion(opts.CacheDir, versionFormat, cmd.OutOrStdout())
|
||||
} else {
|
||||
return cmd.Help()
|
||||
}
|
||||
@@ -255,27 +257,31 @@ func NewImageCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
compliance.Values = []string{types.ComplianceDockerCIS160}
|
||||
reportFlagGroup.Compliance = compliance // override usage as the accepted values differ for each subcommand.
|
||||
|
||||
imageFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
DBFlagGroup: flag.NewDBFlagGroup(),
|
||||
ImageFlagGroup: flag.NewImageFlagGroup(), // container image specific
|
||||
LicenseFlagGroup: flag.NewLicenseFlagGroup(),
|
||||
MisconfFlagGroup: flag.NewMisconfFlagGroup(),
|
||||
ModuleFlagGroup: flag.NewModuleFlagGroup(),
|
||||
PackageFlagGroup: flag.NewPackageFlagGroup(),
|
||||
RemoteFlagGroup: flag.NewClientFlags(), // for client/server mode
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(),
|
||||
RegoFlagGroup: flag.NewRegoFlagGroup(),
|
||||
ReportFlagGroup: reportFlagGroup,
|
||||
ScanFlagGroup: flag.NewScanFlagGroup(),
|
||||
SecretFlagGroup: flag.NewSecretFlagGroup(),
|
||||
VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(),
|
||||
}
|
||||
packageFlagGroup := flag.NewPackageFlagGroup()
|
||||
packageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
|
||||
imageFlags.PackageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
imageFlags.MisconfFlagGroup.CloudformationParamVars = nil // disable '--cf-params'
|
||||
imageFlags.MisconfFlagGroup.TerraformTFVars = nil // disable '--tf-vars'
|
||||
misconfFlagGroup := flag.NewMisconfFlagGroup()
|
||||
misconfFlagGroup.CloudformationParamVars = nil // disable '--cf-params'
|
||||
misconfFlagGroup.TerraformTFVars = nil // disable '--tf-vars'
|
||||
|
||||
imageFlags := flag.Flags{
|
||||
globalFlags,
|
||||
flag.NewCacheFlagGroup(),
|
||||
flag.NewDBFlagGroup(),
|
||||
flag.NewImageFlagGroup(), // container image specific flags
|
||||
flag.NewLicenseFlagGroup(),
|
||||
misconfFlagGroup,
|
||||
flag.NewModuleFlagGroup(),
|
||||
packageFlagGroup,
|
||||
flag.NewClientFlags(),
|
||||
flag.NewRegistryFlagGroup(),
|
||||
flag.NewRegoFlagGroup(),
|
||||
reportFlagGroup,
|
||||
flag.NewScanFlagGroup(),
|
||||
flag.NewSecretFlagGroup(),
|
||||
flag.NewVulnerabilityFlagGroup(),
|
||||
}
|
||||
imageFlags = append(imageFlags, extension.CustomFlagGroups("image")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "image [flags] IMAGE_NAME",
|
||||
@@ -335,26 +341,30 @@ func NewImageCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
}
|
||||
|
||||
func NewFilesystemCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
fsFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
DBFlagGroup: flag.NewDBFlagGroup(),
|
||||
LicenseFlagGroup: flag.NewLicenseFlagGroup(),
|
||||
MisconfFlagGroup: flag.NewMisconfFlagGroup(),
|
||||
ModuleFlagGroup: flag.NewModuleFlagGroup(),
|
||||
PackageFlagGroup: flag.NewPackageFlagGroup(),
|
||||
RemoteFlagGroup: flag.NewClientFlags(), // for client/server mode
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(),
|
||||
RegoFlagGroup: flag.NewRegoFlagGroup(),
|
||||
ReportFlagGroup: flag.NewReportFlagGroup(),
|
||||
ScanFlagGroup: flag.NewScanFlagGroup(),
|
||||
SecretFlagGroup: flag.NewSecretFlagGroup(),
|
||||
VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(),
|
||||
}
|
||||
cacheFlagGroup := flag.NewCacheFlagGroup()
|
||||
cacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default
|
||||
|
||||
fsFlags.CacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default
|
||||
fsFlags.ReportFlagGroup.ReportFormat.Usage = "specify a compliance report format for the output" // @TODO: support --report summary for non compliance reports
|
||||
fsFlags.ReportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol'
|
||||
reportFlagGroup := flag.NewReportFlagGroup()
|
||||
reportFlagGroup.ReportFormat.Usage = "specify a compliance report format for the output" // @TODO: support --report summary for non compliance reports
|
||||
reportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol'
|
||||
|
||||
fsFlags := flag.Flags{
|
||||
globalFlags,
|
||||
cacheFlagGroup,
|
||||
flag.NewDBFlagGroup(),
|
||||
flag.NewLicenseFlagGroup(),
|
||||
flag.NewMisconfFlagGroup(),
|
||||
flag.NewModuleFlagGroup(),
|
||||
flag.NewPackageFlagGroup(),
|
||||
flag.NewClientFlags(), // for client/server mode
|
||||
flag.NewRegistryFlagGroup(),
|
||||
flag.NewRegoFlagGroup(),
|
||||
reportFlagGroup,
|
||||
flag.NewScanFlagGroup(),
|
||||
flag.NewSecretFlagGroup(),
|
||||
flag.NewVulnerabilityFlagGroup(),
|
||||
}
|
||||
fsFlags = append(fsFlags, extension.CustomFlagGroups("filesystem")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "filesystem [flags] PATH",
|
||||
@@ -394,27 +404,34 @@ func NewFilesystemCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
}
|
||||
|
||||
func NewRootfsCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
rootfsFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
DBFlagGroup: flag.NewDBFlagGroup(),
|
||||
LicenseFlagGroup: flag.NewLicenseFlagGroup(),
|
||||
MisconfFlagGroup: flag.NewMisconfFlagGroup(),
|
||||
ModuleFlagGroup: flag.NewModuleFlagGroup(),
|
||||
PackageFlagGroup: flag.NewPackageFlagGroup(),
|
||||
RemoteFlagGroup: flag.NewClientFlags(), // for client/server mode
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(),
|
||||
RegoFlagGroup: flag.NewRegoFlagGroup(),
|
||||
ReportFlagGroup: flag.NewReportFlagGroup(),
|
||||
ScanFlagGroup: flag.NewScanFlagGroup(),
|
||||
SecretFlagGroup: flag.NewSecretFlagGroup(),
|
||||
VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(),
|
||||
reportFlagGroup := flag.NewReportFlagGroup()
|
||||
reportFlagGroup.ReportFormat = nil // TODO: support --report summary
|
||||
reportFlagGroup.Compliance = nil // disable '--compliance'
|
||||
reportFlagGroup.ReportFormat = nil // disable '--report'
|
||||
|
||||
packageFlagGroup := flag.NewPackageFlagGroup()
|
||||
packageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
|
||||
cacheFlagGroup := flag.NewCacheFlagGroup()
|
||||
cacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default
|
||||
|
||||
rootfsFlags := flag.Flags{
|
||||
globalFlags,
|
||||
cacheFlagGroup,
|
||||
flag.NewDBFlagGroup(),
|
||||
flag.NewLicenseFlagGroup(),
|
||||
flag.NewMisconfFlagGroup(),
|
||||
flag.NewModuleFlagGroup(),
|
||||
packageFlagGroup,
|
||||
flag.NewClientFlags(), // for client/server mode
|
||||
flag.NewRegistryFlagGroup(),
|
||||
flag.NewRegoFlagGroup(),
|
||||
reportFlagGroup,
|
||||
flag.NewScanFlagGroup(),
|
||||
flag.NewSecretFlagGroup(),
|
||||
flag.NewVulnerabilityFlagGroup(),
|
||||
}
|
||||
rootfsFlags.ReportFlagGroup.ReportFormat = nil // TODO: support --report summary
|
||||
rootfsFlags.ReportFlagGroup.Compliance = nil // disable '--compliance'
|
||||
rootfsFlags.ReportFlagGroup.ReportFormat = nil // disable '--report'
|
||||
rootfsFlags.PackageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
rootfsFlags.CacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default
|
||||
rootfsFlags = append(rootfsFlags, extension.CustomFlagGroups("rootfs")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "rootfs [flags] ROOTDIR",
|
||||
@@ -455,28 +472,32 @@ func NewRootfsCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
}
|
||||
|
||||
func NewRepositoryCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
repoFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
DBFlagGroup: flag.NewDBFlagGroup(),
|
||||
LicenseFlagGroup: flag.NewLicenseFlagGroup(),
|
||||
MisconfFlagGroup: flag.NewMisconfFlagGroup(),
|
||||
ModuleFlagGroup: flag.NewModuleFlagGroup(),
|
||||
PackageFlagGroup: flag.NewPackageFlagGroup(),
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(),
|
||||
RegoFlagGroup: flag.NewRegoFlagGroup(),
|
||||
RemoteFlagGroup: flag.NewClientFlags(), // for client/server mode
|
||||
ReportFlagGroup: flag.NewReportFlagGroup(),
|
||||
ScanFlagGroup: flag.NewScanFlagGroup(),
|
||||
SecretFlagGroup: flag.NewSecretFlagGroup(),
|
||||
VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(),
|
||||
RepoFlagGroup: flag.NewRepoFlagGroup(),
|
||||
}
|
||||
repoFlags.ReportFlagGroup.ReportFormat = nil // TODO: support --report summary
|
||||
repoFlags.ReportFlagGroup.Compliance = nil // disable '--compliance'
|
||||
repoFlags.ReportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol'
|
||||
reportFlagGroup := flag.NewReportFlagGroup()
|
||||
reportFlagGroup.ReportFormat = nil // TODO: support --report summary
|
||||
reportFlagGroup.Compliance = nil // disable '--compliance'
|
||||
reportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol'
|
||||
|
||||
repoFlags.ScanFlagGroup.DistroFlag = nil // `repo` subcommand doesn't support scanning OS packages, so we can disable `--distro`
|
||||
scanFlagGroup := flag.NewScanFlagGroup()
|
||||
scanFlagGroup.DistroFlag = nil // repo subcommand doesn't support scanning OS packages
|
||||
|
||||
repoFlags := flag.Flags{
|
||||
globalFlags,
|
||||
flag.NewCacheFlagGroup(),
|
||||
flag.NewDBFlagGroup(),
|
||||
flag.NewLicenseFlagGroup(),
|
||||
flag.NewMisconfFlagGroup(),
|
||||
flag.NewModuleFlagGroup(),
|
||||
flag.NewPackageFlagGroup(),
|
||||
flag.NewRegistryFlagGroup(),
|
||||
flag.NewRegoFlagGroup(),
|
||||
flag.NewClientFlags(), // for client/server mode
|
||||
reportFlagGroup,
|
||||
scanFlagGroup,
|
||||
flag.NewSecretFlagGroup(),
|
||||
flag.NewVulnerabilityFlagGroup(),
|
||||
flag.NewRepoFlagGroup(),
|
||||
}
|
||||
repoFlags = append(repoFlags, extension.CustomFlagGroups("repository")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "repository [flags] (REPO_PATH | REPO_URL)",
|
||||
@@ -514,18 +535,20 @@ func NewRepositoryCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
}
|
||||
|
||||
func NewConvertCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
convertFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
ScanFlagGroup: &flag.ScanFlagGroup{},
|
||||
ReportFlagGroup: flag.NewReportFlagGroup(),
|
||||
}
|
||||
|
||||
// To display the summary table, we need to enable scanners (to build columns).
|
||||
// We can't get scanner information from the report (we don't include empty licenses and secrets in the report).
|
||||
// So we need to ask the user to configure scanners (if needed).
|
||||
convertFlags.ScanFlagGroup.Scanners = flag.ScannersFlag.Clone()
|
||||
convertFlags.ScanFlagGroup.Scanners.Default = nil // disable default scanners
|
||||
convertFlags.ScanFlagGroup.Scanners.Usage = "List of scanners included when generating the json report. Used only for rendering the summary table."
|
||||
scanFlagGroup := &flag.ScanFlagGroup{
|
||||
Scanners: flag.ScannersFlag.Clone(),
|
||||
}
|
||||
scanFlagGroup.Scanners.Default = nil // disable default scanners
|
||||
scanFlagGroup.Scanners.Usage = "List of scanners included when generating the json report. Used only for rendering the summary table."
|
||||
|
||||
convertFlags := flag.Flags{
|
||||
globalFlags,
|
||||
scanFlagGroup,
|
||||
flag.NewReportFlagGroup(),
|
||||
}
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "convert [flags] RESULT_JSON",
|
||||
@@ -575,18 +598,19 @@ func NewClientCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
}
|
||||
remoteFlags.ServerAddr = &remoteAddr // disable '--server' and enable '--remote' instead.
|
||||
|
||||
clientFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
DBFlagGroup: flag.NewDBFlagGroup(),
|
||||
MisconfFlagGroup: flag.NewMisconfFlagGroup(),
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(),
|
||||
RegoFlagGroup: flag.NewRegoFlagGroup(),
|
||||
RemoteFlagGroup: remoteFlags,
|
||||
ReportFlagGroup: flag.NewReportFlagGroup(),
|
||||
ScanFlagGroup: flag.NewScanFlagGroup(),
|
||||
VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(),
|
||||
clientFlags := flag.Flags{
|
||||
globalFlags,
|
||||
flag.NewCacheFlagGroup(),
|
||||
flag.NewDBFlagGroup(),
|
||||
flag.NewMisconfFlagGroup(),
|
||||
flag.NewRegistryFlagGroup(),
|
||||
flag.NewRegoFlagGroup(),
|
||||
remoteFlags,
|
||||
flag.NewReportFlagGroup(),
|
||||
flag.NewScanFlagGroup(),
|
||||
flag.NewVulnerabilityFlagGroup(),
|
||||
}
|
||||
clientFlags = append(clientFlags, extension.CustomFlagGroups("client")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "client [flags] IMAGE_NAME",
|
||||
@@ -621,19 +645,21 @@ func NewClientCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
}
|
||||
|
||||
func NewServerCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
serverFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
DBFlagGroup: flag.NewDBFlagGroup(),
|
||||
ModuleFlagGroup: flag.NewModuleFlagGroup(),
|
||||
RemoteFlagGroup: flag.NewServerFlags(),
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(),
|
||||
}
|
||||
// The Java DB is not needed for the server mode
|
||||
dbFlagGroup := flag.NewDBFlagGroup()
|
||||
dbFlagGroup.DownloadJavaDBOnly = nil // disable '--download-java-db-only'
|
||||
dbFlagGroup.SkipJavaDBUpdate = nil // disable '--skip-java-db-update'
|
||||
dbFlagGroup.JavaDBRepositories = nil // disable '--java-db-repository'
|
||||
|
||||
// java-db only works on client side.
|
||||
serverFlags.DBFlagGroup.DownloadJavaDBOnly = nil // disable '--download-java-db-only'
|
||||
serverFlags.DBFlagGroup.SkipJavaDBUpdate = nil // disable '--skip-java-db-update'
|
||||
serverFlags.DBFlagGroup.JavaDBRepositories = nil // disable '--java-db-repository'
|
||||
serverFlags := flag.Flags{
|
||||
globalFlags,
|
||||
flag.NewCacheFlagGroup(),
|
||||
dbFlagGroup,
|
||||
flag.NewModuleFlagGroup(),
|
||||
flag.NewServerFlags(),
|
||||
flag.NewRegistryFlagGroup(),
|
||||
}
|
||||
serverFlags = append(serverFlags, extension.CustomFlagGroups("server")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "server [flags]",
|
||||
@@ -675,27 +701,31 @@ func NewConfigCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
FilePatterns: flag.FilePatternsFlag.Clone(),
|
||||
}
|
||||
|
||||
configFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
MisconfFlagGroup: flag.NewMisconfFlagGroup(),
|
||||
ModuleFlagGroup: flag.NewModuleFlagGroup(),
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(),
|
||||
RegoFlagGroup: flag.NewRegoFlagGroup(),
|
||||
K8sFlagGroup: &flag.K8sFlagGroup{
|
||||
// disable unneeded flags
|
||||
reportFlagGroup := flag.NewReportFlagGroup()
|
||||
reportFlagGroup.DependencyTree = nil // disable '--dependency-tree'
|
||||
reportFlagGroup.ListAllPkgs = nil // disable '--list-all-pkgs'
|
||||
reportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol'
|
||||
reportFlagGroup.ShowSuppressed = nil // disable '--show-suppressed'
|
||||
reportFlagGroup.ReportFormat.Usage = "specify a compliance report format for the output" // @TODO: support --report summary for non compliance reports
|
||||
|
||||
cacheFlagGroup := flag.NewCacheFlagGroup()
|
||||
cacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory)
|
||||
|
||||
configFlags := flag.Flags{
|
||||
globalFlags,
|
||||
cacheFlagGroup,
|
||||
flag.NewMisconfFlagGroup(),
|
||||
flag.NewModuleFlagGroup(),
|
||||
flag.NewRegistryFlagGroup(),
|
||||
flag.NewRegoFlagGroup(),
|
||||
&flag.K8sFlagGroup{
|
||||
// Keep only --k8s-version flag and disable others
|
||||
K8sVersion: flag.K8sVersionFlag.Clone(),
|
||||
},
|
||||
ReportFlagGroup: flag.NewReportFlagGroup(),
|
||||
ScanFlagGroup: scanFlags,
|
||||
reportFlagGroup,
|
||||
scanFlags,
|
||||
}
|
||||
|
||||
configFlags.ReportFlagGroup.DependencyTree = nil // disable '--dependency-tree'
|
||||
configFlags.ReportFlagGroup.ListAllPkgs = nil // disable '--list-all-pkgs'
|
||||
configFlags.ReportFlagGroup.ExitOnEOL = nil // disable '--exit-on-eol'
|
||||
configFlags.ReportFlagGroup.ShowSuppressed = nil // disable '--show-suppressed'
|
||||
configFlags.ReportFlagGroup.ReportFormat.Usage = "specify a compliance report format for the output" // @TODO: support --report summary for non compliance reports
|
||||
configFlags.CacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory)
|
||||
configFlags = append(configFlags, extension.CustomFlagGroups("config")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "config [flags] DIR",
|
||||
@@ -738,7 +768,7 @@ func NewConfigCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
func NewPluginCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
var pluginOptions flag.Options
|
||||
pluginFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
globalFlags,
|
||||
}
|
||||
cmd := &cobra.Command{
|
||||
Use: "plugin subcommand",
|
||||
@@ -747,7 +777,8 @@ func NewPluginCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
Short: "Manage plugins",
|
||||
SilenceErrors: true,
|
||||
SilenceUsage: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) (err error) {
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
var err error
|
||||
pluginOptions, err = pluginFlags.ToOptions(args)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -887,8 +918,8 @@ func NewPluginCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
|
||||
func NewModuleCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
moduleFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
ModuleFlagGroup: flag.NewModuleFlagGroup(),
|
||||
globalFlags,
|
||||
flag.NewModuleFlagGroup(),
|
||||
}
|
||||
|
||||
cmd := &cobra.Command{
|
||||
@@ -998,22 +1029,25 @@ func NewKubernetesCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
misconfFlagGroup.CloudformationParamVars = nil // disable '--cf-params'
|
||||
misconfFlagGroup.TerraformTFVars = nil // disable '--tf-vars'
|
||||
|
||||
k8sFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
DBFlagGroup: flag.NewDBFlagGroup(),
|
||||
ImageFlagGroup: imageFlags,
|
||||
K8sFlagGroup: flag.NewK8sFlagGroup(), // kubernetes-specific flags
|
||||
MisconfFlagGroup: misconfFlagGroup,
|
||||
PackageFlagGroup: flag.NewPackageFlagGroup(),
|
||||
RegoFlagGroup: flag.NewRegoFlagGroup(),
|
||||
ReportFlagGroup: reportFlagGroup,
|
||||
ScanFlagGroup: scanFlags,
|
||||
SecretFlagGroup: flag.NewSecretFlagGroup(),
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(),
|
||||
VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(),
|
||||
packageFlagGroup := flag.NewPackageFlagGroup()
|
||||
packageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
|
||||
k8sFlags := flag.Flags{
|
||||
globalFlags,
|
||||
flag.NewCacheFlagGroup(),
|
||||
flag.NewDBFlagGroup(),
|
||||
imageFlags,
|
||||
flag.NewK8sFlagGroup(), // kubernetes-specific flags
|
||||
misconfFlagGroup,
|
||||
packageFlagGroup,
|
||||
flag.NewRegoFlagGroup(),
|
||||
reportFlagGroup,
|
||||
scanFlags,
|
||||
flag.NewSecretFlagGroup(),
|
||||
flag.NewRegistryFlagGroup(),
|
||||
flag.NewVulnerabilityFlagGroup(),
|
||||
}
|
||||
k8sFlags.PackageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
k8sFlags = append(k8sFlags, extension.CustomFlagGroups("kubernetes")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "kubernetes [flags] [CONTEXT]",
|
||||
@@ -1060,19 +1094,29 @@ func NewKubernetesCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
}
|
||||
|
||||
func NewVMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
vmFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
DBFlagGroup: flag.NewDBFlagGroup(),
|
||||
MisconfFlagGroup: flag.NewMisconfFlagGroup(),
|
||||
ModuleFlagGroup: flag.NewModuleFlagGroup(),
|
||||
PackageFlagGroup: flag.NewPackageFlagGroup(),
|
||||
RemoteFlagGroup: flag.NewClientFlags(), // for client/server mode
|
||||
ReportFlagGroup: flag.NewReportFlagGroup(),
|
||||
ScanFlagGroup: flag.NewScanFlagGroup(),
|
||||
SecretFlagGroup: flag.NewSecretFlagGroup(),
|
||||
VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(),
|
||||
AWSFlagGroup: &flag.AWSFlagGroup{
|
||||
reportFlagGroup := flag.NewReportFlagGroup()
|
||||
reportFlagGroup.ReportFormat = nil // disable '--report'
|
||||
|
||||
packageFlagGroup := flag.NewPackageFlagGroup()
|
||||
packageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
|
||||
misconfFlagGroup := flag.NewMisconfFlagGroup()
|
||||
misconfFlagGroup.CloudformationParamVars = nil // disable '--cf-params'
|
||||
misconfFlagGroup.TerraformTFVars = nil // disable '--tf-vars'
|
||||
|
||||
vmFlags := flag.Flags{
|
||||
globalFlags,
|
||||
flag.NewCacheFlagGroup(),
|
||||
flag.NewDBFlagGroup(),
|
||||
misconfFlagGroup,
|
||||
flag.NewModuleFlagGroup(),
|
||||
packageFlagGroup,
|
||||
flag.NewClientFlags(), // for client/server mode
|
||||
reportFlagGroup,
|
||||
flag.NewScanFlagGroup(),
|
||||
flag.NewSecretFlagGroup(),
|
||||
flag.NewVulnerabilityFlagGroup(),
|
||||
&flag.AWSFlagGroup{
|
||||
Region: &flag.Flag[string]{
|
||||
Name: "aws-region",
|
||||
ConfigName: "aws.region",
|
||||
@@ -1080,10 +1124,7 @@ func NewVMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
},
|
||||
},
|
||||
}
|
||||
vmFlags.ReportFlagGroup.ReportFormat = nil // disable '--report'
|
||||
vmFlags.PackageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
vmFlags.MisconfFlagGroup.CloudformationParamVars = nil // disable '--cf-params'
|
||||
vmFlags.MisconfFlagGroup.TerraformTFVars = nil // disable '--tf-vars'
|
||||
vmFlags = append(vmFlags, extension.CustomFlagGroups("vm")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "vm [flags] VM_IMAGE",
|
||||
@@ -1148,21 +1189,25 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
licenseFlagGroup.LicenseFull = nil
|
||||
licenseFlagGroup.LicenseConfidenceLevel = nil
|
||||
|
||||
sbomFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CacheFlagGroup: flag.NewCacheFlagGroup(),
|
||||
DBFlagGroup: flag.NewDBFlagGroup(),
|
||||
PackageFlagGroup: flag.NewPackageFlagGroup(),
|
||||
RemoteFlagGroup: flag.NewClientFlags(), // for client/server mode
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(), // for DBs in private registries
|
||||
ReportFlagGroup: reportFlagGroup,
|
||||
ScanFlagGroup: scanFlagGroup,
|
||||
VulnerabilityFlagGroup: flag.NewVulnerabilityFlagGroup(),
|
||||
LicenseFlagGroup: licenseFlagGroup,
|
||||
}
|
||||
cacheFlagGroup := flag.NewCacheFlagGroup()
|
||||
cacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default
|
||||
|
||||
sbomFlags.CacheFlagGroup.CacheBackend.Default = string(cache.TypeMemory) // Use memory cache by default
|
||||
sbomFlags.PackageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
packageFlagGroup := flag.NewPackageFlagGroup()
|
||||
packageFlagGroup.IncludeDevDeps = nil // disable '--include-dev-deps'
|
||||
|
||||
sbomFlags := flag.Flags{
|
||||
globalFlags,
|
||||
cacheFlagGroup,
|
||||
flag.NewDBFlagGroup(),
|
||||
packageFlagGroup,
|
||||
flag.NewClientFlags(), // for client/server mode
|
||||
flag.NewRegistryFlagGroup(), // for DBs in private registries
|
||||
reportFlagGroup,
|
||||
scanFlagGroup,
|
||||
flag.NewVulnerabilityFlagGroup(),
|
||||
licenseFlagGroup,
|
||||
}
|
||||
sbomFlags = append(sbomFlags, extension.CustomFlagGroups("sbom")...)
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "sbom [flags] SBOM_PATH",
|
||||
@@ -1203,8 +1248,8 @@ func NewSBOMCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
|
||||
func NewCleanCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
cleanFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
CleanFlagGroup: flag.NewCleanFlagGroup(),
|
||||
globalFlags,
|
||||
flag.NewCleanFlagGroup(),
|
||||
}
|
||||
cmd := &cobra.Command{
|
||||
Use: "clean [flags]",
|
||||
@@ -1252,11 +1297,13 @@ func NewRegistryCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
SilenceUsage: true,
|
||||
}
|
||||
|
||||
registryFlagGroup := flag.NewRegistryFlagGroup()
|
||||
registryFlagGroup.RegistryToken = nil
|
||||
|
||||
loginFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
RegistryFlagGroup: flag.NewRegistryFlagGroup(),
|
||||
globalFlags,
|
||||
registryFlagGroup,
|
||||
}
|
||||
loginFlags.RegistryFlagGroup.RegistryToken = nil // disable '--registry-token'
|
||||
loginCmd := &cobra.Command{
|
||||
Use: "login SERVER",
|
||||
Short: "Log in to a registry",
|
||||
@@ -1300,23 +1347,25 @@ func NewRegistryCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
}
|
||||
|
||||
func NewVEXCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
vexFlags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
vexFlags := flag.Flags{
|
||||
globalFlags,
|
||||
}
|
||||
var vexOptions flag.Options
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "vex subcommand",
|
||||
GroupID: groupManagement,
|
||||
Short: "[EXPERIMENTAL] VEX utilities",
|
||||
SilenceErrors: true,
|
||||
SilenceUsage: true,
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) (err error) {
|
||||
PersistentPreRunE: func(cmd *cobra.Command, args []string) error {
|
||||
cmd.SetContext(log.WithContextPrefix(cmd.Context(), "vex"))
|
||||
|
||||
vexOptions, err = vexFlags.ToOptions(args)
|
||||
opts, err := vexFlags.ToOptions(args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
vexOptions = opts
|
||||
return nil
|
||||
},
|
||||
}
|
||||
@@ -1393,11 +1442,12 @@ func NewVersionCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
|
||||
GroupID: groupUtility,
|
||||
Args: cobra.NoArgs,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
options, err := globalFlags.ToOptions()
|
||||
flags := flag.Flags{globalFlags}
|
||||
opts, err := flags.ToOptions(args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return showVersion(options.CacheDir, versionFormat, cmd.OutOrStdout())
|
||||
return showVersion(opts.CacheDir, versionFormat, cmd.OutOrStdout())
|
||||
},
|
||||
SilenceErrors: true,
|
||||
SilenceUsage: true,
|
||||
|
||||
@@ -315,9 +315,9 @@ func TestFlags(t *testing.T) {
|
||||
rootCmd.SetOut(io.Discard)
|
||||
|
||||
flags := &flag.Flags{
|
||||
GlobalFlagGroup: globalFlags,
|
||||
ReportFlagGroup: flag.NewReportFlagGroup(),
|
||||
ScanFlagGroup: flag.NewScanFlagGroup(),
|
||||
globalFlags,
|
||||
flag.NewReportFlagGroup(),
|
||||
flag.NewScanFlagGroup(),
|
||||
}
|
||||
cmd := &cobra.Command{
|
||||
Use: "test",
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
"github.com/aquasecurity/trivy/pkg/cache"
|
||||
"github.com/aquasecurity/trivy/pkg/commands/operation"
|
||||
"github.com/aquasecurity/trivy/pkg/db"
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
@@ -277,7 +278,6 @@ func (r *runner) Report(ctx context.Context, opts flag.Options, report types.Rep
|
||||
if err := pkgReport.Write(ctx, report, opts); err != nil {
|
||||
return xerrors.Errorf("unable to write results: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -375,12 +375,32 @@ func Run(ctx context.Context, opts flag.Options, targetKind TargetKind) (err err
|
||||
return v.SafeWriteConfigAs("trivy-default.yaml")
|
||||
}
|
||||
|
||||
// Call pre-run hooks
|
||||
if err := extension.PreRun(ctx, opts); err != nil {
|
||||
return xerrors.Errorf("pre run error: %w", err)
|
||||
}
|
||||
|
||||
// Run the application
|
||||
report, err := run(ctx, opts, targetKind)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("run error: %w", err)
|
||||
}
|
||||
|
||||
// Call post-run hooks
|
||||
if err := extension.PostRun(ctx, opts); err != nil {
|
||||
return xerrors.Errorf("post run error: %w", err)
|
||||
}
|
||||
|
||||
return operation.Exit(opts, report.Results.Failed(), report.Metadata)
|
||||
}
|
||||
|
||||
func run(ctx context.Context, opts flag.Options, targetKind TargetKind) (types.Report, error) {
|
||||
r, err := NewRunner(ctx, opts)
|
||||
if err != nil {
|
||||
if errors.Is(err, SkipScan) {
|
||||
return nil
|
||||
return types.Report{}, nil
|
||||
}
|
||||
return xerrors.Errorf("init error: %w", err)
|
||||
return types.Report{}, xerrors.Errorf("init error: %w", err)
|
||||
}
|
||||
defer r.Close(ctx)
|
||||
|
||||
@@ -395,24 +415,27 @@ func Run(ctx context.Context, opts flag.Options, targetKind TargetKind) (err err
|
||||
|
||||
scanFunction, exists := scans[targetKind]
|
||||
if !exists {
|
||||
return xerrors.Errorf("unknown target kind: %s", targetKind)
|
||||
return types.Report{}, xerrors.Errorf("unknown target kind: %s", targetKind)
|
||||
}
|
||||
|
||||
// 1. Scan the artifact
|
||||
report, err := scanFunction(ctx, opts)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("%s scan error: %w", targetKind, err)
|
||||
return types.Report{}, xerrors.Errorf("%s scan error: %w", targetKind, err)
|
||||
}
|
||||
|
||||
// 2. Filter the results
|
||||
report, err = r.Filter(ctx, opts, report)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("filter error: %w", err)
|
||||
return types.Report{}, xerrors.Errorf("filter error: %w", err)
|
||||
}
|
||||
|
||||
// 3. Report the results
|
||||
if err = r.Report(ctx, opts, report); err != nil {
|
||||
return xerrors.Errorf("report error: %w", err)
|
||||
return types.Report{}, xerrors.Errorf("report error: %w", err)
|
||||
}
|
||||
|
||||
return operation.Exit(opts, report.Results.Failed(), report.Metadata)
|
||||
return report, nil
|
||||
}
|
||||
|
||||
func disabledAnalyzers(opts flag.Options) []analyzer.Type {
|
||||
@@ -662,7 +685,7 @@ func initMisconfScannerOption(ctx context.Context, opts flag.Options) (misconf.S
|
||||
return misconf.ScannerOption{}, xerrors.Errorf("load schemas error: %w", err)
|
||||
}
|
||||
|
||||
return misconf.ScannerOption{
|
||||
misconfOpts := misconf.ScannerOption{
|
||||
Trace: opts.Trace,
|
||||
Namespaces: append(opts.CheckNamespaces, rego.BuiltinNamespaces()...),
|
||||
PolicyPaths: policyPaths,
|
||||
@@ -684,5 +707,13 @@ func initMisconfScannerOption(ctx context.Context, opts flag.Options) (misconf.S
|
||||
ConfigFileSchemas: configSchemas,
|
||||
SkipFiles: opts.SkipFiles,
|
||||
SkipDirs: opts.SkipDirs,
|
||||
}, nil
|
||||
}
|
||||
|
||||
regoScanner, err := misconf.InitRegoScanner(misconfOpts)
|
||||
if err != nil {
|
||||
return misconf.ScannerOption{}, xerrors.Errorf("init Rego scanner: %w", err)
|
||||
}
|
||||
|
||||
misconfOpts.RegoScanner = regoScanner
|
||||
return misconfOpts, nil
|
||||
}
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
package conan
|
||||
|
||||
import (
|
||||
"io"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/go-json-experiment/json"
|
||||
"github.com/go-json-experiment/json/jsontext"
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/jfather"
|
||||
"github.com/aquasecurity/trivy/pkg/dependency"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
xio "github.com/aquasecurity/trivy/pkg/x/io"
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
type LockFile struct {
|
||||
@@ -25,19 +26,20 @@ type GraphLock struct {
|
||||
}
|
||||
|
||||
type Node struct {
|
||||
Ref string `json:"ref"`
|
||||
Requires []string `json:"requires"`
|
||||
StartLine int
|
||||
EndLine int
|
||||
Ref string `json:"ref"`
|
||||
Requires []string `json:"requires"`
|
||||
xjson.Location
|
||||
}
|
||||
type Requires []Require
|
||||
|
||||
type Require struct {
|
||||
Dependency string
|
||||
StartLine int
|
||||
EndLine int
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type Requires []Require
|
||||
func (r *Require) UnmarshalJSONFrom(dec *jsontext.Decoder) error {
|
||||
return json.UnmarshalDecode(dec, &r.Dependency)
|
||||
}
|
||||
|
||||
type Parser struct {
|
||||
logger *log.Logger
|
||||
@@ -63,7 +65,7 @@ func (p *Parser) parseV1(lock LockFile) ([]ftypes.Package, []ftypes.Dependency,
|
||||
if node.Ref == "" {
|
||||
continue
|
||||
}
|
||||
pkg, err := toPackage(node.Ref, node.StartLine, node.EndLine)
|
||||
pkg, err := toPackage(node.Ref, node.Location)
|
||||
if err != nil {
|
||||
p.logger.Debug("Parse ref error", log.Err(err))
|
||||
continue
|
||||
@@ -105,7 +107,7 @@ func (p *Parser) parseV2(lock LockFile) ([]ftypes.Package, []ftypes.Dependency,
|
||||
var pkgs []ftypes.Package
|
||||
|
||||
for _, req := range lock.Requires {
|
||||
pkg, err := toPackage(req.Dependency, req.StartLine, req.EndLine)
|
||||
pkg, err := toPackage(req.Dependency, req.Location)
|
||||
if err != nil {
|
||||
p.logger.Debug("Creating package entry from requirement failed", log.Err(err))
|
||||
continue
|
||||
@@ -118,12 +120,7 @@ func (p *Parser) parseV2(lock LockFile) ([]ftypes.Package, []ftypes.Dependency,
|
||||
|
||||
func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) {
|
||||
var lock LockFile
|
||||
|
||||
input, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("failed to read conan lock file: %w", err)
|
||||
}
|
||||
if err := jfather.Unmarshal(input, &lock); err != nil {
|
||||
if err := xjson.UnmarshalRead(r, &lock); err != nil {
|
||||
return nil, nil, xerrors.Errorf("failed to decode conan lock file: %w", err)
|
||||
}
|
||||
|
||||
@@ -152,42 +149,15 @@ func parsePackage(text string) (string, string, error) {
|
||||
return ss[0], ss[1], nil
|
||||
}
|
||||
|
||||
func toPackage(pkg string, startLine, endLine int) (ftypes.Package, error) {
|
||||
func toPackage(pkg string, location xjson.Location) (ftypes.Package, error) {
|
||||
name, version, err := parsePackage(pkg)
|
||||
if err != nil {
|
||||
return ftypes.Package{}, err
|
||||
}
|
||||
return ftypes.Package{
|
||||
ID: dependency.ID(ftypes.Conan, name, version),
|
||||
Name: name,
|
||||
Version: version,
|
||||
Locations: []ftypes.Location{
|
||||
{
|
||||
StartLine: startLine,
|
||||
EndLine: endLine,
|
||||
},
|
||||
},
|
||||
ID: dependency.ID(ftypes.Conan, name, version),
|
||||
Name: name,
|
||||
Version: version,
|
||||
Locations: []ftypes.Location{ftypes.Location(location)},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// UnmarshalJSONWithMetadata needed to detect start and end lines of deps
|
||||
func (n *Node) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
if err := node.Decode(&n); err != nil {
|
||||
return err
|
||||
}
|
||||
// Decode func will overwrite line numbers if we save them first
|
||||
n.StartLine = node.Range().Start.Line
|
||||
n.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
func (r *Require) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
var dep string
|
||||
if err := node.Decode(&dep); err != nil {
|
||||
return err
|
||||
}
|
||||
r.Dependency = dep
|
||||
r.StartLine = node.Range().Start.Line
|
||||
r.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
package core_deps
|
||||
|
||||
import (
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
@@ -9,11 +8,11 @@ import (
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/jfather"
|
||||
"github.com/aquasecurity/trivy/pkg/dependency"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
xio "github.com/aquasecurity/trivy/pkg/x/io"
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
type dotNetDependencies struct {
|
||||
@@ -23,9 +22,8 @@ type dotNetDependencies struct {
|
||||
}
|
||||
|
||||
type dotNetLibrary struct {
|
||||
Type string `json:"type"`
|
||||
StartLine int
|
||||
EndLine int
|
||||
Type string `json:"type"`
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type RuntimeTarget struct {
|
||||
@@ -52,12 +50,7 @@ func NewParser() *Parser {
|
||||
|
||||
func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) {
|
||||
var depsFile dotNetDependencies
|
||||
|
||||
input, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("read error: %w", err)
|
||||
}
|
||||
if err = jfather.Unmarshal(input, &depsFile); err != nil {
|
||||
if err := xjson.UnmarshalRead(r, &depsFile); err != nil {
|
||||
return nil, nil, xerrors.Errorf("failed to decode .deps.json file: %w", err)
|
||||
}
|
||||
|
||||
@@ -87,15 +80,10 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc
|
||||
}
|
||||
|
||||
pkgs = append(pkgs, ftypes.Package{
|
||||
ID: dependency.ID(ftypes.DotNetCore, split[0], split[1]),
|
||||
Name: split[0],
|
||||
Version: split[1],
|
||||
Locations: []ftypes.Location{
|
||||
{
|
||||
StartLine: lib.StartLine,
|
||||
EndLine: lib.EndLine,
|
||||
},
|
||||
},
|
||||
ID: dependency.ID(ftypes.DotNetCore, split[0], split[1]),
|
||||
Name: split[0],
|
||||
Version: split[1],
|
||||
Locations: []ftypes.Location{ftypes.Location(lib.Location)},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -118,14 +106,3 @@ func (p *Parser) isRuntimeLibrary(targetLibs map[string]TargetLib, library strin
|
||||
// Check that `runtime`, `runtimeTarget` and `native` sections are not empty
|
||||
return !lo.IsEmpty(lib)
|
||||
}
|
||||
|
||||
// UnmarshalJSONWithMetadata needed to detect start and end lines of deps
|
||||
func (t *dotNetLibrary) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
if err := node.Decode(&t); err != nil {
|
||||
return err
|
||||
}
|
||||
// Decode func will overwrite line numbers if we save them first
|
||||
t.StartLine = node.Range().Start.Line
|
||||
t.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -82,7 +82,7 @@ func TestParse(t *testing.T) {
|
||||
{
|
||||
name: "sad path",
|
||||
file: "testdata/invalid.deps.json",
|
||||
wantErr: "failed to decode .deps.json file: EOF",
|
||||
wantErr: "failed to decode .deps.json file: jsontext: unexpected EOF within",
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ package npm
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"maps"
|
||||
"path"
|
||||
"slices"
|
||||
@@ -12,13 +11,13 @@ import (
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/jfather"
|
||||
"github.com/aquasecurity/trivy/pkg/dependency"
|
||||
"github.com/aquasecurity/trivy/pkg/dependency/parser/utils"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
xio "github.com/aquasecurity/trivy/pkg/x/io"
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
const nodeModulesDir = "node_modules"
|
||||
@@ -34,8 +33,7 @@ type Dependency struct {
|
||||
Dependencies map[string]Dependency `json:"dependencies"`
|
||||
Requires map[string]string `json:"requires"`
|
||||
Resolved string `json:"resolved"`
|
||||
StartLine int
|
||||
EndLine int
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type Package struct {
|
||||
@@ -49,8 +47,7 @@ type Package struct {
|
||||
Dev bool `json:"dev"`
|
||||
Link bool `json:"link"`
|
||||
Workspaces []string `json:"workspaces"`
|
||||
StartLine int
|
||||
EndLine int
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type Parser struct {
|
||||
@@ -65,11 +62,7 @@ func NewParser() *Parser {
|
||||
|
||||
func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) {
|
||||
var lockFile LockFile
|
||||
input, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("read error: %w", err)
|
||||
}
|
||||
if err := jfather.Unmarshal(input, &lockFile); err != nil {
|
||||
if err := xjson.UnmarshalRead(r, &lockFile); err != nil {
|
||||
return nil, nil, xerrors.Errorf("decode error: %w", err)
|
||||
}
|
||||
|
||||
@@ -117,10 +110,6 @@ func (p *Parser) parseV2(packages map[string]Package) ([]ftypes.Package, []ftype
|
||||
}
|
||||
|
||||
pkgID := packageID(pkgName, pkg.Version)
|
||||
location := ftypes.Location{
|
||||
StartLine: pkg.StartLine,
|
||||
EndLine: pkg.EndLine,
|
||||
}
|
||||
|
||||
var ref ftypes.ExternalRef
|
||||
if pkg.Resolved != "" {
|
||||
@@ -145,7 +134,7 @@ func (p *Parser) parseV2(packages map[string]Package) ([]ftypes.Package, []ftype
|
||||
sortExternalReferences(savedPkg.ExternalReferences)
|
||||
}
|
||||
|
||||
savedPkg.Locations = append(savedPkg.Locations, location)
|
||||
savedPkg.Locations = append(savedPkg.Locations, ftypes.Location(pkg.Location))
|
||||
sort.Sort(savedPkg.Locations)
|
||||
|
||||
pkgs[pkgID] = savedPkg
|
||||
@@ -159,7 +148,7 @@ func (p *Parser) parseV2(packages map[string]Package) ([]ftypes.Package, []ftype
|
||||
Relationship: lo.Ternary(pkgIndirect, ftypes.RelationshipIndirect, ftypes.RelationshipDirect),
|
||||
Dev: pkg.Dev,
|
||||
ExternalReferences: lo.Ternary(ref.URL != "", []ftypes.ExternalRef{ref}, nil),
|
||||
Locations: []ftypes.Location{location},
|
||||
Locations: []ftypes.Location{ftypes.Location(pkg.Location)},
|
||||
}
|
||||
pkgs[pkgID] = newPkg
|
||||
|
||||
@@ -304,12 +293,7 @@ func (p *Parser) parseV1(dependencies map[string]Dependency, versions map[string
|
||||
URL: dep.Resolved,
|
||||
},
|
||||
},
|
||||
Locations: []ftypes.Location{
|
||||
{
|
||||
StartLine: dep.StartLine,
|
||||
EndLine: dep.EndLine,
|
||||
},
|
||||
},
|
||||
Locations: []ftypes.Location{ftypes.Location(dep.Location)},
|
||||
}
|
||||
pkgs = append(pkgs, pkg)
|
||||
|
||||
@@ -396,28 +380,6 @@ func joinPaths(paths ...string) string {
|
||||
return strings.Join(paths, "/")
|
||||
}
|
||||
|
||||
// UnmarshalJSONWithMetadata needed to detect start and end lines of deps for v1
|
||||
func (t *Dependency) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
if err := node.Decode(&t); err != nil {
|
||||
return err
|
||||
}
|
||||
// Decode func will overwrite line numbers if we save them first
|
||||
t.StartLine = node.Range().Start.Line
|
||||
t.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
// UnmarshalJSONWithMetadata needed to detect start and end lines of deps for v2 or newer
|
||||
func (t *Package) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
if err := node.Decode(&t); err != nil {
|
||||
return err
|
||||
}
|
||||
// Decode func will overwrite line numbers if we save them first
|
||||
t.StartLine = node.Range().Start.Line
|
||||
t.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
func packageID(name, version string) string {
|
||||
return dependency.ID(ftypes.Npm, name, version)
|
||||
}
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
package lock
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/jfather"
|
||||
"github.com/aquasecurity/trivy/pkg/dependency"
|
||||
"github.com/aquasecurity/trivy/pkg/dependency/parser/utils"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
xio "github.com/aquasecurity/trivy/pkg/x/io"
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
type LockFile struct {
|
||||
@@ -21,11 +19,10 @@ type LockFile struct {
|
||||
type Dependencies map[string]Dependency
|
||||
|
||||
type Dependency struct {
|
||||
Type string `json:"type"`
|
||||
Resolved string `json:"resolved"`
|
||||
StartLine int
|
||||
EndLine int
|
||||
Type string `json:"type"`
|
||||
Resolved string `json:"resolved"`
|
||||
Dependencies map[string]string `json:"dependencies,omitempty"`
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type Parser struct{}
|
||||
@@ -36,11 +33,7 @@ func NewParser() *Parser {
|
||||
|
||||
func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) {
|
||||
var lockFile LockFile
|
||||
input, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("failed to read packages.lock.json: %w", err)
|
||||
}
|
||||
if err := jfather.Unmarshal(input, &lockFile); err != nil {
|
||||
if err := xjson.UnmarshalRead(r, &lockFile); err != nil {
|
||||
return nil, nil, xerrors.Errorf("failed to decode packages.lock.json: %w", err)
|
||||
}
|
||||
|
||||
@@ -60,12 +53,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc
|
||||
Name: packageName,
|
||||
Version: packageContent.Resolved,
|
||||
Relationship: lo.Ternary(packageContent.Type == "Direct", ftypes.RelationshipDirect, ftypes.RelationshipIndirect),
|
||||
Locations: []ftypes.Location{
|
||||
{
|
||||
StartLine: packageContent.StartLine,
|
||||
EndLine: packageContent.EndLine,
|
||||
},
|
||||
},
|
||||
Locations: []ftypes.Location{ftypes.Location(packageContent.Location)},
|
||||
}
|
||||
pkgs = append(pkgs, pkg)
|
||||
|
||||
@@ -97,17 +85,6 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc
|
||||
return utils.UniquePackages(pkgs), deps, nil
|
||||
}
|
||||
|
||||
// UnmarshalJSONWithMetadata needed to detect start and end lines of deps
|
||||
func (t *Dependency) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
if err := node.Decode(&t); err != nil {
|
||||
return err
|
||||
}
|
||||
// Decode func will overwrite line numbers if we save them first
|
||||
t.StartLine = node.Range().Start.Line
|
||||
t.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
func packageID(name, version string) string {
|
||||
return dependency.ID(ftypes.NuGet, name, version)
|
||||
}
|
||||
|
||||
@@ -1,31 +1,29 @@
|
||||
package composer
|
||||
|
||||
import (
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/jfather"
|
||||
"github.com/aquasecurity/trivy/pkg/dependency"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
"github.com/aquasecurity/trivy/pkg/licensing"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
xio "github.com/aquasecurity/trivy/pkg/x/io"
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
type LockFile struct {
|
||||
Packages []packageInfo `json:"packages"`
|
||||
}
|
||||
type packageInfo struct {
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Require map[string]string `json:"require"`
|
||||
License any `json:"license"`
|
||||
StartLine int
|
||||
EndLine int
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Require map[string]string `json:"require"`
|
||||
License any `json:"license"`
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type Parser struct {
|
||||
@@ -40,11 +38,7 @@ func NewParser() *Parser {
|
||||
|
||||
func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) {
|
||||
var lockFile LockFile
|
||||
input, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("read error: %w", err)
|
||||
}
|
||||
if err = jfather.Unmarshal(input, &lockFile); err != nil {
|
||||
if err := xjson.UnmarshalRead(r, &lockFile); err != nil {
|
||||
return nil, nil, xerrors.Errorf("decode error: %w", err)
|
||||
}
|
||||
|
||||
@@ -57,12 +51,7 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc
|
||||
Version: lpkg.Version,
|
||||
Relationship: ftypes.RelationshipUnknown, // composer.lock file doesn't have info about direct/indirect dependencies
|
||||
Licenses: licenses(lpkg.License),
|
||||
Locations: []ftypes.Location{
|
||||
{
|
||||
StartLine: lpkg.StartLine,
|
||||
EndLine: lpkg.EndLine,
|
||||
},
|
||||
},
|
||||
Locations: []ftypes.Location{ftypes.Location(lpkg.Location)},
|
||||
}
|
||||
pkgs[pkg.Name] = pkg
|
||||
|
||||
@@ -105,17 +94,6 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc
|
||||
return pkgSlice, deps, nil
|
||||
}
|
||||
|
||||
// UnmarshalJSONWithMetadata needed to detect start and end lines of deps
|
||||
func (t *packageInfo) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
if err := node.Decode(&t); err != nil {
|
||||
return err
|
||||
}
|
||||
// Decode func will overwrite line numbers if we save them first
|
||||
t.StartLine = node.Range().Start.Line
|
||||
t.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
// licenses returns slice of licenses from string, string with separators (`or`, `and`, etc.) or string array
|
||||
// cf. https://getcomposer.org/doc/04-schema.md#license
|
||||
func licenses(val any) []string {
|
||||
|
||||
@@ -1,23 +1,21 @@
|
||||
package pipenv
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/jfather"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
xio "github.com/aquasecurity/trivy/pkg/x/io"
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
type lockFile struct {
|
||||
Default map[string]dependency `json:"default"`
|
||||
}
|
||||
type dependency struct {
|
||||
Version string `json:"version"`
|
||||
StartLine int
|
||||
EndLine int
|
||||
Version string `json:"version"`
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type Parser struct{}
|
||||
@@ -28,37 +26,17 @@ func NewParser() *Parser {
|
||||
|
||||
func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) {
|
||||
var lockFile lockFile
|
||||
input, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("failed to read packages.lock.json: %w", err)
|
||||
}
|
||||
if err := jfather.Unmarshal(input, &lockFile); err != nil {
|
||||
if err := xjson.UnmarshalRead(r, &lockFile); err != nil {
|
||||
return nil, nil, xerrors.Errorf("failed to decode Pipenv.lock: %w", err)
|
||||
}
|
||||
|
||||
var pkgs []ftypes.Package
|
||||
for pkgName, dep := range lockFile.Default {
|
||||
pkgs = append(pkgs, ftypes.Package{
|
||||
Name: pkgName,
|
||||
Version: strings.TrimLeft(dep.Version, "="),
|
||||
Locations: []ftypes.Location{
|
||||
{
|
||||
StartLine: dep.StartLine,
|
||||
EndLine: dep.EndLine,
|
||||
},
|
||||
},
|
||||
Name: pkgName,
|
||||
Version: strings.TrimLeft(dep.Version, "="),
|
||||
Locations: []ftypes.Location{ftypes.Location(dep.Location)},
|
||||
})
|
||||
}
|
||||
return pkgs, nil, nil
|
||||
}
|
||||
|
||||
// UnmarshalJSONWithMetadata needed to detect start and end lines of deps
|
||||
func (t *dependency) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
if err := node.Decode(&t); err != nil {
|
||||
return err
|
||||
}
|
||||
// Decode func will overwrite line numbers if we save them first
|
||||
t.StartLine = node.Range().Start.Line
|
||||
t.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
package lockfile
|
||||
|
||||
import (
|
||||
"io"
|
||||
"slices"
|
||||
"sort"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/jfather"
|
||||
"github.com/aquasecurity/trivy/pkg/dependency"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
xio "github.com/aquasecurity/trivy/pkg/x/io"
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
// lockfile format defined at: https://stringbean.github.io/sbt-dependency-lock/file-formats/version-1.html
|
||||
@@ -24,8 +23,7 @@ type sbtLockfileDependency struct {
|
||||
Name string `json:"name"`
|
||||
Version string `json:"version"`
|
||||
Configurations []string `json:"configurations"`
|
||||
StartLine int
|
||||
EndLine int
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type Parser struct{}
|
||||
@@ -36,12 +34,7 @@ func NewParser() *Parser {
|
||||
|
||||
func (Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) {
|
||||
var lockfile sbtLockfile
|
||||
input, err := io.ReadAll(r)
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("failed to read sbt lockfile: %w", err)
|
||||
}
|
||||
if err := jfather.Unmarshal(input, &lockfile); err != nil {
|
||||
if err := xjson.UnmarshalRead(r, &lockfile); err != nil {
|
||||
return nil, nil, xerrors.Errorf("JSON decoding failed: %w", err)
|
||||
}
|
||||
|
||||
@@ -51,15 +44,10 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency,
|
||||
if slices.ContainsFunc(dep.Configurations, isIncludedConfig) {
|
||||
name := dep.Organization + ":" + dep.Name
|
||||
libraries = append(libraries, ftypes.Package{
|
||||
ID: dependency.ID(ftypes.Sbt, name, dep.Version),
|
||||
Name: name,
|
||||
Version: dep.Version,
|
||||
Locations: []ftypes.Location{
|
||||
{
|
||||
StartLine: dep.StartLine,
|
||||
EndLine: dep.EndLine,
|
||||
},
|
||||
},
|
||||
ID: dependency.ID(ftypes.Sbt, name, dep.Version),
|
||||
Name: name,
|
||||
Version: dep.Version,
|
||||
Locations: []ftypes.Location{ftypes.Location(dep.Location)},
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -68,17 +56,6 @@ func (Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency,
|
||||
return libraries, nil, nil
|
||||
}
|
||||
|
||||
// UnmarshalJSONWithMetadata needed to detect start and end lines of deps
|
||||
func (t *sbtLockfileDependency) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
if err := node.Decode(&t); err != nil {
|
||||
return err
|
||||
}
|
||||
// Decode func will overwrite line numbers if we save them first
|
||||
t.StartLine = node.Range().Start.Line
|
||||
t.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
func isIncludedConfig(config string) bool {
|
||||
return config == "compile" || config == "runtime"
|
||||
}
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
package swift
|
||||
|
||||
import (
|
||||
"io"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/jfather"
|
||||
"github.com/aquasecurity/trivy/pkg/dependency"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
xio "github.com/aquasecurity/trivy/pkg/x/io"
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
// Parser is a parser for Package.resolved files
|
||||
@@ -28,11 +27,7 @@ func NewParser() *Parser {
|
||||
|
||||
func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, error) {
|
||||
var lockFile LockFile
|
||||
input, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("read error: %w", err)
|
||||
}
|
||||
if err := jfather.Unmarshal(input, &lockFile); err != nil {
|
||||
if err := xjson.UnmarshalRead(r, &lockFile); err != nil {
|
||||
return nil, nil, xerrors.Errorf("decode error: %w", err)
|
||||
}
|
||||
|
||||
@@ -55,15 +50,10 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc
|
||||
version := lo.Ternary(pin.State.Version != "", pin.State.Version, pin.State.Branch)
|
||||
|
||||
pkgs = append(pkgs, ftypes.Package{
|
||||
ID: dependency.ID(ftypes.Swift, name, version),
|
||||
Name: name,
|
||||
Version: version,
|
||||
Locations: []ftypes.Location{
|
||||
{
|
||||
StartLine: pin.StartLine,
|
||||
EndLine: pin.EndLine,
|
||||
},
|
||||
},
|
||||
ID: dependency.ID(ftypes.Swift, name, version),
|
||||
Name: name,
|
||||
Version: version,
|
||||
Locations: []ftypes.Location{ftypes.Location(pin.Location)},
|
||||
})
|
||||
}
|
||||
sort.Sort(pkgs)
|
||||
@@ -75,7 +65,7 @@ func pkgName(pin Pin, lockVersion int) string {
|
||||
// v2 uses `Location`
|
||||
name := pin.RepositoryURL
|
||||
if lockVersion > 1 {
|
||||
name = pin.Location
|
||||
name = pin.Loc
|
||||
}
|
||||
// Swift uses `https://github.com/<author>/<package>.git format
|
||||
// `.git` suffix can be omitted (take a look happy test)
|
||||
@@ -84,14 +74,3 @@ func pkgName(pin Pin, lockVersion int) string {
|
||||
name = strings.TrimSuffix(name, ".git")
|
||||
return name
|
||||
}
|
||||
|
||||
// UnmarshalJSONWithMetadata needed to detect start and end lines of deps for v1
|
||||
func (p *Pin) UnmarshalJSONWithMetadata(node jfather.Node) error {
|
||||
if err := node.Decode(&p); err != nil {
|
||||
return err
|
||||
}
|
||||
// Decode func will overwrite line numbers if we save them first
|
||||
p.StartLine = node.Range().Start.Line
|
||||
p.EndLine = node.Range().End.Line
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
package swift
|
||||
|
||||
import (
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
type LockFile struct {
|
||||
Object Object `json:"object"`
|
||||
Pins []Pin `json:"pins"`
|
||||
@@ -13,10 +17,9 @@ type Object struct {
|
||||
type Pin struct {
|
||||
Package string `json:"package"`
|
||||
RepositoryURL string `json:"repositoryURL"` // Package.revision v1
|
||||
Location string `json:"location"` // Package.revision v2
|
||||
Loc string `json:"location"` // Package.revision v2
|
||||
State State `json:"state"`
|
||||
StartLine int
|
||||
EndLine int
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type State struct {
|
||||
|
||||
40
pkg/extension/flag.go
Normal file
40
pkg/extension/flag.go
Normal file
@@ -0,0 +1,40 @@
|
||||
package extension
|
||||
|
||||
import (
|
||||
"github.com/samber/lo"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/flag"
|
||||
)
|
||||
|
||||
var flagExtensions = make(map[string]FlagExtension)
|
||||
|
||||
func RegisterFlagExtension(extension FlagExtension) {
|
||||
flagExtensions[extension.Name()] = extension
|
||||
}
|
||||
|
||||
func DeregisterFlagExtension(name string) {
|
||||
delete(flagExtensions, name)
|
||||
}
|
||||
|
||||
// FlagExtension is an extension that allows adding custom CLI flags.
|
||||
type FlagExtension interface {
|
||||
Name() string
|
||||
|
||||
// CustomFlagGroup returns custom flag group to be added to Trivy CLI.
|
||||
// The command parameter specifies which command the flags are for.
|
||||
// If the command is empty, the flags will be applied to all commands.
|
||||
CustomFlagGroup(command string) flag.FlagGroup
|
||||
}
|
||||
|
||||
// CustomFlagGroups collects all flag groups from registered extensions for a specific command.
|
||||
func CustomFlagGroups(command string) []flag.FlagGroup {
|
||||
var flagGroups []flag.FlagGroup
|
||||
for _, e := range flagExtensions {
|
||||
group := e.CustomFlagGroup(command)
|
||||
if lo.IsNil(group) {
|
||||
continue
|
||||
}
|
||||
flagGroups = append(flagGroups, group)
|
||||
}
|
||||
return flagGroups
|
||||
}
|
||||
134
pkg/extension/flag_test.go
Normal file
134
pkg/extension/flag_test.go
Normal file
@@ -0,0 +1,134 @@
|
||||
package extension_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
"github.com/aquasecurity/trivy/pkg/flag"
|
||||
)
|
||||
|
||||
type testOptionKey struct{}
|
||||
|
||||
var foo = flag.Flag[string]{
|
||||
Name: "foo",
|
||||
ConfigName: "foo",
|
||||
Usage: "foo",
|
||||
Default: "default-value",
|
||||
}
|
||||
|
||||
// testFlagGroup is a flag group for testing
|
||||
type testFlagGroup struct {
|
||||
Foo *flag.Flag[string]
|
||||
}
|
||||
|
||||
type testOptions struct {
|
||||
Foo string
|
||||
}
|
||||
|
||||
func (fg *testFlagGroup) Name() string {
|
||||
return "TestFlagGroup"
|
||||
}
|
||||
|
||||
func (fg *testFlagGroup) Flags() []flag.Flagger {
|
||||
return []flag.Flagger{
|
||||
fg.Foo,
|
||||
}
|
||||
}
|
||||
|
||||
func (fg *testFlagGroup) ToOptions(opts *flag.Options) error {
|
||||
if opts.CustomOptions == nil {
|
||||
opts.CustomOptions = make(map[any]any)
|
||||
}
|
||||
opts.CustomOptions[testOptionKey{}] = testOptions{
|
||||
Foo: fg.Foo.Value(),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// testExtension implements the FlagExtension interface for testing
|
||||
type testExtension struct{}
|
||||
|
||||
func (e *testExtension) Name() string {
|
||||
return "TestExtension"
|
||||
}
|
||||
|
||||
func (e *testExtension) CustomFlagGroup(command string) flag.FlagGroup {
|
||||
if command != "image" {
|
||||
return nil
|
||||
}
|
||||
|
||||
return &testFlagGroup{
|
||||
Foo: foo.Clone(),
|
||||
}
|
||||
}
|
||||
|
||||
func TestCustomFlagGroups(t *testing.T) {
|
||||
// Set up
|
||||
te := &testExtension{}
|
||||
extension.RegisterFlagExtension(te)
|
||||
t.Cleanup(func() {
|
||||
extension.DeregisterFlagExtension(te.Name())
|
||||
})
|
||||
|
||||
t.Run("flag group is set", func(t *testing.T) {
|
||||
t.Cleanup(viper.Reset)
|
||||
flags := flag.Flags(extension.CustomFlagGroups("image"))
|
||||
cmd := &cobra.Command{}
|
||||
flags.AddFlags(cmd)
|
||||
flags.Bind(cmd)
|
||||
|
||||
// Test with no custom value
|
||||
opts, err := flags.ToOptions(nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify CustomOptions has the default value
|
||||
testOpts := extractTestOptions(t, opts)
|
||||
assert.Equal(t, "default-value", testOpts.Foo)
|
||||
|
||||
// Test with environment variable
|
||||
t.Setenv("TRIVY_FOO", "env-value")
|
||||
opts, err = flags.ToOptions(nil)
|
||||
require.NoError(t, err)
|
||||
testOpts = extractTestOptions(t, opts)
|
||||
assert.Equal(t, "env-value", testOpts.Foo)
|
||||
|
||||
// Test with flag
|
||||
viper.Set(foo.ConfigName, "custom-value")
|
||||
opts, err = flags.ToOptions(nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify CustomOptions has the custom value
|
||||
testOpts = extractTestOptions(t, opts)
|
||||
assert.Equal(t, "custom-value", testOpts.Foo)
|
||||
})
|
||||
|
||||
t.Run("flag group is not set", func(t *testing.T) {
|
||||
t.Cleanup(viper.Reset)
|
||||
flags := flag.Flags(extension.CustomFlagGroups("other"))
|
||||
cmd := &cobra.Command{}
|
||||
flags.AddFlags(cmd)
|
||||
flags.Bind(cmd)
|
||||
|
||||
// Test
|
||||
viper.Set(foo.ConfigName, "custom-value")
|
||||
opts, err := flags.ToOptions(nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify CustomOptions is not set
|
||||
require.Nil(t, opts.CustomOptions)
|
||||
})
|
||||
}
|
||||
|
||||
func extractTestOptions(t *testing.T, opts flag.Options) testOptions {
|
||||
value, ok := opts.CustomOptions[testOptionKey{}]
|
||||
require.True(t, ok)
|
||||
|
||||
testOpts, ok := value.(testOptions)
|
||||
require.True(t, ok)
|
||||
return testOpts
|
||||
}
|
||||
162
pkg/extension/hook.go
Normal file
162
pkg/extension/hook.go
Normal file
@@ -0,0 +1,162 @@
|
||||
package extension
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sort"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/flag"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
var hooks = make(map[string]Hook)
|
||||
|
||||
func RegisterHook(s Hook) {
|
||||
// Avoid duplication
|
||||
hooks[s.Name()] = s
|
||||
}
|
||||
|
||||
func DeregisterHook(name string) {
|
||||
delete(hooks, name)
|
||||
}
|
||||
|
||||
// Hook is an interface that defines the methods for a hook.
|
||||
type Hook interface {
|
||||
// Name returns the name of the extension.
|
||||
Name() string
|
||||
}
|
||||
|
||||
// RunHook is a extension that is called before and after all the processes.
|
||||
type RunHook interface {
|
||||
Hook
|
||||
|
||||
// PreRun is called before all the processes.
|
||||
PreRun(ctx context.Context, opts flag.Options) error
|
||||
|
||||
// PostRun is called after all the processes.
|
||||
PostRun(ctx context.Context, opts flag.Options) error
|
||||
}
|
||||
|
||||
// ScanHook is a extension that is called before and after the scan.
|
||||
type ScanHook interface {
|
||||
Hook
|
||||
|
||||
// PreScan is called before the scan. It can modify the scan target.
|
||||
// It may be called on the server side in client/server mode.
|
||||
PreScan(ctx context.Context, target *types.ScanTarget, opts types.ScanOptions) error
|
||||
|
||||
// PostScan is called after the scan. It can modify the results.
|
||||
// It may be called on the server side in client/server mode.
|
||||
// NOTE: Wasm modules cannot directly modify the passed results,
|
||||
// so it returns a copy of the results.
|
||||
PostScan(ctx context.Context, results types.Results) (types.Results, error)
|
||||
}
|
||||
|
||||
// ReportHook is a extension that is called before and after the report is written.
|
||||
type ReportHook interface {
|
||||
Hook
|
||||
|
||||
// PreReport is called before the report is written.
|
||||
// It can modify the report. It is called on the client side.
|
||||
PreReport(ctx context.Context, report *types.Report, opts flag.Options) error
|
||||
|
||||
// PostReport is called after the report is written.
|
||||
// It can modify the report. It is called on the client side.
|
||||
PostReport(ctx context.Context, report *types.Report, opts flag.Options) error
|
||||
}
|
||||
|
||||
func PreRun(ctx context.Context, opts flag.Options) error {
|
||||
for _, e := range Hooks() {
|
||||
h, ok := e.(RunHook)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if err := h.PreRun(ctx, opts); err != nil {
|
||||
return xerrors.Errorf("%s pre run error: %w", e.Name(), err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// PostRun is a hook that is called after all the processes.
|
||||
func PostRun(ctx context.Context, opts flag.Options) error {
|
||||
for _, e := range Hooks() {
|
||||
h, ok := e.(RunHook)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if err := h.PostRun(ctx, opts); err != nil {
|
||||
return xerrors.Errorf("%s post run error: %w", e.Name(), err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// PreScan is a hook that is called before the scan.
|
||||
func PreScan(ctx context.Context, target *types.ScanTarget, options types.ScanOptions) error {
|
||||
for _, e := range Hooks() {
|
||||
h, ok := e.(ScanHook)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if err := h.PreScan(ctx, target, options); err != nil {
|
||||
return xerrors.Errorf("%s pre scan error: %w", e.Name(), err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// PostScan is a hook that is called after the scan.
|
||||
func PostScan(ctx context.Context, results types.Results) (types.Results, error) {
|
||||
var err error
|
||||
for _, e := range Hooks() {
|
||||
h, ok := e.(ScanHook)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
results, err = h.PostScan(ctx, results)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("%s post scan error: %w", e.Name(), err)
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
// PreReport is a hook that is called before the report is written.
|
||||
func PreReport(ctx context.Context, report *types.Report, opts flag.Options) error {
|
||||
for _, e := range Hooks() {
|
||||
h, ok := e.(ReportHook)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if err := h.PreReport(ctx, report, opts); err != nil {
|
||||
return xerrors.Errorf("%s pre report error: %w", e.Name(), err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// PostReport is a hook that is called after the report is written.
|
||||
func PostReport(ctx context.Context, report *types.Report, opts flag.Options) error {
|
||||
for _, e := range Hooks() {
|
||||
h, ok := e.(ReportHook)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if err := h.PostReport(ctx, report, opts); err != nil {
|
||||
return xerrors.Errorf("%s post report error: %w", e.Name(), err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Hooks returns the list of hooks.
|
||||
func Hooks() []Hook {
|
||||
hooks := lo.Values(hooks)
|
||||
sort.Slice(hooks, func(i, j int) bool {
|
||||
return hooks[i].Name() < hooks[j].Name()
|
||||
})
|
||||
return hooks
|
||||
}
|
||||
278
pkg/extension/hook_test.go
Normal file
278
pkg/extension/hook_test.go
Normal file
@@ -0,0 +1,278 @@
|
||||
package extension_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
dbTypes "github.com/aquasecurity/trivy-db/pkg/types"
|
||||
"github.com/aquasecurity/trivy/internal/hooktest"
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
"github.com/aquasecurity/trivy/pkg/flag"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
func TestPostScan(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
results types.Results
|
||||
want types.Results
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
results: types.Results{
|
||||
{
|
||||
Target: "test",
|
||||
Vulnerabilities: []types.DetectedVulnerability{
|
||||
{
|
||||
VulnerabilityID: "CVE-2022-0001",
|
||||
PkgName: "musl",
|
||||
InstalledVersion: "1.2.3",
|
||||
FixedVersion: "1.2.4",
|
||||
Vulnerability: dbTypes.Vulnerability{
|
||||
Severity: "CRITICAL",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: types.Results{
|
||||
{
|
||||
Target: "test",
|
||||
Vulnerabilities: []types.DetectedVulnerability{
|
||||
{
|
||||
VulnerabilityID: "CVE-2022-0001",
|
||||
PkgName: "musl",
|
||||
InstalledVersion: "1.2.3",
|
||||
FixedVersion: "1.2.4",
|
||||
Vulnerability: dbTypes.Vulnerability{
|
||||
Severity: "CRITICAL",
|
||||
References: []string{
|
||||
"https://example.com/post-scan",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sad path",
|
||||
results: types.Results{
|
||||
{
|
||||
Target: "bad",
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Initialize the test hook
|
||||
hooktest.Init(t)
|
||||
|
||||
results, err := extension.PostScan(t.Context(), tt.results)
|
||||
require.Equal(t, tt.wantErr, err != nil)
|
||||
assert.Equal(t, tt.want, results)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPreScan(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
target *types.ScanTarget
|
||||
options types.ScanOptions
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
target: &types.ScanTarget{
|
||||
Name: "test",
|
||||
},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "sad path",
|
||||
target: &types.ScanTarget{
|
||||
Name: "bad-pre",
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Initialize the test hook
|
||||
hooktest.Init(t)
|
||||
|
||||
err := extension.PreScan(t.Context(), tt.target, tt.options)
|
||||
require.Equal(t, tt.wantErr, err != nil)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPreRun(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
opts flag.Options
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
opts: flag.Options{},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "sad path",
|
||||
opts: flag.Options{
|
||||
GlobalOptions: flag.GlobalOptions{
|
||||
ConfigFile: "bad-config",
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Initialize the test hook
|
||||
hooktest.Init(t)
|
||||
|
||||
err := extension.PreRun(t.Context(), tt.opts)
|
||||
require.Equal(t, tt.wantErr, err != nil)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPostRun(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
opts flag.Options
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
opts: flag.Options{},
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "sad path",
|
||||
opts: flag.Options{
|
||||
GlobalOptions: flag.GlobalOptions{
|
||||
ConfigFile: "bad-config",
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Initialize the test extension
|
||||
hooktest.Init(t)
|
||||
|
||||
err := extension.PostRun(t.Context(), tt.opts)
|
||||
require.Equal(t, tt.wantErr, err != nil)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPreReport(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
report *types.Report
|
||||
opts flag.Options
|
||||
wantTitle string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
report: &types.Report{
|
||||
Results: types.Results{
|
||||
{
|
||||
Vulnerabilities: []types.DetectedVulnerability{
|
||||
{
|
||||
VulnerabilityID: "CVE-2022-0001",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantTitle: "Modified by pre-report hook",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "sad path",
|
||||
report: &types.Report{
|
||||
ArtifactName: "bad-report",
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Initialize the test hook
|
||||
hooktest.Init(t)
|
||||
|
||||
err := extension.PreReport(t.Context(), tt.report, tt.opts)
|
||||
if tt.wantErr {
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
require.Len(t, tt.report.Results, 1)
|
||||
require.Len(t, tt.report.Results[0].Vulnerabilities, 1)
|
||||
assert.Equal(t, tt.wantTitle, tt.report.Results[0].Vulnerabilities[0].Title)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPostReport(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
report *types.Report
|
||||
opts flag.Options
|
||||
wantDescription string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
report: &types.Report{
|
||||
Results: types.Results{
|
||||
{
|
||||
Vulnerabilities: []types.DetectedVulnerability{
|
||||
{
|
||||
VulnerabilityID: "CVE-2022-0001",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantDescription: "Modified by post-report hook",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "sad path",
|
||||
report: &types.Report{
|
||||
ArtifactName: "bad-report",
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
// Initialize the test hook
|
||||
hooktest.Init(t)
|
||||
|
||||
err := extension.PostReport(t.Context(), tt.report, tt.opts)
|
||||
if tt.wantErr {
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
require.Len(t, tt.report.Results, 1)
|
||||
require.Len(t, tt.report.Results[0].Vulnerabilities, 1)
|
||||
assert.Equal(t, tt.wantDescription, tt.report.Results[0].Vulnerabilities[0].Description)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/distribution/reference"
|
||||
api "github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
dockerClient "github.com/docker/docker/client"
|
||||
"github.com/docker/go-connections/nat"
|
||||
v1 "github.com/google/go-containerregistry/pkg/v1"
|
||||
"github.com/opencontainers/go-digest"
|
||||
@@ -53,7 +54,7 @@ func (n familiarNamed) String() string {
|
||||
}
|
||||
|
||||
func imageWriter(c *client.Client, img client.Image, platform types.Platform) imageSave {
|
||||
return func(ctx context.Context, ref []string) (io.ReadCloser, error) {
|
||||
return func(ctx context.Context, ref []string, saveOptions ...dockerClient.ImageSaveOption) (io.ReadCloser, error) {
|
||||
if len(ref) < 1 {
|
||||
return nil, xerrors.New("no image reference")
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/docker/docker/api/types"
|
||||
"github.com/docker/docker/api/types/container"
|
||||
dimage "github.com/docker/docker/api/types/image"
|
||||
"github.com/docker/docker/client"
|
||||
v1 "github.com/google/go-containerregistry/pkg/v1"
|
||||
"github.com/google/go-containerregistry/pkg/v1/tarball"
|
||||
"github.com/samber/lo"
|
||||
@@ -29,7 +30,7 @@ var mu sync.Mutex
|
||||
|
||||
type opener func() (v1.Image, error)
|
||||
|
||||
type imageSave func(context.Context, []string) (io.ReadCloser, error)
|
||||
type imageSave func(context.Context, []string, ...client.ImageSaveOption) (io.ReadCloser, error)
|
||||
|
||||
func imageOpener(ctx context.Context, ref string, f *os.File, imageSave imageSave) opener {
|
||||
return func() (v1.Image, error) {
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
|
||||
api "github.com/docker/docker/api/types"
|
||||
dimage "github.com/docker/docker/api/types/image"
|
||||
"github.com/docker/docker/client"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
@@ -98,7 +99,7 @@ func (p podmanClient) imageHistoryInspect(imageName string) ([]dimage.HistoryRes
|
||||
return history, nil
|
||||
}
|
||||
|
||||
func (p podmanClient) imageSave(_ context.Context, imageNames []string) (io.ReadCloser, error) {
|
||||
func (p podmanClient) imageSave(_ context.Context, imageNames []string, _ ...client.ImageSaveOption) (io.ReadCloser, error) {
|
||||
if len(imageNames) < 1 {
|
||||
return nil, xerrors.Errorf("no specified image")
|
||||
}
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
package secret
|
||||
|
||||
var builtinAllowRules = []AllowRule{
|
||||
{
|
||||
// `.dist-info` dir contains only metadata files such as version, license, and entry points.
|
||||
// cf. https://github.com/aquasecurity/trivy/issues/8212
|
||||
ID: "dist-info",
|
||||
Description: "Ignore Python .dist-info metadata directories",
|
||||
Path: MustCompile(`\.dist-info\/`),
|
||||
},
|
||||
{
|
||||
ID: "tests",
|
||||
Description: "Avoid test files and paths",
|
||||
|
||||
@@ -84,7 +84,7 @@ func (d Docker) ReplicateImage(ctx context.Context, imageRef, imagePath string,
|
||||
defer testfile.Close()
|
||||
|
||||
// load image into docker engine
|
||||
resp, err := d.cli.ImageLoad(ctx, testfile, true)
|
||||
resp, err := d.cli.ImageLoad(ctx, testfile, client.ImageLoadWithQuiet(true))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -77,16 +77,14 @@ func (f *AWSFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *AWSFlagGroup) ToOptions() (AWSOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return AWSOptions{}, err
|
||||
}
|
||||
return AWSOptions{
|
||||
func (f *AWSFlagGroup) ToOptions(opts *Options) error {
|
||||
opts.AWSOptions = AWSOptions{
|
||||
Region: f.Region.Value(),
|
||||
Endpoint: f.Endpoint.Value(),
|
||||
Services: f.Services.Value(),
|
||||
SkipServices: f.SkipServices.Value(),
|
||||
Account: f.Account.Value(),
|
||||
ARN: f.ARN.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -106,17 +106,15 @@ func (fg *CacheFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (fg *CacheFlagGroup) ToOptions() (CacheOptions, error) {
|
||||
if err := parseFlags(fg); err != nil {
|
||||
return CacheOptions{}, err
|
||||
}
|
||||
|
||||
return CacheOptions{
|
||||
func (fg *CacheFlagGroup) ToOptions(opts *Options) error {
|
||||
opts.CacheOptions = CacheOptions{
|
||||
ClearCache: fg.ClearCache.Value(),
|
||||
CacheBackend: fg.CacheBackend.Value(),
|
||||
CacheTTL: fg.CacheTTL.Value(),
|
||||
RedisTLS: fg.RedisTLS.Value(),
|
||||
RedisCACert: fg.RedisCACert.Value(),
|
||||
RedisCert: fg.RedisCert.Value(),
|
||||
RedisKey: fg.RedisKey.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -78,17 +78,14 @@ func (fg *CleanFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (fg *CleanFlagGroup) ToOptions() (CleanOptions, error) {
|
||||
if err := parseFlags(fg); err != nil {
|
||||
return CleanOptions{}, err
|
||||
}
|
||||
|
||||
return CleanOptions{
|
||||
func (fg *CleanFlagGroup) ToOptions(opts *Options) error {
|
||||
opts.CleanOptions = CleanOptions{
|
||||
CleanAll: fg.CleanAll.Value(),
|
||||
CleanVulnerabilityDB: fg.CleanVulnerabilityDB.Value(),
|
||||
CleanJavaDB: fg.CleanJavaDB.Value(),
|
||||
CleanChecksBundle: fg.CleanChecksBundle.Value(),
|
||||
CleanScanCache: fg.CleanScanCache.Value(),
|
||||
CleanVEXRepositories: fg.CleanVEXRepositories.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -127,31 +127,27 @@ func (f *DBFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *DBFlagGroup) ToOptions() (DBOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return DBOptions{}, err
|
||||
}
|
||||
|
||||
func (f *DBFlagGroup) ToOptions(opts *Options) error {
|
||||
skipDBUpdate := f.SkipDBUpdate.Value()
|
||||
skipJavaDBUpdate := f.SkipJavaDBUpdate.Value()
|
||||
downloadDBOnly := f.DownloadDBOnly.Value()
|
||||
downloadJavaDBOnly := f.DownloadJavaDBOnly.Value()
|
||||
|
||||
if downloadDBOnly && downloadJavaDBOnly {
|
||||
return DBOptions{}, xerrors.New("--download-db-only and --download-java-db-only options can not be specified both")
|
||||
return xerrors.New("--download-db-only and --download-java-db-only options can not be specified both")
|
||||
}
|
||||
if downloadDBOnly && skipDBUpdate {
|
||||
return DBOptions{}, xerrors.New("--skip-db-update and --download-db-only options can not be specified both")
|
||||
return xerrors.New("--skip-db-update and --download-db-only options can not be specified both")
|
||||
}
|
||||
if downloadJavaDBOnly && skipJavaDBUpdate {
|
||||
return DBOptions{}, xerrors.New("--skip-java-db-update and --download-java-db-only options can not be specified both")
|
||||
return xerrors.New("--skip-java-db-update and --download-java-db-only options can not be specified both")
|
||||
}
|
||||
|
||||
var dbRepositories, javaDBRepositories []name.Reference
|
||||
for _, repo := range f.DBRepositories.Value() {
|
||||
ref, err := parseRepository(repo, db.SchemaVersion)
|
||||
if err != nil {
|
||||
return DBOptions{}, xerrors.Errorf("invalid DB repository: %w", err)
|
||||
return xerrors.Errorf("invalid DB repository: %w", err)
|
||||
}
|
||||
dbRepositories = append(dbRepositories, ref)
|
||||
}
|
||||
@@ -159,12 +155,12 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) {
|
||||
for _, repo := range f.JavaDBRepositories.Value() {
|
||||
ref, err := parseRepository(repo, javadb.SchemaVersion)
|
||||
if err != nil {
|
||||
return DBOptions{}, xerrors.Errorf("invalid javadb repository: %w", err)
|
||||
return xerrors.Errorf("invalid javadb repository: %w", err)
|
||||
}
|
||||
javaDBRepositories = append(javaDBRepositories, ref)
|
||||
}
|
||||
|
||||
return DBOptions{
|
||||
opts.DBOptions = DBOptions{
|
||||
Reset: f.Reset.Value(),
|
||||
DownloadDBOnly: downloadDBOnly,
|
||||
SkipDBUpdate: skipDBUpdate,
|
||||
@@ -173,7 +169,8 @@ func (f *DBFlagGroup) ToOptions() (DBOptions, error) {
|
||||
NoProgress: f.NoProgress.Value(),
|
||||
DBRepositories: dbRepositories,
|
||||
JavaDBRepositories: javaDBRepositories,
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseRepository(repo string, dbSchemaVersion int) (name.Reference, error) {
|
||||
|
||||
@@ -101,15 +101,15 @@ func TestDBFlagGroup_ToOptions(t *testing.T) {
|
||||
DBRepositories: flag.DBRepositoryFlag.Clone(),
|
||||
JavaDBRepositories: flag.JavaDBRepositoryFlag.Clone(),
|
||||
}
|
||||
got, err := f.ToOptions()
|
||||
flags := flag.Flags{f}
|
||||
got, err := flags.ToOptions(nil)
|
||||
if tt.wantErr != "" {
|
||||
require.Error(t, err)
|
||||
assert.ErrorContains(t, err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.EqualExportedValues(t, tt.want, got)
|
||||
assert.EqualExportedValues(t, tt.want, got.DBOptions)
|
||||
|
||||
// Assert log messages
|
||||
assert.Equal(t, tt.wantLogs, out.Messages(), tt.name)
|
||||
|
||||
@@ -137,17 +137,13 @@ func (f *GlobalFlagGroup) Bind(cmd *cobra.Command) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *GlobalFlagGroup) ToOptions() (GlobalOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return GlobalOptions{}, err
|
||||
}
|
||||
|
||||
func (f *GlobalFlagGroup) ToOptions(opts *Options) error {
|
||||
// Keep TRIVY_NON_SSL for backward compatibility
|
||||
insecure := f.Insecure.Value() || os.Getenv("TRIVY_NON_SSL") != ""
|
||||
|
||||
log.Debug("Cache dir", log.String("dir", f.CacheDir.Value()))
|
||||
|
||||
return GlobalOptions{
|
||||
opts.GlobalOptions = GlobalOptions{
|
||||
ConfigFile: f.ConfigFile.Value(),
|
||||
ShowVersion: f.ShowVersion.Value(),
|
||||
Quiet: f.Quiet.Value(),
|
||||
@@ -156,5 +152,6 @@ func (f *GlobalFlagGroup) ToOptions() (GlobalOptions, error) {
|
||||
Timeout: f.Timeout.Value(),
|
||||
CacheDir: f.CacheDir.Value(),
|
||||
GenerateDefaultConfig: f.GenerateDefaultConfig.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -119,16 +119,12 @@ func (f *ImageFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *ImageFlagGroup) ToOptions() (ImageOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return ImageOptions{}, err
|
||||
}
|
||||
|
||||
func (f *ImageFlagGroup) ToOptions(opts *Options) error {
|
||||
var platform ftypes.Platform
|
||||
if p := f.Platform.Value(); p != "" {
|
||||
pl, err := v1.ParsePlatform(p)
|
||||
if err != nil {
|
||||
return ImageOptions{}, xerrors.Errorf("unable to parse platform: %w", err)
|
||||
return xerrors.Errorf("unable to parse platform: %w", err)
|
||||
}
|
||||
if pl.OS == "*" {
|
||||
pl.OS = "" // Empty OS means any OS
|
||||
@@ -139,12 +135,12 @@ func (f *ImageFlagGroup) ToOptions() (ImageOptions, error) {
|
||||
if value := f.MaxImageSize.Value(); value != "" {
|
||||
parsedSize, err := units.FromHumanSize(value)
|
||||
if err != nil {
|
||||
return ImageOptions{}, xerrors.Errorf("invalid max image size %q: %w", value, err)
|
||||
return xerrors.Errorf("invalid max image size %q: %w", value, err)
|
||||
}
|
||||
maxSize = parsedSize
|
||||
}
|
||||
|
||||
return ImageOptions{
|
||||
opts.ImageOptions = ImageOptions{
|
||||
Input: f.Input.Value(),
|
||||
ImageConfigScanners: xstrings.ToTSlice[types.Scanner](f.ImageConfigScanners.Value()),
|
||||
ScanRemovedPkgs: f.ScanRemovedPkgs.Value(),
|
||||
@@ -153,5 +149,6 @@ func (f *ImageFlagGroup) ToOptions() (ImageOptions, error) {
|
||||
PodmanHost: f.PodmanHost.Value(),
|
||||
ImageSources: xstrings.ToTSlice[ftypes.ImageSource](f.ImageSources.Value()),
|
||||
MaxImageSize: maxSize,
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -79,13 +79,14 @@ func TestImageFlagGroup_ToOptions(t *testing.T) {
|
||||
Platform: flag.PlatformFlag.Clone(),
|
||||
}
|
||||
|
||||
got, err := f.ToOptions()
|
||||
flags := flag.Flags{f}
|
||||
got, err := flags.ToOptions(nil)
|
||||
if tt.wantErr != "" {
|
||||
assert.ErrorContains(t, err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.EqualExportedValues(t, tt.want, got)
|
||||
assert.EqualExportedValues(t, tt.want, got.ImageOptions)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,11 @@ package flag
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
)
|
||||
|
||||
@@ -173,14 +173,10 @@ func (f *K8sFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *K8sFlagGroup) ToOptions() (K8sOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return K8sOptions{}, err
|
||||
}
|
||||
|
||||
func (f *K8sFlagGroup) ToOptions(opts *Options) error {
|
||||
tolerations, err := optionToTolerations(f.Tolerations.Value())
|
||||
if err != nil {
|
||||
return K8sOptions{}, err
|
||||
return err
|
||||
}
|
||||
|
||||
exludeNodeLabels := make(map[string]string)
|
||||
@@ -188,18 +184,18 @@ func (f *K8sFlagGroup) ToOptions() (K8sOptions, error) {
|
||||
for _, exludeNodeValue := range exludeNodes {
|
||||
excludeNodeParts := strings.Split(exludeNodeValue, ":")
|
||||
if len(excludeNodeParts) != 2 {
|
||||
return K8sOptions{}, fmt.Errorf("exclude node %s must be a key:value", exludeNodeValue)
|
||||
return xerrors.Errorf("exclude node %s must be a key:value", exludeNodeValue)
|
||||
}
|
||||
exludeNodeLabels[excludeNodeParts[0]] = excludeNodeParts[1]
|
||||
}
|
||||
if len(f.ExcludeNamespaces.Value()) > 0 && len(f.IncludeNamespaces.Value()) > 0 {
|
||||
return K8sOptions{}, errors.New("include-namespaces and exclude-namespaces flags cannot be used together")
|
||||
return xerrors.New("include-namespaces and exclude-namespaces flags cannot be used together")
|
||||
}
|
||||
if len(f.ExcludeKinds.Value()) > 0 && len(f.IncludeKinds.Value()) > 0 {
|
||||
return K8sOptions{}, errors.New("include-kinds and exclude-kinds flags cannot be used together")
|
||||
return xerrors.New("include-kinds and exclude-kinds flags cannot be used together")
|
||||
}
|
||||
|
||||
return K8sOptions{
|
||||
opts.K8sOptions = K8sOptions{
|
||||
KubeConfig: f.KubeConfig.Value(),
|
||||
K8sVersion: f.K8sVersion.Value(),
|
||||
Tolerations: tolerations,
|
||||
@@ -215,7 +211,8 @@ func (f *K8sFlagGroup) ToOptions() (K8sOptions, error) {
|
||||
ExcludeNamespaces: f.ExcludeNamespaces.Value(),
|
||||
IncludeNamespaces: f.IncludeNamespaces.Value(),
|
||||
Burst: f.Burst.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func optionToTolerations(tolerationsOptions []string) ([]corev1.Toleration, error) {
|
||||
|
||||
@@ -115,11 +115,7 @@ func (f *LicenseFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *LicenseFlagGroup) ToOptions() (LicenseOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return LicenseOptions{}, err
|
||||
}
|
||||
|
||||
func (f *LicenseFlagGroup) ToOptions(opts *Options) error {
|
||||
licenseCategories := make(map[types.LicenseCategory][]string)
|
||||
licenseCategories[types.CategoryForbidden] = f.LicenseForbidden.Value()
|
||||
licenseCategories[types.CategoryRestricted] = f.LicenseRestricted.Value()
|
||||
@@ -128,10 +124,11 @@ func (f *LicenseFlagGroup) ToOptions() (LicenseOptions, error) {
|
||||
licenseCategories[types.CategoryPermissive] = f.LicensePermissive.Value()
|
||||
licenseCategories[types.CategoryUnencumbered] = f.LicenseUnencumbered.Value()
|
||||
|
||||
return LicenseOptions{
|
||||
opts.LicenseOptions = LicenseOptions{
|
||||
LicenseFull: f.LicenseFull.Value(),
|
||||
IgnoredLicenses: f.IgnoredLicenses.Value(),
|
||||
LicenseConfidenceLevel: f.LicenseConfidenceLevel.Value(),
|
||||
LicenseCategories: licenseCategories,
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -204,12 +204,8 @@ func (f *MisconfFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *MisconfFlagGroup) ToOptions() (MisconfOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return MisconfOptions{}, err
|
||||
}
|
||||
|
||||
return MisconfOptions{
|
||||
func (f *MisconfFlagGroup) ToOptions(opts *Options) error {
|
||||
opts.MisconfOptions = MisconfOptions{
|
||||
IncludeNonFailures: f.IncludeNonFailures.Value(),
|
||||
ResetChecksBundle: f.ResetChecksBundle.Value(),
|
||||
ChecksBundleRepository: f.ChecksBundleRepository.Value(),
|
||||
@@ -225,5 +221,6 @@ func (f *MisconfFlagGroup) ToOptions() (MisconfOptions, error) {
|
||||
MisconfigScanners: xstrings.ToTSlice[analyzer.Type](f.MisconfigScanners.Value()),
|
||||
ConfigFileSchemas: f.ConfigFileSchemas.Value(),
|
||||
RenderCause: xstrings.ToTSlice[types.ConfigType](f.RenderCause.Value()),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
package flag
|
||||
|
||||
import (
|
||||
"github.com/aquasecurity/trivy/pkg/module"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/utils/fsutils"
|
||||
)
|
||||
|
||||
// e.g. config yaml
|
||||
@@ -14,7 +16,7 @@ var (
|
||||
ModuleDirFlag = Flag[string]{
|
||||
Name: "module-dir",
|
||||
ConfigName: "module.dir",
|
||||
Default: module.DefaultDir,
|
||||
Default: filepath.Join(fsutils.HomeDir(), ".trivy", "modules"),
|
||||
Usage: "specify directory to the wasm modules that will be loaded",
|
||||
Persistent: true,
|
||||
}
|
||||
@@ -56,13 +58,10 @@ func (f *ModuleFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *ModuleFlagGroup) ToOptions() (ModuleOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return ModuleOptions{}, err
|
||||
}
|
||||
|
||||
return ModuleOptions{
|
||||
func (f *ModuleFlagGroup) ToOptions(opts *Options) error {
|
||||
opts.ModuleOptions = ModuleOptions{
|
||||
ModuleDir: f.Dir.Value(),
|
||||
EnabledModules: f.EnabledModules.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -344,6 +344,7 @@ func (f *Flag[T]) BindEnv() error {
|
||||
type FlagGroup interface {
|
||||
Name() string
|
||||
Flags() []Flagger
|
||||
ToOptions(*Options) error
|
||||
}
|
||||
|
||||
type Flagger interface {
|
||||
@@ -358,27 +359,7 @@ type Flagger interface {
|
||||
Bind(cmd *cobra.Command) error
|
||||
}
|
||||
|
||||
type Flags struct {
|
||||
GlobalFlagGroup *GlobalFlagGroup
|
||||
AWSFlagGroup *AWSFlagGroup
|
||||
CacheFlagGroup *CacheFlagGroup
|
||||
CleanFlagGroup *CleanFlagGroup
|
||||
DBFlagGroup *DBFlagGroup
|
||||
ImageFlagGroup *ImageFlagGroup
|
||||
K8sFlagGroup *K8sFlagGroup
|
||||
LicenseFlagGroup *LicenseFlagGroup
|
||||
MisconfFlagGroup *MisconfFlagGroup
|
||||
ModuleFlagGroup *ModuleFlagGroup
|
||||
PackageFlagGroup *PackageFlagGroup
|
||||
RemoteFlagGroup *RemoteFlagGroup
|
||||
RegistryFlagGroup *RegistryFlagGroup
|
||||
RegoFlagGroup *RegoFlagGroup
|
||||
RepoFlagGroup *RepoFlagGroup
|
||||
ReportFlagGroup *ReportFlagGroup
|
||||
ScanFlagGroup *ScanFlagGroup
|
||||
SecretFlagGroup *SecretFlagGroup
|
||||
VulnerabilityFlagGroup *VulnerabilityFlagGroup
|
||||
}
|
||||
type Flags []FlagGroup
|
||||
|
||||
// Options holds all the runtime configuration
|
||||
type Options struct {
|
||||
@@ -402,6 +383,9 @@ type Options struct {
|
||||
SecretOptions
|
||||
VulnerabilityOptions
|
||||
|
||||
// CustomOptions is a map of custom options.
|
||||
CustomOptions map[any]any
|
||||
|
||||
// Trivy's version, not populated via CLI flags
|
||||
AppVersion string
|
||||
|
||||
@@ -411,15 +395,19 @@ type Options struct {
|
||||
// outputWriter is not initialized via the CLI.
|
||||
// It is mainly used for testing purposes or by tools that use Trivy as a library.
|
||||
outputWriter io.Writer
|
||||
|
||||
// args is the arguments passed to the command.
|
||||
args []string
|
||||
}
|
||||
|
||||
// Align takes consistency of options
|
||||
func (o *Options) Align(f *Flags) error {
|
||||
if f.ScanFlagGroup != nil && f.ScanFlagGroup.Scanners != nil {
|
||||
if scanFlagGroup, ok := findFlagGroup[*ScanFlagGroup](f); ok && scanFlagGroup.Scanners != nil {
|
||||
o.enableSBOM()
|
||||
}
|
||||
|
||||
if f.PackageFlagGroup != nil && f.PackageFlagGroup.PkgRelationships != nil &&
|
||||
if packageFlagGroup, ok := findFlagGroup[*PackageFlagGroup](f); ok &&
|
||||
packageFlagGroup.PkgRelationships != nil &&
|
||||
slices.Compare(o.PkgRelationships, ftypes.Relationships) != 0 &&
|
||||
(o.DependencyTree || slices.Contains(types.SupportedSBOMFormats, o.Format) || len(o.VEXSources) != 0) {
|
||||
return xerrors.Errorf("'--pkg-relationships' cannot be used with '--dependency-tree', '--vex' or SBOM formats")
|
||||
@@ -601,63 +589,9 @@ func (o *Options) outputPluginWriter(ctx context.Context) (io.Writer, func() err
|
||||
|
||||
// groups returns all the flag groups other than global flags
|
||||
func (f *Flags) groups() []FlagGroup {
|
||||
var groups []FlagGroup
|
||||
// This order affects the usage message, so they are sorted by frequency of use.
|
||||
if f.ScanFlagGroup != nil {
|
||||
groups = append(groups, f.ScanFlagGroup)
|
||||
}
|
||||
if f.ReportFlagGroup != nil {
|
||||
groups = append(groups, f.ReportFlagGroup)
|
||||
}
|
||||
if f.CacheFlagGroup != nil {
|
||||
groups = append(groups, f.CacheFlagGroup)
|
||||
}
|
||||
if f.CleanFlagGroup != nil {
|
||||
groups = append(groups, f.CleanFlagGroup)
|
||||
}
|
||||
if f.DBFlagGroup != nil {
|
||||
groups = append(groups, f.DBFlagGroup)
|
||||
}
|
||||
if f.RegistryFlagGroup != nil {
|
||||
groups = append(groups, f.RegistryFlagGroup)
|
||||
}
|
||||
if f.ImageFlagGroup != nil {
|
||||
groups = append(groups, f.ImageFlagGroup)
|
||||
}
|
||||
if f.VulnerabilityFlagGroup != nil {
|
||||
groups = append(groups, f.VulnerabilityFlagGroup)
|
||||
}
|
||||
if f.MisconfFlagGroup != nil {
|
||||
groups = append(groups, f.MisconfFlagGroup)
|
||||
}
|
||||
if f.ModuleFlagGroup != nil {
|
||||
groups = append(groups, f.ModuleFlagGroup)
|
||||
}
|
||||
if f.SecretFlagGroup != nil {
|
||||
groups = append(groups, f.SecretFlagGroup)
|
||||
}
|
||||
if f.LicenseFlagGroup != nil {
|
||||
groups = append(groups, f.LicenseFlagGroup)
|
||||
}
|
||||
if f.RegoFlagGroup != nil {
|
||||
groups = append(groups, f.RegoFlagGroup)
|
||||
}
|
||||
if f.AWSFlagGroup != nil {
|
||||
groups = append(groups, f.AWSFlagGroup)
|
||||
}
|
||||
if f.K8sFlagGroup != nil {
|
||||
groups = append(groups, f.K8sFlagGroup)
|
||||
}
|
||||
if f.PackageFlagGroup != nil {
|
||||
groups = append(groups, f.PackageFlagGroup)
|
||||
}
|
||||
if f.RemoteFlagGroup != nil {
|
||||
groups = append(groups, f.RemoteFlagGroup)
|
||||
}
|
||||
if f.RepoFlagGroup != nil {
|
||||
groups = append(groups, f.RepoFlagGroup)
|
||||
}
|
||||
return groups
|
||||
return lo.Filter(*f, func(group FlagGroup, _ int) bool {
|
||||
return group != nil && group.Name() != "Global"
|
||||
})
|
||||
}
|
||||
|
||||
func (f *Flags) AddFlags(cmd *cobra.Command) {
|
||||
@@ -715,141 +649,18 @@ func (f *Flags) Bind(cmd *cobra.Command) error {
|
||||
|
||||
// nolint: gocyclo
|
||||
func (f *Flags) ToOptions(args []string) (Options, error) {
|
||||
var err error
|
||||
opts := Options{
|
||||
AppVersion: app.Version(),
|
||||
args: args,
|
||||
}
|
||||
|
||||
if f.GlobalFlagGroup != nil {
|
||||
opts.GlobalOptions, err = f.GlobalFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("global flag error: %w", err)
|
||||
for _, group := range *f { // Include global flags
|
||||
if err := parseFlags(group); err != nil {
|
||||
return Options{}, xerrors.Errorf("unable to parse flags: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.AWSFlagGroup != nil {
|
||||
opts.AWSOptions, err = f.AWSFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("aws flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.CacheFlagGroup != nil {
|
||||
opts.CacheOptions, err = f.CacheFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("cache flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.CleanFlagGroup != nil {
|
||||
opts.CleanOptions, err = f.CleanFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("clean flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.DBFlagGroup != nil {
|
||||
opts.DBOptions, err = f.DBFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("db flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.ImageFlagGroup != nil {
|
||||
opts.ImageOptions, err = f.ImageFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("image flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.K8sFlagGroup != nil {
|
||||
opts.K8sOptions, err = f.K8sFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("k8s flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.LicenseFlagGroup != nil {
|
||||
opts.LicenseOptions, err = f.LicenseFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("license flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.MisconfFlagGroup != nil {
|
||||
opts.MisconfOptions, err = f.MisconfFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("misconfiguration flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.ModuleFlagGroup != nil {
|
||||
opts.ModuleOptions, err = f.ModuleFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("module flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.PackageFlagGroup != nil {
|
||||
opts.PackageOptions, err = f.PackageFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("package flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.RegoFlagGroup != nil {
|
||||
opts.RegoOptions, err = f.RegoFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("rego flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.RemoteFlagGroup != nil {
|
||||
opts.RemoteOptions, err = f.RemoteFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("remote flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.RegistryFlagGroup != nil {
|
||||
opts.RegistryOptions, err = f.RegistryFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("registry flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.RepoFlagGroup != nil {
|
||||
opts.RepoOptions, err = f.RepoFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("rego flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.ReportFlagGroup != nil {
|
||||
opts.ReportOptions, err = f.ReportFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("report flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.ScanFlagGroup != nil {
|
||||
opts.ScanOptions, err = f.ScanFlagGroup.ToOptions(args)
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("scan flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.SecretFlagGroup != nil {
|
||||
opts.SecretOptions, err = f.SecretFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("secret flag error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if f.VulnerabilityFlagGroup != nil {
|
||||
opts.VulnerabilityOptions, err = f.VulnerabilityFlagGroup.ToOptions()
|
||||
if err != nil {
|
||||
return Options{}, xerrors.Errorf("vulnerability flag error: %w", err)
|
||||
if err := group.ToOptions(&opts); err != nil {
|
||||
return Options{}, xerrors.Errorf("unable to convert flags to options: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -935,3 +746,16 @@ func HiddenFlags() []string {
|
||||
}
|
||||
return hiddenFlags
|
||||
}
|
||||
|
||||
// findFlagGroup finds a flag group by type T
|
||||
// Note that Go generics doesn't support methods today.
|
||||
// cf. https://github.com/golang/go/issues/49085
|
||||
func findFlagGroup[T FlagGroup](f *Flags) (T, bool) {
|
||||
for _, group := range *f {
|
||||
if g, ok := group.(T); ok {
|
||||
return g, true
|
||||
}
|
||||
}
|
||||
var zero T
|
||||
return zero, false
|
||||
}
|
||||
|
||||
@@ -69,23 +69,20 @@ func (f *PackageFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *PackageFlagGroup) ToOptions() (PackageOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return PackageOptions{}, err
|
||||
}
|
||||
|
||||
func (f *PackageFlagGroup) ToOptions(opts *Options) error {
|
||||
var relationships []ftypes.Relationship
|
||||
for _, r := range f.PkgRelationships.Value() {
|
||||
relationship, err := ftypes.NewRelationship(r)
|
||||
if err != nil {
|
||||
return PackageOptions{}, err
|
||||
return err
|
||||
}
|
||||
relationships = append(relationships, relationship)
|
||||
}
|
||||
|
||||
return PackageOptions{
|
||||
opts.PackageOptions = PackageOptions{
|
||||
IncludeDevDeps: f.IncludeDevDeps.Value(),
|
||||
PkgTypes: f.PkgTypes.Value(),
|
||||
PkgRelationships: relationships,
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -76,9 +76,10 @@ func TestPackageFlagGroup_ToOptions(t *testing.T) {
|
||||
PkgRelationships: flag.PkgRelationshipsFlag.Clone(),
|
||||
}
|
||||
|
||||
got, err := f.ToOptions()
|
||||
flags := flag.Flags{f}
|
||||
got, err := flags.ToOptions(nil)
|
||||
require.NoError(t, err)
|
||||
assert.EqualExportedValuesf(t, tt.want, got, "PackageFlagGroup")
|
||||
assert.EqualExportedValues(t, tt.want, got.PackageOptions)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -75,27 +75,23 @@ func (f *RegistryFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *RegistryFlagGroup) ToOptions() (RegistryOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return RegistryOptions{}, err
|
||||
}
|
||||
|
||||
func (f *RegistryFlagGroup) ToOptions(opts *Options) error {
|
||||
var credentials []types.Credential
|
||||
users := f.Username.Value()
|
||||
passwords := f.Password.Value()
|
||||
if f.PasswordStdin.Value() {
|
||||
if len(passwords) != 0 {
|
||||
return RegistryOptions{}, xerrors.New("'--password' and '--password-stdin' can't be used at the same time")
|
||||
return xerrors.New("'--password' and '--password-stdin' can't be used at the same time")
|
||||
}
|
||||
contents, err := io.ReadAll(os.Stdin)
|
||||
if err != nil {
|
||||
return RegistryOptions{}, xerrors.Errorf("failed to read from stdin: %w", err)
|
||||
return xerrors.Errorf("failed to read from stdin: %w", err)
|
||||
}
|
||||
// "--password-stdin" doesn't support comma-separated passwords
|
||||
passwords = []string{strings.TrimRight(string(contents), "\r\n")}
|
||||
}
|
||||
if len(users) != len(passwords) {
|
||||
return RegistryOptions{}, xerrors.New("the number of usernames and passwords must match")
|
||||
return xerrors.New("the number of usernames and passwords must match")
|
||||
}
|
||||
for i, user := range users {
|
||||
credentials = append(credentials, types.Credential{
|
||||
@@ -104,9 +100,10 @@ func (f *RegistryFlagGroup) ToOptions() (RegistryOptions, error) {
|
||||
})
|
||||
}
|
||||
|
||||
return RegistryOptions{
|
||||
opts.RegistryOptions = RegistryOptions{
|
||||
Credentials: credentials,
|
||||
RegistryToken: f.RegistryToken.Value(),
|
||||
RegistryMirrors: f.RegistryMirrors.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -102,17 +102,14 @@ func (f *RegoFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *RegoFlagGroup) ToOptions() (RegoOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return RegoOptions{}, err
|
||||
}
|
||||
|
||||
return RegoOptions{
|
||||
func (f *RegoFlagGroup) ToOptions(opts *Options) error {
|
||||
opts.RegoOptions = RegoOptions{
|
||||
IncludeDeprecatedChecks: f.IncludeDeprecatedChecks.Value(),
|
||||
SkipCheckUpdate: f.SkipCheckUpdate.Value(),
|
||||
Trace: f.Trace.Value(),
|
||||
CheckPaths: f.CheckPaths.Value(),
|
||||
DataPaths: f.DataPaths.Value(),
|
||||
CheckNamespaces: f.CheckNamespaces.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -110,11 +110,7 @@ func (f *RemoteFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *RemoteFlagGroup) ToOptions() (RemoteOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return RemoteOptions{}, err
|
||||
}
|
||||
|
||||
func (f *RemoteFlagGroup) ToOptions(opts *Options) error {
|
||||
serverAddr := f.ServerAddr.Value()
|
||||
customHeaders := splitCustomHeaders(f.CustomHeaders.Value())
|
||||
listen := f.Listen.Value()
|
||||
@@ -140,14 +136,15 @@ func (f *RemoteFlagGroup) ToOptions() (RemoteOptions, error) {
|
||||
customHeaders.Set(tokenHeader, token)
|
||||
}
|
||||
|
||||
return RemoteOptions{
|
||||
opts.RemoteOptions = RemoteOptions{
|
||||
Token: token,
|
||||
TokenHeader: tokenHeader,
|
||||
PathPrefix: f.PathPrefix.Value(),
|
||||
ServerAddr: serverAddr,
|
||||
CustomHeaders: customHeaders,
|
||||
Listen: listen,
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func splitCustomHeaders(headers []string) http.Header {
|
||||
|
||||
@@ -110,9 +110,10 @@ func TestRemoteFlagGroup_ToOptions(t *testing.T) {
|
||||
Token: flag.ServerTokenFlag.Clone(),
|
||||
TokenHeader: flag.ServerTokenHeaderFlag.Clone(),
|
||||
}
|
||||
got, err := f.ToOptions()
|
||||
flags := flag.Flags{f}
|
||||
got, err := flags.ToOptions(nil)
|
||||
require.NoError(t, err)
|
||||
assert.Equalf(t, tt.want, got, "ToOptions()")
|
||||
assert.Equal(t, tt.want, got.RemoteOptions)
|
||||
|
||||
// Assert log messages
|
||||
assert.Equal(t, tt.wantLogs, out.Messages(), tt.name)
|
||||
|
||||
@@ -50,14 +50,11 @@ func (f *RepoFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *RepoFlagGroup) ToOptions() (RepoOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return RepoOptions{}, err
|
||||
}
|
||||
|
||||
return RepoOptions{
|
||||
func (f *RepoFlagGroup) ToOptions(opts *Options) error {
|
||||
opts.RepoOptions = RepoOptions{
|
||||
RepoBranch: f.Branch.Value(),
|
||||
RepoCommit: f.Commit.Value(),
|
||||
RepoTag: f.Tag.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -200,11 +200,7 @@ func (f *ReportFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *ReportFlagGroup) ToOptions() (ReportOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return ReportOptions{}, err
|
||||
}
|
||||
|
||||
func (f *ReportFlagGroup) ToOptions(opts *Options) error {
|
||||
format := types.Format(f.Format.Value())
|
||||
template := f.Template.Value()
|
||||
dependencyTree := f.DependencyTree.Value()
|
||||
@@ -241,27 +237,27 @@ func (f *ReportFlagGroup) ToOptions() (ReportOptions, error) {
|
||||
|
||||
// "--table-mode" option is available only with "--format table".
|
||||
if viper.IsSet(TableModeFlag.ConfigName) && format != types.FormatTable {
|
||||
return ReportOptions{}, xerrors.New(`"--table-mode" can be used only with "--format table".`)
|
||||
return xerrors.New(`"--table-mode" can be used only with "--format table".`)
|
||||
}
|
||||
|
||||
cs, err := loadComplianceTypes(f.Compliance.Value())
|
||||
if err != nil {
|
||||
return ReportOptions{}, xerrors.Errorf("unable to load compliance spec: %w", err)
|
||||
return xerrors.Errorf("unable to load compliance spec: %w", err)
|
||||
}
|
||||
|
||||
var outputPluginArgs []string
|
||||
if arg := f.OutputPluginArg.Value(); arg != "" {
|
||||
outputPluginArgs, err = shellwords.Parse(arg)
|
||||
if err != nil {
|
||||
return ReportOptions{}, xerrors.Errorf("unable to parse output plugin argument: %w", err)
|
||||
return xerrors.Errorf("unable to parse output plugin argument: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if viper.IsSet(f.IgnoreFile.ConfigName) && !fsutils.FileExists(f.IgnoreFile.Value()) {
|
||||
return ReportOptions{}, xerrors.Errorf("ignore file not found: %s", f.IgnoreFile.Value())
|
||||
return xerrors.Errorf("ignore file not found: %s", f.IgnoreFile.Value())
|
||||
}
|
||||
|
||||
return ReportOptions{
|
||||
opts.ReportOptions = ReportOptions{
|
||||
Format: format,
|
||||
ReportFormat: f.ReportFormat.Value(),
|
||||
Template: template,
|
||||
@@ -277,7 +273,8 @@ func (f *ReportFlagGroup) ToOptions() (ReportOptions, error) {
|
||||
Compliance: cs,
|
||||
ShowSuppressed: f.ShowSuppressed.Value(),
|
||||
TableModes: xstrings.ToTSlice[types.TableMode](tableModes),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func loadComplianceTypes(compliance string) (spec.ComplianceSpec, error) {
|
||||
|
||||
@@ -214,13 +214,14 @@ func TestReportFlagGroup_ToOptions(t *testing.T) {
|
||||
TableMode: flag.TableModeFlag.Clone(),
|
||||
}
|
||||
|
||||
got, err := f.ToOptions()
|
||||
flags := flag.Flags{f}
|
||||
got, err := flags.ToOptions(nil)
|
||||
if tt.wantErr != "" {
|
||||
require.Contains(t, err.Error(), tt.wantErr)
|
||||
require.ErrorContains(t, err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
|
||||
assert.EqualExportedValuesf(t, tt.want, got, "ToOptions()")
|
||||
assert.EqualExportedValues(t, tt.want, got.ReportOptions)
|
||||
|
||||
// Assert log messages
|
||||
assert.Equal(t, tt.wantLogs, out.Messages(), tt.name)
|
||||
@@ -235,7 +236,8 @@ func TestReportFlagGroup_ToOptions(t *testing.T) {
|
||||
IgnoreFile: flag.IgnoreFileFlag.Clone(),
|
||||
}
|
||||
|
||||
_, err := f.ToOptions()
|
||||
flags := flag.Flags{f}
|
||||
_, err := flags.ToOptions(nil)
|
||||
assert.ErrorContains(t, err, "ignore file not found: doesntexist")
|
||||
})
|
||||
}
|
||||
|
||||
@@ -184,14 +184,10 @@ func (f *ScanFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *ScanFlagGroup) ToOptions(args []string) (ScanOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return ScanOptions{}, err
|
||||
}
|
||||
|
||||
func (f *ScanFlagGroup) ToOptions(opts *Options) error {
|
||||
var target string
|
||||
if len(args) == 1 {
|
||||
target = args[0]
|
||||
if len(opts.args) == 1 {
|
||||
target = opts.args[0]
|
||||
}
|
||||
|
||||
parallel := f.Parallel.Value()
|
||||
@@ -204,7 +200,7 @@ func (f *ScanFlagGroup) ToOptions(args []string) (ScanOptions, error) {
|
||||
if f.DistroFlag != nil && f.DistroFlag.Value() != "" {
|
||||
family, version, _ := strings.Cut(f.DistroFlag.Value(), "/")
|
||||
if !slices.Contains(ftypes.OSTypes, ftypes.OSType(family)) {
|
||||
return ScanOptions{}, xerrors.Errorf("unknown OS family: %s, must be %q", family, ftypes.OSTypes)
|
||||
return xerrors.Errorf("unknown OS family: %s, must be %q", family, ftypes.OSTypes)
|
||||
}
|
||||
distro = ftypes.OS{
|
||||
Family: ftypes.OSType(family),
|
||||
@@ -212,7 +208,7 @@ func (f *ScanFlagGroup) ToOptions(args []string) (ScanOptions, error) {
|
||||
}
|
||||
}
|
||||
|
||||
return ScanOptions{
|
||||
opts.ScanOptions = ScanOptions{
|
||||
Target: target,
|
||||
SkipDirs: f.SkipDirs.Value(),
|
||||
SkipFiles: f.SkipFiles.Value(),
|
||||
@@ -224,5 +220,6 @@ func (f *ScanFlagGroup) ToOptions(args []string) (ScanOptions, error) {
|
||||
RekorURL: f.RekorURL.Value(),
|
||||
DetectionPriority: ftypes.DetectionPriority(f.DetectionPriority.Value()),
|
||||
Distro: distro,
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -147,9 +147,10 @@ func TestScanFlagGroup_ToOptions(t *testing.T) {
|
||||
DistroFlag: flag.DistroFlag.Clone(),
|
||||
}
|
||||
|
||||
got, err := f.ToOptions(tt.args)
|
||||
flags := flag.Flags{f}
|
||||
got, err := flags.ToOptions(tt.args)
|
||||
tt.assertion(t, err)
|
||||
assert.Equalf(t, tt.want, got, "ToOptions()")
|
||||
assert.Equal(t, tt.want, got.ScanOptions)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -31,12 +31,9 @@ func (f *SecretFlagGroup) Flags() []Flagger {
|
||||
return []Flagger{f.SecretConfig}
|
||||
}
|
||||
|
||||
func (f *SecretFlagGroup) ToOptions() (SecretOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return SecretOptions{}, err
|
||||
}
|
||||
|
||||
return SecretOptions{
|
||||
func (f *SecretFlagGroup) ToOptions(opts *Options) error {
|
||||
opts.SecretOptions = SecretOptions{
|
||||
SecretConfigPath: f.SecretConfig.Value(),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -82,11 +82,7 @@ func (f *VulnerabilityFlagGroup) Flags() []Flagger {
|
||||
}
|
||||
}
|
||||
|
||||
func (f *VulnerabilityFlagGroup) ToOptions() (VulnerabilityOptions, error) {
|
||||
if err := parseFlags(f); err != nil {
|
||||
return VulnerabilityOptions{}, err
|
||||
}
|
||||
|
||||
func (f *VulnerabilityFlagGroup) ToOptions(opts *Options) error {
|
||||
// Just convert string to dbTypes.Status as the validated values are passed here.
|
||||
ignoreStatuses := lo.Map(f.IgnoreStatus.Value(), func(s string, _ int) dbTypes.Status {
|
||||
return dbTypes.NewStatus(s)
|
||||
@@ -110,12 +106,13 @@ func (f *VulnerabilityFlagGroup) ToOptions() (VulnerabilityOptions, error) {
|
||||
}
|
||||
log.Debug("Ignore statuses", log.Any("statuses", ignoreStatuses))
|
||||
|
||||
return VulnerabilityOptions{
|
||||
opts.VulnerabilityOptions = VulnerabilityOptions{
|
||||
IgnoreStatuses: ignoreStatuses,
|
||||
VEXSources: lo.Map(f.VEX.Value(), func(s string, _ int) vex.Source {
|
||||
return vex.NewSource(s)
|
||||
}),
|
||||
SkipVEXRepoUpdate: f.SkipVEXRepoUpdate.Value(),
|
||||
VulnSeveritySources: xstrings.ToTSlice[dbTypes.SourceID](f.VulnSeveritySource.Value()),
|
||||
}, nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ type adapter struct {
|
||||
func (a *adapter) adaptClusters() []gke.Cluster {
|
||||
for _, module := range a.modules {
|
||||
for _, resource := range module.GetResourcesByType("google_container_cluster") {
|
||||
a.adaptCluster(resource, module)
|
||||
a.adaptCluster(resource)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ func (a *adapter) adaptClusters() []gke.Cluster {
|
||||
return clusters
|
||||
}
|
||||
|
||||
func (a *adapter) adaptCluster(resource *terraform.Block, module *terraform.Module) {
|
||||
func (a *adapter) adaptCluster(resource *terraform.Block) {
|
||||
|
||||
cluster := gke.Cluster{
|
||||
Metadata: resource.GetMetadata(),
|
||||
@@ -104,7 +104,7 @@ func (a *adapter) adaptCluster(resource *terraform.Block, module *terraform.Modu
|
||||
}
|
||||
|
||||
if blocks := resource.GetBlocks("master_authorized_networks_config"); len(blocks) > 0 {
|
||||
cluster.MasterAuthorizedNetworks = adaptMasterAuthNetworksAsBlocks(resource, blocks)
|
||||
cluster.MasterAuthorizedNetworks = adaptMasterAuthNetworksAsBlocks(blocks)
|
||||
}
|
||||
|
||||
if policyBlock := resource.GetBlock("network_policy"); policyBlock.IsNotNil() {
|
||||
@@ -129,12 +129,24 @@ func (a *adapter) adaptCluster(resource *terraform.Block, module *terraform.Modu
|
||||
}
|
||||
|
||||
if configBlock := resource.GetBlock("node_config"); configBlock.IsNotNil() {
|
||||
if configBlock.GetBlock("metadata").IsNotNil() {
|
||||
cluster.NodeConfig.Metadata = configBlock.GetBlock("metadata").GetMetadata()
|
||||
}
|
||||
cluster.NodeConfig = adaptNodeConfig(configBlock)
|
||||
}
|
||||
|
||||
if autoScalingBlock := resource.GetBlock("cluster_autoscaling"); autoScalingBlock.IsNotNil() {
|
||||
cluster.AutoScaling = gke.AutoScaling{
|
||||
Metadata: autoScalingBlock.GetMetadata(),
|
||||
Enabled: autoScalingBlock.GetAttribute("enabled").AsBoolValueOrDefault(false, autoScalingBlock),
|
||||
}
|
||||
|
||||
if b := autoScalingBlock.GetBlock("auto_provisioning_defaults"); b.IsNotNil() {
|
||||
cluster.AutoScaling.AutoProvisioningDefaults = gke.AutoProvisioningDefaults{
|
||||
Metadata: b.GetMetadata(),
|
||||
ServiceAccount: b.GetAttribute("service_account").AsStringValueOrDefault("", b),
|
||||
Management: adaptManagement(b),
|
||||
ImageType: b.GetAttribute("image_type").AsStringValueOrDefault("", b),
|
||||
}
|
||||
}
|
||||
}
|
||||
cluster.EnableShieldedNodes = resource.GetAttribute("enable_shielded_nodes").AsBoolValueOrDefault(true, resource)
|
||||
|
||||
enableLegacyABACAttr := resource.GetAttribute("enable_legacy_abac")
|
||||
@@ -152,6 +164,23 @@ func (a *adapter) adaptCluster(resource *terraform.Block, module *terraform.Modu
|
||||
a.clusterMap[resource.ID()] = cluster
|
||||
}
|
||||
|
||||
func adaptManagement(parent *terraform.Block) gke.Management {
|
||||
b := parent.GetBlock("management")
|
||||
if b.IsNil() {
|
||||
return gke.Management{
|
||||
Metadata: parent.GetMetadata(),
|
||||
EnableAutoRepair: iacTypes.BoolDefault(false, parent.GetMetadata()),
|
||||
EnableAutoUpgrade: iacTypes.BoolDefault(false, parent.GetMetadata()),
|
||||
}
|
||||
}
|
||||
|
||||
return gke.Management{
|
||||
Metadata: b.GetMetadata(),
|
||||
EnableAutoRepair: b.GetAttribute("auto_repair").AsBoolValueOrDefault(false, b),
|
||||
EnableAutoUpgrade: b.GetAttribute("auto_upgrade").AsBoolValueOrDefault(false, b),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *adapter) adaptNodePools() {
|
||||
for _, nodePoolBlock := range a.modules.GetResourcesByType("google_container_node_pool") {
|
||||
a.adaptNodePool(nodePoolBlock)
|
||||
@@ -170,28 +199,13 @@ func (a *adapter) adaptNodePool(resource *terraform.Block) {
|
||||
EnableLegacyEndpoints: iacTypes.BoolDefault(true, resource.GetMetadata()),
|
||||
}
|
||||
|
||||
management := gke.Management{
|
||||
Metadata: resource.GetMetadata(),
|
||||
EnableAutoRepair: iacTypes.BoolDefault(false, resource.GetMetadata()),
|
||||
EnableAutoUpgrade: iacTypes.BoolDefault(false, resource.GetMetadata()),
|
||||
}
|
||||
|
||||
if managementBlock := resource.GetBlock("management"); managementBlock.IsNotNil() {
|
||||
management.Metadata = managementBlock.GetMetadata()
|
||||
autoRepairAttr := managementBlock.GetAttribute("auto_repair")
|
||||
management.EnableAutoRepair = autoRepairAttr.AsBoolValueOrDefault(false, managementBlock)
|
||||
|
||||
autoUpgradeAttr := managementBlock.GetAttribute("auto_upgrade")
|
||||
management.EnableAutoUpgrade = autoUpgradeAttr.AsBoolValueOrDefault(false, managementBlock)
|
||||
}
|
||||
|
||||
if nodeConfigBlock := resource.GetBlock("node_config"); nodeConfigBlock.IsNotNil() {
|
||||
nodeConfig = adaptNodeConfig(nodeConfigBlock)
|
||||
}
|
||||
|
||||
nodePool := gke.NodePool{
|
||||
Metadata: resource.GetMetadata(),
|
||||
Management: management,
|
||||
Management: adaptManagement(resource),
|
||||
NodeConfig: nodeConfig,
|
||||
}
|
||||
|
||||
@@ -270,9 +284,17 @@ func adaptNodeConfig(resource *terraform.Block) gke.NodeConfig {
|
||||
}
|
||||
|
||||
if metadata := resource.GetAttribute("metadata"); metadata.IsNotNil() {
|
||||
legacyMetadata := metadata.MapValue("disable-legacy-endpoints")
|
||||
if legacyMetadata.IsWhollyKnown() && legacyMetadata.Type() == cty.Bool {
|
||||
config.EnableLegacyEndpoints = iacTypes.Bool(legacyMetadata.False(), metadata.GetMetadata())
|
||||
disableLegacy := metadata.MapValue("disable-legacy-endpoints")
|
||||
if disableLegacy.IsKnown() {
|
||||
var enableLegacyEndpoints bool
|
||||
switch disableLegacy.Type() {
|
||||
case cty.Bool:
|
||||
enableLegacyEndpoints = disableLegacy.False()
|
||||
case cty.String:
|
||||
enableLegacyEndpoints = disableLegacy.AsString() == "false"
|
||||
}
|
||||
|
||||
config.EnableLegacyEndpoints = iacTypes.Bool(enableLegacyEndpoints, metadata.GetMetadata())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -312,7 +334,7 @@ func adaptMasterAuth(resource *terraform.Block) gke.MasterAuth {
|
||||
}
|
||||
}
|
||||
|
||||
func adaptMasterAuthNetworksAsBlocks(parent *terraform.Block, blocks terraform.Blocks) gke.MasterAuthorizedNetworks {
|
||||
func adaptMasterAuthNetworksAsBlocks(blocks terraform.Blocks) gke.MasterAuthorizedNetworks {
|
||||
var cidrs []iacTypes.StringValue
|
||||
for _, block := range blocks {
|
||||
for _, cidrBlock := range block.GetBlocks("cidr_blocks") {
|
||||
|
||||
@@ -77,6 +77,18 @@ resource "google_container_cluster" "example" {
|
||||
enable_autopilot = true
|
||||
|
||||
datapath_provider = "ADVANCED_DATAPATH"
|
||||
|
||||
cluster_autoscaling {
|
||||
enabled = true
|
||||
auto_provisioning_defaults {
|
||||
service_account = "test"
|
||||
image_type = "COS_CONTAINERD"
|
||||
management {
|
||||
auto_repair = true
|
||||
auto_upgrade = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
resource "google_container_node_pool" "primary_preemptible_nodes" {
|
||||
@@ -102,9 +114,7 @@ resource "google_container_node_pool" "primary_preemptible_nodes" {
|
||||
expected: gke.GKE{
|
||||
Clusters: []gke.Cluster{
|
||||
{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
NodeConfig: gke.NodeConfig{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
ImageType: iacTypes.String("COS_CONTAINERD", iacTypes.NewTestMetadata()),
|
||||
WorkloadMetadataConfig: gke.WorkloadMetadataConfig{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
@@ -113,9 +123,19 @@ resource "google_container_node_pool" "primary_preemptible_nodes" {
|
||||
ServiceAccount: iacTypes.String("", iacTypes.NewTestMetadata()),
|
||||
EnableLegacyEndpoints: iacTypes.Bool(false, iacTypes.NewTestMetadata()),
|
||||
},
|
||||
AutoScaling: gke.AutoScaling{
|
||||
Enabled: iacTypes.BoolTest(true),
|
||||
AutoProvisioningDefaults: gke.AutoProvisioningDefaults{
|
||||
ImageType: iacTypes.StringTest("COS_CONTAINERD"),
|
||||
ServiceAccount: iacTypes.StringTest("test"),
|
||||
Management: gke.Management{
|
||||
EnableAutoRepair: iacTypes.BoolTest(true),
|
||||
EnableAutoUpgrade: iacTypes.BoolTest(true),
|
||||
},
|
||||
},
|
||||
},
|
||||
NodePools: []gke.NodePool{
|
||||
{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
Management: gke.Management{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
EnableAutoRepair: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
@@ -134,19 +154,16 @@ resource "google_container_node_pool" "primary_preemptible_nodes" {
|
||||
},
|
||||
},
|
||||
IPAllocationPolicy: gke.IPAllocationPolicy{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
Enabled: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
Enabled: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
},
|
||||
MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
Enabled: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
Enabled: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
CIDRs: []iacTypes.StringValue{
|
||||
iacTypes.String("10.10.128.0/24", iacTypes.NewTestMetadata()),
|
||||
},
|
||||
},
|
||||
NetworkPolicy: gke.NetworkPolicy{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
Enabled: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
Enabled: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
},
|
||||
DatapathProvider: iacTypes.String("ADVANCED_DATAPATH", iacTypes.NewTestMetadata()),
|
||||
PrivateCluster: gke.PrivateCluster{
|
||||
@@ -156,7 +173,6 @@ resource "google_container_node_pool" "primary_preemptible_nodes" {
|
||||
LoggingService: iacTypes.String("logging.googleapis.com/kubernetes", iacTypes.NewTestMetadata()),
|
||||
MonitoringService: iacTypes.String("monitoring.googleapis.com/kubernetes", iacTypes.NewTestMetadata()),
|
||||
MasterAuth: gke.MasterAuth{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
ClientCertificate: gke.ClientCertificate{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
IssueCertificate: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
@@ -182,7 +198,7 @@ resource "google_container_cluster" "example" {
|
||||
node_config {
|
||||
service_account = "service-account"
|
||||
metadata = {
|
||||
disable-legacy-endpoints = true
|
||||
disable-legacy-endpoints = "true"
|
||||
}
|
||||
image_type = "COS"
|
||||
workload_metadata_config {
|
||||
@@ -194,7 +210,6 @@ resource "google_container_cluster" "example" {
|
||||
expected: gke.GKE{
|
||||
Clusters: []gke.Cluster{
|
||||
{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
NodeConfig: gke.NodeConfig{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
ImageType: iacTypes.String("COS", iacTypes.NewTestMetadata()),
|
||||
@@ -207,17 +222,14 @@ resource "google_container_cluster" "example" {
|
||||
},
|
||||
|
||||
IPAllocationPolicy: gke.IPAllocationPolicy{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
Enabled: iacTypes.Bool(false, iacTypes.NewTestMetadata()),
|
||||
Enabled: iacTypes.Bool(false, iacTypes.NewTestMetadata()),
|
||||
},
|
||||
MasterAuthorizedNetworks: gke.MasterAuthorizedNetworks{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
Enabled: iacTypes.Bool(false, iacTypes.NewTestMetadata()),
|
||||
CIDRs: []iacTypes.StringValue{},
|
||||
Enabled: iacTypes.Bool(false, iacTypes.NewTestMetadata()),
|
||||
CIDRs: []iacTypes.StringValue{},
|
||||
},
|
||||
NetworkPolicy: gke.NetworkPolicy{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
Enabled: iacTypes.Bool(false, iacTypes.NewTestMetadata()),
|
||||
Enabled: iacTypes.Bool(false, iacTypes.NewTestMetadata()),
|
||||
},
|
||||
DatapathProvider: iacTypes.StringDefault("DATAPATH_PROVIDER_UNSPECIFIED", iacTypes.NewTestMetadata()),
|
||||
PrivateCluster: gke.PrivateCluster{
|
||||
@@ -227,7 +239,6 @@ resource "google_container_cluster" "example" {
|
||||
LoggingService: iacTypes.String("logging.googleapis.com/kubernetes", iacTypes.NewTestMetadata()),
|
||||
MonitoringService: iacTypes.String("monitoring.googleapis.com/kubernetes", iacTypes.NewTestMetadata()),
|
||||
MasterAuth: gke.MasterAuth{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
ClientCertificate: gke.ClientCertificate{
|
||||
Metadata: iacTypes.NewTestMetadata(),
|
||||
IssueCertificate: iacTypes.Bool(false, iacTypes.NewTestMetadata()),
|
||||
|
||||
@@ -19,6 +19,7 @@ type Cluster struct {
|
||||
MonitoringService iacTypes.StringValue
|
||||
MasterAuth MasterAuth
|
||||
NodeConfig NodeConfig
|
||||
AutoScaling AutoScaling
|
||||
EnableShieldedNodes iacTypes.BoolValue
|
||||
EnableLegacyABAC iacTypes.BoolValue
|
||||
ResourceLabels iacTypes.MapValue
|
||||
@@ -35,6 +36,19 @@ type NodeConfig struct {
|
||||
EnableLegacyEndpoints iacTypes.BoolValue
|
||||
}
|
||||
|
||||
type AutoScaling struct {
|
||||
Metadata iacTypes.Metadata
|
||||
Enabled iacTypes.BoolValue
|
||||
AutoProvisioningDefaults AutoProvisioningDefaults
|
||||
}
|
||||
|
||||
type AutoProvisioningDefaults struct {
|
||||
Metadata iacTypes.Metadata
|
||||
ImageType iacTypes.StringValue
|
||||
ServiceAccount iacTypes.StringValue
|
||||
Management Management
|
||||
}
|
||||
|
||||
type WorkloadMetadataConfig struct {
|
||||
Metadata iacTypes.Metadata
|
||||
NodeMetadata iacTypes.StringValue
|
||||
|
||||
43
pkg/iac/rego/provider.go
Normal file
43
pkg/iac/rego/provider.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package rego
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"sync"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/options"
|
||||
)
|
||||
|
||||
func WithRegoScanner(rs *Scanner) options.ScannerOption {
|
||||
return func(s options.ConfigurableScanner) {
|
||||
if ss, ok := s.(*RegoScannerProvider); ok {
|
||||
ss.regoScanner = rs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type RegoScannerProvider struct {
|
||||
mu sync.Mutex
|
||||
regoScanner *Scanner
|
||||
}
|
||||
|
||||
func NewRegoScannerProvider(opts ...options.ScannerOption) *RegoScannerProvider {
|
||||
s := &RegoScannerProvider{}
|
||||
for _, o := range opts {
|
||||
o(s)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *RegoScannerProvider) InitRegoScanner(fsys fs.FS, opts []options.ScannerOption) (*Scanner, error) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if s.regoScanner != nil {
|
||||
return s.regoScanner, nil
|
||||
}
|
||||
s.regoScanner = NewScanner(opts...)
|
||||
if err := s.regoScanner.LoadPolicies(fsys); err != nil {
|
||||
return nil, fmt.Errorf("load checks: %w", err)
|
||||
}
|
||||
return s.regoScanner, nil
|
||||
}
|
||||
@@ -6450,6 +6450,44 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"github.com.aquasecurity.trivy.pkg.iac.providers.google.gke.AutoProvisioningDefaults": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"__defsec_metadata": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.types.Metadata"
|
||||
},
|
||||
"imagetype": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.types.StringValue"
|
||||
},
|
||||
"management": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.providers.google.gke.Management"
|
||||
},
|
||||
"serviceaccount": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.types.StringValue"
|
||||
}
|
||||
}
|
||||
},
|
||||
"github.com.aquasecurity.trivy.pkg.iac.providers.google.gke.AutoScaling": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"__defsec_metadata": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.types.Metadata"
|
||||
},
|
||||
"autoprovisioningdefaults": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.providers.google.gke.AutoProvisioningDefaults"
|
||||
},
|
||||
"enabled": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.types.BoolValue"
|
||||
}
|
||||
}
|
||||
},
|
||||
"github.com.aquasecurity.trivy.pkg.iac.providers.google.gke.ClientCertificate": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -6470,6 +6508,10 @@
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.types.Metadata"
|
||||
},
|
||||
"autoscaling": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.providers.google.gke.AutoScaling"
|
||||
},
|
||||
"datapathprovider": {
|
||||
"type": "object",
|
||||
"$ref": "#/definitions/github.com.aquasecurity.trivy.pkg.iac.types.StringValue"
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"sync"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/adapters/arm"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/rego"
|
||||
@@ -13,7 +12,6 @@ import (
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/azure"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm/parser"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/options"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/state"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/types"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
)
|
||||
@@ -22,16 +20,16 @@ var _ scanners.FSScanner = (*Scanner)(nil)
|
||||
var _ options.ConfigurableScanner = (*Scanner)(nil)
|
||||
|
||||
type Scanner struct {
|
||||
mu sync.Mutex
|
||||
scannerOptions []options.ScannerOption
|
||||
logger *log.Logger
|
||||
regoScanner *rego.Scanner
|
||||
*rego.RegoScannerProvider
|
||||
opts []options.ScannerOption
|
||||
logger *log.Logger
|
||||
}
|
||||
|
||||
func New(opts ...options.ScannerOption) *Scanner {
|
||||
scanner := &Scanner{
|
||||
scannerOptions: opts,
|
||||
logger: log.WithPrefix("azure-arm"),
|
||||
RegoScannerProvider: rego.NewRegoScannerProvider(opts...),
|
||||
opts: opts,
|
||||
logger: log.WithPrefix("azure-arm"),
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt(scanner)
|
||||
@@ -43,29 +41,12 @@ func (s *Scanner) Name() string {
|
||||
return "Azure ARM"
|
||||
}
|
||||
|
||||
func (s *Scanner) initRegoScanner(srcFS fs.FS) error {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if s.regoScanner != nil {
|
||||
return nil
|
||||
}
|
||||
regoScanner := rego.NewScanner(s.scannerOptions...)
|
||||
if err := regoScanner.LoadPolicies(srcFS); err != nil {
|
||||
return err
|
||||
}
|
||||
s.regoScanner = regoScanner
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (scan.Results, error) {
|
||||
p := parser.New(fsys)
|
||||
deployments, err := p.ParseFS(ctx, dir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := s.initRegoScanner(fsys); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return s.scanDeployments(ctx, deployments, fsys)
|
||||
}
|
||||
@@ -87,12 +68,17 @@ func (s *Scanner) scanDeployments(ctx context.Context, deployments []azure.Deplo
|
||||
}
|
||||
|
||||
func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deployment, fsys fs.FS) (scan.Results, error) {
|
||||
deploymentState := s.adaptDeployment(ctx, deployment)
|
||||
state := arm.Adapt(ctx, deployment)
|
||||
|
||||
results, err := s.regoScanner.ScanInput(ctx, types.SourceCloud, rego.Input{
|
||||
rs, err := s.InitRegoScanner(fsys, s.opts)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init rego scanner: %w", err)
|
||||
}
|
||||
|
||||
results, err := rs.ScanInput(ctx, types.SourceCloud, rego.Input{
|
||||
Path: deployment.Metadata.Range().GetFilename(),
|
||||
FS: fsys,
|
||||
Contents: deploymentState.ToRego(),
|
||||
Contents: state.ToRego(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("rego scan error: %w", err)
|
||||
@@ -100,7 +86,3 @@ func (s *Scanner) scanDeployment(ctx context.Context, deployment azure.Deploymen
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *Scanner) adaptDeployment(ctx context.Context, deployment azure.Deployment) *state.State {
|
||||
return arm.Adapt(ctx, deployment)
|
||||
}
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"sort"
|
||||
"sync"
|
||||
|
||||
adapter "github.com/aquasecurity/trivy/pkg/iac/adapters/cloudformation"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/rego"
|
||||
@@ -45,10 +44,9 @@ var _ scanners.FSScanner = (*Scanner)(nil)
|
||||
var _ options.ConfigurableScanner = (*Scanner)(nil)
|
||||
|
||||
type Scanner struct {
|
||||
mu sync.Mutex
|
||||
*rego.RegoScannerProvider
|
||||
logger *log.Logger
|
||||
parser *parser.Parser
|
||||
regoScanner *rego.Scanner
|
||||
options []options.ScannerOption
|
||||
parserOptions []parser.Option
|
||||
}
|
||||
@@ -64,8 +62,9 @@ func (s *Scanner) Name() string {
|
||||
// New creates a new Scanner
|
||||
func New(opts ...options.ScannerOption) *Scanner {
|
||||
s := &Scanner{
|
||||
options: opts,
|
||||
logger: log.WithPrefix("cloudformation scanner"),
|
||||
RegoScannerProvider: rego.NewRegoScannerProvider(opts...),
|
||||
options: opts,
|
||||
logger: log.WithPrefix("cloudformation scanner"),
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt(s)
|
||||
@@ -74,20 +73,6 @@ func New(opts ...options.ScannerOption) *Scanner {
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if s.regoScanner != nil {
|
||||
return s.regoScanner, nil
|
||||
}
|
||||
regoScanner := rego.NewScanner(s.options...)
|
||||
if err := regoScanner.LoadPolicies(srcFS); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.regoScanner = regoScanner
|
||||
return regoScanner, nil
|
||||
}
|
||||
|
||||
func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (results scan.Results, err error) {
|
||||
|
||||
contexts, err := s.parser.ParseFS(ctx, fsys, dir)
|
||||
@@ -99,16 +84,16 @@ func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (results s
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
regoScanner, err := s.initRegoScanner(fsys)
|
||||
rs, err := s.InitRegoScanner(fsys, s.options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("init rego scanner: %w", err)
|
||||
}
|
||||
|
||||
for _, cfCtx := range contexts {
|
||||
if cfCtx == nil {
|
||||
continue
|
||||
}
|
||||
fileResults, err := s.scanFileContext(ctx, regoScanner, cfCtx, fsys)
|
||||
fileResults, err := s.scanFileContext(ctx, rs, cfCtx, fsys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -127,12 +112,12 @@ func (s *Scanner) ScanFile(ctx context.Context, fsys fs.FS, path string) (scan.R
|
||||
return nil, err
|
||||
}
|
||||
|
||||
regoScanner, err := s.initRegoScanner(fsys)
|
||||
rs, err := s.InitRegoScanner(fsys, s.options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("init rego scanner: %w", err)
|
||||
}
|
||||
|
||||
results, err := s.scanFileContext(ctx, regoScanner, cfCtx, fsys)
|
||||
results, err := s.scanFileContext(ctx, rs, cfCtx, fsys)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -572,7 +572,7 @@ COPY --from=dep /binary /`
|
||||
|
||||
results, err := scanner.ScanFS(t.Context(), fsys, "code")
|
||||
if tc.expectedError != "" && err != nil {
|
||||
require.Equal(t, tc.expectedError, err.Error(), tc.name)
|
||||
require.ErrorContainsf(t, err, tc.expectedError, tc.name)
|
||||
} else {
|
||||
require.NoError(t, err)
|
||||
require.Len(t, results.GetFailed(), 1)
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"io/fs"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/BurntSushi/toml"
|
||||
"github.com/samber/lo"
|
||||
@@ -41,12 +40,11 @@ type configParser interface {
|
||||
|
||||
// GenericScanner is a scanner that scans a file as is without processing it
|
||||
type GenericScanner struct {
|
||||
mu sync.Mutex
|
||||
name string
|
||||
source types.Source
|
||||
logger *log.Logger
|
||||
options []options.ScannerOption
|
||||
regoScanner *rego.Scanner
|
||||
*rego.RegoScannerProvider
|
||||
name string
|
||||
source types.Source
|
||||
logger *log.Logger
|
||||
options []options.ScannerOption
|
||||
|
||||
parser configParser
|
||||
}
|
||||
@@ -59,11 +57,12 @@ func (f ParseFunc) Parse(ctx context.Context, r io.Reader, path string) (any, er
|
||||
|
||||
func NewScanner(name string, source types.Source, parser configParser, opts ...options.ScannerOption) *GenericScanner {
|
||||
s := &GenericScanner{
|
||||
name: name,
|
||||
options: opts,
|
||||
source: source,
|
||||
logger: log.WithPrefix(fmt.Sprintf("%s scanner", source)),
|
||||
parser: parser,
|
||||
RegoScannerProvider: rego.NewRegoScannerProvider(opts...),
|
||||
name: name,
|
||||
options: opts,
|
||||
source: source,
|
||||
logger: log.WithPrefix(fmt.Sprintf("%s scanner", source)),
|
||||
parser: parser,
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
@@ -113,13 +112,13 @@ func (s *GenericScanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (sc
|
||||
}
|
||||
}
|
||||
|
||||
regoScanner, err := s.initRegoScanner(fsys)
|
||||
rs, err := s.InitRegoScanner(fsys, s.options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("init rego scanner: %w", err)
|
||||
}
|
||||
|
||||
s.logger.Debug("Scanning files...", log.Int("count", len(inputs)))
|
||||
results, err := regoScanner.ScanInput(ctx, s.source, inputs...)
|
||||
results, err := rs.ScanInput(ctx, s.source, inputs...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -170,20 +169,6 @@ func (s *GenericScanner) parseFS(ctx context.Context, fsys fs.FS, path string) (
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (s *GenericScanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if s.regoScanner != nil {
|
||||
return s.regoScanner, nil
|
||||
}
|
||||
regoScanner := rego.NewScanner(s.options...)
|
||||
if err := regoScanner.LoadPolicies(srcFS); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.regoScanner = regoScanner
|
||||
return regoScanner, nil
|
||||
}
|
||||
|
||||
func (s *GenericScanner) applyIgnoreRules(fsys fs.FS, results scan.Results) error {
|
||||
if !s.supportsIgnoreRules() {
|
||||
return nil
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/liamg/memoryfs"
|
||||
"helm.sh/helm/v3/pkg/chartutil"
|
||||
@@ -29,18 +28,18 @@ var _ scanners.FSScanner = (*Scanner)(nil)
|
||||
var _ options.ConfigurableScanner = (*Scanner)(nil)
|
||||
|
||||
type Scanner struct {
|
||||
mu sync.Mutex
|
||||
*rego.RegoScannerProvider
|
||||
logger *log.Logger
|
||||
options []options.ScannerOption
|
||||
parserOptions []parser.Option
|
||||
regoScanner *rego.Scanner
|
||||
}
|
||||
|
||||
// New creates a new Scanner
|
||||
func New(opts ...options.ScannerOption) *Scanner {
|
||||
s := &Scanner{
|
||||
options: opts,
|
||||
logger: log.WithPrefix("helm scanner"),
|
||||
RegoScannerProvider: rego.NewRegoScannerProvider(opts...),
|
||||
options: opts,
|
||||
logger: log.WithPrefix("helm scanner"),
|
||||
}
|
||||
|
||||
for _, option := range opts {
|
||||
@@ -58,11 +57,6 @@ func (s *Scanner) Name() string {
|
||||
}
|
||||
|
||||
func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (scan.Results, error) {
|
||||
|
||||
if err := s.initRegoScanner(fsys); err != nil {
|
||||
return nil, fmt.Errorf("failed to init rego scanner: %w", err)
|
||||
}
|
||||
|
||||
var results []scan.Result
|
||||
if err := fs.WalkDir(fsys, dir, func(filePath string, d fs.DirEntry, err error) error {
|
||||
select {
|
||||
@@ -122,6 +116,11 @@ func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
rs, err := s.InitRegoScanner(target, s.options)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init rego scanner: %w", err)
|
||||
}
|
||||
|
||||
for _, file := range chartFiles {
|
||||
file := file
|
||||
s.logger.Debug("Processing rendered chart file", log.FilePath(file.TemplateFilePath))
|
||||
@@ -132,7 +131,7 @@ func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS)
|
||||
return nil, fmt.Errorf("unmarshal yaml: %w", err)
|
||||
}
|
||||
for _, manifest := range manifests {
|
||||
fileResults, err := s.regoScanner.ScanInput(ctx, types.SourceKubernetes, rego.Input{
|
||||
fileResults, err := rs.ScanInput(ctx, types.SourceKubernetes, rego.Input{
|
||||
Path: file.TemplateFilePath,
|
||||
Contents: manifest,
|
||||
FS: target,
|
||||
@@ -161,17 +160,3 @@ func (s *Scanner) getScanResults(path string, ctx context.Context, target fs.FS)
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *Scanner) initRegoScanner(srcFS fs.FS) error {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if s.regoScanner != nil {
|
||||
return nil
|
||||
}
|
||||
regoScanner := rego.NewScanner(s.options...)
|
||||
if err := regoScanner.LoadPolicies(srcFS); err != nil {
|
||||
return err
|
||||
}
|
||||
s.regoScanner = regoScanner
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -44,7 +44,7 @@ func ManifestFromJSON(path string, data []byte) (*Manifest, error) {
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(data, root, json.WithUnmarshalers(
|
||||
json.UnmarshalFromFunc(func(dec *jsontext.Decoder, node *ManifestNode, opts json.Options) error {
|
||||
json.UnmarshalFromFunc(func(dec *jsontext.Decoder, node *ManifestNode) error {
|
||||
startOffset := dec.InputOffset()
|
||||
if err := unmarshalManifestNode(dec, node); err != nil {
|
||||
return err
|
||||
|
||||
@@ -3,15 +3,10 @@ package scanners
|
||||
import (
|
||||
"context"
|
||||
"io/fs"
|
||||
"os"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scan"
|
||||
)
|
||||
|
||||
type WriteFileFS interface {
|
||||
WriteFile(name string, data []byte, perm os.FileMode) error
|
||||
}
|
||||
|
||||
type FSScanner interface {
|
||||
// Name provides the human-readable name of the scanner e.g. "CloudFormation"
|
||||
Name() string
|
||||
|
||||
@@ -9,126 +9,6 @@ import (
|
||||
"github.com/aquasecurity/trivy/pkg/iac/terraform"
|
||||
)
|
||||
|
||||
func Test_AttributeStartsWith(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
source string
|
||||
checkAttribute string
|
||||
checkValue string
|
||||
expectedResult bool
|
||||
}{
|
||||
{
|
||||
name: "bucket name starts with bucket",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
}`,
|
||||
checkAttribute: "bucket_name",
|
||||
checkValue: "bucket",
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
name: "bucket acl starts with public",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
acl = "public-read"
|
||||
}`,
|
||||
checkAttribute: "acl",
|
||||
checkValue: "public",
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
name: "bucket name doesn't start with secret",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
acl = "public-read"
|
||||
logging {
|
||||
target_bucket = aws_s3_bucket.log_bucket.id
|
||||
target_prefix = "log/"
|
||||
}
|
||||
}`,
|
||||
checkAttribute: "bucket_name",
|
||||
checkValue: "secret_",
|
||||
expectedResult: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
modules := createModulesFromSource(t, test.source, ".tf")
|
||||
for _, module := range modules {
|
||||
for _, block := range module.GetBlocks() {
|
||||
attr := block.GetAttribute(test.checkAttribute)
|
||||
require.NotNil(t, attr)
|
||||
assert.Equal(t, test.expectedResult, attr.StartsWith(test.checkValue))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_AttributeEndsWith(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
source string
|
||||
checkAttribute string
|
||||
checkValue string
|
||||
expectedResult bool
|
||||
}{
|
||||
{
|
||||
name: "bucket name ends with Name",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
}`,
|
||||
checkAttribute: "bucket_name",
|
||||
checkValue: "Name",
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
name: "bucket acl ends with read not Read",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
acl = "public-read"
|
||||
}`,
|
||||
checkAttribute: "acl",
|
||||
checkValue: "Read",
|
||||
expectedResult: false,
|
||||
},
|
||||
{
|
||||
name: "bucket name doesn't end with bucket",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
acl = "public-read"
|
||||
logging {
|
||||
target_bucket = aws_s3_bucket.log_bucket.id
|
||||
target_prefix = "log/"
|
||||
}
|
||||
}`,
|
||||
checkAttribute: "bucket_name",
|
||||
checkValue: "_bucket",
|
||||
expectedResult: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
modules := createModulesFromSource(t, test.source, ".tf")
|
||||
for _, module := range modules {
|
||||
for _, block := range module.GetBlocks() {
|
||||
attr := block.GetAttribute(test.checkAttribute)
|
||||
require.NotNil(t, attr)
|
||||
assert.Equal(t, test.expectedResult, attr.EndsWith(test.checkValue))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_AttributeContains(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
@@ -287,118 +167,6 @@ resource "aws_security_group" "my-security_group" {
|
||||
}
|
||||
}
|
||||
|
||||
func Test_AttributeIsAny(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
source string
|
||||
checkAttribute string
|
||||
checkValue []any
|
||||
expectedResult bool
|
||||
}{
|
||||
{
|
||||
name: "bucket acl is not one of the specified acls",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
acl = "public-read"
|
||||
}`,
|
||||
checkAttribute: "acl",
|
||||
checkValue: []any{"private", "authenticated-read"},
|
||||
expectedResult: false,
|
||||
},
|
||||
{
|
||||
name: "bucket acl is one of the specified acls",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
acl = "private"
|
||||
}`,
|
||||
checkAttribute: "acl",
|
||||
checkValue: []any{"private", "authenticated-read"},
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
name: "is is one of the provided valued",
|
||||
source: `
|
||||
resource "aws_security_group" "my-security_group" {
|
||||
count = 1
|
||||
}`,
|
||||
checkAttribute: "count",
|
||||
checkValue: []any{1, 2},
|
||||
expectedResult: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
modules := createModulesFromSource(t, test.source, ".tf")
|
||||
for _, module := range modules {
|
||||
for _, block := range module.GetBlocks() {
|
||||
attr := block.GetAttribute(test.checkAttribute)
|
||||
require.NotNil(t, attr)
|
||||
assert.Equal(t, test.expectedResult, attr.IsAny(test.checkValue...))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_AttributeIsNone(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
source string
|
||||
checkAttribute string
|
||||
checkValue []any
|
||||
expectedResult bool
|
||||
}{
|
||||
{
|
||||
name: "bucket acl is not one of the specified acls",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
acl = "public-read"
|
||||
}`,
|
||||
checkAttribute: "acl",
|
||||
checkValue: []any{"private", "authenticated-read"},
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
name: "bucket acl is one of the specified acls",
|
||||
source: `
|
||||
resource "aws_s3_bucket" "my-bucket" {
|
||||
bucket_name = "bucketName"
|
||||
acl = "private"
|
||||
}`,
|
||||
checkAttribute: "acl",
|
||||
checkValue: []any{"private", "authenticated-read"},
|
||||
expectedResult: false,
|
||||
},
|
||||
{
|
||||
name: "count is non-of the provided values",
|
||||
source: `
|
||||
resource "aws_security_group" "my-security_group" {
|
||||
count = 0
|
||||
}`,
|
||||
checkAttribute: "count",
|
||||
checkValue: []any{1, 2},
|
||||
expectedResult: true,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
modules := createModulesFromSource(t, test.source, ".tf")
|
||||
for _, module := range modules {
|
||||
for _, block := range module.GetBlocks() {
|
||||
attr := block.GetAttribute(test.checkAttribute)
|
||||
require.NotNil(t, attr)
|
||||
assert.Equal(t, test.expectedResult, attr.IsNone(test.checkValue...))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_AttributeIsEmpty(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
@@ -509,92 +277,6 @@ resource "aws_security_group_rule" "example" {
|
||||
}
|
||||
}
|
||||
|
||||
func Test_AttributeIsLessThan(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
source string
|
||||
checkAttribute string
|
||||
checkValue int
|
||||
expectedResult bool
|
||||
}{
|
||||
{
|
||||
name: "check attribute is less than check value",
|
||||
source: `
|
||||
resource "numerical_something" "my-bucket" {
|
||||
value = 100
|
||||
}`,
|
||||
checkAttribute: "value",
|
||||
checkValue: 200,
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
name: "check attribute is not less than check value",
|
||||
source: `
|
||||
resource "numerical_something" "my-bucket" {
|
||||
value = 100
|
||||
}`,
|
||||
checkAttribute: "value",
|
||||
checkValue: 50,
|
||||
expectedResult: false,
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
modules := createModulesFromSource(t, test.source, ".tf")
|
||||
for _, module := range modules {
|
||||
for _, block := range module.GetBlocks() {
|
||||
attr := block.GetAttribute(test.checkAttribute)
|
||||
require.NotNil(t, attr)
|
||||
assert.Equal(t, test.expectedResult, attr.LessThan(test.checkValue))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_AttributeIsLessThanOrEqual(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
source string
|
||||
checkAttribute string
|
||||
checkValue int
|
||||
expectedResult bool
|
||||
}{
|
||||
{
|
||||
name: "check attribute is less than or equal check value",
|
||||
source: `
|
||||
resource "numerical_something" "my-bucket" {
|
||||
value = 100
|
||||
}`,
|
||||
checkAttribute: "value",
|
||||
checkValue: 100,
|
||||
expectedResult: true,
|
||||
},
|
||||
{
|
||||
name: "check attribute is not less than check value",
|
||||
source: `
|
||||
resource "numerical_something" "my-bucket" {
|
||||
value = 100
|
||||
}`,
|
||||
checkAttribute: "value",
|
||||
checkValue: 50,
|
||||
expectedResult: false,
|
||||
},
|
||||
}
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
modules := createModulesFromSource(t, test.source, ".tf")
|
||||
for _, module := range modules {
|
||||
for _, block := range module.GetBlocks() {
|
||||
attr := block.GetAttribute(test.checkAttribute)
|
||||
require.NotNil(t, attr)
|
||||
assert.Equal(t, test.expectedResult, attr.LessThanOrEqualTo(test.checkValue))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_AttributeIsTrue(t *testing.T) {
|
||||
var tests = []struct {
|
||||
name string
|
||||
|
||||
34
pkg/iac/scanners/terraform/parser/ctylist.go
Normal file
34
pkg/iac/scanners/terraform/parser/ctylist.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package parser
|
||||
|
||||
import "github.com/zclconf/go-cty/cty"
|
||||
|
||||
// insertTupleElement inserts a value into a tuple at the specified index.
|
||||
// If the idx is outside the bounds of the list, it grows the tuple to
|
||||
// the new size, and fills in `cty.NilVal` for the missing elements.
|
||||
//
|
||||
// This function will not panic. If the list value is not a list, it will
|
||||
// be replaced with an empty list.
|
||||
func insertTupleElement(list cty.Value, idx int, val cty.Value) cty.Value {
|
||||
if list.IsNull() || !list.Type().IsTupleType() {
|
||||
// better than a panic
|
||||
list = cty.EmptyTupleVal
|
||||
}
|
||||
|
||||
if idx < 0 {
|
||||
// Nothing to do?
|
||||
return list
|
||||
}
|
||||
|
||||
// Create a new list of the correct length, copying in the old list
|
||||
// values for matching indices.
|
||||
newList := make([]cty.Value, max(idx+1, list.LengthInt()))
|
||||
for it := list.ElementIterator(); it.Next(); {
|
||||
key, elem := it.Element()
|
||||
elemIdx, _ := key.AsBigFloat().Int64()
|
||||
newList[elemIdx] = elem
|
||||
}
|
||||
// Insert the new value.
|
||||
newList[idx] = val
|
||||
|
||||
return cty.TupleVal(newList)
|
||||
}
|
||||
103
pkg/iac/scanners/terraform/parser/ctylist_test.go
Normal file
103
pkg/iac/scanners/terraform/parser/ctylist_test.go
Normal file
@@ -0,0 +1,103 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
)
|
||||
|
||||
func Test_insertTupleElement(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
start cty.Value
|
||||
index int
|
||||
value cty.Value
|
||||
want cty.Value
|
||||
}{
|
||||
{
|
||||
name: "empty",
|
||||
start: cty.Value{},
|
||||
index: 0,
|
||||
value: cty.NilVal,
|
||||
want: cty.TupleVal([]cty.Value{cty.NilVal}),
|
||||
},
|
||||
{
|
||||
name: "empty to length",
|
||||
start: cty.Value{},
|
||||
index: 2,
|
||||
value: cty.NilVal,
|
||||
want: cty.TupleVal([]cty.Value{cty.NilVal, cty.NilVal, cty.NilVal}),
|
||||
},
|
||||
{
|
||||
name: "insert to empty",
|
||||
start: cty.EmptyTupleVal,
|
||||
index: 1,
|
||||
value: cty.NumberIntVal(5),
|
||||
want: cty.TupleVal([]cty.Value{cty.NilVal, cty.NumberIntVal(5)}),
|
||||
},
|
||||
{
|
||||
name: "insert to existing",
|
||||
start: cty.TupleVal([]cty.Value{cty.NumberIntVal(1), cty.NumberIntVal(2), cty.NumberIntVal(3)}),
|
||||
index: 1,
|
||||
value: cty.NumberIntVal(5),
|
||||
want: cty.TupleVal([]cty.Value{cty.NumberIntVal(1), cty.NumberIntVal(5), cty.NumberIntVal(3)}),
|
||||
},
|
||||
{
|
||||
name: "insert to existing, extends",
|
||||
start: cty.TupleVal([]cty.Value{cty.NumberIntVal(1), cty.NumberIntVal(2), cty.NumberIntVal(3)}),
|
||||
index: 4,
|
||||
value: cty.NumberIntVal(5),
|
||||
want: cty.TupleVal([]cty.Value{
|
||||
cty.NumberIntVal(1), cty.NumberIntVal(2),
|
||||
cty.NumberIntVal(3), cty.NilVal,
|
||||
cty.NumberIntVal(5),
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "mixed list",
|
||||
start: cty.TupleVal([]cty.Value{cty.StringVal("a"), cty.NumberIntVal(2), cty.NumberIntVal(3)}),
|
||||
index: 1,
|
||||
value: cty.BoolVal(true),
|
||||
want: cty.TupleVal([]cty.Value{
|
||||
cty.StringVal("a"), cty.BoolVal(true), cty.NumberIntVal(3),
|
||||
}),
|
||||
},
|
||||
{
|
||||
name: "replace end",
|
||||
start: cty.TupleVal([]cty.Value{cty.StringVal("a"), cty.NumberIntVal(2), cty.NumberIntVal(3)}),
|
||||
index: 2,
|
||||
value: cty.StringVal("end"),
|
||||
want: cty.TupleVal([]cty.Value{
|
||||
cty.StringVal("a"), cty.NumberIntVal(2), cty.StringVal("end"),
|
||||
}),
|
||||
},
|
||||
|
||||
// Some bad arguments
|
||||
{
|
||||
name: "negative index",
|
||||
start: cty.TupleVal([]cty.Value{cty.StringVal("a"), cty.NumberIntVal(2), cty.NumberIntVal(3)}),
|
||||
index: -1,
|
||||
value: cty.BoolVal(true),
|
||||
want: cty.TupleVal([]cty.Value{cty.StringVal("a"), cty.NumberIntVal(2), cty.NumberIntVal(3)}),
|
||||
},
|
||||
{
|
||||
name: "non-list",
|
||||
start: cty.BoolVal(true),
|
||||
index: 1,
|
||||
value: cty.BoolVal(true),
|
||||
want: cty.TupleVal([]cty.Value{cty.NilVal, cty.BoolVal(true)}),
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
require.Equal(t, tt.want, insertTupleElement(tt.start, tt.index, tt.value))
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"io/fs"
|
||||
"maps"
|
||||
"reflect"
|
||||
"slices"
|
||||
|
||||
@@ -464,9 +465,6 @@ func (e *evaluator) evaluateVariable(b *terraform.Block) (cty.Value, error) {
|
||||
}
|
||||
|
||||
attributes := b.Attributes()
|
||||
if attributes == nil {
|
||||
return cty.NilVal, errors.New("cannot resolve variable with no attributes")
|
||||
}
|
||||
|
||||
var valType cty.Type
|
||||
var defaults *typeexpr.Defaults
|
||||
@@ -524,7 +522,6 @@ func (e *evaluator) getValuesByBlockType(blockType string) cty.Value {
|
||||
values := make(map[string]cty.Value)
|
||||
|
||||
for _, b := range blocksOfType {
|
||||
|
||||
switch b.Type() {
|
||||
case "variable": // variables are special in that their value comes from the "default" attribute
|
||||
val, err := e.evaluateVariable(b)
|
||||
@@ -539,9 +536,7 @@ func (e *evaluator) getValuesByBlockType(blockType string) cty.Value {
|
||||
}
|
||||
values[b.Label()] = val
|
||||
case "locals", "moved", "import":
|
||||
for key, val := range b.Values().AsValueMap() {
|
||||
values[key] = val
|
||||
}
|
||||
maps.Copy(values, b.Values().AsValueMap())
|
||||
case "provider", "module", "check":
|
||||
if b.Label() == "" {
|
||||
continue
|
||||
@@ -552,19 +547,27 @@ func (e *evaluator) getValuesByBlockType(blockType string) cty.Value {
|
||||
continue
|
||||
}
|
||||
|
||||
blockMap, ok := values[b.Labels()[0]]
|
||||
// Data blocks should all be loaded into the top level 'values'
|
||||
// object. The hierarchy of the map is:
|
||||
// values = map[<type>]map[<name>] =
|
||||
// Block -> Block's attributes as a cty.Object
|
||||
// Tuple(Block) -> Instances of the block
|
||||
// Object(Block) -> Field values are instances of the block
|
||||
ref := b.Reference()
|
||||
typeValues, ok := values[ref.TypeLabel()]
|
||||
if !ok {
|
||||
values[b.Labels()[0]] = cty.ObjectVal(make(map[string]cty.Value))
|
||||
blockMap = values[b.Labels()[0]]
|
||||
typeValues = cty.ObjectVal(make(map[string]cty.Value))
|
||||
values[ref.TypeLabel()] = typeValues
|
||||
}
|
||||
|
||||
valueMap := blockMap.AsValueMap()
|
||||
valueMap := typeValues.AsValueMap()
|
||||
if valueMap == nil {
|
||||
valueMap = make(map[string]cty.Value)
|
||||
}
|
||||
valueMap[ref.NameLabel()] = blockInstanceValues(b, valueMap)
|
||||
|
||||
valueMap[b.Labels()[1]] = b.Values()
|
||||
values[b.Labels()[0]] = cty.ObjectVal(valueMap)
|
||||
// Update the map of all blocks with the same type.
|
||||
values[ref.TypeLabel()] = cty.ObjectVal(valueMap)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -575,23 +578,57 @@ func (e *evaluator) getResources() map[string]cty.Value {
|
||||
values := make(map[string]map[string]cty.Value)
|
||||
|
||||
for _, b := range e.blocks {
|
||||
if b.Type() != "resource" {
|
||||
if b.Type() != "resource" || len(b.Labels()) < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(b.Labels()) < 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
val, exists := values[b.Labels()[0]]
|
||||
ref := b.Reference()
|
||||
typeValues, exists := values[ref.TypeLabel()]
|
||||
if !exists {
|
||||
val = make(map[string]cty.Value)
|
||||
values[b.Labels()[0]] = val
|
||||
typeValues = make(map[string]cty.Value)
|
||||
values[ref.TypeLabel()] = typeValues
|
||||
}
|
||||
val[b.Labels()[1]] = b.Values()
|
||||
typeValues[ref.NameLabel()] = blockInstanceValues(b, typeValues)
|
||||
}
|
||||
|
||||
return lo.MapValues(values, func(v map[string]cty.Value, _ string) cty.Value {
|
||||
return cty.ObjectVal(v)
|
||||
})
|
||||
}
|
||||
|
||||
// blockInstanceValues returns a cty.Value containing the values of the block instances.
|
||||
// If the count argument is used, a tuple is returned where the index corresponds to the argument index.
|
||||
// If the for_each argument is used, an object is returned where the key corresponds to the argument key.
|
||||
// In other cases, the values of the block itself are returned.
|
||||
func blockInstanceValues(b *terraform.Block, typeValues map[string]cty.Value) cty.Value {
|
||||
ref := b.Reference()
|
||||
key := ref.RawKey()
|
||||
|
||||
switch {
|
||||
case key.Type().Equals(cty.Number) && b.GetAttribute("count") != nil:
|
||||
idx, _ := key.AsBigFloat().Int64()
|
||||
return insertTupleElement(typeValues[ref.NameLabel()], int(idx), b.Values())
|
||||
case isForEachKey(key) && b.GetAttribute("for_each") != nil:
|
||||
keyStr := ref.Key()
|
||||
|
||||
instancesVal, exists := typeValues[ref.NameLabel()]
|
||||
if !exists || !instancesVal.CanIterateElements() {
|
||||
instancesVal = cty.EmptyObjectVal
|
||||
}
|
||||
|
||||
instances := instancesVal.AsValueMap()
|
||||
if instances == nil {
|
||||
instances = make(map[string]cty.Value)
|
||||
}
|
||||
|
||||
instances[keyStr] = b.Values()
|
||||
return cty.ObjectVal(instances)
|
||||
|
||||
default:
|
||||
return b.Values()
|
||||
}
|
||||
}
|
||||
|
||||
func isForEachKey(key cty.Value) bool {
|
||||
return key.Type().Equals(cty.Number) || key.Type().Equals(cty.String)
|
||||
}
|
||||
|
||||
@@ -269,14 +269,7 @@ func (p *Parser) Load(ctx context.Context) (*evaluator, error) {
|
||||
return nil, err
|
||||
}
|
||||
p.logger.Debug("Added input variables from tfvars", log.Int("count", len(inputVars)))
|
||||
|
||||
if missingVars := missingVariableValues(blocks, inputVars); len(missingVars) > 0 {
|
||||
p.logger.Warn(
|
||||
"Variable values was not found in the environment or variable files. Evaluating may not work correctly.",
|
||||
log.String("variables", strings.Join(missingVars, ", ")),
|
||||
)
|
||||
setNullMissingVariableValues(inputVars, missingVars)
|
||||
}
|
||||
p.setFallbackValuesForMissingVars(inputVars, blocks)
|
||||
}
|
||||
|
||||
modulesMetadata, metadataPath, err := loadModuleMetadata(p.moduleFS, p.projectRoot)
|
||||
@@ -314,12 +307,12 @@ func (p *Parser) Load(ctx context.Context) (*evaluator, error) {
|
||||
), nil
|
||||
}
|
||||
|
||||
func missingVariableValues(blocks terraform.Blocks, inputVars map[string]cty.Value) []string {
|
||||
var missing []string
|
||||
func missingVariableValues(blocks terraform.Blocks, inputVars map[string]cty.Value) []*terraform.Block {
|
||||
var missing []*terraform.Block
|
||||
for _, varBlock := range blocks.OfType("variable") {
|
||||
if varBlock.GetAttribute("default") == nil {
|
||||
if _, ok := inputVars[varBlock.TypeLabel()]; !ok {
|
||||
missing = append(missing, varBlock.TypeLabel())
|
||||
missing = append(missing, varBlock)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -327,12 +320,41 @@ func missingVariableValues(blocks terraform.Blocks, inputVars map[string]cty.Val
|
||||
return missing
|
||||
}
|
||||
|
||||
// Set null values for missing variables, to allow expressions using them to be
|
||||
// Set fallback values for missing variables, to allow expressions using them to be
|
||||
// still be possibly evaluated to a value different than null.
|
||||
func setNullMissingVariableValues(inputVars map[string]cty.Value, missingVars []string) {
|
||||
for _, missingVar := range missingVars {
|
||||
inputVars[missingVar] = cty.NullVal(cty.DynamicPseudoType)
|
||||
func (p *Parser) setFallbackValuesForMissingVars(inputVars map[string]cty.Value, blocks []*terraform.Block) {
|
||||
varBlocks := missingVariableValues(blocks, inputVars)
|
||||
if len(varBlocks) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
missingVars := make([]string, 0, len(varBlocks))
|
||||
for _, block := range varBlocks {
|
||||
varType := inputVariableType(block)
|
||||
if varType != cty.NilType {
|
||||
inputVars[block.TypeLabel()] = cty.UnknownVal(varType)
|
||||
} else {
|
||||
inputVars[block.TypeLabel()] = cty.DynamicVal
|
||||
}
|
||||
missingVars = append(missingVars, block.TypeLabel())
|
||||
}
|
||||
|
||||
p.logger.Warn(
|
||||
"Variable values were not found in the environment or variable files. Evaluating may not work correctly.",
|
||||
log.String("variables", strings.Join(missingVars, ", ")),
|
||||
)
|
||||
}
|
||||
|
||||
func inputVariableType(b *terraform.Block) cty.Type {
|
||||
typeAttr, exists := b.Attributes()["type"]
|
||||
if !exists {
|
||||
return cty.NilType
|
||||
}
|
||||
ty, _, err := typeAttr.DecodeVarType()
|
||||
if err != nil {
|
||||
return cty.NilType
|
||||
}
|
||||
return ty
|
||||
}
|
||||
|
||||
func (p *Parser) EvaluateAll(ctx context.Context) (terraform.Modules, cty.Value, error) {
|
||||
|
||||
@@ -1704,6 +1704,102 @@ resource "test_resource" "this" {
|
||||
assert.Equal(t, "test_value", attr.GetRawValue())
|
||||
}
|
||||
|
||||
func TestPopulateContextWithBlockInstances(t *testing.T) {
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
files map[string]string
|
||||
}{
|
||||
{
|
||||
name: "data blocks with count",
|
||||
files: map[string]string{
|
||||
"main.tf": `data "d" "foo" {
|
||||
count = 1
|
||||
value = "Index ${count.index}"
|
||||
}
|
||||
|
||||
data "b" "foo" {
|
||||
count = 1
|
||||
value = data.d.foo[0].value
|
||||
}
|
||||
|
||||
data "c" "foo" {
|
||||
count = 1
|
||||
value = data.b.foo[0].value
|
||||
}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "resource blocks with count",
|
||||
files: map[string]string{
|
||||
"main.tf": `resource "d" "foo" {
|
||||
count = 1
|
||||
value = "Index ${count.index}"
|
||||
}
|
||||
|
||||
resource "b" "foo" {
|
||||
count = 1
|
||||
value = d.foo[0].value
|
||||
}
|
||||
|
||||
resource "c" "foo" {
|
||||
count = 1
|
||||
value = b.foo[0].value
|
||||
}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "data blocks with for_each",
|
||||
files: map[string]string{
|
||||
"main.tf": `data "d" "foo" {
|
||||
for_each = toset([0])
|
||||
value = "Index ${each.key}"
|
||||
}
|
||||
|
||||
data "b" "foo" {
|
||||
for_each = data.d.foo
|
||||
value = each.value.value
|
||||
}
|
||||
|
||||
data "c" "foo" {
|
||||
for_each = data.b.foo
|
||||
value = each.value.value
|
||||
}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "resource blocks with for_each",
|
||||
files: map[string]string{
|
||||
"main.tf": `resource "d" "foo" {
|
||||
for_each = toset([0])
|
||||
value = "Index ${each.key}"
|
||||
}
|
||||
|
||||
resource "b" "foo" {
|
||||
for_each = d.foo
|
||||
value = each.value.value
|
||||
}
|
||||
|
||||
resource "c" "foo" {
|
||||
for_each = b.foo
|
||||
value = each.value.value
|
||||
}`,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
modules := parse(t, tt.files)
|
||||
require.Len(t, modules, 1)
|
||||
for _, b := range modules.GetBlocks() {
|
||||
attr := b.GetAttribute("value")
|
||||
assert.Equal(t, "Index 0", attr.Value().AsString())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// TestNestedModulesOptions ensures parser options are carried to the nested
|
||||
// submodule evaluators.
|
||||
// The test will include an invalid module that will fail to download
|
||||
@@ -2042,7 +2138,7 @@ resource "test" "values" {
|
||||
|
||||
s_attr := resources[0].GetAttribute("s")
|
||||
require.NotNil(t, s_attr)
|
||||
assert.Equal(t, "foo-", s_attr.GetRawValue())
|
||||
assert.Equal(t, "foo-", s_attr.Value().Range().StringPrefix())
|
||||
|
||||
for _, name := range []string{"l1", "l2", "d1", "d2"} {
|
||||
attr := resources[0].GetAttribute(name)
|
||||
@@ -2224,7 +2320,7 @@ variable "baz" {}
|
||||
_, err := parser.Load(t.Context())
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Contains(t, buf.String(), "Variable values was not found in the environment or variable files.")
|
||||
assert.Contains(t, buf.String(), "Variable values were not found in the environment or variable files.")
|
||||
assert.Contains(t, buf.String(), "variables=\"foo\"")
|
||||
}
|
||||
|
||||
@@ -2454,3 +2550,29 @@ module "bar" {
|
||||
_, _, err = parser.EvaluateAll(t.Context())
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestAttributeWithMissingVarIsUnresolvable(t *testing.T) {
|
||||
fsys := fstest.MapFS{
|
||||
"main.tf": &fstest.MapFile{Data: []byte(`variable "inp" {
|
||||
type = string
|
||||
}
|
||||
|
||||
resource "foo" "bar" {
|
||||
attr = "${var.inp}-test"
|
||||
}
|
||||
`)},
|
||||
}
|
||||
|
||||
parser := New(fsys, "", OptionStopOnHCLError(true))
|
||||
require.NoError(t, parser.ParseFS(t.Context(), "."))
|
||||
|
||||
_, err := parser.Load(t.Context())
|
||||
require.NoError(t, err)
|
||||
|
||||
modules, _, err := parser.EvaluateAll(t.Context())
|
||||
require.NoError(t, err)
|
||||
require.Len(t, modules, 1)
|
||||
foo := modules[0].GetResourcesByType("foo")[0]
|
||||
attr := foo.GetAttribute("attr")
|
||||
assert.False(t, attr.IsResolvable())
|
||||
}
|
||||
|
||||
@@ -26,14 +26,13 @@ var _ options.ConfigurableScanner = (*Scanner)(nil)
|
||||
var _ ConfigurableTerraformScanner = (*Scanner)(nil)
|
||||
|
||||
type Scanner struct {
|
||||
mu sync.Mutex
|
||||
*rego.RegoScannerProvider
|
||||
logger *log.Logger
|
||||
options []options.ScannerOption
|
||||
parserOpt []parser.Option
|
||||
executorOpt []executor.Option
|
||||
dirs set.Set[string]
|
||||
forceAllDirs bool
|
||||
regoScanner *rego.Scanner
|
||||
execLock sync.RWMutex
|
||||
}
|
||||
|
||||
@@ -55,9 +54,10 @@ func (s *Scanner) AddExecutorOptions(opts ...executor.Option) {
|
||||
|
||||
func New(opts ...options.ScannerOption) *Scanner {
|
||||
s := &Scanner{
|
||||
dirs: set.New[string](),
|
||||
options: opts,
|
||||
logger: log.WithPrefix("terraform scanner"),
|
||||
RegoScannerProvider: rego.NewRegoScannerProvider(opts...),
|
||||
dirs: set.New[string](),
|
||||
options: opts,
|
||||
logger: log.WithPrefix("terraform scanner"),
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt(s)
|
||||
@@ -65,20 +65,6 @@ func New(opts ...options.ScannerOption) *Scanner {
|
||||
return s
|
||||
}
|
||||
|
||||
func (s *Scanner) initRegoScanner(srcFS fs.FS) (*rego.Scanner, error) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if s.regoScanner != nil {
|
||||
return s.regoScanner, nil
|
||||
}
|
||||
regoScanner := rego.NewScanner(s.options...)
|
||||
if err := regoScanner.LoadPolicies(srcFS); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s.regoScanner = regoScanner
|
||||
return regoScanner, nil
|
||||
}
|
||||
|
||||
// terraformRootModule represents the module to be used as the root module for Terraform deployment.
|
||||
type terraformRootModule struct {
|
||||
rootPath string
|
||||
@@ -99,13 +85,13 @@ func (s *Scanner) ScanFS(ctx context.Context, target fs.FS, dir string) (scan.Re
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
regoScanner, err := s.initRegoScanner(target)
|
||||
rs, err := s.InitRegoScanner(target, s.options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, fmt.Errorf("init rego scanner: %w", err)
|
||||
}
|
||||
|
||||
s.execLock.Lock()
|
||||
s.executorOpt = append(s.executorOpt, executor.OptionWithRegoScanner(regoScanner))
|
||||
s.executorOpt = append(s.executorOpt, executor.OptionWithRegoScanner(rs))
|
||||
s.execLock.Unlock()
|
||||
|
||||
var allResults scan.Results
|
||||
|
||||
@@ -9,12 +9,12 @@ import (
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scan"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/options"
|
||||
terraformScanner "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform"
|
||||
tfscanner "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform"
|
||||
tfparser "github.com/aquasecurity/trivy/pkg/iac/scanners/terraform/parser"
|
||||
)
|
||||
|
||||
type Scanner struct {
|
||||
inner *terraformScanner.Scanner
|
||||
inner *tfscanner.Scanner
|
||||
}
|
||||
|
||||
func (s *Scanner) Name() string {
|
||||
@@ -23,7 +23,7 @@ func (s *Scanner) Name() string {
|
||||
|
||||
func New(opts ...options.ScannerOption) *Scanner {
|
||||
scanner := &Scanner{
|
||||
inner: terraformScanner.New(opts...),
|
||||
inner: tfscanner.New(opts...),
|
||||
}
|
||||
return scanner
|
||||
}
|
||||
|
||||
@@ -14,10 +14,10 @@ import (
|
||||
)
|
||||
|
||||
type Scanner struct {
|
||||
parser *parser.Parser
|
||||
logger *log.Logger
|
||||
options []options.ScannerOption
|
||||
tfScanner *terraform.Scanner
|
||||
inner *terraform.Scanner
|
||||
parser *parser.Parser
|
||||
logger *log.Logger
|
||||
options []options.ScannerOption
|
||||
}
|
||||
|
||||
func (s *Scanner) Name() string {
|
||||
@@ -55,10 +55,10 @@ func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (scan.Resu
|
||||
|
||||
func New(opts ...options.ScannerOption) *Scanner {
|
||||
scanner := &Scanner{
|
||||
options: opts,
|
||||
logger: log.WithPrefix("tfjson scanner"),
|
||||
parser: parser.New(),
|
||||
tfScanner: terraform.New(opts...),
|
||||
inner: terraform.New(opts...),
|
||||
parser: parser.New(),
|
||||
logger: log.WithPrefix("tfjson scanner"),
|
||||
options: opts,
|
||||
}
|
||||
|
||||
return scanner
|
||||
@@ -87,5 +87,5 @@ func (s *Scanner) Scan(reader io.Reader) (scan.Results, error) {
|
||||
return nil, fmt.Errorf("failed to convert plan to FS: %w", err)
|
||||
}
|
||||
|
||||
return s.tfScanner.ScanFS(context.TODO(), planFS, ".")
|
||||
return s.inner.ScanFS(context.TODO(), planFS, ".")
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
@@ -287,7 +286,7 @@ func (a *Attribute) Value() (ctyVal cty.Value) {
|
||||
}
|
||||
}()
|
||||
ctyVal, _ = a.hclAttribute.Expr.Value(a.ctx.Inner())
|
||||
if !ctyVal.IsKnown() || ctyVal.IsNull() {
|
||||
if ctyVal.IsNull() {
|
||||
return cty.DynamicVal
|
||||
}
|
||||
return ctyVal
|
||||
@@ -510,10 +509,6 @@ func (a *Attribute) mapContains(checkValue any, val cty.Value) bool {
|
||||
}
|
||||
}
|
||||
|
||||
func (a *Attribute) NotContains(checkValue any, equalityOptions ...EqualityOption) bool {
|
||||
return !a.Contains(checkValue, equalityOptions...)
|
||||
}
|
||||
|
||||
func (a *Attribute) Contains(checkValue any, equalityOptions ...EqualityOption) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
@@ -604,26 +599,6 @@ func containsIgnoreCase(left, substring string) bool {
|
||||
return strings.Contains(strings.ToLower(left), strings.ToLower(substring))
|
||||
}
|
||||
|
||||
func (a *Attribute) StartsWith(prefix any) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().Type() == cty.String {
|
||||
return strings.HasPrefix(a.Value().AsString(), fmt.Sprintf("%v", prefix))
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *Attribute) EndsWith(suffix any) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().Type() == cty.String {
|
||||
return strings.HasSuffix(a.Value().AsString(), fmt.Sprintf("%v", suffix))
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type EqualityOption int
|
||||
|
||||
const (
|
||||
@@ -661,74 +636,6 @@ func (a *Attribute) NotEqual(checkValue any, equalityOptions ...EqualityOption)
|
||||
return !a.Equals(checkValue, equalityOptions...)
|
||||
}
|
||||
|
||||
func (a *Attribute) RegexMatches(re regexp.Regexp) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().Type() == cty.String {
|
||||
match := re.MatchString(a.Value().AsString())
|
||||
return match
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *Attribute) IsNotAny(options ...any) bool {
|
||||
return !a.IsAny(options...)
|
||||
}
|
||||
|
||||
func (a *Attribute) IsAny(options ...any) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().Type() == cty.String {
|
||||
value := a.Value().AsString()
|
||||
for _, option := range options {
|
||||
if option == value {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
if a.Value().Type() == cty.Number {
|
||||
for _, option := range options {
|
||||
checkValue, err := gocty.ToCtyValue(option, cty.Number)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().RawEquals(checkValue) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *Attribute) IsNone(options ...any) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().Type() == cty.String {
|
||||
for _, option := range options {
|
||||
if option == a.Value().AsString() {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
if a.Value().Type() == cty.Number {
|
||||
for _, option := range options {
|
||||
checkValue, err := gocty.ToCtyValue(option, cty.Number)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().RawEquals(checkValue) {
|
||||
return false
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
func (a *Attribute) IsTrue() bool {
|
||||
if a == nil {
|
||||
return false
|
||||
@@ -849,77 +756,6 @@ func (a *Attribute) AsMapValue() iacTypes.MapValue {
|
||||
return iacTypes.Map(values, a.GetMetadata())
|
||||
}
|
||||
|
||||
func (a *Attribute) LessThan(checkValue any) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().Type() == cty.Number {
|
||||
checkNumber, err := gocty.ToCtyValue(checkValue, cty.Number)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return a.Value().LessThan(checkNumber).True()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *Attribute) LessThanOrEqualTo(checkValue any) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().Type() == cty.Number {
|
||||
checkNumber, err := gocty.ToCtyValue(checkValue, cty.Number)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return a.Value().LessThanOrEqualTo(checkNumber).True()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *Attribute) GreaterThan(checkValue any) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().Type() == cty.Number {
|
||||
checkNumber, err := gocty.ToCtyValue(checkValue, cty.Number)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return a.Value().GreaterThan(checkNumber).True()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *Attribute) GreaterThanOrEqualTo(checkValue any) bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if a.Value().Type() == cty.Number {
|
||||
checkNumber, err := gocty.ToCtyValue(checkValue, cty.Number)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return a.Value().GreaterThanOrEqualTo(checkNumber).True()
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (a *Attribute) IsDataBlockReference() bool {
|
||||
if a == nil {
|
||||
return false
|
||||
}
|
||||
if t, ok := a.hclAttribute.Expr.(*hclsyntax.ScopeTraversalExpr); ok {
|
||||
split := t.Traversal.SimpleSplit()
|
||||
return split.Abs.RootName() == "data"
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func createDotReferenceFromTraversal(parentRef string, traversals ...hcl.Traversal) (*Reference, error) {
|
||||
var refParts []string
|
||||
var key cty.Value
|
||||
|
||||
@@ -64,7 +64,7 @@ func postProcessValues(b *Block, input map[string]cty.Value) map[string]cty.Valu
|
||||
|
||||
if b.TypeLabel() == "aws_s3_bucket" {
|
||||
var bucketName string
|
||||
if bucket := input["bucket"]; bucket.Type().Equals(cty.String) {
|
||||
if bucket := input["bucket"]; bucket.Type().Equals(cty.String) && bucket.IsKnown() {
|
||||
bucketName = bucket.AsString()
|
||||
}
|
||||
input["arn"] = cty.StringVal(fmt.Sprintf("arn:aws:s3:::%s", bucketName))
|
||||
|
||||
@@ -170,6 +170,9 @@ func accumulateSeverityCounts(finding Resource) (map[string]int, map[string]int,
|
||||
vCount[rv.Severity]++
|
||||
}
|
||||
for _, rv := range r.Misconfigurations {
|
||||
if rv.Status == types.MisconfStatusPassed {
|
||||
continue
|
||||
}
|
||||
mCount[rv.Severity]++
|
||||
}
|
||||
for _, rv := range r.Secrets {
|
||||
|
||||
@@ -53,6 +53,11 @@ var (
|
||||
Status: types.MisconfStatusFailure,
|
||||
Severity: "HIGH",
|
||||
},
|
||||
{
|
||||
ID: "KSV-ID103",
|
||||
Status: types.MisconfStatusPassed,
|
||||
Severity: "HIGH",
|
||||
},
|
||||
|
||||
{
|
||||
ID: "KCV-ID100",
|
||||
@@ -265,7 +270,7 @@ func TestReportWrite_Table(t *testing.T) {
|
||||
expectedOutput string
|
||||
}{
|
||||
{
|
||||
name: "Only config, all serverities",
|
||||
name: "Only config, all severities",
|
||||
report: report.Report{
|
||||
ClusterName: "test",
|
||||
Resources: []report.Resource{deployOrionWithMisconfigs},
|
||||
@@ -319,7 +324,7 @@ See https://google.com/search?q=bad%20config
|
||||
────────────────────────────────────────`,
|
||||
},
|
||||
{
|
||||
name: "Only vuln, all serverities",
|
||||
name: "Only vuln, all severities",
|
||||
report: report.Report{
|
||||
ClusterName: "test",
|
||||
Resources: []report.Resource{deployOrionWithVulns},
|
||||
@@ -371,7 +376,7 @@ Total: 1 (LOW: 1)
|
||||
└─────────┴───────────────┴──────────┴─────────┴───────────────────┴───────────────┴───────────────────────────────────────────┘`,
|
||||
},
|
||||
{
|
||||
name: "Only rbac, all serverities",
|
||||
name: "Only rbac, all severities",
|
||||
report: report.Report{
|
||||
ClusterName: "test",
|
||||
Resources: []report.Resource{roleWithMisconfig},
|
||||
@@ -393,7 +398,7 @@ RBAC Assessment
|
||||
Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
|
||||
},
|
||||
{
|
||||
name: "Only secret, all serverities",
|
||||
name: "Only secret, all severities",
|
||||
report: report.Report{
|
||||
ClusterName: "test",
|
||||
Resources: []report.Resource{deployLuaWithSecrets},
|
||||
@@ -424,7 +429,7 @@ Infra Assessment
|
||||
Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
|
||||
},
|
||||
{
|
||||
name: "apiserver, only infra and serverities",
|
||||
name: "apiserver, only infra and severities",
|
||||
report: report.Report{
|
||||
ClusterName: "test",
|
||||
Resources: []report.Resource{apiseverPodWithMisconfigAndInfra},
|
||||
@@ -455,7 +460,7 @@ Infra Assessment
|
||||
Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
|
||||
},
|
||||
{
|
||||
name: "apiserver, vuln,config,secret and serverities",
|
||||
name: "apiserver, vuln,config,secret and severities",
|
||||
report: report.Report{
|
||||
ClusterName: "test",
|
||||
Resources: []report.Resource{apiseverPodWithMisconfigAndInfra},
|
||||
@@ -490,7 +495,7 @@ Infra Assessment
|
||||
Severities: C=CRITICAL H=HIGH M=MEDIUM L=LOW U=UNKNOWN`,
|
||||
},
|
||||
{
|
||||
name: "apiserver, all misconfig and vuln scanners and serverities",
|
||||
name: "apiserver, all misconfig and vuln scanners and severities",
|
||||
report: report.Report{
|
||||
ClusterName: "test",
|
||||
Resources: []report.Resource{apiseverPodWithMisconfigAndInfra},
|
||||
|
||||
@@ -82,6 +82,8 @@ type ScannerOption struct {
|
||||
DisabledChecks []DisabledCheck
|
||||
SkipFiles []string
|
||||
SkipDirs []string
|
||||
|
||||
RegoScanner *rego.Scanner
|
||||
}
|
||||
|
||||
func (o *ScannerOption) Sort() {
|
||||
@@ -216,7 +218,21 @@ func (s *Scanner) filterFS(fsys fs.FS) (fs.FS, error) {
|
||||
return newfs, nil
|
||||
}
|
||||
|
||||
func scannerOptions(t detection.FileType, opt ScannerOption) ([]options.ScannerOption, error) {
|
||||
func InitRegoScanner(opt ScannerOption) (*rego.Scanner, error) {
|
||||
regoOpts, err := initRegoOptions(opt)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("init rego options: %w", err)
|
||||
}
|
||||
regoScanner := rego.NewScanner(regoOpts...)
|
||||
// note: it is safe to pass nil as fsys, since checks and data files will be loaded
|
||||
// from the filesystems passed through the options.
|
||||
if err := regoScanner.LoadPolicies(nil); err != nil {
|
||||
return nil, xerrors.Errorf("load checks: %w", err)
|
||||
}
|
||||
return regoScanner, nil
|
||||
}
|
||||
|
||||
func initRegoOptions(opt ScannerOption) ([]options.ScannerOption, error) {
|
||||
disabledCheckIDs := lo.Map(opt.DisabledChecks, func(check DisabledCheck, _ int) string {
|
||||
log.Info("Check disabled", log.Prefix(log.PrefixMisconfiguration), log.String("ID", check.ID),
|
||||
log.String("scanner", check.Scanner), log.String("reason", check.Reason))
|
||||
@@ -268,6 +284,23 @@ func scannerOptions(t detection.FileType, opt ScannerOption) ([]options.ScannerO
|
||||
if len(opt.Namespaces) > 0 {
|
||||
opts = append(opts, rego.WithPolicyNamespaces(opt.Namespaces...))
|
||||
}
|
||||
return opts, nil
|
||||
}
|
||||
|
||||
func scannerOptions(t detection.FileType, opt ScannerOption) ([]options.ScannerOption, error) {
|
||||
var opts []options.ScannerOption
|
||||
|
||||
if opt.RegoScanner != nil {
|
||||
opts = append(opts, rego.WithRegoScanner(opt.RegoScanner))
|
||||
} else {
|
||||
// If RegoScanner is not provided, pass the Rego options to IaC scanners
|
||||
// so that they can initialize the Rego scanner themselves
|
||||
regoOpts, err := initRegoOptions(opt)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("init rego opts: %w", err)
|
||||
}
|
||||
opts = append(opts, regoOpts...)
|
||||
}
|
||||
|
||||
switch t {
|
||||
case detection.FileTypeHelm:
|
||||
|
||||
@@ -17,13 +17,12 @@ import (
|
||||
wasi "github.com/tetratelabs/wazero/imports/wasi_snapshot_preview1"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
tapi "github.com/aquasecurity/trivy/pkg/module/api"
|
||||
"github.com/aquasecurity/trivy/pkg/module/serialize"
|
||||
"github.com/aquasecurity/trivy/pkg/scan/post"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
"github.com/aquasecurity/trivy/pkg/utils/fsutils"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -33,10 +32,6 @@ var (
|
||||
"warn": logWarn,
|
||||
"error": logError,
|
||||
}
|
||||
|
||||
RelativeDir = filepath.Join(".trivy", "modules")
|
||||
|
||||
DefaultDir = dir()
|
||||
)
|
||||
|
||||
// logDebug is defined as an api.GoModuleFunc for lower overhead vs reflection.
|
||||
@@ -172,7 +167,7 @@ func (m *Manager) Register() {
|
||||
func (m *Manager) Deregister() {
|
||||
for _, mod := range m.modules {
|
||||
analyzer.DeregisterAnalyzer(analyzer.Type(mod.Name()))
|
||||
post.DeregisterPostScanner(mod.Name())
|
||||
extension.DeregisterHook(mod.Name())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -262,6 +257,8 @@ func marshal(ctx context.Context, m api.Module, malloc api.Function, v any) (uin
|
||||
return ptr, size, nil
|
||||
}
|
||||
|
||||
var _ extension.ScanHook = (*wasmModule)(nil)
|
||||
|
||||
type wasmModule struct {
|
||||
mod api.Module
|
||||
memFS *memFS
|
||||
@@ -416,7 +413,7 @@ func (m *wasmModule) Register() {
|
||||
}
|
||||
if m.isPostScanner {
|
||||
logger.Debug("Registering custom post scanner")
|
||||
post.RegisterPostScanner(m)
|
||||
extension.RegisterHook(m)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -486,8 +483,11 @@ func (m *wasmModule) Analyze(ctx context.Context, input analyzer.AnalysisInput)
|
||||
return &result, nil
|
||||
}
|
||||
|
||||
// PostScan performs post scanning
|
||||
// e.g. Remove a vulnerability, change severity, etc.
|
||||
func (m *wasmModule) PreScan(ctx context.Context, target *types.ScanTarget, options types.ScanOptions) error {
|
||||
// TODO: Implement
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *wasmModule) PostScan(ctx context.Context, results types.Results) (types.Results, error) {
|
||||
// Find custom resources
|
||||
var custom types.Result
|
||||
@@ -746,10 +746,6 @@ func isType(ctx context.Context, mod api.Module, name string) (bool, error) {
|
||||
return isRes[0] > 0, nil
|
||||
}
|
||||
|
||||
func dir() string {
|
||||
return filepath.Join(fsutils.HomeDir(), RelativeDir)
|
||||
}
|
||||
|
||||
func modulePostScanSpec(ctx context.Context, mod api.Module, freeFn api.Function) (serialize.PostScanSpec, error) {
|
||||
postScanSpecFunc := mod.ExportedFunction("post_scan_spec")
|
||||
if postScanSpecFunc == nil {
|
||||
|
||||
@@ -6,12 +6,13 @@ import (
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||
"github.com/aquasecurity/trivy/pkg/module"
|
||||
"github.com/aquasecurity/trivy/pkg/scan/post"
|
||||
)
|
||||
|
||||
func TestManager_Register(t *testing.T) {
|
||||
@@ -20,12 +21,12 @@ func TestManager_Register(t *testing.T) {
|
||||
t.Skip("Test satisfied adequately by Linux tests")
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
moduleDir string
|
||||
enabledModules []string
|
||||
wantAnalyzerVersions analyzer.Versions
|
||||
wantPostScannerVersions map[string]int
|
||||
wantErr bool
|
||||
name string
|
||||
moduleDir string
|
||||
enabledModules []string
|
||||
wantAnalyzerVersions analyzer.Versions
|
||||
wantExtentions []string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
@@ -36,8 +37,8 @@ func TestManager_Register(t *testing.T) {
|
||||
},
|
||||
PostAnalyzers: make(map[string]int),
|
||||
},
|
||||
wantPostScannerVersions: map[string]int{
|
||||
"happy": 1,
|
||||
wantExtentions: []string{
|
||||
"happy",
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -49,7 +50,7 @@ func TestManager_Register(t *testing.T) {
|
||||
},
|
||||
PostAnalyzers: make(map[string]int),
|
||||
},
|
||||
wantPostScannerVersions: make(map[string]int),
|
||||
wantExtentions: []string{},
|
||||
},
|
||||
{
|
||||
name: "only post scanner",
|
||||
@@ -58,8 +59,8 @@ func TestManager_Register(t *testing.T) {
|
||||
Analyzers: make(map[string]int),
|
||||
PostAnalyzers: make(map[string]int),
|
||||
},
|
||||
wantPostScannerVersions: map[string]int{
|
||||
"scanner": 2,
|
||||
wantExtentions: []string{
|
||||
"scanner",
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -69,7 +70,7 @@ func TestManager_Register(t *testing.T) {
|
||||
Analyzers: make(map[string]int),
|
||||
PostAnalyzers: make(map[string]int),
|
||||
},
|
||||
wantPostScannerVersions: make(map[string]int),
|
||||
wantExtentions: []string{},
|
||||
},
|
||||
{
|
||||
name: "pass enabled modules",
|
||||
@@ -85,8 +86,8 @@ func TestManager_Register(t *testing.T) {
|
||||
},
|
||||
PostAnalyzers: make(map[string]int),
|
||||
},
|
||||
wantPostScannerVersions: map[string]int{
|
||||
"happy": 1,
|
||||
wantExtentions: []string{
|
||||
"happy",
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -124,9 +125,10 @@ func TestManager_Register(t *testing.T) {
|
||||
got := a.AnalyzerVersions()
|
||||
assert.Equal(t, tt.wantAnalyzerVersions, got)
|
||||
|
||||
// Confirm the post scanner is registered
|
||||
gotScannerVersions := post.ScannerVersions()
|
||||
assert.Equal(t, tt.wantPostScannerVersions, gotScannerVersions)
|
||||
hookNames := lo.Map(extension.Hooks(), func(hook extension.Hook, _ int) string {
|
||||
return hook.Name()
|
||||
})
|
||||
assert.Equal(t, tt.wantExtentions, hookNames)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
cr "github.com/aquasecurity/trivy/pkg/compliance/report"
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
"github.com/aquasecurity/trivy/pkg/flag"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
@@ -26,6 +27,11 @@ const (
|
||||
|
||||
// Write writes the result to output, format as passed in argument
|
||||
func Write(ctx context.Context, report types.Report, option flag.Options) (err error) {
|
||||
// Call pre-report hooks
|
||||
if err := extension.PreReport(ctx, &report, option); err != nil {
|
||||
return xerrors.Errorf("pre report error: %w", err)
|
||||
}
|
||||
|
||||
output, cleanup, err := option.OutputWriter(ctx)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("failed to create a file: %w", err)
|
||||
@@ -106,6 +112,11 @@ func Write(ctx context.Context, report types.Report, option flag.Options) (err e
|
||||
return xerrors.Errorf("failed to write results: %w", err)
|
||||
}
|
||||
|
||||
// Call post-report hooks
|
||||
if err := extension.PostReport(ctx, &report, option); err != nil {
|
||||
return xerrors.Errorf("post report error: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
package report_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
dbTypes "github.com/aquasecurity/trivy-db/pkg/types"
|
||||
"github.com/aquasecurity/trivy/internal/hooktest"
|
||||
"github.com/aquasecurity/trivy/pkg/flag"
|
||||
"github.com/aquasecurity/trivy/pkg/report"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
@@ -82,3 +88,93 @@ func TestResults_Failed(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestWrite(t *testing.T) {
|
||||
testReport := types.Report{
|
||||
SchemaVersion: report.SchemaVersion,
|
||||
ArtifactName: "test-artifact",
|
||||
Results: types.Results{
|
||||
{
|
||||
Target: "test-target",
|
||||
Vulnerabilities: []types.DetectedVulnerability{
|
||||
{
|
||||
VulnerabilityID: "CVE-2021-0001",
|
||||
PkgName: "test-pkg",
|
||||
Vulnerability: dbTypes.Vulnerability{
|
||||
Title: "Test Vulnerability Title",
|
||||
Description: "This is a test description of a vulnerability",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
testTemplate := "{{ range . }}{{ range .Vulnerabilities }}- {{ .VulnerabilityID }}: {{ .Title }}\n {{ .Description }}\n{{ end }}{{ end }}"
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
setUpHook bool
|
||||
report types.Report
|
||||
options flag.Options
|
||||
wantOutput string
|
||||
wantTitle string // Expected title after function call
|
||||
wantDesc string // Expected description after function call
|
||||
}{
|
||||
{
|
||||
name: "template with title and description",
|
||||
report: testReport,
|
||||
options: flag.Options{
|
||||
ReportOptions: flag.ReportOptions{
|
||||
Format: types.FormatTemplate,
|
||||
Template: testTemplate,
|
||||
},
|
||||
},
|
||||
wantOutput: "- CVE-2021-0001: Test Vulnerability Title\n This is a test description of a vulnerability\n",
|
||||
wantTitle: "Test Vulnerability Title", // Should remain unchanged
|
||||
wantDesc: "This is a test description of a vulnerability", // Should remain unchanged
|
||||
},
|
||||
{
|
||||
name: "report modified by hooks",
|
||||
setUpHook: true,
|
||||
report: testReport,
|
||||
options: flag.Options{
|
||||
ReportOptions: flag.ReportOptions{
|
||||
Format: types.FormatTemplate,
|
||||
Template: testTemplate,
|
||||
},
|
||||
},
|
||||
// The template output only reflects the pre-report hook changes because
|
||||
// the post-report hook runs AFTER the output is written.
|
||||
// However, the report object itself is modified by both pre and post hooks.
|
||||
wantOutput: "- CVE-2021-0001: Modified by pre-report hook\n This is a test description of a vulnerability\n",
|
||||
wantTitle: "Modified by pre-report hook",
|
||||
wantDesc: "Modified by post-report hook",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if tt.setUpHook {
|
||||
hooktest.Init(t)
|
||||
}
|
||||
|
||||
// Create a buffer to capture the output
|
||||
output := new(bytes.Buffer)
|
||||
tt.options.SetOutputWriter(output)
|
||||
|
||||
// Execute the Write function
|
||||
err := report.Write(t.Context(), tt.report, tt.options)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the output matches the expected template rendering
|
||||
got := output.String()
|
||||
assert.Equal(t, tt.wantOutput, got, "Template output does not match wanted value")
|
||||
|
||||
// Verify that the title and description in the report match the expected values
|
||||
require.Len(t, tt.report.Results, 1)
|
||||
require.Len(t, tt.report.Results[0].Vulnerabilities, 1)
|
||||
assert.Equal(t, tt.wantTitle, tt.report.Results[0].Vulnerabilities[0].Title)
|
||||
assert.Equal(t, tt.wantDesc, tt.report.Results[0].Vulnerabilities[0].Description)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ import (
|
||||
|
||||
dbTypes "github.com/aquasecurity/trivy-db/pkg/types"
|
||||
ospkgDetector "github.com/aquasecurity/trivy/pkg/detector/ospkg"
|
||||
"github.com/aquasecurity/trivy/pkg/extension"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/applier"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
@@ -23,7 +24,6 @@ import (
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/aquasecurity/trivy/pkg/scan/langpkg"
|
||||
"github.com/aquasecurity/trivy/pkg/scan/ospkg"
|
||||
"github.com/aquasecurity/trivy/pkg/scan/post"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
"github.com/aquasecurity/trivy/pkg/vulnerability"
|
||||
@@ -49,7 +49,7 @@ type Service struct {
|
||||
vulnClient vulnerability.Client
|
||||
}
|
||||
|
||||
// NewService is the factory method for Scanner
|
||||
// NewService is the factory method for scan service
|
||||
func NewService(a applier.Applier, osPkgScanner ospkg.Scanner, langPkgScanner langpkg.Scanner,
|
||||
vulnClient vulnerability.Client) Service {
|
||||
return Service{
|
||||
@@ -113,6 +113,11 @@ func (s Service) Scan(ctx context.Context, targetName, artifactKey string, blobK
|
||||
}
|
||||
|
||||
func (s Service) ScanTarget(ctx context.Context, target types.ScanTarget, options types.ScanOptions) (types.Results, ftypes.OS, error) {
|
||||
// Call pre-scan hooks
|
||||
if err := extension.PreScan(ctx, &target, options); err != nil {
|
||||
return nil, ftypes.OS{}, xerrors.Errorf("pre scan error: %w", err)
|
||||
}
|
||||
|
||||
var results types.Results
|
||||
|
||||
// Filter packages according to the options
|
||||
@@ -148,9 +153,8 @@ func (s Service) ScanTarget(ctx context.Context, target types.ScanTarget, option
|
||||
s.vulnClient.FillInfo(results[i].Vulnerabilities, options.VulnSeveritySources)
|
||||
}
|
||||
|
||||
// Post scanning
|
||||
results, err = post.Scan(ctx, results)
|
||||
if err != nil {
|
||||
// Call post-scan hooks
|
||||
if results, err = extension.PostScan(ctx, results); err != nil {
|
||||
return nil, ftypes.OS{}, xerrors.Errorf("post scan error: %w", err)
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/aquasecurity/trivy-db/pkg/db"
|
||||
dbTypes "github.com/aquasecurity/trivy-db/pkg/types"
|
||||
"github.com/aquasecurity/trivy/internal/dbtest"
|
||||
"github.com/aquasecurity/trivy/internal/hooktest"
|
||||
"github.com/aquasecurity/trivy/pkg/cache"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/applier"
|
||||
ftypes "github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
@@ -151,6 +152,7 @@ func TestScanner_Scan(t *testing.T) {
|
||||
name string
|
||||
args args
|
||||
fixtures []string
|
||||
setUpHook bool
|
||||
setupCache func(t *testing.T) cache.Cache
|
||||
wantResults types.Results
|
||||
wantOS ftypes.OS
|
||||
@@ -909,6 +911,75 @@ func TestScanner_Scan(t *testing.T) {
|
||||
Name: "3.11",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "happy path with hooks",
|
||||
args: args{
|
||||
target: "alpine:latest",
|
||||
layerIDs: []string{"sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10"},
|
||||
options: types.ScanOptions{
|
||||
PkgTypes: []string{types.PkgTypeOS},
|
||||
PkgRelationships: ftypes.Relationships,
|
||||
Scanners: types.Scanners{types.VulnerabilityScanner},
|
||||
VulnSeveritySources: []dbTypes.SourceID{"auto"},
|
||||
},
|
||||
},
|
||||
fixtures: []string{"testdata/fixtures/happy.yaml"},
|
||||
setUpHook: true,
|
||||
setupCache: func(t *testing.T) cache.Cache {
|
||||
c := cache.NewMemoryCache()
|
||||
require.NoError(t, c.PutBlob("sha256:5216338b40a7b96416b8b9858974bbe4acc3096ee60acbc4dfb1ee02aecceb10", ftypes.BlobInfo{
|
||||
SchemaVersion: ftypes.BlobJSONSchemaVersion,
|
||||
OS: ftypes.OS{
|
||||
Family: ftypes.Alpine,
|
||||
Name: "3.11",
|
||||
},
|
||||
PackageInfos: []ftypes.PackageInfo{
|
||||
{
|
||||
FilePath: "lib/apk/db/installed",
|
||||
Packages: []ftypes.Package{muslPkg},
|
||||
},
|
||||
},
|
||||
}))
|
||||
return c
|
||||
},
|
||||
wantResults: types.Results{
|
||||
{
|
||||
Target: "alpine:latest (pre-scan) (alpine 3.11)",
|
||||
Class: types.ClassOSPkg,
|
||||
Type: ftypes.Alpine,
|
||||
Packages: ftypes.Packages{
|
||||
muslPkg,
|
||||
},
|
||||
Vulnerabilities: []types.DetectedVulnerability{
|
||||
{
|
||||
VulnerabilityID: "CVE-2020-9999",
|
||||
PkgName: muslPkg.Name,
|
||||
PkgIdentifier: muslPkg.Identifier,
|
||||
InstalledVersion: muslPkg.Version,
|
||||
FixedVersion: "1.2.4",
|
||||
Status: dbTypes.StatusFixed,
|
||||
Layer: ftypes.Layer{
|
||||
DiffID: "sha256:ebf12965380b39889c99a9c02e82ba465f887b45975b6e389d42e9e6a3857888",
|
||||
},
|
||||
PrimaryURL: "https://avd.aquasec.com/nvd/cve-2020-9999",
|
||||
Vulnerability: dbTypes.Vulnerability{
|
||||
Title: "dos",
|
||||
Description: "dos vulnerability",
|
||||
Severity: "HIGH",
|
||||
References: []string{
|
||||
"https://example.com/post-scan", // modified by post-scan hook
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
wantOS: ftypes.OS{
|
||||
Family: "alpine",
|
||||
Name: "3.11",
|
||||
Eosl: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "happy path with misconfigurations",
|
||||
args: args{
|
||||
@@ -1242,6 +1313,10 @@ func TestScanner_Scan(t *testing.T) {
|
||||
_ = dbtest.InitDB(t, tt.fixtures)
|
||||
defer db.Close()
|
||||
|
||||
if tt.setUpHook {
|
||||
hooktest.Init(t)
|
||||
}
|
||||
|
||||
c := tt.setupCache(t)
|
||||
a := applier.NewApplier(c)
|
||||
s := NewService(a, ospkg.NewScanner(), langpkg.NewScanner(), vulnerability.NewClient(db.Config{}))
|
||||
|
||||
@@ -1,45 +0,0 @@
|
||||
package post
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
type Scanner interface {
|
||||
Name() string
|
||||
Version() int
|
||||
PostScan(ctx context.Context, results types.Results) (types.Results, error)
|
||||
}
|
||||
|
||||
func RegisterPostScanner(s Scanner) {
|
||||
// Avoid duplication
|
||||
postScanners[s.Name()] = s
|
||||
}
|
||||
|
||||
func DeregisterPostScanner(name string) {
|
||||
delete(postScanners, name)
|
||||
}
|
||||
|
||||
func ScannerVersions() map[string]int {
|
||||
versions := make(map[string]int)
|
||||
for _, s := range postScanners {
|
||||
versions[s.Name()] = s.Version()
|
||||
}
|
||||
return versions
|
||||
}
|
||||
|
||||
var postScanners = make(map[string]Scanner)
|
||||
|
||||
func Scan(ctx context.Context, results types.Results) (types.Results, error) {
|
||||
var err error
|
||||
for _, s := range postScanners {
|
||||
results, err = s.PostScan(ctx, results)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("%s post scan error: %w", s.Name(), err)
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
package post_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
dbTypes "github.com/aquasecurity/trivy-db/pkg/types"
|
||||
"github.com/aquasecurity/trivy/pkg/scan/post"
|
||||
"github.com/aquasecurity/trivy/pkg/types"
|
||||
)
|
||||
|
||||
type testPostScanner struct{}
|
||||
|
||||
func (testPostScanner) Name() string {
|
||||
return "test"
|
||||
}
|
||||
|
||||
func (testPostScanner) Version() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (testPostScanner) PostScan(ctx context.Context, results types.Results) (types.Results, error) {
|
||||
for i, r := range results {
|
||||
if r.Target == "bad" {
|
||||
return nil, errors.New("bad")
|
||||
}
|
||||
for j := range r.Vulnerabilities {
|
||||
results[i].Vulnerabilities[j].Severity = "LOW"
|
||||
}
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func TestScan(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
results types.Results
|
||||
want types.Results
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
results: types.Results{
|
||||
{
|
||||
Target: "test",
|
||||
Vulnerabilities: []types.DetectedVulnerability{
|
||||
{
|
||||
VulnerabilityID: "CVE-2022-0001",
|
||||
PkgName: "musl",
|
||||
InstalledVersion: "1.2.3",
|
||||
FixedVersion: "1.2.4",
|
||||
Vulnerability: dbTypes.Vulnerability{
|
||||
Severity: "CRITICAL",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: types.Results{
|
||||
{
|
||||
Target: "test",
|
||||
Vulnerabilities: []types.DetectedVulnerability{
|
||||
{
|
||||
VulnerabilityID: "CVE-2022-0001",
|
||||
PkgName: "musl",
|
||||
InstalledVersion: "1.2.3",
|
||||
FixedVersion: "1.2.4",
|
||||
Vulnerability: dbTypes.Vulnerability{
|
||||
Severity: "LOW",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sad path",
|
||||
results: types.Results{
|
||||
{
|
||||
Target: "bad",
|
||||
},
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
s := testPostScanner{}
|
||||
post.RegisterPostScanner(s)
|
||||
defer func() {
|
||||
post.DeregisterPostScanner(s.Name())
|
||||
}()
|
||||
|
||||
results, err := post.Scan(t.Context(), tt.results)
|
||||
require.Equal(t, tt.wantErr, err != nil)
|
||||
assert.Equal(t, tt.want, results)
|
||||
})
|
||||
}
|
||||
}
|
||||
107
pkg/x/json/json.go
Normal file
107
pkg/x/json/json.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package json
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
|
||||
"github.com/go-json-experiment/json"
|
||||
"github.com/go-json-experiment/json/jsontext"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
)
|
||||
|
||||
// lineReader is a custom reader that tracks line numbers.
|
||||
type lineReader struct {
|
||||
r io.Reader
|
||||
line int
|
||||
}
|
||||
|
||||
// newLineReader creates a new line reader.
|
||||
func newLineReader(r io.Reader) *lineReader {
|
||||
return &lineReader{
|
||||
r: r,
|
||||
line: 1,
|
||||
}
|
||||
}
|
||||
|
||||
func (lr *lineReader) Read(p []byte) (n int, err error) {
|
||||
n, err = lr.r.Read(p)
|
||||
if n > 0 {
|
||||
// Count the number of newlines in the read buffer
|
||||
lr.line += bytes.Count(p[:n], []byte("\n"))
|
||||
}
|
||||
return n, err
|
||||
}
|
||||
|
||||
func (lr *lineReader) Line() int {
|
||||
return lr.line
|
||||
}
|
||||
|
||||
func Unmarshal(data []byte, v any) error {
|
||||
return UnmarshalRead(bytes.NewBuffer(data), v)
|
||||
}
|
||||
|
||||
func UnmarshalRead(r io.Reader, v any) error {
|
||||
lr := newLineReader(r)
|
||||
unmarshalers := unmarshalerWithObjectLocation(lr)
|
||||
return json.UnmarshalRead(lr, v, json.WithUnmarshalers(unmarshalers))
|
||||
}
|
||||
|
||||
// Location is wrap of types.Location.
|
||||
// This struct is required when you need to detect location of your object from json file.
|
||||
type Location types.Location
|
||||
|
||||
func (l *Location) SetLocation(location types.Location) {
|
||||
*l = Location(location)
|
||||
}
|
||||
|
||||
// ObjectLocation is required when you need to save Location for your struct.
|
||||
type ObjectLocation interface {
|
||||
SetLocation(location types.Location)
|
||||
}
|
||||
|
||||
// unmarshalerWithObjectLocation creates json.Unmarshaler for ObjectLocation to save object location into xjson.Location
|
||||
// To use UnmarshalerWithObjectLocation for primitive types, you must implement the UnmarshalerFrom interface for those objects.
|
||||
// cf. https://pkg.go.dev/github.com/go-json-experiment/json#UnmarshalerFrom
|
||||
func unmarshalerWithObjectLocation(r *lineReader) *json.Unmarshalers {
|
||||
visited := set.New[int]()
|
||||
return unmarshaler(r, visited)
|
||||
}
|
||||
|
||||
func unmarshaler(r *lineReader, visited set.Set[int]) *json.Unmarshalers {
|
||||
return json.UnmarshalFromFunc(func(dec *jsontext.Decoder, loc ObjectLocation) error {
|
||||
// Decoder.InputOffset reports the offset after the last token,
|
||||
// but we want to record the offset before the next token.
|
||||
//
|
||||
// Call Decoder.PeekKind to buffer enough to reach the next token.
|
||||
// Add the number of leading whitespace, commas, and colons
|
||||
// to locate the start of the next token.
|
||||
// cf. https://pkg.go.dev/github.com/go-json-experiment/json@v0.0.0-20250223041408-d3c622f1b874#example-WithUnmarshalers-RecordOffsets
|
||||
kind := dec.PeekKind()
|
||||
|
||||
unread := bytes.TrimLeft(dec.UnreadBuffer(), " \n\r\t,:")
|
||||
start := r.Line() - bytes.Count(unread, []byte("\n")) // The decoder buffer may have read more lines.
|
||||
|
||||
// Check visited set to avoid infinity loops
|
||||
if visited.Contains(start) {
|
||||
return json.SkipFunc
|
||||
}
|
||||
visited.Append(start)
|
||||
|
||||
// Return more detailed error for cases when UnmarshalJSONFrom is not implemented for primitive type.
|
||||
if _, ok := loc.(json.UnmarshalerFrom); !ok && kind != '[' && kind != '{' {
|
||||
return xerrors.Errorf("structures with single primitive type should implement UnmarshalJSONFrom: %T", loc)
|
||||
}
|
||||
|
||||
if err := json.UnmarshalDecode(dec, loc, json.WithUnmarshalers(unmarshaler(r, visited))); err != nil {
|
||||
return err
|
||||
}
|
||||
loc.SetLocation(types.Location{
|
||||
StartLine: start,
|
||||
EndLine: r.Line() - bytes.Count(dec.UnreadBuffer(), []byte("\n")),
|
||||
})
|
||||
return nil
|
||||
})
|
||||
}
|
||||
149
pkg/x/json/json_test.go
Normal file
149
pkg/x/json/json_test.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package json_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/go-json-experiment/json"
|
||||
"github.com/go-json-experiment/json/jsontext"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
xjson "github.com/aquasecurity/trivy/pkg/x/json"
|
||||
)
|
||||
|
||||
// See npm.LockFile
|
||||
type nestedStruct struct {
|
||||
Dependencies map[string]Dependency `json:"dependencies"`
|
||||
}
|
||||
|
||||
type Dependency struct {
|
||||
Version string `json:"version"`
|
||||
Dependencies map[string]Dependency `json:"dependencies"`
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
type stringWithLocation struct {
|
||||
Requires Requires `json:"requires"`
|
||||
}
|
||||
|
||||
type Requires []Require
|
||||
|
||||
type Require struct {
|
||||
Dependency string
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
func (r *Require) UnmarshalJSONFrom(dec *jsontext.Decoder) error {
|
||||
return json.UnmarshalDecode(dec, &r.Dependency)
|
||||
}
|
||||
|
||||
type stringsWithoutUnmarshalerFrom struct {
|
||||
Strings []StringWithoutUnmarshalerFrom `json:"strings"`
|
||||
}
|
||||
|
||||
type StringWithoutUnmarshalerFrom struct {
|
||||
String string
|
||||
xjson.Location
|
||||
}
|
||||
|
||||
func TestUnmarshal(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
in []byte
|
||||
out any
|
||||
want any
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "nested LocationObjects",
|
||||
in: []byte(`{
|
||||
"dependencies": {
|
||||
"body-parser": {
|
||||
"version": "1.18.3",
|
||||
"dependencies": {
|
||||
"debug": {
|
||||
"version": "2.6.9"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}`),
|
||||
out: nestedStruct{},
|
||||
want: nestedStruct{
|
||||
Dependencies: map[string]Dependency{
|
||||
"body-parser": {
|
||||
Version: "1.18.3",
|
||||
Location: xjson.Location{
|
||||
StartLine: 3,
|
||||
EndLine: 10,
|
||||
},
|
||||
Dependencies: map[string]Dependency{
|
||||
// UnmarshalerWithObjectLocation doesn't support Location for nested objects
|
||||
"debug": {
|
||||
Version: "2.6.9",
|
||||
Location: xjson.Location{
|
||||
StartLine: 6,
|
||||
EndLine: 8,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Location for only string",
|
||||
in: []byte(`{
|
||||
"version": "0.5",
|
||||
"requires": [
|
||||
"sound32/1.0#83d4b7bf607b3b60a6546f8b58b5cdd7%1675278904.0791488",
|
||||
"matrix/1.3#905c3f0babc520684c84127378fefdd0%1675278900.0103245"
|
||||
]
|
||||
}`),
|
||||
out: stringWithLocation{},
|
||||
want: stringWithLocation{
|
||||
Requires: []Require{
|
||||
{
|
||||
Dependency: "sound32/1.0#83d4b7bf607b3b60a6546f8b58b5cdd7%1675278904.0791488",
|
||||
Location: xjson.Location{
|
||||
StartLine: 4,
|
||||
EndLine: 4,
|
||||
},
|
||||
},
|
||||
{
|
||||
Dependency: "matrix/1.3#905c3f0babc520684c84127378fefdd0%1675278900.0103245",
|
||||
Location: xjson.Location{
|
||||
StartLine: 5,
|
||||
EndLine: 5,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "String object without UnmarshalerFrom implementation",
|
||||
in: []byte(`{
|
||||
"strings": [
|
||||
"sound32/1.0#83d4b7bf607b3b60a6546f8b58b5cdd7%1675278904.0791488",
|
||||
"matrix/1.3#905c3f0babc520684c84127378fefdd0%1675278900.0103245"
|
||||
]
|
||||
}`),
|
||||
out: stringsWithoutUnmarshalerFrom{},
|
||||
wantErr: "structures with single primitive type should implement UnmarshalJSONFrom",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
err := xjson.Unmarshal(tt.in, &tt.out)
|
||||
|
||||
if tt.wantErr != "" {
|
||||
require.ErrorContains(t, err, tt.wantErr)
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, tt.want, tt.out)
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
Reference in New Issue
Block a user