mirror of
https://github.com/aquasecurity/trivy.git
synced 2025-12-23 07:29:00 -08:00
feat: Adding support for Windows testing (#3037)
Signed-off-by: Owen Rumney <owen.rumney@aquasec.com> Signed-off-by: knqyf263 <knqyf263@gmail.com> Co-authored-by: knqyf263 <knqyf263@gmail.com>
This commit is contained in:
1
.gitattributes
vendored
Normal file
1
.gitattributes
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
* text=auto eol=lf
|
||||||
1
.github/workflows/semantic-pr.yaml
vendored
1
.github/workflows/semantic-pr.yaml
vendored
@@ -58,6 +58,7 @@ jobs:
|
|||||||
suse
|
suse
|
||||||
photon
|
photon
|
||||||
distroless
|
distroless
|
||||||
|
windows
|
||||||
|
|
||||||
ruby
|
ruby
|
||||||
php
|
php
|
||||||
|
|||||||
26
.github/workflows/test.yaml
vendored
26
.github/workflows/test.yaml
vendored
@@ -9,12 +9,13 @@ on:
|
|||||||
- 'mkdocs.yml'
|
- 'mkdocs.yml'
|
||||||
- 'LICENSE'
|
- 'LICENSE'
|
||||||
pull_request:
|
pull_request:
|
||||||
env:
|
|
||||||
TINYGO_VERSION: "0.25.0"
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
name: Test
|
name: Test
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.operating-system }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
operating-system: [ubuntu-latest, windows-latest, macos-latest]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
@@ -30,6 +31,7 @@ jobs:
|
|||||||
echo "Run 'go mod tidy' and push it"
|
echo "Run 'go mod tidy' and push it"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
if: matrix.operating-system == 'ubuntu-latest'
|
||||||
|
|
||||||
- name: Lint
|
- name: Lint
|
||||||
uses: golangci/golangci-lint-action@v3.3.0
|
uses: golangci/golangci-lint-action@v3.3.0
|
||||||
@@ -37,11 +39,12 @@ jobs:
|
|||||||
version: v1.49
|
version: v1.49
|
||||||
args: --deadline=30m
|
args: --deadline=30m
|
||||||
skip-cache: true # https://github.com/golangci/golangci-lint-action/issues/244#issuecomment-1052197778
|
skip-cache: true # https://github.com/golangci/golangci-lint-action/issues/244#issuecomment-1052197778
|
||||||
|
if: matrix.operating-system == 'ubuntu-latest'
|
||||||
|
|
||||||
- name: Install TinyGo
|
# Install tools
|
||||||
run: |
|
- uses: aquaproj/aqua-installer@v1.2.0
|
||||||
wget https://github.com/tinygo-org/tinygo/releases/download/v${TINYGO_VERSION}/tinygo_${TINYGO_VERSION}_amd64.deb
|
with:
|
||||||
sudo dpkg -i tinygo_${TINYGO_VERSION}_amd64.deb
|
aqua_version: v1.25.0
|
||||||
|
|
||||||
- name: Run unit tests
|
- name: Run unit tests
|
||||||
run: make test
|
run: make test
|
||||||
@@ -73,12 +76,13 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
go-version-file: go.mod
|
go-version-file: go.mod
|
||||||
|
|
||||||
- name: Install TinyGo
|
# Install tools
|
||||||
run: |
|
- uses: aquaproj/aqua-installer@v1.1.2
|
||||||
wget https://github.com/tinygo-org/tinygo/releases/download/v${TINYGO_VERSION}/tinygo_${TINYGO_VERSION}_amd64.deb
|
with:
|
||||||
sudo dpkg -i tinygo_${TINYGO_VERSION}_amd64.deb
|
aqua_version: v1.25.0
|
||||||
|
|
||||||
- name: Run module integration tests
|
- name: Run module integration tests
|
||||||
|
shell: bash
|
||||||
run: |
|
run: |
|
||||||
make test-module-integration
|
make test-module-integration
|
||||||
|
|
||||||
|
|||||||
8
aqua.yaml
Normal file
8
aqua.yaml
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
---
|
||||||
|
# aqua - Declarative CLI Version Manager
|
||||||
|
# https://aquaproj.github.io/
|
||||||
|
registries:
|
||||||
|
- type: standard
|
||||||
|
ref: v3.106.0 # renovate: depName=aquaproj/aqua-registry
|
||||||
|
packages:
|
||||||
|
- name: tinygo-org/tinygo@v0.26.0
|
||||||
115
go.mod
115
go.mod
@@ -39,7 +39,7 @@ require (
|
|||||||
github.com/golang-jwt/jwt v3.2.2+incompatible
|
github.com/golang-jwt/jwt v3.2.2+incompatible
|
||||||
github.com/golang/protobuf v1.5.2
|
github.com/golang/protobuf v1.5.2
|
||||||
github.com/google/go-containerregistry v0.12.0
|
github.com/google/go-containerregistry v0.12.0
|
||||||
github.com/google/licenseclassifier/v2 v2.0.0-pre6
|
github.com/google/licenseclassifier/v2 v2.0.0
|
||||||
github.com/google/uuid v1.3.0
|
github.com/google/uuid v1.3.0
|
||||||
github.com/google/wire v0.5.0
|
github.com/google/wire v0.5.0
|
||||||
github.com/hashicorp/go-getter v1.6.2
|
github.com/hashicorp/go-getter v1.6.2
|
||||||
@@ -82,7 +82,40 @@ require (
|
|||||||
)
|
)
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
cloud.google.com/go v0.104.0 // indirect
|
||||||
|
cloud.google.com/go/compute v1.12.1 // indirect
|
||||||
cloud.google.com/go/compute/metadata v0.2.1 // indirect
|
cloud.google.com/go/compute/metadata v0.2.1 // indirect
|
||||||
|
cloud.google.com/go/iam v0.5.0 // indirect
|
||||||
|
cloud.google.com/go/storage v1.23.0 // indirect
|
||||||
|
github.com/Azure/azure-sdk-for-go v67.1.0+incompatible
|
||||||
|
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect
|
||||||
|
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
|
||||||
|
github.com/Azure/go-autorest/autorest v0.11.28
|
||||||
|
github.com/Azure/go-autorest/autorest/adal v0.9.21
|
||||||
|
github.com/Azure/go-autorest/autorest/azure/auth v0.5.11
|
||||||
|
github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 // indirect
|
||||||
|
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
|
||||||
|
github.com/Azure/go-autorest/logger v0.2.1 // indirect
|
||||||
|
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
|
||||||
|
github.com/BurntSushi/toml v1.2.1 // indirect
|
||||||
|
github.com/GoogleCloudPlatform/docker-credential-gcr v2.0.5+incompatible
|
||||||
|
github.com/MakeNowJust/heredoc v1.0.0 // indirect
|
||||||
|
github.com/Masterminds/goutils v1.1.1 // indirect
|
||||||
|
github.com/Masterminds/semver v1.5.0 // indirect
|
||||||
|
github.com/Masterminds/semver/v3 v3.2.0 // indirect
|
||||||
|
github.com/Masterminds/squirrel v1.5.3 // indirect
|
||||||
|
github.com/Microsoft/go-winio v0.6.0 // indirect
|
||||||
|
github.com/Microsoft/hcsshim v0.9.4 // indirect
|
||||||
|
github.com/OneOfOne/xxhash v1.2.8 // indirect
|
||||||
|
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 // indirect
|
||||||
|
github.com/VividCortex/ewma v1.1.1 // indirect
|
||||||
|
github.com/acomagu/bufpipe v1.0.3 // indirect
|
||||||
|
github.com/agext/levenshtein v1.2.3 // indirect
|
||||||
|
github.com/agnivade/levenshtein v1.1.1 // indirect
|
||||||
|
github.com/alecthomas/chroma v0.10.0 // indirect
|
||||||
|
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect
|
||||||
|
github.com/apparentlymart/go-cidr v1.1.0 // indirect
|
||||||
|
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
|
||||||
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect
|
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 // indirect
|
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.8 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/credentials v1.13.3 // indirect
|
github.com/aws/aws-sdk-go-v2/credentials v1.13.3 // indirect
|
||||||
@@ -133,64 +166,6 @@ require (
|
|||||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.13.8 // indirect
|
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.13.8 // indirect
|
||||||
github.com/aws/aws-sdk-go-v2/service/workspaces v1.23.0 // indirect
|
github.com/aws/aws-sdk-go-v2/service/workspaces v1.23.0 // indirect
|
||||||
github.com/aws/smithy-go v1.13.4 // indirect
|
github.com/aws/smithy-go v1.13.4 // indirect
|
||||||
github.com/emicklei/go-restful/v3 v3.8.0 // indirect
|
|
||||||
github.com/go-openapi/analysis v0.21.4 // indirect
|
|
||||||
github.com/go-openapi/errors v0.20.3 // indirect
|
|
||||||
github.com/go-openapi/loads v0.21.2 // indirect
|
|
||||||
github.com/go-openapi/spec v0.20.7 // indirect
|
|
||||||
github.com/go-openapi/validate v0.22.0 // indirect
|
|
||||||
github.com/googleapis/enterprise-certificate-proxy v0.2.0 // indirect
|
|
||||||
github.com/googleapis/go-type-adapters v1.0.0 // indirect
|
|
||||||
github.com/hashicorp/go-retryablehttp v0.7.1 // indirect
|
|
||||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
|
||||||
github.com/liamg/iamgo v0.0.9 // indirect
|
|
||||||
github.com/liamg/jfather v0.0.7 // indirect
|
|
||||||
github.com/liamg/memoryfs v1.4.3 // indirect
|
|
||||||
github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40 // indirect
|
|
||||||
github.com/microsoft/go-rustaudit v0.0.0-20220808201409-204dfee52032 // indirect
|
|
||||||
github.com/oklog/ulid v1.3.1 // indirect
|
|
||||||
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
|
||||||
github.com/pelletier/go-toml/v2 v2.0.5 // indirect
|
|
||||||
github.com/shibumi/go-pathspec v1.3.0 // indirect
|
|
||||||
github.com/tchap/go-patricia/v2 v2.3.1 // indirect
|
|
||||||
go.etcd.io/etcd/api/v3 v3.5.5 // indirect
|
|
||||||
go.mongodb.org/mongo-driver v1.10.0 // indirect
|
|
||||||
)
|
|
||||||
|
|
||||||
require (
|
|
||||||
cloud.google.com/go v0.104.0 // indirect
|
|
||||||
cloud.google.com/go/compute v1.12.1 // indirect
|
|
||||||
cloud.google.com/go/iam v0.5.0 // indirect
|
|
||||||
cloud.google.com/go/storage v1.23.0 // indirect
|
|
||||||
github.com/Azure/azure-sdk-for-go v67.1.0+incompatible
|
|
||||||
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect
|
|
||||||
github.com/Azure/go-autorest v14.2.0+incompatible // indirect
|
|
||||||
github.com/Azure/go-autorest/autorest v0.11.28
|
|
||||||
github.com/Azure/go-autorest/autorest/adal v0.9.21
|
|
||||||
github.com/Azure/go-autorest/autorest/azure/auth v0.5.11
|
|
||||||
github.com/Azure/go-autorest/autorest/azure/cli v0.4.5 // indirect
|
|
||||||
github.com/Azure/go-autorest/autorest/date v0.3.0 // indirect
|
|
||||||
github.com/Azure/go-autorest/logger v0.2.1 // indirect
|
|
||||||
github.com/Azure/go-autorest/tracing v0.6.0 // indirect
|
|
||||||
github.com/BurntSushi/toml v1.2.1 // indirect
|
|
||||||
github.com/GoogleCloudPlatform/docker-credential-gcr v2.0.5+incompatible
|
|
||||||
github.com/MakeNowJust/heredoc v1.0.0 // indirect
|
|
||||||
github.com/Masterminds/goutils v1.1.1 // indirect
|
|
||||||
github.com/Masterminds/semver v1.5.0 // indirect
|
|
||||||
github.com/Masterminds/semver/v3 v3.2.0 // indirect
|
|
||||||
github.com/Masterminds/squirrel v1.5.3 // indirect
|
|
||||||
github.com/Microsoft/go-winio v0.6.0 // indirect
|
|
||||||
github.com/Microsoft/hcsshim v0.9.4 // indirect
|
|
||||||
github.com/OneOfOne/xxhash v1.2.8 // indirect
|
|
||||||
github.com/ProtonMail/go-crypto v0.0.0-20210428141323-04723f9f07d7 // indirect
|
|
||||||
github.com/VividCortex/ewma v1.1.1 // indirect
|
|
||||||
github.com/acomagu/bufpipe v1.0.3 // indirect
|
|
||||||
github.com/agext/levenshtein v1.2.3 // indirect
|
|
||||||
github.com/agnivade/levenshtein v1.1.1 // indirect
|
|
||||||
github.com/alecthomas/chroma v0.10.0 // indirect
|
|
||||||
github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect
|
|
||||||
github.com/apparentlymart/go-cidr v1.1.0 // indirect
|
|
||||||
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
|
|
||||||
github.com/beorn7/perks v1.0.1 // indirect
|
github.com/beorn7/perks v1.0.1 // indirect
|
||||||
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
|
github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect
|
||||||
github.com/bmatcuk/doublestar v1.3.4 // indirect
|
github.com/bmatcuk/doublestar v1.3.4 // indirect
|
||||||
@@ -215,6 +190,7 @@ require (
|
|||||||
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c // indirect
|
github.com/docker/go-events v0.0.0-20190806004212-e31b211e4f1c // indirect
|
||||||
github.com/docker/go-metrics v0.0.1 // indirect
|
github.com/docker/go-metrics v0.0.1 // indirect
|
||||||
github.com/docker/go-units v0.5.0 // indirect
|
github.com/docker/go-units v0.5.0 // indirect
|
||||||
|
github.com/emicklei/go-restful/v3 v3.8.0 // indirect
|
||||||
github.com/emirpasic/gods v1.12.0 // indirect
|
github.com/emirpasic/gods v1.12.0 // indirect
|
||||||
github.com/evanphx/json-patch v5.6.0+incompatible // indirect
|
github.com/evanphx/json-patch v5.6.0+incompatible // indirect
|
||||||
github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d // indirect
|
github.com/exponent-io/jsonpath v0.0.0-20151013193312-d6023ce2651d // indirect
|
||||||
@@ -226,9 +202,14 @@ require (
|
|||||||
github.com/go-git/go-git/v5 v5.4.2
|
github.com/go-git/go-git/v5 v5.4.2
|
||||||
github.com/go-gorp/gorp/v3 v3.0.2 // indirect
|
github.com/go-gorp/gorp/v3 v3.0.2 // indirect
|
||||||
github.com/go-logr/logr v1.2.3 // indirect
|
github.com/go-logr/logr v1.2.3 // indirect
|
||||||
|
github.com/go-openapi/analysis v0.21.4 // indirect
|
||||||
|
github.com/go-openapi/errors v0.20.3 // indirect
|
||||||
github.com/go-openapi/jsonpointer v0.19.5 // indirect
|
github.com/go-openapi/jsonpointer v0.19.5 // indirect
|
||||||
github.com/go-openapi/jsonreference v0.20.0 // indirect
|
github.com/go-openapi/jsonreference v0.20.0 // indirect
|
||||||
|
github.com/go-openapi/loads v0.21.2 // indirect
|
||||||
|
github.com/go-openapi/spec v0.20.7 // indirect
|
||||||
github.com/go-openapi/swag v0.22.3 // indirect
|
github.com/go-openapi/swag v0.22.3 // indirect
|
||||||
|
github.com/go-openapi/validate v0.22.0 // indirect
|
||||||
github.com/gobwas/glob v0.2.3 // indirect
|
github.com/gobwas/glob v0.2.3 // indirect
|
||||||
github.com/goccy/go-yaml v1.8.2 // indirect
|
github.com/goccy/go-yaml v1.8.2 // indirect
|
||||||
github.com/gofrs/uuid v4.0.0+incompatible // indirect
|
github.com/gofrs/uuid v4.0.0+incompatible // indirect
|
||||||
@@ -241,16 +222,20 @@ require (
|
|||||||
github.com/google/go-cmp v0.5.9 // indirect
|
github.com/google/go-cmp v0.5.9 // indirect
|
||||||
github.com/google/gofuzz v1.2.0 // indirect
|
github.com/google/gofuzz v1.2.0 // indirect
|
||||||
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
|
github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 // indirect
|
||||||
|
github.com/googleapis/enterprise-certificate-proxy v0.2.0 // indirect
|
||||||
github.com/googleapis/gax-go/v2 v2.6.0 // indirect
|
github.com/googleapis/gax-go/v2 v2.6.0 // indirect
|
||||||
|
github.com/googleapis/go-type-adapters v1.0.0 // indirect
|
||||||
github.com/gorilla/mux v1.8.0 // indirect
|
github.com/gorilla/mux v1.8.0 // indirect
|
||||||
github.com/gosuri/uitable v0.0.4 // indirect
|
github.com/gosuri/uitable v0.0.4 // indirect
|
||||||
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 // indirect
|
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 // indirect
|
||||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||||
github.com/hashicorp/go-multierror v1.1.1
|
github.com/hashicorp/go-multierror v1.1.1
|
||||||
|
github.com/hashicorp/go-retryablehttp v0.7.1 // indirect
|
||||||
github.com/hashicorp/go-safetemp v1.0.0 // indirect
|
github.com/hashicorp/go-safetemp v1.0.0 // indirect
|
||||||
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
github.com/hashicorp/go-uuid v1.0.3 // indirect
|
||||||
github.com/hashicorp/go-version v1.6.0 // indirect
|
github.com/hashicorp/go-version v1.6.0 // indirect
|
||||||
|
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||||
github.com/hashicorp/hcl/v2 v2.14.1 // indirect
|
github.com/hashicorp/hcl/v2 v2.14.1 // indirect
|
||||||
github.com/huandu/xstrings v1.3.3 // indirect
|
github.com/huandu/xstrings v1.3.3 // indirect
|
||||||
github.com/imdario/mergo v0.3.13 // indirect
|
github.com/imdario/mergo v0.3.13 // indirect
|
||||||
@@ -267,13 +252,18 @@ require (
|
|||||||
github.com/knqyf263/nested v0.0.1
|
github.com/knqyf263/nested v0.0.1
|
||||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
|
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 // indirect
|
||||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
|
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 // indirect
|
||||||
|
github.com/liamg/iamgo v0.0.9 // indirect
|
||||||
|
github.com/liamg/jfather v0.0.7 // indirect
|
||||||
|
github.com/liamg/memoryfs v1.4.3 // indirect
|
||||||
github.com/lib/pq v1.10.6 // indirect
|
github.com/lib/pq v1.10.6 // indirect
|
||||||
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect
|
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect
|
||||||
|
github.com/lunixbochs/struc v0.0.0-20200707160740-784aaebc1d40 // indirect
|
||||||
github.com/magiconair/properties v1.8.6 // indirect
|
github.com/magiconair/properties v1.8.6 // indirect
|
||||||
github.com/mattn/go-colorable v0.1.12 // indirect
|
github.com/mattn/go-colorable v0.1.12 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||||
github.com/mattn/go-runewidth v0.0.13 // indirect
|
github.com/mattn/go-runewidth v0.0.13 // indirect
|
||||||
github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect
|
github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect
|
||||||
|
github.com/microsoft/go-rustaudit v0.0.0-20220808201409-204dfee52032 // indirect
|
||||||
github.com/mitchellh/copystructure v1.2.0 // indirect
|
github.com/mitchellh/copystructure v1.2.0 // indirect
|
||||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||||
github.com/mitchellh/go-testing-interface v1.0.0 // indirect
|
github.com/mitchellh/go-testing-interface v1.0.0 // indirect
|
||||||
@@ -292,14 +282,17 @@ require (
|
|||||||
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect
|
github.com/monochromegane/go-gitignore v0.0.0-20200626010858-205db1a8cc00 // indirect
|
||||||
github.com/morikuni/aec v1.0.0 // indirect
|
github.com/morikuni/aec v1.0.0 // indirect
|
||||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||||
|
github.com/oklog/ulid v1.3.1 // indirect
|
||||||
github.com/olekukonko/tablewriter v0.0.5 // indirect
|
github.com/olekukonko/tablewriter v0.0.5 // indirect
|
||||||
github.com/opencontainers/go-digest v1.0.0
|
github.com/opencontainers/go-digest v1.0.0
|
||||||
github.com/opencontainers/image-spec v1.1.0-rc2
|
github.com/opencontainers/image-spec v1.1.0-rc2
|
||||||
github.com/opencontainers/runc v1.1.3 // indirect
|
github.com/opencontainers/runc v1.1.3 // indirect
|
||||||
github.com/opencontainers/runtime-spec v1.0.3-0.20220311020903-6969a0a09ab1 // indirect
|
github.com/opencontainers/runtime-spec v1.0.3-0.20220311020903-6969a0a09ab1 // indirect
|
||||||
github.com/opencontainers/selinux v1.10.1 // indirect
|
github.com/opencontainers/selinux v1.10.1 // indirect
|
||||||
|
github.com/opentracing/opentracing-go v1.2.0 // indirect
|
||||||
github.com/owenrumney/squealer v1.0.1-0.20220510063705-c0be93f0edea // indirect
|
github.com/owenrumney/squealer v1.0.1-0.20220510063705-c0be93f0edea // indirect
|
||||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
github.com/pelletier/go-toml v1.9.5 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.0.5 // indirect
|
||||||
github.com/peterbourgon/diskv v2.0.1+incompatible // indirect
|
github.com/peterbourgon/diskv v2.0.1+incompatible // indirect
|
||||||
github.com/pkg/errors v0.9.1 // indirect
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||||
@@ -314,6 +307,7 @@ require (
|
|||||||
github.com/russross/blackfriday v1.6.0 // indirect
|
github.com/russross/blackfriday v1.6.0 // indirect
|
||||||
github.com/saracen/walker v0.0.0-20191201085201-324a081bae7e
|
github.com/saracen/walker v0.0.0-20191201085201-324a081bae7e
|
||||||
github.com/sergi/go-diff v1.1.0 // indirect
|
github.com/sergi/go-diff v1.1.0 // indirect
|
||||||
|
github.com/shibumi/go-pathspec v1.3.0 // indirect
|
||||||
github.com/shopspring/decimal v1.2.0 // indirect
|
github.com/shopspring/decimal v1.2.0 // indirect
|
||||||
github.com/sirupsen/logrus v1.9.0 // indirect
|
github.com/sirupsen/logrus v1.9.0 // indirect
|
||||||
github.com/spdx/tools-golang v0.3.0
|
github.com/spdx/tools-golang v0.3.0
|
||||||
@@ -322,6 +316,7 @@ require (
|
|||||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||||
github.com/stretchr/objx v0.5.0 // indirect
|
github.com/stretchr/objx v0.5.0 // indirect
|
||||||
github.com/subosito/gotenv v1.4.1 // indirect
|
github.com/subosito/gotenv v1.4.1 // indirect
|
||||||
|
github.com/tchap/go-patricia/v2 v2.3.1 // indirect
|
||||||
github.com/ulikunitz/xz v0.5.10 // indirect
|
github.com/ulikunitz/xz v0.5.10 // indirect
|
||||||
github.com/vbatts/tar-split v0.11.2 // indirect
|
github.com/vbatts/tar-split v0.11.2 // indirect
|
||||||
github.com/xanzy/ssh-agent v0.3.0 // indirect
|
github.com/xanzy/ssh-agent v0.3.0 // indirect
|
||||||
@@ -332,6 +327,8 @@ require (
|
|||||||
github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 // indirect
|
github.com/yuin/gopher-lua v0.0.0-20210529063254-f4c35e4016d9 // indirect
|
||||||
github.com/zclconf/go-cty v1.10.0 // indirect
|
github.com/zclconf/go-cty v1.10.0 // indirect
|
||||||
github.com/zclconf/go-cty-yaml v1.0.2 // indirect
|
github.com/zclconf/go-cty-yaml v1.0.2 // indirect
|
||||||
|
go.etcd.io/etcd/api/v3 v3.5.5 // indirect
|
||||||
|
go.mongodb.org/mongo-driver v1.10.0 // indirect
|
||||||
go.opencensus.io v0.23.0 // indirect
|
go.opencensus.io v0.23.0 // indirect
|
||||||
go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect
|
go.starlark.net v0.0.0-20200306205701-8dd3e2ee1dd5 // indirect
|
||||||
go.uber.org/atomic v1.10.0 // indirect
|
go.uber.org/atomic v1.10.0 // indirect
|
||||||
|
|||||||
4
go.sum
4
go.sum
@@ -869,8 +869,8 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/
|
|||||||
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0=
|
||||||
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/licenseclassifier/v2 v2.0.0-pre6 h1:ytJvfOEiKcN1m5vkAJXkK2olICdrXqwNKFkBpKQ5Q+I=
|
github.com/google/licenseclassifier/v2 v2.0.0 h1:1Y57HHILNf4m0ABuMVb6xk4vAJYEUO0gDxNpog0pyeA=
|
||||||
github.com/google/licenseclassifier/v2 v2.0.0-pre6/go.mod h1:cOjbdH0kyC9R22sdQbYsFkto4NGCAc+ZSwbeThazEtM=
|
github.com/google/licenseclassifier/v2 v2.0.0/go.mod h1:cOjbdH0kyC9R22sdQbYsFkto4NGCAc+ZSwbeThazEtM=
|
||||||
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
|
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
|
||||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||||
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0=
|
||||||
|
|||||||
@@ -12,9 +12,13 @@ builds:
|
|||||||
goos:
|
goos:
|
||||||
- darwin
|
- darwin
|
||||||
- linux
|
- linux
|
||||||
|
- windows
|
||||||
goarch:
|
goarch:
|
||||||
- amd64
|
- amd64
|
||||||
- arm64
|
- arm64
|
||||||
|
ignore:
|
||||||
|
- goos: windows
|
||||||
|
goarch: arm64
|
||||||
|
|
||||||
archives:
|
archives:
|
||||||
-
|
-
|
||||||
@@ -25,7 +29,11 @@ archives:
|
|||||||
arm64: ARM64
|
arm64: ARM64
|
||||||
darwin: macOS
|
darwin: macOS
|
||||||
linux: Linux
|
linux: Linux
|
||||||
|
windows: Windows
|
||||||
files:
|
files:
|
||||||
- README.md
|
- README.md
|
||||||
- LICENSE
|
- LICENSE
|
||||||
- contrib/*.tpl
|
- contrib/*.tpl
|
||||||
|
format_overrides:
|
||||||
|
- goos: windows
|
||||||
|
format: zip
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ builds:
|
|||||||
- darwin
|
- darwin
|
||||||
- linux
|
- linux
|
||||||
- freebsd
|
- freebsd
|
||||||
|
- windows
|
||||||
goarch:
|
goarch:
|
||||||
- amd64
|
- amd64
|
||||||
- 386
|
- 386
|
||||||
@@ -30,6 +31,16 @@ builds:
|
|||||||
goarch: arm
|
goarch: arm
|
||||||
- goos: freebsd
|
- goos: freebsd
|
||||||
goarch: arm64
|
goarch: arm64
|
||||||
|
- goos: windows
|
||||||
|
goarch: 386
|
||||||
|
- goos: windows
|
||||||
|
goarch: arm
|
||||||
|
- goos: windows
|
||||||
|
goarch: arm64
|
||||||
|
- goos: windows
|
||||||
|
goarch: s390x
|
||||||
|
- goos: windows
|
||||||
|
goarch: ppc64le
|
||||||
|
|
||||||
release:
|
release:
|
||||||
extra_files:
|
extra_files:
|
||||||
@@ -59,6 +70,7 @@ nfpms:
|
|||||||
netbsd: NetBSD
|
netbsd: NetBSD
|
||||||
freebsd: FreeBSD
|
freebsd: FreeBSD
|
||||||
dragonfly: DragonFlyBSD
|
dragonfly: DragonFlyBSD
|
||||||
|
windows: Windows
|
||||||
contents:
|
contents:
|
||||||
- src: contrib/*.tpl
|
- src: contrib/*.tpl
|
||||||
dst: /usr/local/share/trivy/templates
|
dst: /usr/local/share/trivy/templates
|
||||||
@@ -83,6 +95,9 @@ archives:
|
|||||||
- README.md
|
- README.md
|
||||||
- LICENSE
|
- LICENSE
|
||||||
- contrib/*.tpl
|
- contrib/*.tpl
|
||||||
|
format_overrides:
|
||||||
|
- goos: windows
|
||||||
|
format: zip
|
||||||
|
|
||||||
|
|
||||||
brews:
|
brews:
|
||||||
|
|||||||
2
pkg/cloud/aws/cache/cache.go
vendored
2
pkg/cloud/aws/cache/cache.go
vendored
@@ -52,6 +52,7 @@ func (c *Cache) load() (*CacheData, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, ErrCacheNotFound
|
return nil, ErrCacheNotFound
|
||||||
}
|
}
|
||||||
|
defer func() { _ = m.Close() }()
|
||||||
|
|
||||||
var data CacheData
|
var data CacheData
|
||||||
if err := json.NewDecoder(m).Decode(&data); err != nil {
|
if err := json.NewDecoder(m).Decode(&data); err != nil {
|
||||||
@@ -127,5 +128,6 @@ func (c *Cache) AddServices(state *state.State, includedServices []string) error
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
defer func() { _ = f.Close() }()
|
||||||
return json.NewEncoder(f).Encode(data)
|
return json.NewEncoder(f).Encode(data)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,11 +6,11 @@ import (
|
|||||||
"path/filepath"
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/aquasecurity/trivy/pkg/compliance/report"
|
"github.com/aquasecurity/trivy/pkg/compliance/report"
|
||||||
"github.com/aquasecurity/trivy/pkg/types"
|
"github.com/aquasecurity/trivy/pkg/types"
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestJSONWriter_Write(t *testing.T) {
|
func TestJSONWriter_Write(t *testing.T) {
|
||||||
|
|||||||
@@ -3,10 +3,11 @@ package report_test
|
|||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/samber/lo"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
"github.com/aquasecurity/trivy/pkg/compliance/report"
|
"github.com/aquasecurity/trivy/pkg/compliance/report"
|
||||||
"github.com/aquasecurity/trivy/pkg/types"
|
"github.com/aquasecurity/trivy/pkg/types"
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
"k8s.io/utils/pointer"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestBuildSummary(t *testing.T) {
|
func TestBuildSummary(t *testing.T) {
|
||||||
@@ -59,13 +60,13 @@ func TestBuildSummary(t *testing.T) {
|
|||||||
ID: "1.0",
|
ID: "1.0",
|
||||||
Name: "Non-root containers",
|
Name: "Non-root containers",
|
||||||
Severity: "MEDIUM",
|
Severity: "MEDIUM",
|
||||||
TotalFail: pointer.Int(1),
|
TotalFail: lo.ToPtr(1),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "1.1",
|
ID: "1.1",
|
||||||
Name: "Immutable container file systems",
|
Name: "Immutable container file systems",
|
||||||
Severity: "LOW",
|
Severity: "LOW",
|
||||||
TotalFail: pointer.Int(1),
|
TotalFail: lo.ToPtr(1),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -126,19 +127,19 @@ func TestBuildSummary(t *testing.T) {
|
|||||||
ID: "1.0",
|
ID: "1.0",
|
||||||
Name: "Non-root containers",
|
Name: "Non-root containers",
|
||||||
Severity: "MEDIUM",
|
Severity: "MEDIUM",
|
||||||
TotalFail: pointer.Int(1),
|
TotalFail: lo.ToPtr(1),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "1.1",
|
ID: "1.1",
|
||||||
Name: "Immutable container file systems",
|
Name: "Immutable container file systems",
|
||||||
Severity: "LOW",
|
Severity: "LOW",
|
||||||
TotalFail: pointer.Int(1),
|
TotalFail: lo.ToPtr(1),
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
ID: "1.2",
|
ID: "1.2",
|
||||||
Name: "tzdata - new upstream version",
|
Name: "tzdata - new upstream version",
|
||||||
Severity: "LOW",
|
Severity: "LOW",
|
||||||
TotalFail: pointer.Int(1),
|
TotalFail: lo.ToPtr(1),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -67,6 +67,8 @@ func TestTableWriter_Write(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
want, err := os.ReadFile(tt.want)
|
want, err := os.ReadFile(tt.want)
|
||||||
|
want = bytes.ReplaceAll(want, []byte("\r"), []byte(""))
|
||||||
|
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
assert.Equal(t, string(want), buf.String())
|
assert.Equal(t, string(want), buf.String())
|
||||||
|
|||||||
@@ -32,3 +32,7 @@ func InitDB(t *testing.T, fixtureFiles []string) string {
|
|||||||
|
|
||||||
return dir
|
return dir
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Close() error {
|
||||||
|
return db.Close()
|
||||||
|
}
|
||||||
|
|||||||
@@ -363,6 +363,7 @@ func TestScanner_Detect(t *testing.T) {
|
|||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
dbtest.InitDB(t, tt.fixtures)
|
dbtest.InitDB(t, tt.fixtures)
|
||||||
|
defer func() { _ = dbtest.Close() }()
|
||||||
|
|
||||||
s := redhat.NewScanner()
|
s := redhat.NewScanner()
|
||||||
got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs)
|
got, err := s.Detect(tt.args.osVer, nil, tt.args.pkgs)
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ package apk
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"io/ioutil"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"os"
|
"os"
|
||||||
@@ -11,16 +10,15 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
|
|
||||||
"github.com/kylelemons/godebug/pretty"
|
"github.com/kylelemons/godebug/pretty"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
|
||||||
"github.com/aquasecurity/trivy/pkg/fanal/types"
|
"github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestAnalyze(t *testing.T) {
|
func TestAnalyze(t *testing.T) {
|
||||||
testServer := httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
|
testServer := httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
|
||||||
content, err := ioutil.ReadFile("testdata/history_v3.9.json")
|
content, err := os.ReadFile("testdata/history_v3.9.json")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
http.Error(res, err.Error(), http.StatusInternalServerError)
|
http.Error(res, err.Error(), http.StatusInternalServerError)
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ package dockerfile
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -26,33 +26,9 @@ func Test_dockerConfigAnalyzer_Analyze(t *testing.T) {
|
|||||||
Files: map[types.HandlerType][]types.File{
|
Files: map[types.HandlerType][]types.File{
|
||||||
types.MisconfPostHandler: {
|
types.MisconfPostHandler: {
|
||||||
{
|
{
|
||||||
Type: types.Dockerfile,
|
Type: types.Dockerfile,
|
||||||
Path: "testdata/Dockerfile.deployment",
|
Path: "testdata/Dockerfile.deployment",
|
||||||
Content: []byte(`FROM foo
|
Content: []byte(`FROM scratch`),
|
||||||
COPY . /
|
|
||||||
RUN echo hello
|
|
||||||
`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "happy path with multi-stage",
|
|
||||||
inputFile: "testdata/Dockerfile.multistage",
|
|
||||||
want: &analyzer.AnalysisResult{
|
|
||||||
Files: map[types.HandlerType][]types.File{
|
|
||||||
types.MisconfPostHandler: {
|
|
||||||
{
|
|
||||||
Type: types.Dockerfile,
|
|
||||||
Path: "testdata/Dockerfile.multistage",
|
|
||||||
Content: []byte(`FROM foo AS build
|
|
||||||
COPY . /
|
|
||||||
RUN echo hello
|
|
||||||
|
|
||||||
FROM scratch
|
|
||||||
COPY --from=build /bar /bar
|
|
||||||
`),
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -62,15 +38,12 @@ COPY --from=build /bar /bar
|
|||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
f, err := os.Open(tt.inputFile)
|
r := strings.NewReader("FROM scratch")
|
||||||
require.NoError(t, err)
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
a := dockerConfigAnalyzer{}
|
a := dockerConfigAnalyzer{}
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
|
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
|
||||||
FilePath: tt.inputFile,
|
FilePath: tt.inputFile,
|
||||||
Content: f,
|
Content: r,
|
||||||
})
|
})
|
||||||
|
|
||||||
if tt.wantErr != "" {
|
if tt.wantErr != "" {
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package helm
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"archive/tar"
|
"archive/tar"
|
||||||
|
"bytes"
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
@@ -28,7 +29,9 @@ const maxTarSize = 209_715_200 // 200MB
|
|||||||
type helmConfigAnalyzer struct{}
|
type helmConfigAnalyzer struct{}
|
||||||
|
|
||||||
func (a helmConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
|
func (a helmConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput) (*analyzer.AnalysisResult, error) {
|
||||||
|
isAnArchive := false
|
||||||
if isArchive(input.FilePath) {
|
if isArchive(input.FilePath) {
|
||||||
|
isAnArchive = true
|
||||||
if !isHelmChart(input.FilePath, input.Content) {
|
if !isHelmChart(input.FilePath, input.Content) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
@@ -42,6 +45,10 @@ func (a helmConfigAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisIn
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
|
return nil, xerrors.Errorf("failed to read %s: %w", input.FilePath, err)
|
||||||
}
|
}
|
||||||
|
if !isAnArchive {
|
||||||
|
// if it's not an archive we need to remove the carriage returns
|
||||||
|
b = bytes.ReplaceAll(b, []byte("\r"), []byte(""))
|
||||||
|
}
|
||||||
|
|
||||||
return &analyzer.AnalysisResult{
|
return &analyzer.AnalysisResult{
|
||||||
Files: map[types.HandlerType][]types.File{
|
Files: map[types.HandlerType][]types.File{
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package helm
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -30,13 +31,13 @@ func Test_helmConfigAnalyzer_Analyze(t *testing.T) {
|
|||||||
namespaces: []string{"main"},
|
namespaces: []string{"main"},
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||||
},
|
},
|
||||||
inputFile: "testdata/Chart.yaml",
|
inputFile: filepath.Join("testdata", "Chart.yaml"),
|
||||||
want: &analyzer.AnalysisResult{
|
want: &analyzer.AnalysisResult{
|
||||||
Files: map[types.HandlerType][]types.File{
|
Files: map[types.HandlerType][]types.File{
|
||||||
types.MisconfPostHandler: {
|
types.MisconfPostHandler: {
|
||||||
{
|
{
|
||||||
Type: "helm",
|
Type: "helm",
|
||||||
Path: "testdata/Chart.yaml",
|
Path: filepath.Join("testdata", "Chart.yaml"),
|
||||||
Content: []byte(`apiVersion: v2
|
Content: []byte(`apiVersion: v2
|
||||||
name: testchart
|
name: testchart
|
||||||
description: A Helm chart for Kubernetes
|
description: A Helm chart for Kubernetes
|
||||||
@@ -73,13 +74,13 @@ appVersion: "1.16.0"
|
|||||||
namespaces: []string{"main"},
|
namespaces: []string{"main"},
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||||
},
|
},
|
||||||
inputFile: "testdata/values.yaml",
|
inputFile: filepath.Join("testdata", "values.yaml"),
|
||||||
want: &analyzer.AnalysisResult{
|
want: &analyzer.AnalysisResult{
|
||||||
Files: map[types.HandlerType][]types.File{
|
Files: map[types.HandlerType][]types.File{
|
||||||
types.MisconfPostHandler: {
|
types.MisconfPostHandler: {
|
||||||
{
|
{
|
||||||
Type: "helm",
|
Type: "helm",
|
||||||
Path: "testdata/values.yaml",
|
Path: filepath.Join("testdata", "values.yaml"),
|
||||||
Content: []byte(`# Default values for testchart.
|
Content: []byte(`# Default values for testchart.
|
||||||
# This is a YAML-formatted file.
|
# This is a YAML-formatted file.
|
||||||
# Declare variables to be passed into your templates.
|
# Declare variables to be passed into your templates.
|
||||||
@@ -178,140 +179,137 @@ affinity: {}
|
|||||||
namespaces: []string{"main"},
|
namespaces: []string{"main"},
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||||
},
|
},
|
||||||
inputFile: "testdata/testchart.tgz",
|
inputFile: filepath.Join("testdata", "testchart.tgz"),
|
||||||
want: &analyzer.AnalysisResult{
|
want: &analyzer.AnalysisResult{
|
||||||
Files: map[types.HandlerType][]types.File{
|
Files: map[types.HandlerType][]types.File{
|
||||||
types.MisconfPostHandler: {
|
types.MisconfPostHandler: {
|
||||||
{
|
{
|
||||||
Type: "helm",
|
Type: "helm",
|
||||||
Path: "testdata/testchart.tgz",
|
Path: filepath.Join("testdata", "testchart.tgz"),
|
||||||
Content: []uint8{
|
Content: []uint8{
|
||||||
0x1f, 0x8b, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0xed, 0x58, 0x5b,
|
0x1f, 0x8b, 0x8, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x3, 0xed, 0x58, 0x5b, 0x6f, 0xdb, 0x36,
|
||||||
0x6f, 0xdb, 0x36, 0x14, 0xce, 0xb3, 0x7e, 0x5, 0xd7, 0x3c, 0xa4, 0x2d, 0x1a, 0xd9,
|
0x14, 0xce, 0xb3, 0x7e, 0x5, 0xd7, 0x3c, 0xa4, 0x2d, 0x1a, 0xd9, 0x4a, 0xec, 0xa4, 0xf0,
|
||||||
0x4a, 0xec, 0xa4, 0xf0, 0x5b, 0x90, 0xec, 0x12, 0x2c, 0x69, 0x83, 0xa6, 0xed, 0x30,
|
0x5b, 0x90, 0xec, 0x12, 0x2c, 0x69, 0x83, 0xa6, 0xed, 0x30, 0xc, 0xc3, 0x40, 0x4b, 0xb4,
|
||||||
0xc, 0xc3, 0x40, 0x4b, 0xb4, 0xcd, 0x85, 0x22, 0x55, 0x92, 0x72, 0xe2, 0xe, 0xdb,
|
0xcd, 0x85, 0x22, 0x55, 0x92, 0x72, 0xe2, 0xe, 0xdb, 0x6f, 0xdf, 0x77, 0x48, 0xc9, 0x76,
|
||||||
0x6f, 0xdf, 0x77, 0x48, 0xc9, 0x76, 0x9c, 0xa6, 0xed, 0x43, 0x9b, 0x62, 0x98, 0xce,
|
0x9c, 0xa6, 0xed, 0x43, 0x9b, 0x62, 0x98, 0xce, 0x83, 0x6d, 0x51, 0x87, 0xe7, 0x7e,
|
||||||
0x83, 0x6d, 0x51, 0x87, 0xe7, 0x7e, 0xb5, 0x17, 0xce, 0xe7, 0x33, 0x6e, 0x7d, 0x6f,
|
0xb5, 0x17, 0xce, 0xe7, 0x33, 0x6e, 0x7d, 0x6f, 0xeb, 0xab, 0x41, 0x1f, 0x70, 0x38,
|
||||||
0xeb, 0xab, 0x41, 0x1f, 0x70, 0x38, 0x1c, 0xd2, 0x77, 0x76, 0x38, 0xec, 0xc7, 0xef,
|
0x1c, 0xd2, 0x77, 0x76, 0x38, 0xec, 0xc7, 0xef, 0xfd, 0xf0, 0xdd, 0xc2, 0x56, 0x36,
|
||||||
0xfd, 0xf0, 0xdd, 0xc2, 0x56, 0x36, 0xd8, 0x1b, 0x64, 0x7b, 0xd9, 0x60, 0x98, 0x1,
|
0xd8, 0x1b, 0x64, 0x7b, 0xd9, 0x60, 0x98, 0x1, 0x2f, 0xcb, 0xe, 0x7, 0x87, 0x5b, 0x6c,
|
||||||
0x2f, 0xcb, 0xe, 0x7, 0x87, 0x5b, 0x6c, 0xf8, 0xf5, 0x44, 0x5a, 0x41, 0xed, 0x3c, 0xb7,
|
0xf8, 0xf5, 0x44, 0x5a, 0x41, 0xed, 0x3c, 0xb7, 0x8c, 0x6d, 0x99, 0x6b, 0xa1, 0x3f,
|
||||||
0x8c, 0x6d, 0x99, 0x6b, 0xa1, 0x3f, 0x8e, 0x27, 0xac, 0x7b, 0x8, 0x81, 0x1e, 0x16,
|
0x8e, 0x27, 0xac, 0x7b, 0x8, 0x81, 0x1e, 0x16, 0xfc, 0xd2, 0xff, 0xc7, 0xf4, 0x99, 0x2e,
|
||||||
0xfc, 0xd2, 0xff, 0xc7, 0xf4, 0x99, 0x2e, 0x78, 0xa9, 0xbe, 0x34, 0xf, 0x72, 0xf0,
|
0x78, 0xa9, 0xbe, 0x34, 0xf, 0x72, 0xf0, 0xc1, 0x60, 0x70, 0x9f, 0xff, 0xf7, 0xb2, 0xc3,
|
||||||
0xc1, 0x60, 0x70, 0x9f, 0xff, 0xf7, 0xb2, 0xc3, 0x2c, 0xf8, 0xbf, 0x3f, 0x18, 0xec,
|
0x2c, 0xf8, 0xbf, 0x3f, 0x18, 0xec, 0xed, 0xf, 0xf6, 0xe0, 0xff, 0xfd, 0x83, 0xc3, 0xe1,
|
||||||
0xed, 0xf, 0xf6, 0xe0, 0xff, 0xfd, 0x83, 0xc3, 0xe1, 0x16, 0xeb, 0x7f, 0x69, 0x41,
|
0x16, 0xeb, 0x7f, 0x69, 0x41, 0x3e, 0x4, 0xff, 0x73, 0xff, 0xf3, 0x4a, 0xbe, 0x85, 0x62,
|
||||||
0x3e, 0x4, 0xff, 0x73, 0xff, 0xf3, 0x4a, 0xbe, 0x85, 0x62, 0xd2, 0xe8, 0x11, 0x9b,
|
0xd2, 0xe8, 0x11, 0x9b, 0xef, 0x25, 0x9a, 0x97, 0x62, 0xc4, 0x96, 0x41, 0x91, 0x14,
|
||||||
0xef, 0x25, 0x9a, 0x97, 0x62, 0xc4, 0x96, 0x41, 0x91, 0x14, 0xc2, 0xe5, 0x56,
|
0xc2, 0xe5, 0x56, 0x56, 0x3e, 0xbc, 0x3f, 0x62, 0x3f, 0x9, 0x55, 0xb2, 0xf0, 0x86, 0x4d,
|
||||||
0x56, 0x3e, 0xbc, 0x3f, 0x62, 0x3f, 0x9, 0x55, 0xb2, 0xf0, 0x86, 0x4d, 0x8c, 0x65,
|
0x8c, 0x65, 0x3f, 0xd7, 0x63, 0x61, 0xb5, 0x0, 0x7a, 0x92, 0x6c, 0xe3, 0x75, 0x7c, 0x93,
|
||||||
0x3f, 0xd7, 0x63, 0x61, 0xb5, 0x0, 0x7a, 0x92, 0x6c, 0xe3, 0x75, 0x7c, 0x93, 0x73,
|
0x73, 0xcd, 0xc6, 0x82, 0x9, 0xe9, 0x67, 0xc2, 0x32, 0x3c, 0xec, 0xf0, 0xaa, 0x52, 0x32,
|
||||||
0xcd, 0xc6, 0x82, 0x9, 0xe9, 0x67, 0xc2, 0x32, 0x3c, 0xec, 0xf0, 0xaa, 0x52, 0x32,
|
|
||||||
0xe7, 0x44, 0x65, 0x87, 0xe1, 0x1e, 0x67, 0x3b, 0x4a, 0x8e, 0x2d, 0xb7, 0x8b, 0x9d,
|
0xe7, 0x44, 0x65, 0x87, 0xe1, 0x1e, 0x67, 0x3b, 0x4a, 0x8e, 0x2d, 0xb7, 0x8b, 0x9d,
|
||||||
0x78, 0x27, 0x4d, 0xb6, 0x89, 0xc0, 0xa, 0x2d, 0x1e, 0x3b, 0xc6, 0xad, 0x0, 0x76, 0x6e,
|
0x78, 0x27, 0x4d, 0xb6, 0x89, 0xc0, 0xa, 0x2d, 0x1e, 0x3b, 0xc6, 0xad, 0x0, 0x76, 0x6e,
|
||||||
0x94, 0x12, 0x79, 0x38, 0x37, 0x13, 0x48, 0x57, 0x56, 0x8a, 0x83, 0x29, 0xf3, 0x33,
|
0x94, 0x12, 0x79, 0x38, 0x37, 0x13, 0x48, 0x57, 0x56, 0x8a, 0x83, 0x29, 0xf3, 0x33,
|
||||||
0xbe, 0x64, 0x57, 0xf1, 0xfc, 0x8a, 0x4f, 0x45, 0xc1, 0xa4, 0xf6, 0x86, 0xcd, 0xa3,
|
0xbe, 0x64, 0x57, 0xf1, 0xfc, 0x8a, 0x4f, 0x45, 0xc1, 0xa4, 0xf6, 0x86, 0xcd, 0xa3,
|
||||||
0x4e, 0x78, 0xe4, 0x36, 0x9f, 0xc9, 0x39, 0x24, 0xdc, 0x66, 0x38, 0x7, 0x62, 0x21,
|
0x4e, 0x78, 0xe4, 0x36, 0x9f, 0xc9, 0x39, 0x24, 0xdc, 0x66, 0x38, 0x7, 0x62, 0x21, 0x2a,
|
||||||
0x2a, 0x65, 0x16, 0xa2, 0x88, 0x3c, 0xcf, 0xa2, 0x1c, 0x2d, 0xbf, 0xca, 0x9a, 0xb9,
|
0x65, 0x16, 0xa2, 0x88, 0x3c, 0xcf, 0xa2, 0x1c, 0x2d, 0xbf, 0xca, 0x9a, 0xb9, 0x2c, 0x4,
|
||||||
0x2c, 0x4, 0x83, 0xb7, 0x27, 0xb5, 0x62, 0xb5, 0x97, 0x4a, 0x7a, 0x9, 0x56, 0x90,
|
0x83, 0xb7, 0x27, 0xb5, 0x62, 0xb5, 0x97, 0x4a, 0x7a, 0x9, 0x56, 0x90, 0x7a, 0x52, 0xeb,
|
||||||
0x7a, 0x52, 0xeb, 0x20, 0x83, 0xb, 0xba, 0x43, 0xb9, 0x46, 0xdf, 0x42, 0xcc, 0x85,
|
0x20, 0x83, 0xb, 0xba, 0x43, 0xb9, 0x46, 0xdf, 0x42, 0xcc, 0x85, 0x32, 0x95, 0xb0, 0x29,
|
||||||
0x32, 0x95, 0xb0, 0x29, 0x7b, 0x3d, 0x13, 0x8b, 0x1d, 0x88, 0x2c, 0x75, 0xae, 0xea,
|
0x7b, 0x3d, 0x13, 0x8b, 0x1d, 0x88, 0x2c, 0x75, 0xae, 0xea, 0x82, 0xb8, 0x13, 0x5f,
|
||||||
0x82, 0xb8, 0x13, 0x5f, 0x4e, 0x3c, 0x85, 0x2e, 0x84, 0xce, 0x17, 0xa4, 0x0, 0xbf,
|
0x4e, 0x3c, 0x85, 0x2e, 0x84, 0xce, 0x17, 0xa4, 0x0, 0xbf, 0xab, 0x27, 0x44, 0x93, 0xfa,
|
||||||
0xab, 0x27, 0x44, 0x93, 0xfa, 0x4f, 0xe8, 0x8, 0xba, 0xc6, 0x89, 0x35, 0xc6, 0x5c,
|
0x4f, 0xe8, 0x8, 0xba, 0xc6, 0x89, 0x35, 0xc6, 0x5c, 0x17, 0x6b, 0x9c, 0x83, 0x72, 0xc4,
|
||||||
0x17, 0x6b, 0x9c, 0x83, 0x72, 0xc4, 0xda, 0x12, 0x3d, 0x2b, 0xf5, 0x14, 0xf4, 0x2b,
|
0xda, 0x12, 0x3d, 0x2b, 0xf5, 0x14, 0xf4, 0x2b, 0x59, 0x9, 0x25, 0xb5, 0x48, 0x37, 0xb5,
|
||||||
0x59, 0x9, 0x25, 0xb5, 0x48, 0x37, 0xb5, 0x29, 0xc, 0xd3, 0x86, 0x4, 0x9c, 0xe0,
|
0x29, 0xc, 0xd3, 0x86, 0x4, 0x9c, 0xe0, 0x2d, 0x28, 0x2d, 0xd6, 0x6c, 0x47, 0x74, 0xc9,
|
||||||
0x2d, 0x28, 0x2d, 0xd6, 0x6c, 0x47, 0x74, 0xc9, 0x41, 0x2, 0xca, 0x8, 0x32, 0x22,
|
0x41, 0x2, 0xca, 0x8, 0x32, 0x22, 0x61, 0xde, 0x32, 0x8f, 0x5f, 0x54, 0x88, 0x85, 0x35,
|
||||||
0x61, 0xde, 0x32, 0x8f, 0x5f, 0x54, 0x88, 0x85, 0x35, 0x61, 0xc9, 0xc9, 0xaf, 0x67,
|
0x61, 0xc9, 0xc9, 0xaf, 0x67, 0x12, 0x52, 0xb8, 0x35, 0xf5, 0x1b, 0x53, 0xa7, 0xf1,
|
||||||
0x12, 0x52, 0xb8, 0x35, 0xf5, 0x1b, 0x53, 0xa7, 0xf1, 0x55, 0xf3, 0xc4, 0x74, 0x5d,
|
0x55, 0xf3, 0xc4, 0x74, 0x5d, 0x22, 0x36, 0x98, 0x9b, 0x99, 0x5a, 0x15, 0x44, 0x17,
|
||||||
0x22, 0x36, 0x98, 0x9b, 0x99, 0x5a, 0x15, 0x44, 0x17, 0x26, 0xb1, 0xa2, 0x14, 0xda,
|
0x26, 0xb1, 0xa2, 0x14, 0xda, 0xc3, 0x2a, 0x82, 0xe7, 0x33, 0xe6, 0x65, 0x29, 0xd8,
|
||||||
0xc3, 0x2a, 0x82, 0xe7, 0x33, 0xe6, 0x65, 0x29, 0xd8, 0xc2, 0xd4, 0xac, 0xe4, 0x57,
|
0xc2, 0xd4, 0xac, 0xe4, 0x57, 0x81, 0x96, 0x9e, 0xb6, 0x5e, 0x5a, 0x11, 0x27, 0x29,
|
||||||
0x81, 0x96, 0x9e, 0xb6, 0x5e, 0x5a, 0x11, 0x27, 0x29, 0x25, 0xd9, 0xa7, 0x95, 0xfb,
|
0x25, 0xd9, 0xa7, 0x95, 0xfb, 0x59, 0x63, 0x5b, 0x68, 0x1e, 0xd0, 0x20, 0xde, 0x52, 0x2,
|
||||||
0x59, 0x63, 0x5b, 0x68, 0x1e, 0xd0, 0x20, 0xde, 0x52, 0x2, 0x5c, 0x6e, 0x62, 0x39,
|
0x5c, 0x6e, 0x62, 0x39, 0x6, 0x8e, 0xb8, 0xa9, 0x60, 0x51, 0xf0, 0x3, 0xc9, 0x9, 0x42,
|
||||||
0x6, 0x8e, 0xb8, 0xa9, 0x60, 0x51, 0xf0, 0x3, 0xc9, 0x9, 0x42, 0xc8, 0x5c, 0xb3, 0x4b,
|
0xc8, 0x5c, 0xb3, 0x4b, 0x51, 0x72, 0xed, 0x65, 0xde, 0x22, 0x12, 0x99, 0xc7, 0x33,
|
||||||
0x51, 0x72, 0xed, 0x65, 0xde, 0x22, 0x12, 0x99, 0xc7, 0x33, 0xef, 0x2b, 0x37, 0xea,
|
0xef, 0x2b, 0x37, 0xea, 0xf5, 0x9c, 0x28, 0x41, 0x2a, 0x35, 0x76, 0xda, 0x7b, 0x92,
|
||||||
0xf5, 0x9c, 0x28, 0x41, 0x2a, 0x35, 0x76, 0xda, 0x7b, 0x92, 0xcc, 0xdb, 0xac, 0xe8,
|
0xcc, 0xdb, 0xac, 0xe8, 0xa7, 0x59, 0xda, 0xdf, 0x54, 0x7d, 0x43, 0x4d, 0x8a, 0xca,
|
||||||
0xa7, 0x59, 0xda, 0xdf, 0x54, 0x7d, 0x43, 0x4d, 0x8a, 0xca, 0x28, 0xcc, 0xd2, 0xb1,
|
0x28, 0xcc, 0xd2, 0xb1, 0x63, 0x41, 0xb4, 0x97, 0xd6, 0xfc, 0xb8, 0x75, 0x40, 0xfd,
|
||||||
0x63, 0x41, 0xb4, 0x97, 0xd6, 0xfc, 0xb8, 0x75, 0x40, 0xfd, 0xf3, 0xec, 0xd3, 0x5a,
|
0xf3, 0xec, 0xd3, 0x5a, 0x67, 0x8d, 0x53, 0x7a, 0x5b, 0x69, 0x72, 0xe3, 0x9a, 0xe2,
|
||||||
0x67, 0x8d, 0x53, 0x7a, 0x5b, 0x69, 0x72, 0xe3, 0x9a, 0xe2, 0xa0, 0x7c, 0xbf, 0xea,
|
0xa0, 0x7c, 0xbf, 0xea, 0x31, 0x60, 0x5b, 0x31, 0x10, 0xa, 0x2a, 0x46, 0xe0, 0x4a, 0xbf,
|
||||||
0x31, 0x60, 0x5b, 0x31, 0x10, 0xa, 0x2a, 0x46, 0xe0, 0x4a, 0xbf, 0x4d, 0xad, 0xa0,
|
0x4d, 0xad, 0xa0, 0x43, 0xed, 0xe8, 0x26, 0xe8, 0x9e, 0x7a, 0x7a, 0xb4, 0x22, 0x37,
|
||||||
0x43, 0xed, 0xe8, 0x26, 0xe8, 0x9e, 0x7a, 0x7a, 0xb4, 0x22, 0x37, 0x65, 0x49, 0xd1,
|
0x65, 0x49, 0xd1, 0x18, 0xec, 0x8c, 0xc4, 0x81, 0xc3, 0xd8, 0x35, 0x92, 0x9f, 0xbd,
|
||||||
0x18, 0xec, 0x8c, 0xc4, 0x81, 0xc3, 0xd8, 0x35, 0x92, 0x9f, 0xbd, 0xab, 0xd, 0x3c,
|
0xab, 0xd, 0x3c, 0x96, 0x26, 0x20, 0xb0, 0xac, 0x31, 0x8f, 0xb2, 0x34, 0x3b, 0x48, 0xfb,
|
||||||
0x96, 0x26, 0x20, 0xb0, 0xac, 0x31, 0x8f, 0xb2, 0x34, 0x3b, 0x48, 0xfb, 0x8f, 0x92,
|
0x8f, 0x92, 0x6f, 0x5d, 0xf8, 0x3a, 0x8, 0xb0, 0xea, 0xff, 0xe9, 0xc, 0xc5, 0x5d, 0x4e,
|
||||||
0x6f, 0x5d, 0xf8, 0x3a, 0x8, 0xb0, 0xea, 0xff, 0xe9, 0xc, 0xc5, 0x5d, 0x4e, 0x35,
|
0x35, 0xca, 0xc1, 0x17, 0xe6, 0xf1, 0x89, 0xfe, 0xdf, 0x1f, 0xee, 0xf, 0x37, 0xfa, 0xff,
|
||||||
0xca, 0xc1, 0x17, 0xe6, 0xf1, 0x89, 0xfe, 0xdf, 0x1f, 0xee, 0xf, 0x37, 0xfa, 0xff,
|
0x20, 0xeb, 0xf, 0xba, 0xfe, 0xff, 0x10, 0xb0, 0xcd, 0x2e, 0xb8, 0xf7, 0xe8, 0xe2, 0xb1,
|
||||||
0x20, 0xeb, 0xf, 0xba, 0xfe, 0xff, 0x10, 0xb0, 0xcd, 0x2e, 0xb8, 0xf7, 0xe8, 0xe2,
|
0x7, 0x5, 0xf7, 0xb3, 0xeb, 0x99, 0x40, 0x1, 0xab, 0xa5, 0xa, 0x65, 0xb6, 0xe9, 0xac,
|
||||||
0xb1, 0x7, 0x5, 0xf7, 0xb3, 0xeb, 0x99, 0x40, 0x1, 0xab, 0xa5, 0xa, 0x65, 0xb6, 0xe9,
|
0x2e, 0x6d, 0x6b, 0xa0, 0xab, 0xab, 0xca, 0x50, 0x7f, 0x71, 0x8, 0x19, 0xc5, 0xa6, 0xca,
|
||||||
0xac, 0x2e, 0x6d, 0x6b, 0xa0, 0xab, 0xab, 0xca, 0x50, 0x7f, 0x71, 0x8, 0x19, 0xc5, 0xa6,
|
0x8c, 0x51, 0xa4, 0x10, 0x46, 0xc0, 0x7e, 0x86, 0x82, 0x80, 0x42, 0x8d, 0x8e, 0x8b,
|
||||||
0xca, 0x8c, 0x51, 0xa4, 0x10, 0x46, 0xc0, 0x7e, 0x86, 0x82, 0x80, 0x42, 0x8d, 0x8e,
|
0x7b, 0xa8, 0x1, 0xab, 0x73, 0x94, 0x72, 0x10, 0xd0, 0x62, 0x1a, 0x2b, 0xc9, 0xe3, 0xa,
|
||||||
0x8b, 0x7b, 0xa8, 0x1, 0xab, 0x73, 0x94, 0x72, 0x10, 0xd0, 0x62, 0x1a, 0x2b, 0xc9,
|
0xf5, 0x46, 0xde, 0xa0, 0x6a, 0x84, 0x5a, 0xf1, 0xdd, 0x93, 0x94, 0xbd, 0xd4, 0xa, 0xfd,
|
||||||
0xe3, 0xa, 0xf5, 0x46, 0xde, 0xa0, 0x6a, 0x84, 0x5a, 0xf1, 0xdd, 0x93, 0x94, 0xbd,
|
0x51, 0x87, 0x9b, 0x24, 0x12, 0x43, 0x4f, 0x65, 0xa1, 0xb1, 0x25, 0xe9, 0xc9, 0xe5,
|
||||||
0xd4, 0xa, 0xfd, 0x51, 0x87, 0x9b, 0x24, 0x12, 0x43, 0x4f, 0x65, 0xa1, 0xb1, 0x25,
|
0x1f, 0x97, 0x1e, 0xb2, 0x81, 0xc4, 0x31, 0xea, 0xd, 0x8, 0xbc, 0x3d, 0xbe, 0x64, 0x85,
|
||||||
0xe9, 0xc9, 0xe5, 0x1f, 0x97, 0x1e, 0xb2, 0x81, 0xc4, 0x31, 0xea, 0xd, 0x8, 0xbc,
|
0xb4, 0x2e, 0x49, 0xa7, 0xd2, 0xf7, 0xc2, 0x67, 0x14, 0x3f, 0x49, 0xc7, 0xef, 0x6d,
|
||||||
0x3d, 0xbe, 0x64, 0x85, 0xb4, 0x2e, 0x49, 0xa7, 0xd2, 0xf7, 0xc2, 0x67, 0x14, 0x3f,
|
0x2f, 0x7c, 0xb6, 0x7, 0xb3, 0x69, 0x8f, 0x3e, 0xda, 0x47, 0x37, 0xd7, 0xbd, 0x15, 0xa1,
|
||||||
0x49, 0xc7, 0xef, 0x6d, 0x2f, 0x7c, 0xb6, 0x7, 0xb3, 0x69, 0x8f, 0x3e, 0xda, 0x47,
|
0x31, 0xf4, 0xab, 0x2b, 0x36, 0x91, 0xa, 0xfd, 0xe7, 0x69, 0xea, 0xae, 0x2b, 0x7c, 0x8e,
|
||||||
0x37, 0xd7, 0xbd, 0x15, 0xa1, 0x31, 0xf4, 0xab, 0x2b, 0x36, 0x91, 0xa, 0xfd, 0xe7,
|
0xf9, 0x15, 0x3e, 0x7d, 0x49, 0xbf, 0xd, 0xe8, 0x24, 0x4f, 0xff, 0xa1, 0xf6, 0xc2, 0xad,
|
||||||
0x69, 0xea, 0xae, 0x2b, 0x7c, 0x8e, 0xf9, 0x15, 0x3e, 0x7d, 0x49, 0xbf, 0xd, 0xe8,
|
0x34, 0xb5, 0x63, 0xa7, 0x27, 0xdf, 0x83, 0x2f, 0x86, 0x5, 0x6a, 0xd9, 0x49, 0x8a, 0x89,
|
||||||
0x24, 0x4f, 0xff, 0xa1, 0xf6, 0xc2, 0xad, 0x34, 0xb5, 0x63, 0xa7, 0x27, 0xdf, 0x83,
|
0x81, 0xf7, 0x22, 0x3a, 0x8e, 0x92, 0x74, 0xee, 0x72, 0x53, 0x88, 0xde, 0x7f, 0xa1,
|
||||||
0x2f, 0x86, 0x5, 0x6a, 0xd9, 0x49, 0x8a, 0x89, 0x81, 0xf7, 0x22, 0x3a, 0x8e, 0x92,
|
0xc6, 0xad, 0xf2, 0x7f, 0xce, 0x55, 0xd, 0x27, 0x7f, 0x85, 0x5, 0xe0, 0x13, 0xf9, 0xbf,
|
||||||
0x74, 0xee, 0x72, 0x53, 0x88, 0xde, 0x7f, 0xa1, 0xc6, 0xad, 0xf2, 0x7f, 0xce, 0x55,
|
0x3f, 0xdc, 0x3f, 0xb8, 0x93, 0xff, 0x83, 0xac, 0xcb, 0xff, 0x87, 0x80, 0x6d, 0x76,
|
||||||
0xd, 0x27, 0x7f, 0x85, 0x5, 0xe0, 0x13, 0xf9, 0xbf, 0x3f, 0xdc, 0x3f, 0xb8, 0x93,
|
0x22, 0x26, 0xbc, 0x56, 0x98, 0xe3, 0x82, 0xff, 0xe3, 0x6c, 0xdb, 0x6, 0x45, 0xba, 0x36,
|
||||||
0xff, 0x83, 0xac, 0xcb, 0xff, 0x87, 0x80, 0x6d, 0x76, 0x22, 0x26, 0xbc, 0x56, 0x98,
|
0xf6, 0x70, 0xf6, 0xeb, 0xd1, 0xf9, 0xd9, 0x2e, 0xde, 0x97, 0x94, 0x9e, 0x45, 0x48,
|
||||||
0xe3, 0x82, 0xff, 0xe3, 0x6c, 0xdb, 0x6, 0x45, 0xba, 0x36, 0xf6, 0x70, 0xf6, 0xeb,
|
0x18, 0x42, 0x38, 0x11, 0xb9, 0xa2, 0xc9, 0x63, 0x8e, 0xe4, 0xe0, 0x63, 0x15, 0x87,
|
||||||
0xd1, 0xf9, 0xd9, 0x2e, 0xde, 0x97, 0x94, 0x9e, 0x45, 0x48, 0x18, 0x42, 0x38, 0x11,
|
0x94, 0x30, 0x91, 0x3b, 0xd7, 0xce, 0xe3, 0x98, 0x63, 0xec, 0x6a, 0x88, 0x4b, 0x93,
|
||||||
0xb9, 0xa2, 0xc9, 0x63, 0x8e, 0xe4, 0xe0, 0x63, 0x15, 0x87, 0x94, 0x30, 0x91, 0x3b,
|
0xc4, 0x8a, 0x30, 0x54, 0x1c, 0x9b, 0x5a, 0xfb, 0x11, 0xcb, 0x92, 0x44, 0x96, 0xa8,
|
||||||
0xd7, 0xce, 0xe3, 0x98, 0x63, 0xec, 0x6a, 0x88, 0x4b, 0x93, 0xc4, 0x8a, 0x30, 0x54,
|
0x31, 0xa3, 0x84, 0xa1, 0x7e, 0x54, 0xc6, 0x49, 0xe4, 0xfa, 0x62, 0xc4, 0xf4, 0x54,
|
||||||
0x1c, 0x9b, 0x5a, 0xfb, 0x11, 0xcb, 0x92, 0x44, 0x96, 0xa8, 0x31, 0xa3, 0x84, 0xa1,
|
0xea, 0x1b, 0x9c, 0x55, 0xb5, 0x52, 0x17, 0x6, 0x17, 0x70, 0x76, 0x3a, 0x79, 0x61, 0xfc,
|
||||||
0x7e, 0x54, 0xc6, 0x49, 0xe4, 0xfa, 0x62, 0xc4, 0xf4, 0x54, 0xea, 0x1b, 0x9c, 0x55,
|
0x85, 0x15, 0xe, 0xb3, 0x12, 0x5e, 0x6d, 0xb3, 0x97, 0x18, 0x56, 0x2c, 0x52, 0x30, 0x4e,
|
||||||
0xb5, 0x52, 0x17, 0x6, 0x17, 0x70, 0x76, 0x3a, 0x79, 0x61, 0xfc, 0x85, 0x15, 0xe,
|
0x66, 0x81, 0xe, 0xf3, 0x7c, 0x8a, 0x2a, 0x46, 0x93, 0x74, 0xd1, 0xa8, 0x77, 0x6b, 0x66,
|
||||||
0xb3, 0x12, 0x5e, 0x6d, 0xb3, 0x97, 0x18, 0x56, 0x2c, 0x52, 0x30, 0x4e, 0x66, 0x81,
|
0x5d, 0x4d, 0x24, 0x29, 0x48, 0x0, 0x19, 0x83, 0xc9, 0xa3, 0x46, 0x86, 0xb, 0xf0, 0xba,
|
||||||
0xe, 0xf3, 0x7c, 0x8a, 0x2a, 0x46, 0x93, 0x74, 0xd1, 0xa8, 0x77, 0x6b, 0x66, 0x5d,
|
0x14, 0x18, 0xc6, 0xbc, 0x1b, 0xb1, 0xdf, 0x7e, 0xf, 0x2b, 0x51, 0xcb, 0x22, 0xa0, 0x61,
|
||||||
0x4d, 0x24, 0x29, 0x48, 0x0, 0x19, 0x83, 0xc9, 0xa3, 0x46, 0x86, 0xb, 0xf0, 0xba,
|
0x31, 0x50, 0x77, 0xe, 0x13, 0x84, 0xc7, 0x5c, 0xe6, 0xe2, 0x28, 0xcf, 0x83, 0x4a, 0x41,
|
||||||
0x14, 0x18, 0xc6, 0xbc, 0x1b, 0xb1, 0xdf, 0x7e, 0xf, 0x2b, 0x51, 0xcb, 0x22, 0xa0,
|
0xb2, 0x4b, 0x8c, 0x61, 0x72, 0x42, 0x53, 0x3c, 0x2a, 0x6a, 0xdc, 0x86, 0x58, 0x83,
|
||||||
0x61, 0x31, 0x50, 0x77, 0xe, 0x13, 0x84, 0xc7, 0x5c, 0xe6, 0xe2, 0x28, 0xcf, 0x83,
|
0xc7, 0x78, 0x44, 0x5c, 0x9b, 0x90, 0xc1, 0x12, 0xc6, 0x29, 0x70, 0x33, 0xfe, 0xc2,
|
||||||
0x4a, 0x41, 0xb2, 0x4b, 0x8c, 0x61, 0x72, 0x42, 0x53, 0x3c, 0x2a, 0x6a, 0xdc, 0x86,
|
0x1e, 0x66, 0x6b, 0x11, 0x8, 0x1d, 0xd1, 0x74, 0xce, 0xe3, 0xa, 0x0, 0x6b, 0xf2, 0xa2,
|
||||||
0x58, 0x83, 0xc7, 0x78, 0x44, 0x5c, 0x9b, 0x90, 0xc1, 0x12, 0xc6, 0x29, 0x70, 0x33,
|
0x68, 0x7, 0xc1, 0xd, 0x72, 0xc0, 0xe6, 0x2b, 0xdc, 0x11, 0xfb, 0xeb, 0xef, 0x70, 0x1f,
|
||||||
0xfe, 0xc2, 0x1e, 0x66, 0x6b, 0x11, 0x8, 0x1d, 0xd1, 0x74, 0xce, 0xe3, 0xa, 0x0, 0x6b,
|
0x63, 0x1e, 0x23, 0x91, 0xdb, 0x41, 0x75, 0x53, 0x88, 0x38, 0xb8, 0xa5, 0x1, 0xf7, 0x74,
|
||||||
0xf2, 0xa2, 0x68, 0x7, 0xc1, 0xd, 0x72, 0xc0, 0xe6, 0x2b, 0xdc, 0x11, 0xfb, 0xeb, 0xef,
|
0x12, 0x66, 0x48, 0x27, 0xe2, 0xf4, 0x1d, 0x65, 0x9, 0xe6, 0x83, 0x34, 0x28, 0xe3, 0x91,
|
||||||
0x70, 0x1f, 0x63, 0x1e, 0x23, 0x91, 0xdb, 0x41, 0x75, 0x53, 0x88, 0x38, 0xb8, 0xa5,
|
0x10, 0x9e, 0xa7, 0x42, 0xb, 0x4b, 0x12, 0xc7, 0x91, 0x30, 0x10, 0x6e, 0x6d, 0xb3, 0xf4,
|
||||||
0x1, 0xf7, 0x74, 0x12, 0x66, 0x48, 0x27, 0xe2, 0xf4, 0x1d, 0x65, 0x9, 0xe6, 0x83,
|
0x35, 0x48, 0xc6, 0x9d, 0x92, 0x6c, 0x54, 0x99, 0xe2, 0x68, 0x43, 0x3a, 0x3a, 0x83,
|
||||||
0x34, 0x28, 0xe3, 0x91, 0x10, 0x9e, 0xa7, 0x42, 0xb, 0x4b, 0x12, 0xc7, 0x91, 0x30,
|
0xb5, 0x6b, 0x2b, 0xfd, 0xe2, 0xd8, 0x60, 0xfa, 0xbd, 0x9, 0xb6, 0x6b, 0xe4, 0x9e, 0xb8,
|
||||||
0x10, 0x6e, 0x6d, 0xb3, 0xf4, 0x35, 0x48, 0xc6, 0x9d, 0x92, 0x6c, 0x54, 0x99, 0xe2,
|
0x1f, 0xad, 0xa9, 0xab, 0x11, 0xdb, 0x43, 0x9d, 0x20, 0x1b, 0xdf, 0x87, 0x98, 0xf3,
|
||||||
0x68, 0x43, 0x3a, 0x3a, 0x83, 0xb5, 0x6b, 0x2b, 0xfd, 0xe2, 0xd8, 0x60, 0xfa, 0xbd,
|
0x8a, 0x8f, 0x9b, 0x95, 0x29, 0xda, 0x9e, 0xb1, 0xc2, 0x9a, 0xaa, 0xfd, 0xbd, 0xcb,
|
||||||
0x9, 0xb6, 0x6b, 0xe4, 0x9e, 0xb8, 0x1f, 0xad, 0xa9, 0xab, 0x11, 0xdb, 0x43, 0x9d,
|
0x8e, 0xce, 0xce, 0xc2, 0x6f, 0xa8, 0x53, 0x50, 0xa3, 0x79, 0x65, 0x8c, 0xff, 0x81,
|
||||||
0x20, 0x1b, 0xdf, 0x87, 0x98, 0xf3, 0x8a, 0x8f, 0x9b, 0x95, 0x29, 0xda, 0x9e, 0xb1,
|
0x4a, 0xfe, 0xc2, 0x41, 0xd6, 0x35, 0x5b, 0xdb, 0x5a, 0x1f, 0xb9, 0x17, 0x46, 0x13,
|
||||||
0xc2, 0x9a, 0xaa, 0xfd, 0xbd, 0xcb, 0x8e, 0xce, 0xce, 0xc2, 0x6f, 0xa8, 0x53, 0x50,
|
0xc2, 0xe6, 0xf1, 0x1b, 0x18, 0xe, 0xd1, 0xda, 0x8, 0x13, 0x6c, 0x48, 0x1c, 0xe2, 0xa6,
|
||||||
0xa3, 0x79, 0x65, 0x8c, 0xff, 0x81, 0x4a, 0xfe, 0xc2, 0x41, 0xd6, 0x35, 0x5b, 0xdb,
|
0x74, 0xac, 0x50, 0x4b, 0x84, 0x3d, 0xbd, 0xa0, 0x80, 0x45, 0x7f, 0x1c, 0xb1, 0xe7,
|
||||||
0x5a, 0x1f, 0xb9, 0x17, 0x46, 0x13, 0xc2, 0xe6, 0xf1, 0x1b, 0x18, 0xe, 0xd1, 0xda, 0x8,
|
0x40, 0x83, 0x65, 0x10, 0xab, 0x41, 0x28, 0xa1, 0x29, 0x3d, 0x8a, 0x11, 0x9b, 0x70,
|
||||||
0x13, 0x6c, 0x48, 0x1c, 0xe2, 0xa6, 0x74, 0xac, 0x50, 0x4b, 0x84, 0x3d, 0xbd, 0xa0,
|
0xe5, 0x88, 0x2a, 0xf2, 0xc6, 0xb9, 0x17, 0xad, 0x79, 0x6e, 0xbb, 0xe, 0x8f, 0x8d, 0x76,
|
||||||
0x80, 0x45, 0x7f, 0x1c, 0xb1, 0xe7, 0x40, 0x83, 0x65, 0x10, 0xab, 0x41, 0x28, 0xa1,
|
0xc4, 0xfd, 0x6a, 0xb9, 0x69, 0xa7, 0xd2, 0xf4, 0x1a, 0x9a, 0x69, 0xb8, 0xbf, 0xca,
|
||||||
0x29, 0x3d, 0x8a, 0x11, 0x9b, 0x70, 0xe5, 0x88, 0x2a, 0xf2, 0xc6, 0xb9, 0x17, 0xad,
|
0x92, 0xbb, 0x88, 0x5e, 0xb9, 0x5d, 0x9e, 0x7, 0xfa, 0xa4, 0x8, 0xf1, 0x40, 0x52, 0xf8,
|
||||||
0x79, 0x6e, 0xbb, 0xe, 0x8f, 0x8d, 0x76, 0xc4, 0xfd, 0x6a, 0xb9, 0x69, 0xa7, 0xd2, 0xf4,
|
0x86, 0xfa, 0x6e, 0x78, 0x18, 0xc5, 0x9c, 0xd8, 0x15, 0x37, 0x1c, 0x2e, 0x13, 0xa9,
|
||||||
0x1a, 0x9a, 0x69, 0xb8, 0xbf, 0xca, 0x92, 0xbb, 0x88, 0x5e, 0xb9, 0x5d, 0x9e, 0x7, 0xfa,
|
0x32, 0x39, 0x57, 0xe1, 0x3d, 0xb, 0x2d, 0xbc, 0x41, 0x8e, 0x17, 0xe8, 0x60, 0xc4, 0x7a,
|
||||||
0xa4, 0x8, 0xf1, 0x40, 0x52, 0xf8, 0x86, 0xfa, 0x6e, 0x78, 0x18, 0xc5, 0x9c, 0xd8, 0x15,
|
0xcb, 0x93, 0x88, 0xf2, 0x3a, 0xe8, 0x7f, 0x4a, 0xd7, 0x69, 0x83, 0x9, 0x1a, 0x34, 0xb1,
|
||||||
0x37, 0x1c, 0x2e, 0x13, 0xa9, 0x32, 0x39, 0x57, 0xe1, 0x3d, 0xb, 0x2d, 0xbc, 0x41, 0x8e,
|
0x9f, 0x93, 0x79, 0x54, 0xcc, 0xa4, 0xe0, 0x8a, 0x5d, 0x44, 0x16, 0x25, 0x57, 0x54,
|
||||||
0x17, 0xe8, 0x60, 0xc4, 0x7a, 0xcb, 0x93, 0x88, 0xf2, 0x3a, 0xe8, 0x7f, 0x4a, 0xd7,
|
0xfb, 0x16, 0xeb, 0x5d, 0x20, 0x36, 0xfe, 0x5a, 0x89, 0xb9, 0xdd, 0xb2, 0xfe, 0x90,
|
||||||
0x69, 0x83, 0x9, 0x1a, 0x34, 0xb1, 0x9f, 0x93, 0x79, 0x54, 0xcc, 0xa4, 0xe0, 0x8a, 0x5d,
|
0x94, 0x28, 0x23, 0xe, 0x95, 0x25, 0x8f, 0x4e, 0x6f, 0x62, 0xe1, 0x17, 0x5a, 0xdf, 0x6b,
|
||||||
0x44, 0x16, 0x25, 0x57, 0x54, 0xfb, 0x16, 0xeb, 0x5d, 0x20, 0x36, 0xfe, 0x5a, 0x89,
|
0xae, 0x30, 0x4c, 0x2c, 0x97, 0x93, 0x10, 0xd2, 0x88, 0x72, 0x17, 0xa4, 0x5a, 0x2c,
|
||||||
0xb9, 0xdd, 0xb2, 0xfe, 0x90, 0x94, 0x28, 0x23, 0xe, 0x95, 0x25, 0x8f, 0x4e, 0x6f, 0x62,
|
0xab, 0xc2, 0xf2, 0x7e, 0xdc, 0x87, 0xd, 0x53, 0x82, 0x63, 0x74, 0xf1, 0x54, 0xf3, 0xb8,
|
||||||
0xe1, 0x17, 0x5a, 0xdf, 0x6b, 0xae, 0x30, 0x4c, 0x2c, 0x97, 0x93, 0x10, 0xd2, 0x88,
|
0xb, 0x7f, 0x3d, 0x68, 0x97, 0x53, 0xcf, 0x8f, 0x61, 0x36, 0x33, 0x94, 0x36, 0xed, 0xfe,
|
||||||
0x72, 0x17, 0xa4, 0x5a, 0x2c, 0xab, 0xc2, 0xf2, 0x7e, 0xdc, 0x87, 0xd, 0x53, 0x82, 0x63,
|
0x4f, 0x3d, 0xa1, 0xd9, 0xec, 0xe0, 0x51, 0x13, 0x77, 0x38, 0xee, 0x40, 0x8d, 0xd6,
|
||||||
0x74, 0xf1, 0x54, 0xf3, 0xb8, 0xb, 0x7f, 0x3d, 0x68, 0x97, 0x53, 0xcf, 0x8f, 0x61, 0x36,
|
0xb5, 0x3c, 0x7e, 0xd3, 0xa0, 0x84, 0xe0, 0xc1, 0x60, 0x83, 0x0, 0x98, 0x4b, 0x6b, 0x34,
|
||||||
0x33, 0x94, 0x36, 0xed, 0xfe, 0x4f, 0x3d, 0xa1, 0xd9, 0xec, 0xe0, 0x51, 0x13, 0x77,
|
0xd9, 0xc8, 0xc5, 0xb1, 0x7, 0x11, 0xeb, 0x55, 0x13, 0x60, 0xad, 0x28, 0xcf, 0x30, 0x60,
|
||||||
0x38, 0xee, 0x40, 0x8d, 0xd6, 0xb5, 0x3c, 0x7e, 0xd3, 0xa0, 0x84, 0xe0, 0xc1, 0x60,
|
0x61, 0xff, 0x3, 0xfb, 0x73, 0xa9, 0x25, 0x39, 0x34, 0xa5, 0xa4, 0xa4, 0x55, 0x10, 0xb,
|
||||||
0x83, 0x0, 0x98, 0x4b, 0x6b, 0x34, 0xd9, 0xc8, 0xc5, 0xb1, 0x7, 0x11, 0xeb, 0x55, 0x13,
|
0xfd, 0x35, 0xd7, 0xb7, 0x34, 0x59, 0xbb, 0x56, 0xeb, 0xa8, 0x6d, 0x5c, 0xe1, 0xe2,
|
||||||
0x60, 0xad, 0x28, 0xcf, 0x30, 0x60, 0x61, 0xff, 0x3, 0xfb, 0x73, 0xa9, 0x25, 0x39, 0x34,
|
0xda, 0x47, 0xff, 0x10, 0x10, 0x75, 0x1a, 0xa3, 0x80, 0xc2, 0x8b, 0x3f, 0x11, 0xa4,
|
||||||
0xa5, 0xa4, 0xa4, 0x55, 0x10, 0xb, 0xfd, 0x35, 0xd7, 0xb7, 0x34, 0x59, 0xbb, 0x56, 0xeb,
|
0xf4, 0xbe, 0x24, 0x6, 0x5a, 0xe0, 0xa2, 0xe3, 0x76, 0x11, 0xc6, 0x33, 0xd0, 0x2a, 0x4d,
|
||||||
0xa8, 0x6d, 0x5c, 0xe1, 0xe2, 0xda, 0x47, 0xff, 0x10, 0x10, 0x75, 0x1a, 0xa3, 0x80,
|
0xd0, 0x1e, 0xc5, 0xa8, 0xb6, 0xb0, 0xdf, 0xd8, 0xf2, 0x60, 0x9b, 0x9, 0xe2, 0x9a, 0xed,
|
||||||
0xc2, 0x8b, 0x3f, 0x11, 0xa4, 0xf4, 0xbe, 0x24, 0x6, 0x5a, 0xe0, 0xa2, 0xe3, 0x76, 0x11,
|
0xac, 0x8c, 0xbd, 0x93, 0x36, 0x44, 0x4b, 0xb9, 0xf2, 0x52, 0x5e, 0xd5, 0x21, 0x37,
|
||||||
0xc6, 0x33, 0xd0, 0x2a, 0x4d, 0xd0, 0x1e, 0xc5, 0xa8, 0xb6, 0xb0, 0xdf, 0xd8, 0xf2,
|
0xca, 0xe6, 0xb9, 0x4, 0x35, 0x2a, 0xe2, 0xd9, 0xde, 0xf3, 0x73, 0xd9, 0xa8, 0xf8, 0xe,
|
||||||
0x60, 0x9b, 0x9, 0xe2, 0x9a, 0xed, 0xac, 0x8c, 0xbd, 0x93, 0x36, 0x44, 0x4b, 0xb9, 0xf2,
|
0x2d, 0xe6, 0x73, 0x6f, 0x24, 0xbc, 0xf6, 0xc6, 0xc1, 0xcd, 0xd0, 0xe1, 0x83, 0xe9,
|
||||||
0x52, 0x5e, 0xd5, 0x21, 0x37, 0xca, 0xe6, 0xb9, 0x4, 0x35, 0x2a, 0xe2, 0xd9, 0xde, 0xf3,
|
0x53, 0x4a, 0xfd, 0x2a, 0xb6, 0x11, 0x47, 0x2d, 0x4, 0x7, 0xfc, 0x66, 0xed, 0x0, 0x49,
|
||||||
0x73, 0xd9, 0xa8, 0xf8, 0xe, 0x2d, 0xe6, 0x73, 0x6f, 0x24, 0xbc, 0xf6, 0xc6, 0xc1, 0xcd,
|
0x4a, 0xb5, 0xdd, 0x4e, 0x85, 0x3f, 0xbe, 0x78, 0xf3, 0x86, 0xfe, 0x51, 0x79, 0x1f,
|
||||||
0xd0, 0xe1, 0x83, 0xe9, 0x53, 0x4a, 0xfd, 0x2a, 0xb6, 0x11, 0x47, 0x2d, 0x4, 0x7, 0xfc,
|
0x42, 0xf3, 0x42, 0x40, 0x9, 0x84, 0x29, 0xfa, 0xd, 0xa5, 0x28, 0xf1, 0x8e, 0x68, 0xe7,
|
||||||
0x66, 0xed, 0x0, 0x49, 0x4a, 0xb5, 0xdd, 0x4e, 0x85, 0x3f, 0xbe, 0x78, 0xf3, 0x86, 0xfe,
|
0x81, 0xff, 0xfd, 0x98, 0x89, 0xc6, 0x58, 0x77, 0x29, 0x68, 0x4b, 0x36, 0x36, 0x96,
|
||||||
0x51, 0x79, 0x1f, 0x42, 0xf3, 0x42, 0x40, 0x9, 0x84, 0x29, 0xfa, 0xd, 0xa5, 0x28, 0xf1,
|
0x2f, 0x6f, 0x14, 0x15, 0xc1, 0x58, 0xcf, 0x10, 0xe3, 0x9, 0x9f, 0x4c, 0xe0, 0x35, 0xbf,
|
||||||
0x8e, 0x68, 0xe7, 0x81, 0xff, 0xfd, 0x98, 0x89, 0xc6, 0x58, 0x77, 0x29, 0x68, 0x4b,
|
0x8, 0xaf, 0xbf, 0x75, 0x77, 0xef, 0xa0, 0x83, 0xe, 0x3a, 0xe8, 0xa0, 0x83, 0xe, 0x3a,
|
||||||
0x36, 0x36, 0x96, 0x2f, 0x6f, 0x14, 0x15, 0xc1, 0x58, 0xcf, 0x10, 0xe3, 0x9, 0x9f, 0x4c,
|
0xe8, 0xa0, 0x83, 0xe, 0x3a, 0xe8, 0xa0, 0x83, 0xe, 0x3a, 0xd8, 0xda, 0xfa, 0x17, 0xe2,
|
||||||
0xe0, 0x35, 0xbf, 0x8, 0xaf, 0xbf, 0x75, 0x77, 0xef, 0xa0, 0x83, 0xe, 0x3a, 0xe8, 0xa0,
|
0x8a, 0xf9, 0x39, 0x0, 0x28, 0x0, 0x0,
|
||||||
0x83, 0xe, 0x3a, 0xe8, 0xa0, 0x83, 0xe, 0x3a, 0xe8, 0xa0, 0x83, 0xe, 0x3a, 0xd8, 0xda,
|
|
||||||
0xfa, 0x17, 0xe2, 0x8a, 0xf9, 0x39, 0x0, 0x28, 0x0, 0x0,
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -324,7 +322,7 @@ affinity: {}
|
|||||||
namespaces: []string{"main"},
|
namespaces: []string{"main"},
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||||
},
|
},
|
||||||
inputFile: "testdata/nope.tgz",
|
inputFile: filepath.Join("testdata", "nope.tgz"),
|
||||||
want: nil,
|
want: nil,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -396,17 +394,17 @@ func Test_helmConfigAnalyzer_Required(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "testchart.tgz",
|
name: "testchart.tgz",
|
||||||
filePath: "testdata/testchart.tgz",
|
filePath: filepath.Join("testdata", "testchart.tgz"),
|
||||||
want: true,
|
want: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "testchart.tar.gz",
|
name: "testchart.tar.gz",
|
||||||
filePath: "testdata/testchart.tar.gz",
|
filePath: filepath.Join("testdata", "testchart.tar.gz"),
|
||||||
want: true,
|
want: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "nope.tgz",
|
name: "nope.tgz",
|
||||||
filePath: "testdata/nope.tgz",
|
filePath: filepath.Join("testdata", "nope.tgz"),
|
||||||
want: true, // its a tarball after all
|
want: true, // its a tarball after all
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ package json
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -13,111 +13,22 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
|
func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
|
||||||
type args struct {
|
|
||||||
namespaces []string
|
|
||||||
policyPaths []string
|
|
||||||
}
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
args args
|
|
||||||
inputFile string
|
inputFile string
|
||||||
want *analyzer.AnalysisResult
|
want *analyzer.AnalysisResult
|
||||||
wantErr string
|
wantErr string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "happy path",
|
name: "happy path",
|
||||||
args: args{
|
inputFile: "test.json",
|
||||||
namespaces: []string{"main"},
|
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
|
||||||
},
|
|
||||||
inputFile: "testdata/deployment.json",
|
|
||||||
want: &analyzer.AnalysisResult{
|
want: &analyzer.AnalysisResult{
|
||||||
Files: map[types.HandlerType][]types.File{
|
Files: map[types.HandlerType][]types.File{
|
||||||
types.MisconfPostHandler: {
|
types.MisconfPostHandler: {
|
||||||
{
|
{
|
||||||
Type: "json",
|
Type: "json",
|
||||||
Path: "testdata/deployment.json",
|
Path: "test.json",
|
||||||
Content: []byte(`{
|
Content: []byte(`{}`),
|
||||||
"apiVersion": "apps/v1",
|
|
||||||
"kind": "Deployment",
|
|
||||||
"metadata": {
|
|
||||||
"name": "hello-kubernetes"
|
|
||||||
},
|
|
||||||
"spec": {
|
|
||||||
"replicas": 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "deny",
|
|
||||||
args: args{
|
|
||||||
namespaces: []string{"main"},
|
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
|
||||||
},
|
|
||||||
inputFile: "testdata/deployment_deny.json",
|
|
||||||
want: &analyzer.AnalysisResult{
|
|
||||||
Files: map[types.HandlerType][]types.File{
|
|
||||||
types.MisconfPostHandler: {
|
|
||||||
{
|
|
||||||
Type: "json",
|
|
||||||
Path: "testdata/deployment_deny.json",
|
|
||||||
Content: []byte(`{
|
|
||||||
"apiVersion": "apps/v1",
|
|
||||||
"kind": "Deployment",
|
|
||||||
"metadata": {
|
|
||||||
"name": "hello-kubernetes"
|
|
||||||
},
|
|
||||||
"spec": {
|
|
||||||
"replicas": 4
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "json array",
|
|
||||||
args: args{
|
|
||||||
namespaces: []string{"main"},
|
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
|
||||||
},
|
|
||||||
inputFile: "testdata/array.json",
|
|
||||||
want: &analyzer.AnalysisResult{
|
|
||||||
Files: map[types.HandlerType][]types.File{
|
|
||||||
types.MisconfPostHandler: {
|
|
||||||
{
|
|
||||||
Type: "json",
|
|
||||||
Path: "testdata/array.json",
|
|
||||||
Content: []byte(`[
|
|
||||||
{
|
|
||||||
"apiVersion": "apps/v1",
|
|
||||||
"kind": "Deployment",
|
|
||||||
"metadata": {
|
|
||||||
"name": "hello-kubernetes"
|
|
||||||
},
|
|
||||||
"spec": {
|
|
||||||
"replicas": 4
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"apiVersion": "apps/v2",
|
|
||||||
"kind": "Deployment",
|
|
||||||
"metadata": {
|
|
||||||
"name": "hello-kubernetes"
|
|
||||||
},
|
|
||||||
"spec": {
|
|
||||||
"replicas": 5
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
`),
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -127,16 +38,12 @@ func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
|
|||||||
|
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
f, err := os.Open(tt.inputFile)
|
r := strings.NewReader("{}")
|
||||||
require.NoError(t, err)
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
s := jsonConfigAnalyzer{}
|
s := jsonConfigAnalyzer{}
|
||||||
|
got, err := s.Analyze(context.Background(), analyzer.AnalysisInput{
|
||||||
ctx := context.Background()
|
|
||||||
got, err := s.Analyze(ctx, analyzer.AnalysisInput{
|
|
||||||
FilePath: tt.inputFile,
|
FilePath: tt.inputFile,
|
||||||
Content: f,
|
Content: r,
|
||||||
})
|
})
|
||||||
|
|
||||||
if tt.wantErr != "" {
|
if tt.wantErr != "" {
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ package yaml
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -13,142 +13,22 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
|
func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
|
||||||
type args struct {
|
|
||||||
namespaces []string
|
|
||||||
policyPaths []string
|
|
||||||
}
|
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
args args
|
|
||||||
inputFile string
|
inputFile string
|
||||||
want *analyzer.AnalysisResult
|
want *analyzer.AnalysisResult
|
||||||
wantErr string
|
wantErr string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "happy path",
|
name: "happy path",
|
||||||
args: args{
|
inputFile: "test.yaml",
|
||||||
namespaces: []string{"main"},
|
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
|
||||||
},
|
|
||||||
inputFile: "testdata/deployment.yaml",
|
|
||||||
want: &analyzer.AnalysisResult{
|
want: &analyzer.AnalysisResult{
|
||||||
Files: map[types.HandlerType][]types.File{
|
Files: map[types.HandlerType][]types.File{
|
||||||
types.MisconfPostHandler: {
|
types.MisconfPostHandler: {
|
||||||
{
|
{
|
||||||
Type: "yaml",
|
Type: "yaml",
|
||||||
Path: "testdata/deployment.yaml",
|
Path: "test.yaml",
|
||||||
Content: []byte(`apiVersion: apps/v1
|
Content: []byte(`- abc`),
|
||||||
kind: Deployment
|
|
||||||
metadata:
|
|
||||||
name: hello-kubernetes
|
|
||||||
spec:
|
|
||||||
replicas: 3
|
|
||||||
`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "deny",
|
|
||||||
args: args{
|
|
||||||
namespaces: []string{"main"},
|
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
|
||||||
},
|
|
||||||
inputFile: "testdata/deployment_deny.yaml",
|
|
||||||
want: &analyzer.AnalysisResult{
|
|
||||||
OS: (*types.OS)(nil),
|
|
||||||
PackageInfos: []types.PackageInfo(nil),
|
|
||||||
Applications: []types.Application(nil),
|
|
||||||
Files: map[types.HandlerType][]types.File{
|
|
||||||
types.MisconfPostHandler: {
|
|
||||||
{
|
|
||||||
Type: "yaml",
|
|
||||||
Path: "testdata/deployment_deny.yaml",
|
|
||||||
Content: []byte(`apiVersion: apps/v1
|
|
||||||
kind: Deployment
|
|
||||||
metadata:
|
|
||||||
name: hello-kubernetes
|
|
||||||
spec:
|
|
||||||
replicas: 4
|
|
||||||
`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "happy path using anchors",
|
|
||||||
args: args{
|
|
||||||
namespaces: []string{"main"},
|
|
||||||
policyPaths: []string{"testdata/deny.rego"},
|
|
||||||
},
|
|
||||||
inputFile: "testdata/anchor.yaml",
|
|
||||||
want: &analyzer.AnalysisResult{
|
|
||||||
OS: (*types.OS)(nil),
|
|
||||||
PackageInfos: []types.PackageInfo(nil),
|
|
||||||
Applications: []types.Application(nil),
|
|
||||||
Files: map[types.HandlerType][]types.File{
|
|
||||||
types.MisconfPostHandler: {
|
|
||||||
{
|
|
||||||
Type: "yaml",
|
|
||||||
Path: "testdata/anchor.yaml",
|
|
||||||
Content: []byte(`default: &default
|
|
||||||
line: single line
|
|
||||||
|
|
||||||
john: &J
|
|
||||||
john_name: john
|
|
||||||
fred: &F
|
|
||||||
fred_name: fred
|
|
||||||
|
|
||||||
main:
|
|
||||||
<<: *default
|
|
||||||
name:
|
|
||||||
<<: [*J, *F]
|
|
||||||
comment: |
|
|
||||||
multi
|
|
||||||
line
|
|
||||||
`),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "multiple yaml",
|
|
||||||
args: args{
|
|
||||||
namespaces: []string{"main"},
|
|
||||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
|
||||||
},
|
|
||||||
inputFile: "testdata/multiple.yaml",
|
|
||||||
want: &analyzer.AnalysisResult{
|
|
||||||
OS: (*types.OS)(nil),
|
|
||||||
PackageInfos: []types.PackageInfo(nil),
|
|
||||||
Applications: []types.Application(nil),
|
|
||||||
Files: map[types.HandlerType][]types.File{
|
|
||||||
types.MisconfPostHandler: {
|
|
||||||
{
|
|
||||||
Type: "yaml",
|
|
||||||
Path: "testdata/multiple.yaml",
|
|
||||||
Content: []byte(`apiVersion: apps/v1
|
|
||||||
kind: Deployment
|
|
||||||
metadata:
|
|
||||||
name: hello-kubernetes
|
|
||||||
spec:
|
|
||||||
replicas: 4
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
apiVersion: v1
|
|
||||||
kind: Service
|
|
||||||
metadata:
|
|
||||||
name: hello-kubernetes
|
|
||||||
spec:
|
|
||||||
ports:
|
|
||||||
- protocol: TCP
|
|
||||||
port: 80
|
|
||||||
targetPort: 8080
|
|
||||||
`),
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -157,15 +37,12 @@ spec:
|
|||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
f, err := os.Open(tt.inputFile)
|
r := strings.NewReader("- abc")
|
||||||
require.NoError(t, err)
|
|
||||||
defer f.Close()
|
|
||||||
|
|
||||||
a := yamlConfigAnalyzer{}
|
a := yamlConfigAnalyzer{}
|
||||||
ctx := context.Background()
|
ctx := context.Background()
|
||||||
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
|
got, err := a.Analyze(ctx, analyzer.AnalysisInput{
|
||||||
FilePath: tt.inputFile,
|
FilePath: tt.inputFile,
|
||||||
Content: f,
|
Content: r,
|
||||||
})
|
})
|
||||||
|
|
||||||
if tt.wantErr != "" {
|
if tt.wantErr != "" {
|
||||||
|
|||||||
@@ -3,8 +3,11 @@ package binary
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"os"
|
||||||
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/samber/lo"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
@@ -73,8 +76,8 @@ func Test_gobinaryLibraryAnalyzer_Required(t *testing.T) {
|
|||||||
want bool
|
want bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "file perm 0755",
|
name: "executable file",
|
||||||
filePath: "testdata/0755",
|
filePath: lo.Ternary(runtime.GOOS == "windows", "testdata/binary.exe", "testdata/0755"),
|
||||||
want: true,
|
want: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
1
pkg/fanal/analyzer/language/golang/binary/testdata/binary.exe
vendored
Normal file
1
pkg/fanal/analyzer/language/golang/binary/testdata/binary.exe
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
exe
|
||||||
@@ -34,8 +34,8 @@ func (a gradleLockAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisIn
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a gradleLockAnalyzer) Required(_ string, fileInfo os.FileInfo) bool {
|
func (a gradleLockAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||||
return strings.HasSuffix(fileInfo.Name(), fileNameSuffix)
|
return strings.HasSuffix(filePath, fileNameSuffix)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a gradleLockAnalyzer) Type() analyzer.Type {
|
func (a gradleLockAnalyzer) Type() analyzer.Type {
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ package gradle
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -87,20 +86,8 @@ func Test_nugetLibraryAnalyzer_Required(t *testing.T) {
|
|||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
err := os.MkdirAll(filepath.Dir(tt.filePath), 0700)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
_, err = os.Create(tt.filePath)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
defer func() {
|
|
||||||
err = os.RemoveAll(filepath.Dir(tt.filePath))
|
|
||||||
assert.NoError(t, err)
|
|
||||||
}()
|
|
||||||
|
|
||||||
fileInfo, err := os.Stat(tt.filePath)
|
|
||||||
assert.NoError(t, err)
|
|
||||||
|
|
||||||
a := gradleLockAnalyzer{}
|
a := gradleLockAnalyzer{}
|
||||||
got := a.Required("", fileInfo)
|
got := a.Required(tt.filePath, nil)
|
||||||
assert.Equal(t, tt.want, got)
|
assert.Equal(t, tt.want, got)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -28,23 +28,46 @@ func Test_javaLibraryAnalyzer_Analyze(t *testing.T) {
|
|||||||
Type: types.Jar,
|
Type: types.Jar,
|
||||||
FilePath: "testdata/test.war",
|
FilePath: "testdata/test.war",
|
||||||
Libraries: []types.Package{
|
Libraries: []types.Package{
|
||||||
{Name: "org.glassfish:javax.el", FilePath: "testdata/test.war", Version: "3.0.0"},
|
|
||||||
{
|
{
|
||||||
Name: "com.fasterxml.jackson.core:jackson-databind", FilePath: "testdata/test.war",
|
Name: "org.glassfish:javax.el",
|
||||||
Version: "2.9.10.6",
|
FilePath: "testdata/test.war",
|
||||||
|
Version: "3.0.0",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "com.fasterxml.jackson.core:jackson-annotations", FilePath: "testdata/test.war",
|
Name: "com.fasterxml.jackson.core:jackson-databind",
|
||||||
Version: "2.9.10",
|
FilePath: "testdata/test.war",
|
||||||
|
Version: "2.9.10.6",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
Name: "com.fasterxml.jackson.core:jackson-core", FilePath: "testdata/test.war",
|
Name: "com.fasterxml.jackson.core:jackson-annotations",
|
||||||
Version: "2.9.10",
|
FilePath: "testdata/test.war",
|
||||||
|
Version: "2.9.10",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "com.fasterxml.jackson.core:jackson-core",
|
||||||
|
FilePath: "testdata/test.war",
|
||||||
|
Version: "2.9.10",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "org.slf4j:slf4j-api",
|
||||||
|
FilePath: "testdata/test.war",
|
||||||
|
Version: "1.7.30",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "com.cronutils:cron-utils",
|
||||||
|
FilePath: "testdata/test.war",
|
||||||
|
Version: "9.1.2",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "org.apache.commons:commons-lang3",
|
||||||
|
FilePath: "testdata/test.war",
|
||||||
|
Version: "3.11",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "com.example:web-app",
|
||||||
|
FilePath: "testdata/test.war",
|
||||||
|
Version: "1.0-SNAPSHOT",
|
||||||
},
|
},
|
||||||
{Name: "org.slf4j:slf4j-api", FilePath: "testdata/test.war", Version: "1.7.30"},
|
|
||||||
{Name: "com.cronutils:cron-utils", FilePath: "testdata/test.war", Version: "9.1.2"},
|
|
||||||
{Name: "org.apache.commons:commons-lang3", FilePath: "testdata/test.war", Version: "3.11"},
|
|
||||||
{Name: "com.example:web-app", FilePath: "testdata/test.war", Version: "1.0-SNAPSHOT"},
|
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -60,8 +83,9 @@ func Test_javaLibraryAnalyzer_Analyze(t *testing.T) {
|
|||||||
FilePath: "testdata/test.par",
|
FilePath: "testdata/test.par",
|
||||||
Libraries: []types.Package{
|
Libraries: []types.Package{
|
||||||
{
|
{
|
||||||
Name: "com.fasterxml.jackson.core:jackson-core", FilePath: "testdata/test.par",
|
Name: "com.fasterxml.jackson.core:jackson-core",
|
||||||
Version: "2.9.10",
|
FilePath: "testdata/test.par",
|
||||||
|
Version: "2.9.10",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -121,14 +121,14 @@ func Test_npmLibraryAnalyzer_Analyze(t *testing.T) {
|
|||||||
{
|
{
|
||||||
name: "sad path",
|
name: "sad path",
|
||||||
inputFile: "testdata/wrong.json",
|
inputFile: "testdata/wrong.json",
|
||||||
wantErr: "unable to parse testdata/wrong.json",
|
wantErr: "unable to parse",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
f, err := os.Open(tt.inputFile)
|
f, err := os.Open(tt.inputFile)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer f.Close()
|
defer func() { _ = f.Close() }()
|
||||||
|
|
||||||
a := npmLibraryAnalyzer{}
|
a := npmLibraryAnalyzer{}
|
||||||
got, err := a.Analyze(context.Background(), analyzer.AnalysisInput{
|
got, err := a.Analyze(context.Background(), analyzer.AnalysisInput{
|
||||||
|
|||||||
@@ -42,7 +42,7 @@ func Test_nodePkgLibraryAnalyzer_Analyze(t *testing.T) {
|
|||||||
{
|
{
|
||||||
name: "sad path",
|
name: "sad path",
|
||||||
inputFile: "testdata/noname.json",
|
inputFile: "testdata/noname.json",
|
||||||
wantErr: "unable to parse testdata/noname.json",
|
wantErr: "unable to parse",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
|
|||||||
@@ -5,10 +5,11 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
|
||||||
"github.com/aquasecurity/trivy/pkg/fanal/types"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||||
|
"github.com/aquasecurity/trivy/pkg/fanal/types"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Test_yarnLibraryAnalyzer_Analyze(t *testing.T) {
|
func Test_yarnLibraryAnalyzer_Analyze(t *testing.T) {
|
||||||
|
|||||||
@@ -3,8 +3,10 @@ package binary
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"os"
|
"os"
|
||||||
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/samber/lo"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
@@ -33,7 +35,12 @@ func Test_rustBinaryLibraryAnalyzer_Analyze(t *testing.T) {
|
|||||||
Version: "0.1.0",
|
Version: "0.1.0",
|
||||||
DependsOn: []string{"library_crate@0.1.0"},
|
DependsOn: []string{"library_crate@0.1.0"},
|
||||||
},
|
},
|
||||||
{ID: "library_crate@0.1.0", Name: "library_crate", Version: "0.1.0", Indirect: true},
|
{
|
||||||
|
ID: "library_crate@0.1.0",
|
||||||
|
Name: "library_crate",
|
||||||
|
Version: "0.1.0",
|
||||||
|
Indirect: true,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -74,8 +81,8 @@ func Test_rustBinaryLibraryAnalyzer_Required(t *testing.T) {
|
|||||||
want bool
|
want bool
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "file perm 0755",
|
name: "executable file",
|
||||||
filePath: "testdata/0755",
|
filePath: lo.Ternary(runtime.GOOS == "windows", "testdata/binary.exe", "testdata/0755"),
|
||||||
want: true,
|
want: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|||||||
1
pkg/fanal/analyzer/language/rust/binary/testdata/binary.exe
vendored
Normal file
1
pkg/fanal/analyzer/language/rust/binary/testdata/binary.exe
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
exe
|
||||||
@@ -2,7 +2,6 @@ package licensing
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
"os"
|
"os"
|
||||||
@@ -64,15 +63,7 @@ func (a licenseFileAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisI
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
filePath := input.FilePath
|
lf, err := licensing.Classify(input.FilePath, input.Content)
|
||||||
// Files extracted from the image have an empty input.Dir.
|
|
||||||
// Also, paths to these files do not have "/" prefix.
|
|
||||||
// We need to add a "/" prefix to properly filter paths from the config file.
|
|
||||||
if input.Dir == "" { // add leading `/` for files extracted from image
|
|
||||||
filePath = fmt.Sprintf("/%s", filePath)
|
|
||||||
}
|
|
||||||
|
|
||||||
lf, err := licensing.Classify(filePath, input.Content)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, xerrors.Errorf("license classification error: %w", err)
|
return nil, xerrors.Errorf("license classification error: %w", err)
|
||||||
} else if len(lf.Findings) == 0 {
|
} else if len(lf.Findings) == 0 {
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ func Test_licenseAnalyzer_Analyze(t *testing.T) {
|
|||||||
Licenses: []types.LicenseFile{
|
Licenses: []types.LicenseFile{
|
||||||
{
|
{
|
||||||
Type: types.LicenseTypeHeader,
|
Type: types.LicenseTypeHeader,
|
||||||
FilePath: "/testdata/licensed.c",
|
FilePath: "testdata/licensed.c",
|
||||||
Findings: []types.LicenseFinding{
|
Findings: []types.LicenseFinding{
|
||||||
{
|
{
|
||||||
Name: "AGPL-3.0",
|
Name: "AGPL-3.0",
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@@ -86,7 +86,7 @@ func (a alpinePkgAnalyzer) parseApkInfo(scanner *bufio.Scanner) ([]types.Package
|
|||||||
case "F:":
|
case "F:":
|
||||||
dir = line[2:]
|
dir = line[2:]
|
||||||
case "R:":
|
case "R:":
|
||||||
installedFiles = append(installedFiles, filepath.Join(dir, line[2:]))
|
installedFiles = append(installedFiles, path.Join(dir, line[2:]))
|
||||||
case "p:": // provides (corresponds to provides in PKGINFO, concatenated by spaces into a single line)
|
case "p:": // provides (corresponds to provides in PKGINFO, concatenated by spaces into a single line)
|
||||||
a.parseProvides(line, pkg.ID, provides)
|
a.parseProvides(line, pkg.ID, provides)
|
||||||
case "D:": // dependencies (corresponds to depend in PKGINFO, concatenated by spaces into a single line)
|
case "D:": // dependencies (corresponds to depend in PKGINFO, concatenated by spaces into a single line)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package secret
|
package secret
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
@@ -87,6 +88,8 @@ func (a *SecretAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput
|
|||||||
return nil, xerrors.Errorf("read error %s: %w", input.FilePath, err)
|
return nil, xerrors.Errorf("read error %s: %w", input.FilePath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
content = bytes.ReplaceAll(content, []byte("\r"), []byte(""))
|
||||||
|
|
||||||
filePath := input.FilePath
|
filePath := input.FilePath
|
||||||
// Files extracted from the image have an empty input.Dir.
|
// Files extracted from the image have an empty input.Dir.
|
||||||
// Also, paths to these files do not have "/" prefix.
|
// Also, paths to these files do not have "/" prefix.
|
||||||
|
|||||||
@@ -201,9 +201,30 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
Packages: alpinePkgs,
|
Packages: alpinePkgs,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Applications: []types.Application(nil),
|
Licenses: []types.LicenseFile{
|
||||||
OpaqueDirs: []string(nil),
|
{
|
||||||
WhiteoutFiles: []string(nil),
|
Type: "header",
|
||||||
|
FilePath: "etc/ssl/misc/CA.pl",
|
||||||
|
Findings: []types.LicenseFinding{
|
||||||
|
{
|
||||||
|
Name: "OpenSSL",
|
||||||
|
Confidence: 1,
|
||||||
|
Link: "https://spdx.org/licenses/OpenSSL.html",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "header",
|
||||||
|
FilePath: "etc/ssl/misc/tsget.pl",
|
||||||
|
Findings: []types.LicenseFinding{
|
||||||
|
{
|
||||||
|
Name: "OpenSSL",
|
||||||
|
Confidence: 1,
|
||||||
|
Link: "https://spdx.org/licenses/OpenSSL.html",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Returns: cache.ArtifactCachePutBlobReturns{},
|
Returns: cache.ArtifactCachePutBlobReturns{},
|
||||||
@@ -856,9 +877,30 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
Packages: alpinePkgs,
|
Packages: alpinePkgs,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Applications: []types.Application(nil),
|
Licenses: []types.LicenseFile{
|
||||||
OpaqueDirs: []string(nil),
|
{
|
||||||
WhiteoutFiles: []string(nil),
|
Type: "header",
|
||||||
|
FilePath: "etc/ssl/misc/CA.pl",
|
||||||
|
Findings: []types.LicenseFinding{
|
||||||
|
{
|
||||||
|
Name: "OpenSSL",
|
||||||
|
Confidence: 1,
|
||||||
|
Link: "https://spdx.org/licenses/OpenSSL.html",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "header",
|
||||||
|
FilePath: "etc/ssl/misc/tsget.pl",
|
||||||
|
Findings: []types.LicenseFinding{
|
||||||
|
{
|
||||||
|
Name: "OpenSSL",
|
||||||
|
Confidence: 1,
|
||||||
|
Link: "https://spdx.org/licenses/OpenSSL.html",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Returns: cache.ArtifactCachePutBlobReturns{
|
Returns: cache.ArtifactCachePutBlobReturns{
|
||||||
@@ -904,9 +946,30 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
Packages: alpinePkgs,
|
Packages: alpinePkgs,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Applications: []types.Application(nil),
|
Licenses: []types.LicenseFile{
|
||||||
OpaqueDirs: []string(nil),
|
{
|
||||||
WhiteoutFiles: []string(nil),
|
Type: "header",
|
||||||
|
FilePath: "etc/ssl/misc/CA.pl",
|
||||||
|
Findings: []types.LicenseFinding{
|
||||||
|
{
|
||||||
|
Name: "OpenSSL",
|
||||||
|
Confidence: 1,
|
||||||
|
Link: "https://spdx.org/licenses/OpenSSL.html",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "header",
|
||||||
|
FilePath: "etc/ssl/misc/tsget.pl",
|
||||||
|
Findings: []types.LicenseFinding{
|
||||||
|
{
|
||||||
|
Name: "OpenSSL",
|
||||||
|
Confidence: 1,
|
||||||
|
Link: "https://spdx.org/licenses/OpenSSL.html",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Returns: cache.ArtifactCachePutBlobReturns{},
|
Returns: cache.ArtifactCachePutBlobReturns{},
|
||||||
|
|||||||
@@ -91,6 +91,9 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error)
|
|||||||
return xerrors.Errorf("filepath rel (%s): %w", filePath, err)
|
return xerrors.Errorf("filepath rel (%s): %w", filePath, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For Windows
|
||||||
|
filePath = filepath.ToSlash(filePath)
|
||||||
|
|
||||||
opts := analyzer.AnalysisOptions{Offline: a.artifactOption.Offline}
|
opts := analyzer.AnalysisOptions{Offline: a.artifactOption.Offline}
|
||||||
if err = a.analyzer.AnalyzeFile(ctx, &wg, limit, result, directory, filePath, info, opener, nil, opts); err != nil {
|
if err = a.analyzer.AnalyzeFile(ctx, &wg, limit, result, directory, filePath, info, opener, nil, opts); err != nil {
|
||||||
return xerrors.Errorf("analyze file (%s): %w", filePath, err)
|
return xerrors.Errorf("analyze file (%s): %w", filePath, err)
|
||||||
@@ -137,7 +140,8 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error)
|
|||||||
if err == nil && string(b) != "" {
|
if err == nil && string(b) != "" {
|
||||||
hostName = strings.TrimSpace(string(b))
|
hostName = strings.TrimSpace(string(b))
|
||||||
} else {
|
} else {
|
||||||
hostName = a.rootPath
|
// To slash for Windows
|
||||||
|
hostName = filepath.ToSlash(a.rootPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
return types.ArtifactReference{
|
return types.ArtifactReference{
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,57 +1,59 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
|
"$schema": "https://schema.management.azure.com/schemas/2015-01-01/deploymentTemplate.json#",
|
||||||
"contentVersion": "1.0.0.0",
|
"contentVersion": "1.0.0.0",
|
||||||
"parameters": {
|
"parameters": {
|
||||||
"storageAccountType": {
|
"storageAccountType": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
"defaultValue": "Standard_LRS",
|
"defaultValue": "Standard_LRS",
|
||||||
"allowedValues": [
|
"allowedValues": [
|
||||||
"Standard_LRS",
|
"Standard_LRS",
|
||||||
"Standard_GRS",
|
"Standard_GRS",
|
||||||
"Standard_ZRS",
|
"Standard_ZRS",
|
||||||
"Premium_LRS"
|
"Premium_LRS"
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"description": "Storage Account type"
|
"description": "Storage Account type"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"location": {
|
|
||||||
"type": "string",
|
|
||||||
"defaultValue": "[resourceGroup().location]",
|
|
||||||
"metadata": {
|
|
||||||
"description": "Location for all resources."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"variables": {
|
"location": {
|
||||||
"storageAccountName": "[concat('store', uniquestring(resourceGroup().id))]"
|
"type": "string",
|
||||||
},
|
"defaultValue": "[resourceGroup().location]",
|
||||||
"resources": [{
|
"metadata": {
|
||||||
"type": "Microsoft.Storage/storageAccounts",
|
"description": "Location for all resources."
|
||||||
"name": "[variables('storageAccountName')]",
|
}
|
||||||
"location": "[parameters('location')]",
|
|
||||||
"apiVersion": "2018-07-01",
|
|
||||||
"sku": {
|
|
||||||
"name": "[parameters('storageAccountType')]"
|
|
||||||
},
|
|
||||||
"kind": "StorageV2",
|
|
||||||
"properties": {}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "Microsoft.Storage/storageAccounts",
|
|
||||||
"name": "bucket2",
|
|
||||||
"location": "[parameters('location')]",
|
|
||||||
"apiVersion": "2018-07-01",
|
|
||||||
"sku": {
|
|
||||||
"name": "[parameters('storageAccountType')]"
|
|
||||||
},
|
|
||||||
"kind": "StorageV2",
|
|
||||||
"properties": {}
|
|
||||||
}],
|
|
||||||
"outputs": {
|
|
||||||
"storageAccountName": {
|
|
||||||
"type": "string",
|
|
||||||
"value": "[variables('storageAccountName')]"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"variables": {
|
||||||
|
"storageAccountName": "[concat('store', uniquestring(resourceGroup().id))]"
|
||||||
|
},
|
||||||
|
"resources": [
|
||||||
|
{
|
||||||
|
"type": "Microsoft.Storage/storageAccounts",
|
||||||
|
"name": "[variables('storageAccountName')]",
|
||||||
|
"location": "[parameters('location')]",
|
||||||
|
"apiVersion": "2018-07-01",
|
||||||
|
"sku": {
|
||||||
|
"name": "[parameters('storageAccountType')]"
|
||||||
|
},
|
||||||
|
"kind": "StorageV2",
|
||||||
|
"properties": {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "Microsoft.Storage/storageAccounts",
|
||||||
|
"name": "bucket2",
|
||||||
|
"location": "[parameters('location')]",
|
||||||
|
"apiVersion": "2018-07-01",
|
||||||
|
"sku": {
|
||||||
|
"name": "[parameters('storageAccountType')]"
|
||||||
|
},
|
||||||
|
"kind": "StorageV2",
|
||||||
|
"properties": {}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"outputs": {
|
||||||
|
"storageAccountName": {
|
||||||
|
"type": "string",
|
||||||
|
"value": "[variables('storageAccountName')]"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -26,17 +26,19 @@
|
|||||||
"variables": {
|
"variables": {
|
||||||
"storageAccountName": "[concat('store', uniquestring(resourceGroup().id))]"
|
"storageAccountName": "[concat('store', uniquestring(resourceGroup().id))]"
|
||||||
},
|
},
|
||||||
"resources": [{
|
"resources": [
|
||||||
"type": "Microsoft.Storage/storageAccounts",
|
{
|
||||||
"name": "[variables('storageAccountName')]",
|
"type": "Microsoft.Storage/storageAccounts",
|
||||||
"location": "[parameters('location')]",
|
"name": "[variables('storageAccountName')]",
|
||||||
"apiVersion": "2018-07-01",
|
"location": "[parameters('location')]",
|
||||||
"sku": {
|
"apiVersion": "2018-07-01",
|
||||||
"name": "[parameters('storageAccountType')]"
|
"sku": {
|
||||||
},
|
"name": "[parameters('storageAccountType')]"
|
||||||
"kind": "StorageV2",
|
},
|
||||||
"properties": {}
|
"kind": "StorageV2",
|
||||||
}],
|
"properties": {}
|
||||||
|
}
|
||||||
|
],
|
||||||
"outputs": {
|
"outputs": {
|
||||||
"storageAccountName": {
|
"storageAccountName": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
|||||||
@@ -26,17 +26,19 @@
|
|||||||
"variables": {
|
"variables": {
|
||||||
"storageAccountName": "[concat('store', uniquestring(resourceGroup().id))]"
|
"storageAccountName": "[concat('store', uniquestring(resourceGroup().id))]"
|
||||||
},
|
},
|
||||||
"resources": [{
|
"resources": [
|
||||||
"type": "Microsoft.Storage/storageAccounts",
|
{
|
||||||
"name": "[variables('storageAccountName')]",
|
"type": "Microsoft.Storage/storageAccounts",
|
||||||
"location": "[parameters('location')]",
|
"name": "[variables('storageAccountName')]",
|
||||||
"apiVersion": "2018-07-01",
|
"location": "[parameters('location')]",
|
||||||
"sku": {
|
"apiVersion": "2018-07-01",
|
||||||
"name": "[parameters('storageAccountType')]"
|
"sku": {
|
||||||
},
|
"name": "[parameters('storageAccountType')]"
|
||||||
"kind": "StorageV2",
|
},
|
||||||
"properties": {}
|
"kind": "StorageV2",
|
||||||
}],
|
"properties": {}
|
||||||
|
}
|
||||||
|
],
|
||||||
"outputs": {
|
"outputs": {
|
||||||
"storageAccountName": {
|
"storageAccountName": {
|
||||||
"type": "string",
|
"type": "string",
|
||||||
|
|||||||
@@ -4,9 +4,9 @@ metadata:
|
|||||||
name: evil-pod
|
name: evil-pod
|
||||||
spec:
|
spec:
|
||||||
containers:
|
containers:
|
||||||
- command: ["sh", "-c", "echo 'Hello' && sleep 1h"]
|
- command: [ "sh", "-c", "echo 'Hello' && sleep 1h" ]
|
||||||
image: evil
|
image: evil
|
||||||
name: evil1
|
name: evil1
|
||||||
- command: ["sh", "-c", "echo 'Hello' && sleep 1h"]
|
- command: [ "sh", "-c", "echo 'Hello' && sleep 1h" ]
|
||||||
image: evil
|
image: evil
|
||||||
name: evil2
|
name: evil2
|
||||||
|
|||||||
@@ -4,6 +4,6 @@ metadata:
|
|||||||
name: good-pod
|
name: good-pod
|
||||||
spec:
|
spec:
|
||||||
containers:
|
containers:
|
||||||
- command: ["sh", "-c", "echo 'Hello' && sleep 1h"]
|
- command: [ "sh", "-c", "echo 'Hello' && sleep 1h" ]
|
||||||
image: good
|
image: good
|
||||||
name: good
|
name: good
|
||||||
|
|||||||
@@ -4,6 +4,6 @@ metadata:
|
|||||||
name: evil-pod
|
name: evil-pod
|
||||||
spec:
|
spec:
|
||||||
containers:
|
containers:
|
||||||
- command: ["sh", "-c", "echo 'Hello' && sleep 1h"]
|
- command: [ "sh", "-c", "echo 'Hello' && sleep 1h" ]
|
||||||
image: evil
|
image: evil
|
||||||
name: evil
|
name: evil
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
resource "aws_s3_bucket" "three" {
|
resource "aws_s3_bucket" "three" {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import (
|
|||||||
"net/url"
|
"net/url"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
git "github.com/go-git/go-git/v5"
|
"github.com/go-git/go-git/v5"
|
||||||
"github.com/go-git/go-git/v5/plumbing"
|
"github.com/go-git/go-git/v5/plumbing"
|
||||||
"github.com/go-git/go-git/v5/plumbing/transport/http"
|
"github.com/go-git/go-git/v5/plumbing/transport/http"
|
||||||
"golang.org/x/xerrors"
|
"golang.org/x/xerrors"
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
//go:build unix
|
||||||
|
|
||||||
package remote
|
package remote
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
digest "github.com/opencontainers/go-digest"
|
"github.com/opencontainers/go-digest"
|
||||||
"golang.org/x/xerrors"
|
"golang.org/x/xerrors"
|
||||||
|
|
||||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||||
|
|||||||
@@ -3,6 +3,8 @@ package sbom_test
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -20,11 +22,11 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
filePath string
|
filePath string
|
||||||
putBlobExpectation cache.ArtifactCachePutBlobExpectation
|
putBlobExpectation cache.ArtifactCachePutBlobExpectation
|
||||||
want types.ArtifactReference
|
want types.ArtifactReference
|
||||||
wantErr string
|
wantErr []string
|
||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "happy path",
|
name: "happy path",
|
||||||
filePath: "testdata/bom.json",
|
filePath: filepath.Join("testdata", "bom.json"),
|
||||||
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
|
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
|
||||||
Args: cache.ArtifactCachePutBlobArgs{
|
Args: cache.ArtifactCachePutBlobArgs{
|
||||||
BlobID: "sha256:21f10e5ab97c37f6c4d6a45815cd5db10e9539d5db8614d3b1d8890111d7a2b8",
|
BlobID: "sha256:21f10e5ab97c37f6c4d6a45815cd5db10e9539d5db8614d3b1d8890111d7a2b8",
|
||||||
@@ -38,8 +40,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
{
|
{
|
||||||
Packages: []types.Package{
|
Packages: []types.Package{
|
||||||
{
|
{
|
||||||
Name: "musl", Version: "1.2.3-r0", SrcName: "musl", SrcVersion: "1.2.3-r0", Licenses: []string{"MIT"},
|
Name: "musl", Version: "1.2.3-r0", SrcName: "musl", SrcVersion: "1.2.3-r0",
|
||||||
Ref: "pkg:apk/alpine/musl@1.2.3-r0?distro=3.16.0",
|
Licenses: []string{"MIT"},
|
||||||
|
Ref: "pkg:apk/alpine/musl@1.2.3-r0?distro=3.16.0",
|
||||||
Layer: types.Layer{
|
Layer: types.Layer{
|
||||||
DiffID: "sha256:dd565ff850e7003356e2b252758f9bdc1ff2803f61e995e24c7844f6297f8fc3",
|
DiffID: "sha256:dd565ff850e7003356e2b252758f9bdc1ff2803f61e995e24c7844f6297f8fc3",
|
||||||
},
|
},
|
||||||
@@ -120,7 +123,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
Returns: cache.ArtifactCachePutBlobReturns{},
|
Returns: cache.ArtifactCachePutBlobReturns{},
|
||||||
},
|
},
|
||||||
want: types.ArtifactReference{
|
want: types.ArtifactReference{
|
||||||
Name: "testdata/bom.json",
|
Name: filepath.Join("testdata", "bom.json"),
|
||||||
Type: types.ArtifactCycloneDX,
|
Type: types.ArtifactCycloneDX,
|
||||||
ID: "sha256:21f10e5ab97c37f6c4d6a45815cd5db10e9539d5db8614d3b1d8890111d7a2b8",
|
ID: "sha256:21f10e5ab97c37f6c4d6a45815cd5db10e9539d5db8614d3b1d8890111d7a2b8",
|
||||||
BlobIDs: []string{
|
BlobIDs: []string{
|
||||||
@@ -130,7 +133,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "happy path for sbom attestation",
|
name: "happy path for sbom attestation",
|
||||||
filePath: "testdata/sbom.cdx.intoto.jsonl",
|
filePath: filepath.Join("testdata", "sbom.cdx.intoto.jsonl"),
|
||||||
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
|
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
|
||||||
Args: cache.ArtifactCachePutBlobArgs{
|
Args: cache.ArtifactCachePutBlobArgs{
|
||||||
BlobID: "sha256:21f10e5ab97c37f6c4d6a45815cd5db10e9539d5db8614d3b1d8890111d7a2b8",
|
BlobID: "sha256:21f10e5ab97c37f6c4d6a45815cd5db10e9539d5db8614d3b1d8890111d7a2b8",
|
||||||
@@ -144,8 +147,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
{
|
{
|
||||||
Packages: []types.Package{
|
Packages: []types.Package{
|
||||||
{
|
{
|
||||||
Name: "musl", Version: "1.2.3-r0", SrcName: "musl", SrcVersion: "1.2.3-r0", Licenses: []string{"MIT"},
|
Name: "musl", Version: "1.2.3-r0", SrcName: "musl", SrcVersion: "1.2.3-r0",
|
||||||
Ref: "pkg:apk/alpine/musl@1.2.3-r0?distro=3.16.0",
|
Licenses: []string{"MIT"},
|
||||||
|
Ref: "pkg:apk/alpine/musl@1.2.3-r0?distro=3.16.0",
|
||||||
Layer: types.Layer{
|
Layer: types.Layer{
|
||||||
DiffID: "sha256:dd565ff850e7003356e2b252758f9bdc1ff2803f61e995e24c7844f6297f8fc3",
|
DiffID: "sha256:dd565ff850e7003356e2b252758f9bdc1ff2803f61e995e24c7844f6297f8fc3",
|
||||||
},
|
},
|
||||||
@@ -226,7 +230,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
Returns: cache.ArtifactCachePutBlobReturns{},
|
Returns: cache.ArtifactCachePutBlobReturns{},
|
||||||
},
|
},
|
||||||
want: types.ArtifactReference{
|
want: types.ArtifactReference{
|
||||||
Name: "testdata/sbom.cdx.intoto.jsonl",
|
Name: filepath.Join("testdata", "sbom.cdx.intoto.jsonl"),
|
||||||
Type: types.ArtifactCycloneDX,
|
Type: types.ArtifactCycloneDX,
|
||||||
ID: "sha256:21f10e5ab97c37f6c4d6a45815cd5db10e9539d5db8614d3b1d8890111d7a2b8",
|
ID: "sha256:21f10e5ab97c37f6c4d6a45815cd5db10e9539d5db8614d3b1d8890111d7a2b8",
|
||||||
BlobIDs: []string{
|
BlobIDs: []string{
|
||||||
@@ -236,12 +240,12 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "sad path with no such directory",
|
name: "sad path with no such directory",
|
||||||
filePath: "./testdata/unknown.json",
|
filePath: filepath.Join("testdata", "unknown.json"),
|
||||||
wantErr: "no such file or directory",
|
wantErr: []string{"no such file or directory", "The system cannot find the file specified"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "sad path PutBlob returns an error",
|
name: "sad path PutBlob returns an error",
|
||||||
filePath: "testdata/os-only-bom.json",
|
filePath: filepath.Join("testdata", "os-only-bom.json"),
|
||||||
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
|
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
|
||||||
Args: cache.ArtifactCachePutBlobArgs{
|
Args: cache.ArtifactCachePutBlobArgs{
|
||||||
BlobID: "sha256:05a4e94bb5503e437108210c90849a977ea0b9b83e4e8606aabc9647b2a5256c",
|
BlobID: "sha256:05a4e94bb5503e437108210c90849a977ea0b9b83e4e8606aabc9647b2a5256c",
|
||||||
@@ -260,7 +264,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
Err: errors.New("error"),
|
Err: errors.New("error"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
wantErr: "failed to store blob",
|
wantErr: []string{"failed to store blob"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
@@ -272,9 +276,16 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
got, err := a.Inspect(context.Background())
|
got, err := a.Inspect(context.Background())
|
||||||
if tt.wantErr != "" {
|
if len(tt.wantErr) > 0 {
|
||||||
require.NotNil(t, err)
|
require.NotNil(t, err)
|
||||||
assert.Contains(t, err.Error(), tt.wantErr)
|
found := false
|
||||||
|
for _, wantErr := range tt.wantErr {
|
||||||
|
if strings.Contains(err.Error(), wantErr) {
|
||||||
|
found = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert.True(t, found)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
@@ -36,7 +36,7 @@ func TestNewArtifact(t *testing.T) {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "happy path for file",
|
name: "happy path for file",
|
||||||
target: filepath.Join("testdata", "rawdata.img"),
|
target: "testdata/rawdata.img",
|
||||||
wantErr: assert.NoError,
|
wantErr: assert.NoError,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -46,7 +46,7 @@ func TestNewArtifact(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "sad path unsupported vm format",
|
name: "sad path unsupported vm format",
|
||||||
target: filepath.Join("testdata", "monolithicSparse.vmdk"),
|
target: "testdata/monolithicSparse.vmdk",
|
||||||
wantErr: func(t assert.TestingT, err error, args ...interface{}) bool {
|
wantErr: func(t assert.TestingT, err error, args ...interface{}) bool {
|
||||||
return assert.ErrorContains(t, err, "unsupported type error")
|
return assert.ErrorContains(t, err, "unsupported type error")
|
||||||
},
|
},
|
||||||
@@ -55,7 +55,7 @@ func TestNewArtifact(t *testing.T) {
|
|||||||
name: "sad path file not found",
|
name: "sad path file not found",
|
||||||
target: "testdata/no-file",
|
target: "testdata/no-file",
|
||||||
wantErr: func(t assert.TestingT, err error, args ...interface{}) bool {
|
wantErr: func(t assert.TestingT, err error, args ...interface{}) bool {
|
||||||
return assert.ErrorContains(t, err, "no such file or directory")
|
return assert.ErrorContains(t, err, "file open error")
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -177,6 +177,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
c.ApplyPutBlobExpectation(tt.putBlobExpectation)
|
c.ApplyPutBlobExpectation(tt.putBlobExpectation)
|
||||||
c.ApplyMissingBlobsExpectation(tt.missingBlobsExpectation)
|
c.ApplyMissingBlobsExpectation(tt.missingBlobsExpectation)
|
||||||
c.ApplyPutArtifactExpectations(tt.putArtifactExpectations)
|
c.ApplyPutArtifactExpectations(tt.putArtifactExpectations)
|
||||||
|
c.ApplyDeleteBlobsExpectation(cache.ArtifactCacheDeleteBlobsExpectation{
|
||||||
|
Args: cache.ArtifactCacheDeleteBlobsArgs{BlobIDsAnything: true},
|
||||||
|
})
|
||||||
|
|
||||||
filePath := tt.filePath
|
filePath := tt.filePath
|
||||||
if !strings.HasPrefix(tt.filePath, ebsPrefix) {
|
if !strings.HasPrefix(tt.filePath, ebsPrefix) {
|
||||||
@@ -189,11 +192,12 @@ func TestArtifact_Inspect(t *testing.T) {
|
|||||||
|
|
||||||
if aa, ok := a.(*vm.EBS); ok {
|
if aa, ok := a.(*vm.EBS); ok {
|
||||||
// blockSize: 512 KB, volumeSize: 40MB
|
// blockSize: 512 KB, volumeSize: 40MB
|
||||||
ebs := ebsfile.NewMockEBS(filepath.Join("testdata", "AmazonLinux2.img.gz"), 512<<10, 40<<20)
|
ebs := ebsfile.NewMockEBS("testdata/AmazonLinux2.img.gz", 512<<10, 40<<20)
|
||||||
aa.SetEBS(ebs)
|
aa.SetEBS(ebs)
|
||||||
}
|
}
|
||||||
|
|
||||||
got, err := a.Inspect(context.Background())
|
got, err := a.Inspect(context.Background())
|
||||||
|
defer a.Clean(got)
|
||||||
if tt.wantErr != "" {
|
if tt.wantErr != "" {
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
assert.ErrorContains(t, err, tt.wantErr)
|
assert.ErrorContains(t, err, tt.wantErr)
|
||||||
|
|||||||
20
pkg/fanal/cache/fs_test.go
vendored
20
pkg/fanal/cache/fs_test.go
vendored
@@ -99,7 +99,10 @@ func TestFSCache_GetBlob(t *testing.T) {
|
|||||||
|
|
||||||
fs, err := NewFSCache(tmpDir)
|
fs, err := NewFSCache(tmpDir)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer fs.Clear()
|
defer func() {
|
||||||
|
_ = fs.Clear()
|
||||||
|
_ = fs.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
got, err := fs.GetBlob(tt.args.layerID)
|
got, err := fs.GetBlob(tt.args.layerID)
|
||||||
assert.Equal(t, tt.wantErr, err != nil, err)
|
assert.Equal(t, tt.wantErr, err != nil, err)
|
||||||
@@ -269,7 +272,10 @@ func TestFSCache_PutBlob(t *testing.T) {
|
|||||||
|
|
||||||
fs, err := NewFSCache(tmpDir)
|
fs, err := NewFSCache(tmpDir)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer fs.Clear()
|
defer func() {
|
||||||
|
_ = fs.Clear()
|
||||||
|
_ = fs.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
if strings.HasPrefix(tt.name, "sad") {
|
if strings.HasPrefix(tt.name, "sad") {
|
||||||
require.NoError(t, fs.Close())
|
require.NoError(t, fs.Close())
|
||||||
@@ -349,7 +355,10 @@ func TestFSCache_PutArtifact(t *testing.T) {
|
|||||||
|
|
||||||
fs, err := NewFSCache(tmpDir)
|
fs, err := NewFSCache(tmpDir)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
//defer fs.Clear()
|
defer func() {
|
||||||
|
_ = fs.Clear()
|
||||||
|
_ = fs.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
err = fs.PutArtifact(tt.args.imageID, tt.args.imageConfig)
|
err = fs.PutArtifact(tt.args.imageID, tt.args.imageConfig)
|
||||||
if tt.wantErr != "" {
|
if tt.wantErr != "" {
|
||||||
@@ -466,7 +475,10 @@ func TestFSCache_MissingBlobs(t *testing.T) {
|
|||||||
|
|
||||||
fs, err := NewFSCache(tmpDir)
|
fs, err := NewFSCache(tmpDir)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
defer fs.Clear()
|
defer func() {
|
||||||
|
_ = fs.Clear()
|
||||||
|
_ = fs.Close()
|
||||||
|
}()
|
||||||
|
|
||||||
gotMissingImage, gotMissingLayerIDs, err := fs.MissingBlobs(tt.args.imageID, tt.args.layerIDs)
|
gotMissingImage, gotMissingLayerIDs, err := fs.MissingBlobs(tt.args.imageID, tt.args.layerIDs)
|
||||||
if tt.wantErr != "" {
|
if tt.wantErr != "" {
|
||||||
|
|||||||
2
pkg/fanal/cache/key.go
vendored
2
pkg/fanal/cache/key.go
vendored
@@ -37,7 +37,7 @@ func CalcKey(id string, analyzerVersions, hookVersions map[string]int, artifactO
|
|||||||
for _, p := range paths {
|
for _, p := range paths {
|
||||||
s, err := dirhash.HashDir(p, "", dirhash.DefaultHash)
|
s, err := dirhash.HashDir(p, "", dirhash.DefaultHash)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", xerrors.Errorf("hash dir (%s): %w", p, err)
|
return "", xerrors.Errorf("hash dir error (%s): %w", p, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err = h.Write([]byte(s)); err != nil {
|
if _, err = h.Write([]byte(s)); err != nil {
|
||||||
|
|||||||
4
pkg/fanal/cache/key_test.go
vendored
4
pkg/fanal/cache/key_test.go
vendored
@@ -152,7 +152,7 @@ func TestCalcKey(t *testing.T) {
|
|||||||
},
|
},
|
||||||
policy: []string{"policydir"},
|
policy: []string{"policydir"},
|
||||||
},
|
},
|
||||||
wantErr: "no such file or directory",
|
wantErr: "hash dir error",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
@@ -170,7 +170,7 @@ func TestCalcKey(t *testing.T) {
|
|||||||
got, err := CalcKey(tt.args.key, tt.args.analyzerVersions, tt.args.hookVersions, artifactOpt)
|
got, err := CalcKey(tt.args.key, tt.args.analyzerVersions, tt.args.hookVersions, artifactOpt)
|
||||||
if tt.wantErr != "" {
|
if tt.wantErr != "" {
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
assert.Contains(t, err.Error(), tt.wantErr)
|
assert.ErrorContains(t, err, tt.wantErr)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|||||||
4
pkg/fanal/external/config_scan.go
vendored
4
pkg/fanal/external/config_scan.go
vendored
@@ -72,3 +72,7 @@ func (s ConfigScanner) Scan(dir string) ([]types.Misconfiguration, error) {
|
|||||||
|
|
||||||
return mergedLayer.Misconfigurations, nil
|
return mergedLayer.Misconfigurations, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s ConfigScanner) Close() error {
|
||||||
|
return s.cache.Close()
|
||||||
|
}
|
||||||
|
|||||||
11
pkg/fanal/external/config_scan_test.go
vendored
11
pkg/fanal/external/config_scan_test.go
vendored
@@ -1,6 +1,7 @@
|
|||||||
package external_test
|
package external_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -26,10 +27,10 @@ func TestConfigScanner_Scan(t *testing.T) {
|
|||||||
{
|
{
|
||||||
name: "deny",
|
name: "deny",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
policyPaths: []string{"testdata/deny"},
|
policyPaths: []string{filepath.Join("testdata", "deny")},
|
||||||
namespaces: []string{"testdata"},
|
namespaces: []string{"testdata"},
|
||||||
},
|
},
|
||||||
inputDir: "testdata/deny",
|
inputDir: filepath.Join("testdata", "deny"),
|
||||||
want: []types.Misconfiguration{
|
want: []types.Misconfiguration{
|
||||||
{
|
{
|
||||||
FileType: "dockerfile",
|
FileType: "dockerfile",
|
||||||
@@ -93,10 +94,10 @@ func TestConfigScanner_Scan(t *testing.T) {
|
|||||||
{
|
{
|
||||||
name: "allow",
|
name: "allow",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
policyPaths: []string{"testdata/allow"},
|
policyPaths: []string{filepath.Join("testdata", "allow")},
|
||||||
namespaces: []string{"testdata"},
|
namespaces: []string{"testdata"},
|
||||||
},
|
},
|
||||||
inputDir: "testdata/allow",
|
inputDir: filepath.Join("testdata", "allow"),
|
||||||
want: []types.Misconfiguration{
|
want: []types.Misconfiguration{
|
||||||
{
|
{
|
||||||
FileType: "dockerfile",
|
FileType: "dockerfile",
|
||||||
@@ -131,6 +132,8 @@ func TestConfigScanner_Scan(t *testing.T) {
|
|||||||
tt.fields.policyPaths, tt.fields.dataPaths, tt.fields.namespaces, false)
|
tt.fields.policyPaths, tt.fields.dataPaths, tt.fields.namespaces, false)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
defer func() { _ = s.Close() }()
|
||||||
|
|
||||||
got, err := s.Scan(tt.inputDir)
|
got, err := s.Scan(tt.inputDir)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, tt.want, got)
|
assert.Equal(t, tt.want, got)
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ package gomod
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"path/filepath"
|
"path"
|
||||||
|
|
||||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||||
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
|
"github.com/aquasecurity/trivy/pkg/fanal/artifact"
|
||||||
@@ -30,7 +30,8 @@ func (h gomodMergeHook) Handle(_ context.Context, _ *analyzer.AnalysisResult, bl
|
|||||||
var apps []types.Application
|
var apps []types.Application
|
||||||
for _, app := range blob.Applications {
|
for _, app := range blob.Applications {
|
||||||
if app.Type == types.GoModule {
|
if app.Type == types.GoModule {
|
||||||
dir, file := filepath.Split(app.FilePath)
|
// The file path is supposed to be slash-separated regardless of OS.
|
||||||
|
dir, file := path.Split(app.FilePath)
|
||||||
|
|
||||||
// go.sum should be merged to go.mod.
|
// go.sum should be merged to go.mod.
|
||||||
if file == types.GoSum {
|
if file == types.GoSum {
|
||||||
@@ -39,7 +40,7 @@ func (h gomodMergeHook) Handle(_ context.Context, _ *analyzer.AnalysisResult, bl
|
|||||||
|
|
||||||
if file == types.GoMod && lessThanGo117(app) {
|
if file == types.GoMod && lessThanGo117(app) {
|
||||||
// e.g. /app/go.mod => /app/go.sum
|
// e.g. /app/go.mod => /app/go.sum
|
||||||
gosumFile := filepath.Join(dir, types.GoSum)
|
gosumFile := path.Join(dir, types.GoSum)
|
||||||
if gosum := findGoSum(gosumFile, blob.Applications); gosum != nil {
|
if gosum := findGoSum(gosumFile, blob.Applications); gosum != nil {
|
||||||
mergeGoSum(&app, gosum) // nolint
|
mergeGoSum(&app, gosum) // nolint
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -103,7 +103,7 @@ func Test_gomodMergeHook_Hook(t *testing.T) {
|
|||||||
Applications: []types.Application{
|
Applications: []types.Application{
|
||||||
{
|
{
|
||||||
Type: types.GoModule,
|
Type: types.GoModule,
|
||||||
FilePath: "/app/go.mod",
|
FilePath: "app/go.mod",
|
||||||
Libraries: []types.Package{
|
Libraries: []types.Package{
|
||||||
{
|
{
|
||||||
Name: "github.com/aquasecurity/go-dep-parser",
|
Name: "github.com/aquasecurity/go-dep-parser",
|
||||||
@@ -117,7 +117,7 @@ func Test_gomodMergeHook_Hook(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
Type: types.GoModule,
|
Type: types.GoModule,
|
||||||
FilePath: "/app/go.sum",
|
FilePath: "app/go.sum",
|
||||||
Libraries: []types.Package{
|
Libraries: []types.Package{
|
||||||
{
|
{
|
||||||
Name: "modernc.org/libc",
|
Name: "modernc.org/libc",
|
||||||
@@ -135,7 +135,7 @@ func Test_gomodMergeHook_Hook(t *testing.T) {
|
|||||||
Applications: []types.Application{
|
Applications: []types.Application{
|
||||||
{
|
{
|
||||||
Type: types.GoModule,
|
Type: types.GoModule,
|
||||||
FilePath: "/app/go.mod",
|
FilePath: "app/go.mod",
|
||||||
Libraries: []types.Package{
|
Libraries: []types.Package{
|
||||||
{
|
{
|
||||||
Name: "github.com/aquasecurity/go-dep-parser",
|
Name: "github.com/aquasecurity/go-dep-parser",
|
||||||
@@ -161,7 +161,7 @@ func Test_gomodMergeHook_Hook(t *testing.T) {
|
|||||||
Applications: []types.Application{
|
Applications: []types.Application{
|
||||||
{
|
{
|
||||||
Type: types.GoModule,
|
Type: types.GoModule,
|
||||||
FilePath: "/app/go.mod",
|
FilePath: "app/go.mod",
|
||||||
Libraries: []types.Package{
|
Libraries: []types.Package{
|
||||||
{
|
{
|
||||||
Name: "github.com/aquasecurity/go-dep-parser",
|
Name: "github.com/aquasecurity/go-dep-parser",
|
||||||
@@ -179,7 +179,7 @@ func Test_gomodMergeHook_Hook(t *testing.T) {
|
|||||||
Applications: []types.Application{
|
Applications: []types.Application{
|
||||||
{
|
{
|
||||||
Type: types.GoModule,
|
Type: types.GoModule,
|
||||||
FilePath: "/app/go.mod",
|
FilePath: "app/go.mod",
|
||||||
Libraries: []types.Package{
|
Libraries: []types.Package{
|
||||||
{
|
{
|
||||||
Name: "github.com/aquasecurity/go-dep-parser",
|
Name: "github.com/aquasecurity/go-dep-parser",
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ func findFSTarget(paths []string) (string, []string, error) {
|
|||||||
for _, path := range absPaths {
|
for _, path := range absPaths {
|
||||||
path := filepath.ToSlash(path)
|
path := filepath.ToSlash(path)
|
||||||
path = strings.TrimPrefix(path, slashTarget)
|
path = strings.TrimPrefix(path, slashTarget)
|
||||||
path = strings.TrimPrefix(path, string(filepath.Separator))
|
path = strings.TrimPrefix(path, "/")
|
||||||
if path == "" {
|
if path == "" {
|
||||||
path = "."
|
path = "."
|
||||||
}
|
}
|
||||||
@@ -106,8 +106,8 @@ func findFSTarget(paths []string) (string, []string, error) {
|
|||||||
|
|
||||||
// we don't use filepath.Join here as we need to maintain the root "/"
|
// we don't use filepath.Join here as we need to maintain the root "/"
|
||||||
target := strings.Join(outputSegments, string(filepath.Separator))
|
target := strings.Join(outputSegments, string(filepath.Separator))
|
||||||
if target == "" {
|
if target == "" || filepath.VolumeName(target) == target {
|
||||||
target = string(filepath.Separator)
|
target += string(filepath.Separator)
|
||||||
}
|
}
|
||||||
return target, cleanPaths, nil
|
return target, cleanPaths, nil
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,9 @@ package misconf
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -81,44 +84,62 @@ func Test_FindingFSTarget(t *testing.T) {
|
|||||||
wantErr: true,
|
wantErr: true,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: []string{"/"},
|
input: []string{string(os.PathSeparator)},
|
||||||
wantTarget: "/",
|
wantTarget: string(os.PathSeparator),
|
||||||
wantPaths: []string{"."},
|
wantPaths: []string{"."},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: []string{"/home/user"},
|
input: []string{filepath.Join(string(os.PathSeparator), "home", "user")},
|
||||||
wantTarget: "/home/user",
|
wantTarget: filepath.Join(string(os.PathSeparator), "home", "user"),
|
||||||
wantPaths: []string{"."},
|
wantPaths: []string{"."},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: []string{"/home/user", "/home/user/something"},
|
input: []string{
|
||||||
wantTarget: "/home/user",
|
filepath.Join(string(os.PathSeparator), "home", "user"),
|
||||||
|
filepath.Join(string(os.PathSeparator), "home", "user", "something"),
|
||||||
|
},
|
||||||
|
wantTarget: filepath.Join(string(os.PathSeparator), "home", "user"),
|
||||||
wantPaths: []string{".", "something"},
|
wantPaths: []string{".", "something"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: []string{"/home/user", "/home/user/something/else"},
|
input: []string{
|
||||||
wantTarget: "/home/user",
|
filepath.Join(string(os.PathSeparator), "home", "user"),
|
||||||
|
filepath.Join(string(os.PathSeparator), "home", "user", "something", "else"),
|
||||||
|
},
|
||||||
|
wantTarget: filepath.Join(string(os.PathSeparator), "home", "user"),
|
||||||
wantPaths: []string{".", "something/else"},
|
wantPaths: []string{".", "something/else"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: []string{"/home/user", "/home/user2/something/else"},
|
input: []string{
|
||||||
wantTarget: "/home",
|
filepath.Join(string(os.PathSeparator), "home", "user"),
|
||||||
|
filepath.Join(string(os.PathSeparator), "home", "user2", "something", "else"),
|
||||||
|
},
|
||||||
|
wantTarget: filepath.Join(string(os.PathSeparator), "home"),
|
||||||
wantPaths: []string{"user", "user2/something/else"},
|
wantPaths: []string{"user", "user2/something/else"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: []string{"/foo", "/bar"},
|
input: []string{
|
||||||
wantTarget: "/",
|
filepath.Join(string(os.PathSeparator), "foo"),
|
||||||
|
filepath.Join(string(os.PathSeparator), "bar"),
|
||||||
|
},
|
||||||
|
wantTarget: string(os.PathSeparator),
|
||||||
wantPaths: []string{"foo", "bar"},
|
wantPaths: []string{"foo", "bar"},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
input: []string{"/", "/bar"},
|
input: []string{string(os.PathSeparator), filepath.Join(string(os.PathSeparator), "bar")},
|
||||||
wantTarget: "/",
|
wantTarget: string(os.PathSeparator),
|
||||||
wantPaths: []string{".", "bar"},
|
wantPaths: []string{".", "bar"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
t.Run(fmt.Sprintf("%#v", test.input), func(t *testing.T) {
|
t.Run(fmt.Sprintf("%#v", test.input), func(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
wantTarget, err := filepath.Abs(test.wantTarget)
|
||||||
|
require.NoError(t, err)
|
||||||
|
test.wantTarget = filepath.Clean(wantTarget)
|
||||||
|
}
|
||||||
|
|
||||||
target, paths, err := findFSTarget(test.input)
|
target, paths, err := findFSTarget(test.input)
|
||||||
if test.wantErr {
|
if test.wantErr {
|
||||||
require.Error(t, err)
|
require.Error(t, err)
|
||||||
@@ -128,5 +149,4 @@ func Test_FindingFSTarget(t *testing.T) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import (
|
|||||||
"net/http/httptest"
|
"net/http/httptest"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/aquasecurity/testdocker/engine"
|
"github.com/aquasecurity/testdocker/engine"
|
||||||
@@ -43,6 +44,10 @@ func setupPodmanSock(t *testing.T) *httptest.Server {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestPodmanImage(t *testing.T) {
|
func TestPodmanImage(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
t.Skip("podman.sock is not available for Windows CI")
|
||||||
|
}
|
||||||
|
|
||||||
type fields struct {
|
type fields struct {
|
||||||
Image v1.Image
|
Image v1.Image
|
||||||
opener opener
|
opener opener
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
package image
|
package image
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -14,32 +15,32 @@ func TestTryOCI(t *testing.T) {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "correct path to index without tag",
|
name: "correct path to index without tag",
|
||||||
ociImagePath: "testdata/multi",
|
ociImagePath: filepath.Join("testdata", "multi"),
|
||||||
wantErr: "",
|
wantErr: "",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "correct path to index with correct tag",
|
name: "correct path to index with correct tag",
|
||||||
ociImagePath: "testdata/multi:tg11",
|
ociImagePath: filepath.Join("testdata", "multi:tg11"),
|
||||||
wantErr: "",
|
wantErr: "",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "correct path to index with incorrect tag",
|
name: "correct path to index with incorrect tag",
|
||||||
ociImagePath: "testdata/multi:tg12",
|
ociImagePath: filepath.Join("testdata", "multi:tg12"),
|
||||||
wantErr: "invalid OCI image tag",
|
wantErr: "invalid OCI image tag",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "correct path to manifest without tag",
|
name: "correct path to manifest without tag",
|
||||||
ociImagePath: "testdata/single",
|
ociImagePath: filepath.Join("testdata", "single"),
|
||||||
wantErr: "",
|
wantErr: "",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "correct path to manifest with correct tag",
|
name: "correct path to manifest with correct tag",
|
||||||
ociImagePath: "testdata/single:3.14",
|
ociImagePath: filepath.Join("testdata", "single:3.14"),
|
||||||
wantErr: "",
|
wantErr: "",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "correct path to manifest with incorrect tag",
|
name: "correct path to manifest with incorrect tag",
|
||||||
ociImagePath: "testdata/single:3.11",
|
ociImagePath: filepath.Join("testdata", "single:3.11"),
|
||||||
wantErr: "invalid OCI image tag",
|
wantErr: "invalid OCI image tag",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
package secret_test
|
package secret_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"bytes"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
@@ -534,27 +536,27 @@ func TestSecretScanner(t *testing.T) {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "find match",
|
name: "find match",
|
||||||
configPath: "testdata/config.yaml",
|
configPath: filepath.Join("testdata", "config.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.txt",
|
FilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFinding1, wantFinding2},
|
Findings: []types.SecretFinding{wantFinding1, wantFinding2},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "find aws secrets",
|
name: "find aws secrets",
|
||||||
configPath: "testdata/config.yaml",
|
configPath: filepath.Join("testdata", "config.yaml"),
|
||||||
inputFilePath: "testdata/aws-secrets.txt",
|
inputFilePath: filepath.Join("testdata", "aws-secrets.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/aws-secrets.txt",
|
FilePath: filepath.Join("testdata", "aws-secrets.txt"),
|
||||||
Findings: []types.SecretFinding{wantFinding5, wantFinding10, wantFinding9},
|
Findings: []types.SecretFinding{wantFinding5, wantFinding10, wantFinding9},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "find Asymmetric Private Key secrets",
|
name: "find Asymmetric Private Key secrets",
|
||||||
inputFilePath: "testdata/asymmetric-private-secret.txt",
|
inputFilePath: filepath.Join("testdata", "asymmetric-private-secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/asymmetric-private-secret.txt",
|
FilePath: filepath.Join("testdata", "asymmetric-private-secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFindingAsymmetricPrivateKey},
|
Findings: []types.SecretFinding{wantFindingAsymmetricPrivateKey},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -568,170 +570,170 @@ func TestSecretScanner(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "find Asymmetric Private Key secrets json",
|
name: "find Asymmetric Private Key secrets json",
|
||||||
inputFilePath: "testdata/asymmetric-private-secret.json",
|
inputFilePath: filepath.Join("testdata", "asymmetric-private-secret.json"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/asymmetric-private-secret.json",
|
FilePath: filepath.Join("testdata", "asymmetric-private-secret.json"),
|
||||||
Findings: []types.SecretFinding{wantFindingAsymmetricPrivateKeyJson},
|
Findings: []types.SecretFinding{wantFindingAsymmetricPrivateKeyJson},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "include when keyword found",
|
name: "include when keyword found",
|
||||||
configPath: "testdata/config-happy-keywords.yaml",
|
configPath: filepath.Join("testdata", "config-happy-keywords.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.txt",
|
FilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFinding1, wantFinding2},
|
Findings: []types.SecretFinding{wantFinding1, wantFinding2},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "exclude when no keyword found",
|
name: "exclude when no keyword found",
|
||||||
configPath: "testdata/config-sad-keywords.yaml",
|
configPath: filepath.Join("testdata", "config-sad-keywords.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{},
|
want: types.Secret{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "should ignore .md files by default",
|
name: "should ignore .md files by default",
|
||||||
configPath: "testdata/config.yaml",
|
configPath: filepath.Join("testdata", "config.yaml"),
|
||||||
inputFilePath: "testdata/secret.md",
|
inputFilePath: filepath.Join("testdata", "secret.md"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.md",
|
FilePath: filepath.Join("testdata", "secret.md"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "should disable .md allow rule",
|
name: "should disable .md allow rule",
|
||||||
configPath: "testdata/config-disable-allow-rule-md.yaml",
|
configPath: filepath.Join("testdata", "config-disable-allow-rule-md.yaml"),
|
||||||
inputFilePath: "testdata/secret.md",
|
inputFilePath: filepath.Join("testdata", "secret.md"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.md",
|
FilePath: filepath.Join("testdata", "secret.md"),
|
||||||
Findings: []types.SecretFinding{wantFinding1, wantFinding2},
|
Findings: []types.SecretFinding{wantFinding1, wantFinding2},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "should find ghp builtin secret",
|
name: "should find ghp builtin secret",
|
||||||
configPath: "",
|
configPath: "",
|
||||||
inputFilePath: "testdata/builtin-rule-secret.txt",
|
inputFilePath: filepath.Join("testdata", "builtin-rule-secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/builtin-rule-secret.txt",
|
FilePath: filepath.Join("testdata", "builtin-rule-secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFinding5a, wantFinding6},
|
Findings: []types.SecretFinding{wantFinding5a, wantFinding6},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "should enable github-pat builtin rule, but disable aws-access-key-id rule",
|
name: "should enable github-pat builtin rule, but disable aws-access-key-id rule",
|
||||||
configPath: "testdata/config-enable-ghp.yaml",
|
configPath: filepath.Join("testdata", "config-enable-ghp.yaml"),
|
||||||
inputFilePath: "testdata/builtin-rule-secret.txt",
|
inputFilePath: filepath.Join("testdata", "builtin-rule-secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/builtin-rule-secret.txt",
|
FilePath: filepath.Join("testdata", "builtin-rule-secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFindingGHButDisableAWS},
|
Findings: []types.SecretFinding{wantFindingGHButDisableAWS},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "should disable github-pat builtin rule",
|
name: "should disable github-pat builtin rule",
|
||||||
configPath: "testdata/config-disable-ghp.yaml",
|
configPath: filepath.Join("testdata", "config-disable-ghp.yaml"),
|
||||||
inputFilePath: "testdata/builtin-rule-secret.txt",
|
inputFilePath: filepath.Join("testdata", "builtin-rule-secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/builtin-rule-secret.txt",
|
FilePath: filepath.Join("testdata", "builtin-rule-secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFindingPATDisabled},
|
Findings: []types.SecretFinding{wantFindingPATDisabled},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "should disable custom rule",
|
name: "should disable custom rule",
|
||||||
configPath: "testdata/config-disable-rule1.yaml",
|
configPath: filepath.Join("testdata", "config-disable-rule1.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{},
|
want: types.Secret{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "allow-rule path",
|
name: "allow-rule path",
|
||||||
configPath: "testdata/allow-path.yaml",
|
configPath: filepath.Join("testdata", "allow-path.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{},
|
want: types.Secret{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "allow-rule regex inside group",
|
name: "allow-rule regex inside group",
|
||||||
configPath: "testdata/allow-regex.yaml",
|
configPath: filepath.Join("testdata", "allow-regex.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.txt",
|
FilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFinding1},
|
Findings: []types.SecretFinding{wantFinding1},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "allow-rule regex outside group",
|
name: "allow-rule regex outside group",
|
||||||
configPath: "testdata/allow-regex-outside-group.yaml",
|
configPath: filepath.Join("testdata", "allow-regex-outside-group.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{},
|
want: types.Secret{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "exclude-block regexes",
|
name: "exclude-block regexes",
|
||||||
configPath: "testdata/exclude-block.yaml",
|
configPath: filepath.Join("testdata", "exclude-block.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.txt",
|
FilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFindingRegexDisabled},
|
Findings: []types.SecretFinding{wantFindingRegexDisabled},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "skip examples file",
|
name: "skip examples file",
|
||||||
inputFilePath: "testdata/example-secret.txt",
|
inputFilePath: filepath.Join("testdata", "example-secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/example-secret.txt",
|
FilePath: filepath.Join("testdata", "example-secret.txt"),
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "global allow-rule path",
|
name: "global allow-rule path",
|
||||||
configPath: "testdata/global-allow-path.yaml",
|
configPath: filepath.Join("testdata", "global-allow-path.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.txt",
|
FilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
Findings: nil,
|
Findings: nil,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "global allow-rule regex",
|
name: "global allow-rule regex",
|
||||||
configPath: "testdata/global-allow-regex.yaml",
|
configPath: filepath.Join("testdata", "global-allow-regex.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.txt",
|
FilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFinding1},
|
Findings: []types.SecretFinding{wantFinding1},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "global exclude-block regexes",
|
name: "global exclude-block regexes",
|
||||||
configPath: "testdata/global-exclude-block.yaml",
|
configPath: filepath.Join("testdata", "global-exclude-block.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.txt",
|
FilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFindingRegexDisabled},
|
Findings: []types.SecretFinding{wantFindingRegexDisabled},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "multiple secret groups",
|
name: "multiple secret groups",
|
||||||
configPath: "testdata/multiple-secret-groups.yaml",
|
configPath: filepath.Join("testdata", "multiple-secret-groups.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.txt",
|
FilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFinding3, wantFinding4},
|
Findings: []types.SecretFinding{wantFinding3, wantFinding4},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "truncate long line",
|
name: "truncate long line",
|
||||||
inputFilePath: "testdata/long-line-secret.txt",
|
inputFilePath: filepath.Join("testdata", "long-line-secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/long-line-secret.txt",
|
FilePath: filepath.Join("testdata", "long-line-secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFinding7},
|
Findings: []types.SecretFinding{wantFinding7},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "add unknown severity when rule has no severity",
|
name: "add unknown severity when rule has no severity",
|
||||||
configPath: "testdata/config-without-severity.yaml",
|
configPath: filepath.Join("testdata", "config-without-severity.yaml"),
|
||||||
inputFilePath: "testdata/secret.txt",
|
inputFilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
want: types.Secret{
|
want: types.Secret{
|
||||||
FilePath: "testdata/secret.txt",
|
FilePath: filepath.Join("testdata", "secret.txt"),
|
||||||
Findings: []types.SecretFinding{wantFinding8},
|
Findings: []types.SecretFinding{wantFinding8},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "invalid aws secrets",
|
name: "invalid aws secrets",
|
||||||
inputFilePath: "testdata/invalid-aws-secrets.txt",
|
inputFilePath: filepath.Join("testdata", "invalid-aws-secrets.txt"),
|
||||||
want: types.Secret{},
|
want: types.Secret{},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -750,6 +752,8 @@ func TestSecretScanner(t *testing.T) {
|
|||||||
content, err := os.ReadFile(tt.inputFilePath)
|
content, err := os.ReadFile(tt.inputFilePath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
content = bytes.ReplaceAll(content, []byte("\r"), []byte(""))
|
||||||
|
|
||||||
c, err := secret.ParseConfig(tt.configPath)
|
c, err := secret.ParseConfig(tt.configPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
|||||||
@@ -24,10 +24,24 @@ type LicenseFile struct {
|
|||||||
Type LicenseType
|
Type LicenseType
|
||||||
FilePath string
|
FilePath string
|
||||||
PkgName string
|
PkgName string
|
||||||
Findings []LicenseFinding
|
Findings LicenseFindings
|
||||||
Layer Layer `json:",omitempty"`
|
Layer Layer `json:",omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type LicenseFindings []LicenseFinding
|
||||||
|
|
||||||
|
func (findings LicenseFindings) Len() int {
|
||||||
|
return len(findings)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (findings LicenseFindings) Swap(i, j int) {
|
||||||
|
findings[i], findings[j] = findings[j], findings[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (findings LicenseFindings) Less(i, j int) bool {
|
||||||
|
return findings[i].Name < findings[j].Name
|
||||||
|
}
|
||||||
|
|
||||||
type LicenseFinding struct {
|
type LicenseFinding struct {
|
||||||
Category LicenseCategory // such as "forbidden"
|
Category LicenseCategory // such as "forbidden"
|
||||||
Name string
|
Name string
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import (
|
|||||||
"math"
|
"math"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
dio "github.com/aquasecurity/go-dep-parser/pkg/io"
|
dio "github.com/aquasecurity/go-dep-parser/pkg/io"
|
||||||
)
|
)
|
||||||
@@ -56,6 +57,11 @@ func Keys(m map[string]struct{}) []string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func IsExecutable(fileInfo os.FileInfo) bool {
|
func IsExecutable(fileInfo os.FileInfo) bool {
|
||||||
|
// For Windows
|
||||||
|
if filepath.Ext(fileInfo.Name()) == ".exe" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
mode := fileInfo.Mode()
|
mode := fileInfo.Mode()
|
||||||
if !mode.IsRegular() {
|
if !mode.IsRegular() {
|
||||||
return false
|
return false
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package utils
|
|||||||
import (
|
import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -11,11 +12,11 @@ func TestIsGzip(t *testing.T) {
|
|||||||
in string
|
in string
|
||||||
want bool
|
want bool
|
||||||
}{
|
}{
|
||||||
{"testdata/test.txt.gz", true},
|
{filepath.Join("testdata", "test.txt.gz"), true},
|
||||||
{"testdata/test.tar.gz", true},
|
{filepath.Join("testdata", "test.tar.gz"), true},
|
||||||
{"testdata/test.txt", false},
|
{filepath.Join("testdata", "test.txt"), false},
|
||||||
{"testdata/test.txt.zst", false},
|
{filepath.Join("testdata", "test.txt.zst"), false},
|
||||||
{"testdata/aqua.png", false},
|
{filepath.Join("testdata", "aqua.png"), false},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.in, func(t *testing.T) {
|
t.Run(tt.in, func(t *testing.T) {
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@@ -28,7 +29,7 @@ func TestDir_Walk(t *testing.T) {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "happy path",
|
name: "happy path",
|
||||||
rootDir: "testdata/fs",
|
rootDir: filepath.Join("testdata", "fs"),
|
||||||
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
||||||
if filePath == "testdata/fs/bar" {
|
if filePath == "testdata/fs/bar" {
|
||||||
got, err := opener()
|
got, err := opener()
|
||||||
@@ -44,7 +45,7 @@ func TestDir_Walk(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "skip file",
|
name: "skip file",
|
||||||
rootDir: "testdata/fs",
|
rootDir: filepath.Join("testdata", "fs"),
|
||||||
fields: fields{
|
fields: fields{
|
||||||
skipFiles: []string{"testdata/fs/bar"},
|
skipFiles: []string{"testdata/fs/bar"},
|
||||||
},
|
},
|
||||||
@@ -59,7 +60,7 @@ func TestDir_Walk(t *testing.T) {
|
|||||||
name: "skip dir",
|
name: "skip dir",
|
||||||
rootDir: "testdata/fs/",
|
rootDir: "testdata/fs/",
|
||||||
fields: fields{
|
fields: fields{
|
||||||
skipDirs: []string{"/testdata/fs/app/"},
|
skipDirs: []string{"/testdata/fs/app"},
|
||||||
},
|
},
|
||||||
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
||||||
if strings.HasPrefix(filePath, "testdata/fs/app") {
|
if strings.HasPrefix(filePath, "testdata/fs/app") {
|
||||||
@@ -70,7 +71,7 @@ func TestDir_Walk(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "sad path",
|
name: "sad path",
|
||||||
rootDir: "testdata/fs",
|
rootDir: filepath.Join("testdata", "fs"),
|
||||||
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
||||||
return errors.New("error")
|
return errors.New("error")
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -4,10 +4,13 @@ import (
|
|||||||
"archive/tar"
|
"archive/tar"
|
||||||
"io"
|
"io"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
|
"path"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"golang.org/x/xerrors"
|
"golang.org/x/xerrors"
|
||||||
|
|
||||||
|
"github.com/aquasecurity/trivy/pkg/fanal/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@@ -15,6 +18,8 @@ const (
|
|||||||
wh string = ".wh."
|
wh string = ".wh."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var parentDir = ".." + utils.PathSeparator
|
||||||
|
|
||||||
type LayerTar struct {
|
type LayerTar struct {
|
||||||
walker
|
walker
|
||||||
threshold int64
|
threshold int64
|
||||||
@@ -33,7 +38,6 @@ func NewLayerTar(skipFiles, skipDirs []string, slow bool) LayerTar {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (w LayerTar) Walk(layer io.Reader, analyzeFn WalkFunc) ([]string, []string, error) {
|
func (w LayerTar) Walk(layer io.Reader, analyzeFn WalkFunc) ([]string, []string, error) {
|
||||||
|
|
||||||
var opqDirs, whFiles, skipDirs []string
|
var opqDirs, whFiles, skipDirs []string
|
||||||
tr := tar.NewReader(layer)
|
tr := tar.NewReader(layer)
|
||||||
for {
|
for {
|
||||||
@@ -44,9 +48,10 @@ func (w LayerTar) Walk(layer io.Reader, analyzeFn WalkFunc) ([]string, []string,
|
|||||||
return nil, nil, xerrors.Errorf("failed to extract the archive: %w", err)
|
return nil, nil, xerrors.Errorf("failed to extract the archive: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
filePath := hdr.Name
|
// filepath.Clean cannot be used since tar file paths should be OS-agnostic.
|
||||||
filePath = strings.TrimLeft(filepath.Clean(filePath), "/")
|
filePath := path.Clean(hdr.Name)
|
||||||
fileDir, fileName := filepath.Split(filePath)
|
filePath = strings.TrimLeft(filePath, "/")
|
||||||
|
fileDir, fileName := path.Split(filePath)
|
||||||
|
|
||||||
// e.g. etc/.wh..wh..opq
|
// e.g. etc/.wh..wh..opq
|
||||||
if opq == fileName {
|
if opq == fileName {
|
||||||
@@ -56,7 +61,7 @@ func (w LayerTar) Walk(layer io.Reader, analyzeFn WalkFunc) ([]string, []string,
|
|||||||
// etc/.wh.hostname
|
// etc/.wh.hostname
|
||||||
if strings.HasPrefix(fileName, wh) {
|
if strings.HasPrefix(fileName, wh) {
|
||||||
name := strings.TrimPrefix(fileName, wh)
|
name := strings.TrimPrefix(fileName, wh)
|
||||||
fpath := filepath.Join(fileDir, name)
|
fpath := path.Join(fileDir, name)
|
||||||
whFiles = append(whFiles, fpath)
|
whFiles = append(whFiles, fpath)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@@ -108,7 +113,7 @@ func underSkippedDir(filePath string, skipDirs []string) bool {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
if !strings.HasPrefix(rel, "../") {
|
if !strings.HasPrefix(rel, parentDir) {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package walker_test
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"os"
|
"os"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
@@ -31,7 +32,7 @@ func TestLayerTar_Walk(t *testing.T) {
|
|||||||
}{
|
}{
|
||||||
{
|
{
|
||||||
name: "happy path",
|
name: "happy path",
|
||||||
inputFile: "testdata/test.tar",
|
inputFile: filepath.Join("testdata", "test.tar"),
|
||||||
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
||||||
return nil
|
return nil
|
||||||
},
|
},
|
||||||
@@ -40,7 +41,7 @@ func TestLayerTar_Walk(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "skip file",
|
name: "skip file",
|
||||||
inputFile: "testdata/test.tar",
|
inputFile: filepath.Join("testdata", "test.tar"),
|
||||||
fields: fields{
|
fields: fields{
|
||||||
skipFiles: []string{"/app/myweb/index.html"},
|
skipFiles: []string{"/app/myweb/index.html"},
|
||||||
},
|
},
|
||||||
@@ -55,9 +56,9 @@ func TestLayerTar_Walk(t *testing.T) {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "skip dir",
|
name: "skip dir",
|
||||||
inputFile: "testdata/test.tar",
|
inputFile: filepath.Join("testdata", "test.tar"),
|
||||||
fields: fields{
|
fields: fields{
|
||||||
skipDirs: []string{"/app/"},
|
skipDirs: []string{"/app"},
|
||||||
},
|
},
|
||||||
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
analyzeFn: func(filePath string, info os.FileInfo, opener analyzer.Opener) error {
|
||||||
if strings.HasPrefix(filePath, "app") {
|
if strings.HasPrefix(filePath, "app") {
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ package licensing
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"sort"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
classifier "github.com/google/licenseclassifier/v2"
|
classifier "github.com/google/licenseclassifier/v2"
|
||||||
@@ -13,8 +14,11 @@ import (
|
|||||||
"github.com/aquasecurity/trivy/pkg/log"
|
"github.com/aquasecurity/trivy/pkg/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
var cf *classifier.Classifier
|
var (
|
||||||
var classifierOnce sync.Once
|
cf *classifier.Classifier
|
||||||
|
classifierOnce sync.Once
|
||||||
|
m sync.Mutex
|
||||||
|
)
|
||||||
|
|
||||||
func initGoogleClassifier() error {
|
func initGoogleClassifier() error {
|
||||||
// Initialize the default classifier once.
|
// Initialize the default classifier once.
|
||||||
@@ -37,13 +41,18 @@ func Classify(filePath string, r io.Reader) (*types.LicenseFile, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var findings []types.LicenseFinding
|
var findings types.LicenseFindings
|
||||||
var matchType types.LicenseType
|
var matchType types.LicenseType
|
||||||
seen := map[string]struct{}{}
|
seen := map[string]struct{}{}
|
||||||
|
|
||||||
|
// cf.Match is not thread safe
|
||||||
|
m.Lock()
|
||||||
|
|
||||||
// Use 'github.com/google/licenseclassifier' to find licenses
|
// Use 'github.com/google/licenseclassifier' to find licenses
|
||||||
result := cf.Match(cf.Normalize(content))
|
result := cf.Match(cf.Normalize(content))
|
||||||
|
|
||||||
|
m.Unlock()
|
||||||
|
|
||||||
for _, match := range result.Matches {
|
for _, match := range result.Matches {
|
||||||
if match.Confidence <= 0.9 {
|
if match.Confidence <= 0.9 {
|
||||||
continue
|
continue
|
||||||
@@ -68,6 +77,7 @@ func Classify(filePath string, r io.Reader) (*types.LicenseFile, error) {
|
|||||||
Link: licenseLink,
|
Link: licenseLink,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
sort.Sort(findings)
|
||||||
return &types.LicenseFile{
|
return &types.LicenseFile{
|
||||||
Type: matchType,
|
Type: matchType,
|
||||||
FilePath: filePath,
|
FilePath: filePath,
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package log
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
|
"runtime"
|
||||||
|
|
||||||
xlog "github.com/masahiro331/go-xfs-filesystem/log"
|
xlog "github.com/masahiro331/go-xfs-filesystem/log"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
@@ -58,6 +59,12 @@ func NewLogger(debug, disable bool) (*zap.SugaredLogger, error) {
|
|||||||
return zapcore.DebugLevel < lvl && lvl < zapcore.ErrorLevel
|
return zapcore.DebugLevel < lvl && lvl < zapcore.ErrorLevel
|
||||||
})
|
})
|
||||||
|
|
||||||
|
encoderLevel := zapcore.CapitalColorLevelEncoder
|
||||||
|
// when running on Windows, don't log with color
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
encoderLevel = zapcore.CapitalLevelEncoder
|
||||||
|
}
|
||||||
|
|
||||||
encoderConfig := zapcore.EncoderConfig{
|
encoderConfig := zapcore.EncoderConfig{
|
||||||
TimeKey: "Time",
|
TimeKey: "Time",
|
||||||
LevelKey: "Level",
|
LevelKey: "Level",
|
||||||
@@ -65,7 +72,7 @@ func NewLogger(debug, disable bool) (*zap.SugaredLogger, error) {
|
|||||||
CallerKey: "Caller",
|
CallerKey: "Caller",
|
||||||
MessageKey: "Msg",
|
MessageKey: "Msg",
|
||||||
StacktraceKey: "St",
|
StacktraceKey: "St",
|
||||||
EncodeLevel: zapcore.CapitalColorLevelEncoder,
|
EncodeLevel: encoderLevel,
|
||||||
EncodeTime: zapcore.ISO8601TimeEncoder,
|
EncodeTime: zapcore.ISO8601TimeEncoder,
|
||||||
EncodeDuration: zapcore.StringDurationEncoder,
|
EncodeDuration: zapcore.StringDurationEncoder,
|
||||||
EncodeCaller: zapcore.ShortCallerEncoder,
|
EncodeCaller: zapcore.ShortCallerEncoder,
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -16,6 +17,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestManager_Register(t *testing.T) {
|
func TestManager_Register(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// WASM tests difficult on Windows
|
||||||
|
t.Skip("Test satisfied adequately by Linux tests")
|
||||||
|
}
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
noModuleDir bool
|
noModuleDir bool
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"runtime"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
@@ -14,6 +15,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func TestPlugin_Run(t *testing.T) {
|
func TestPlugin_Run(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// the test.sh script can't be run on windows so skipping
|
||||||
|
t.Skip("Test satisfied adequately by Linux tests")
|
||||||
|
}
|
||||||
type fields struct {
|
type fields struct {
|
||||||
Name string
|
Name string
|
||||||
Repository string
|
Repository string
|
||||||
@@ -169,6 +174,10 @@ func TestPlugin_Run(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestInstall(t *testing.T) {
|
func TestInstall(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// the test.sh script can't be run on windows so skipping
|
||||||
|
t.Skip("Test satisfied adequately by Linux tests")
|
||||||
|
}
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
url string
|
url string
|
||||||
@@ -272,6 +281,10 @@ func TestInstall(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestUninstall(t *testing.T) {
|
func TestUninstall(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// the test.sh script can't be run on windows so skipping
|
||||||
|
t.Skip("Test satisfied adequately by Linux tests")
|
||||||
|
}
|
||||||
pluginName := "test_plugin"
|
pluginName := "test_plugin"
|
||||||
|
|
||||||
tempDir := t.TempDir()
|
tempDir := t.TempDir()
|
||||||
@@ -292,6 +305,10 @@ func TestUninstall(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestInformation(t *testing.T) {
|
func TestInformation(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// the test.sh script can't be run on windows so skipping
|
||||||
|
t.Skip("Test satisfied adequately by Linux tests")
|
||||||
|
}
|
||||||
pluginName := "test_plugin"
|
pluginName := "test_plugin"
|
||||||
|
|
||||||
tempDir := t.TempDir()
|
tempDir := t.TempDir()
|
||||||
@@ -325,6 +342,10 @@ description: A simple test plugin`
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestLoadAll1(t *testing.T) {
|
func TestLoadAll1(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// the test.sh script can't be run on windows so skipping
|
||||||
|
t.Skip("Test satisfied adequately by Linux tests")
|
||||||
|
}
|
||||||
tests := []struct {
|
tests := []struct {
|
||||||
name string
|
name string
|
||||||
dir string
|
dir string
|
||||||
@@ -380,6 +401,10 @@ func TestLoadAll1(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestUpdate(t *testing.T) {
|
func TestUpdate(t *testing.T) {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// the test.sh script can't be run on windows so skipping
|
||||||
|
t.Skip("Test satisfied adequately by Linux tests")
|
||||||
|
}
|
||||||
pluginName := "test_plugin"
|
pluginName := "test_plugin"
|
||||||
|
|
||||||
tempDir := t.TempDir()
|
tempDir := t.TempDir()
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
@@ -131,6 +132,11 @@ func summarize(specifiedSeverities []dbTypes.Severity, severityCount map[string]
|
|||||||
}
|
}
|
||||||
|
|
||||||
func IsOutputToTerminal(output io.Writer) bool {
|
func IsOutputToTerminal(output io.Writer) bool {
|
||||||
|
if runtime.GOOS == "windows" {
|
||||||
|
// if its windows, we don't support formatting
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
if output != os.Stdout {
|
if output != os.Stdout {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -114,6 +114,8 @@ func Test_dbWorker_update(t *testing.T) {
|
|||||||
tt.needsUpdate.input.appVersion, tt.needsUpdate.input.skip).Return(
|
tt.needsUpdate.input.appVersion, tt.needsUpdate.input.skip).Return(
|
||||||
tt.needsUpdate.output.needsUpdate, tt.needsUpdate.output.err)
|
tt.needsUpdate.output.needsUpdate, tt.needsUpdate.output.err)
|
||||||
|
|
||||||
|
defer func() { _ = db.Close() }()
|
||||||
|
|
||||||
if tt.download.call {
|
if tt.download.call {
|
||||||
mockDBClient.On("Download", mock.Anything, mock.Anything).Run(
|
mockDBClient.On("Download", mock.Anything, mock.Anything).Run(
|
||||||
func(args mock.Arguments) {
|
func(args mock.Arguments) {
|
||||||
@@ -222,6 +224,7 @@ func Test_newServeMux(t *testing.T) {
|
|||||||
|
|
||||||
c, err := cache.NewFSCache(t.TempDir())
|
c, err := cache.NewFSCache(t.TempDir())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
defer func() { _ = c.Close() }()
|
||||||
|
|
||||||
ts := httptest.NewServer(newServeMux(
|
ts := httptest.NewServer(newServeMux(
|
||||||
c, dbUpdateWg, requestWg, tt.args.token, tt.args.tokenHeader),
|
c, dbUpdateWg, requestWg, tt.args.token, tt.args.tokenHeader),
|
||||||
|
|||||||
Reference in New Issue
Block a user