mirror of
https://github.com/aquasecurity/trivy.git
synced 2025-12-23 07:29:00 -08:00
feat: support config (fanal#166)
Co-authored-by: aprp <doelaudi@gmail.com> Co-authored-by: rahul2393 <rahulyadavsep92@gmail.com>
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -15,8 +15,7 @@ main
|
||||
|
||||
vendor
|
||||
test/integration/testdata/fixtures
|
||||
cmd/fanal/fanal
|
||||
*.tar
|
||||
*.gz
|
||||
|
||||
fanal
|
||||
/fanal
|
||||
|
||||
26
analyzer/all/import.go
Normal file
26
analyzer/all/import.go
Normal file
@@ -0,0 +1,26 @@
|
||||
package all
|
||||
|
||||
import (
|
||||
_ "github.com/aquasecurity/fanal/analyzer/command/apk"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/bundler"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/cargo"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/composer"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/gobinary"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/gomod"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/jar"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/npm"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/nuget"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/pipenv"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/poetry"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/yarn"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/alpine"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/amazonlinux"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/debian"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/photon"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/redhatbase"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/suse"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/ubuntu"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/apk"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/dpkg"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/rpm"
|
||||
)
|
||||
@@ -2,7 +2,6 @@ package analyzer
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
@@ -12,6 +11,7 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
aos "github.com/aquasecurity/fanal/analyzer/os"
|
||||
"github.com/aquasecurity/fanal/log"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
@@ -73,14 +73,25 @@ func (r *AnalysisResult) Sort() {
|
||||
return r.PackageInfos[i].FilePath < r.PackageInfos[j].FilePath
|
||||
})
|
||||
|
||||
for _, pi := range r.PackageInfos {
|
||||
sort.Slice(pi.Packages, func(i, j int) bool {
|
||||
return pi.Packages[i].Name < pi.Packages[j].Name
|
||||
})
|
||||
}
|
||||
|
||||
sort.Slice(r.Applications, func(i, j int) bool {
|
||||
return r.Applications[i].FilePath < r.Applications[j].FilePath
|
||||
})
|
||||
|
||||
sort.Slice(r.Configs, func(i, j int) bool {
|
||||
return r.Configs[i].FilePath < r.Configs[j].FilePath
|
||||
for _, app := range r.Applications {
|
||||
sort.Slice(app.Libraries, func(i, j int) bool {
|
||||
if app.Libraries[i].Library.Name != app.Libraries[j].Library.Name {
|
||||
return app.Libraries[i].Library.Name < app.Libraries[j].Library.Name
|
||||
}
|
||||
return app.Libraries[i].Library.Version < app.Libraries[j].Library.Version
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func (r *AnalysisResult) Merge(new *AnalysisResult) {
|
||||
if new == nil || new.isEmpty() {
|
||||
@@ -108,15 +119,15 @@ func (r *AnalysisResult) Merge(new *AnalysisResult) {
|
||||
r.Applications = append(r.Applications, new.Applications...)
|
||||
}
|
||||
|
||||
if len(new.Configs) > 0 {
|
||||
r.Configs = append(r.Configs, new.Configs...)
|
||||
for _, m := range new.Configs {
|
||||
r.Configs = append(r.Configs, m)
|
||||
}
|
||||
}
|
||||
|
||||
type Analyzer struct {
|
||||
drivers []analyzer
|
||||
configDrivers []configAnalyzer
|
||||
disabled []Type
|
||||
disabledAnalyzers []Type
|
||||
}
|
||||
|
||||
func NewAnalyzer(disabledAnalyzers []Type) Analyzer {
|
||||
@@ -139,54 +150,41 @@ func NewAnalyzer(disabledAnalyzers []Type) Analyzer {
|
||||
return Analyzer{
|
||||
drivers: drivers,
|
||||
configDrivers: configDrivers,
|
||||
disabled: disabledAnalyzers,
|
||||
disabledAnalyzers: disabledAnalyzers,
|
||||
}
|
||||
}
|
||||
|
||||
// AnalyzerVersions returns analyzer version identifier used for cache suffixes.
|
||||
// e.g. alpine: 1, amazon: 3, debian: 2 => 132
|
||||
// When the amazon analyzer is disabled => 102
|
||||
func (a Analyzer) AnalyzerVersions() string {
|
||||
// Sort analyzers for the consistent version identifier
|
||||
sorted := make([]analyzer, len(analyzers))
|
||||
copy(sorted, analyzers)
|
||||
sort.Slice(sorted, func(i, j int) bool {
|
||||
return sorted[i].Type() < sorted[j].Type()
|
||||
})
|
||||
|
||||
var versions string
|
||||
for _, s := range sorted {
|
||||
if isDisabled(s.Type(), a.disabled) {
|
||||
versions += "0"
|
||||
// AnalyzerVersions returns analyzer version identifier used for cache keys.
|
||||
func (a Analyzer) AnalyzerVersions() map[string]int {
|
||||
versions := map[string]int{}
|
||||
for _, aa := range analyzers {
|
||||
if isDisabled(aa.Type(), a.disabledAnalyzers) {
|
||||
versions[string(aa.Type())] = 0
|
||||
continue
|
||||
}
|
||||
versions += fmt.Sprint(s.Version())
|
||||
versions[string(aa.Type())] = aa.Version()
|
||||
}
|
||||
return versions
|
||||
}
|
||||
|
||||
// ImageConfigAnalyzerVersions returns analyzer version identifier used for cache suffixes.
|
||||
func (a Analyzer) ImageConfigAnalyzerVersions() string {
|
||||
// Sort image config analyzers for the consistent version identifier.
|
||||
sorted := make([]configAnalyzer, len(configAnalyzers))
|
||||
copy(sorted, configAnalyzers)
|
||||
sort.Slice(sorted, func(i, j int) bool {
|
||||
return sorted[i].Type() < sorted[j].Type()
|
||||
})
|
||||
|
||||
var versions string
|
||||
for _, s := range sorted {
|
||||
if isDisabled(s.Type(), a.disabled) {
|
||||
versions += "0"
|
||||
// ImageConfigAnalyzerVersions returns analyzer version identifier used for cache keys.
|
||||
func (a Analyzer) ImageConfigAnalyzerVersions() map[string]int {
|
||||
versions := map[string]int{}
|
||||
for _, ca := range configAnalyzers {
|
||||
if isDisabled(ca.Type(), a.disabledAnalyzers) {
|
||||
versions[string(ca.Type())] = 0
|
||||
continue
|
||||
}
|
||||
versions += fmt.Sprint(s.Version())
|
||||
versions[string(ca.Type())] = ca.Version()
|
||||
}
|
||||
return versions
|
||||
}
|
||||
|
||||
func (a Analyzer) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, limit *semaphore.Weighted, result *AnalysisResult,
|
||||
filePath string, info os.FileInfo, opener Opener) error {
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
for _, d := range a.drivers {
|
||||
// filepath extracted from tar file doesn't have the prefix "/"
|
||||
if !d.Required(strings.TrimLeft(filePath, "/"), info) {
|
||||
@@ -207,7 +205,8 @@ func (a Analyzer) AnalyzeFile(ctx context.Context, wg *sync.WaitGroup, limit *se
|
||||
defer wg.Done()
|
||||
|
||||
ret, err := a.Analyze(target)
|
||||
if err != nil {
|
||||
if err != nil && !xerrors.Is(err, aos.AnalyzeOSError) {
|
||||
log.Logger.Debugf("Analysis error: %s", err)
|
||||
return
|
||||
}
|
||||
result.Merge(ret)
|
||||
|
||||
@@ -3,7 +3,7 @@ package analyzer_test
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io/ioutil"
|
||||
"fmt"
|
||||
"os"
|
||||
"sync"
|
||||
"testing"
|
||||
@@ -14,12 +14,8 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/command/apk"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/bundler"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/all"
|
||||
aos "github.com/aquasecurity/fanal/analyzer/os"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/alpine"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/ubuntu"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/apk"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
godeptypes "github.com/aquasecurity/go-dep-parser/pkg/types"
|
||||
)
|
||||
@@ -40,7 +36,7 @@ func (mockConfigAnalyzer) Analyze(targetOS types.OS, configBlob []byte) ([]types
|
||||
}
|
||||
|
||||
func (mockConfigAnalyzer) Type() analyzer.Type {
|
||||
return analyzer.Type(999)
|
||||
return analyzer.Type("test")
|
||||
}
|
||||
|
||||
func (mockConfigAnalyzer) Version() int {
|
||||
@@ -253,8 +249,7 @@ func TestAnalysisResult_Merge(t *testing.T) {
|
||||
func TestAnalyzeFile(t *testing.T) {
|
||||
type args struct {
|
||||
filePath string
|
||||
info os.FileInfo
|
||||
opener analyzer.Opener
|
||||
testFilePath string
|
||||
disabledAnalyzers []analyzer.Type
|
||||
}
|
||||
tests := []struct {
|
||||
@@ -267,9 +262,7 @@ func TestAnalyzeFile(t *testing.T) {
|
||||
name: "happy path with os analyzer",
|
||||
args: args{
|
||||
filePath: "/etc/alpine-release",
|
||||
opener: func() ([]byte, error) {
|
||||
return ioutil.ReadFile("testdata/etc/alpine-release")
|
||||
},
|
||||
testFilePath: "testdata/etc/alpine-release",
|
||||
},
|
||||
want: &analyzer.AnalysisResult{
|
||||
OS: &types.OS{
|
||||
@@ -282,9 +275,7 @@ func TestAnalyzeFile(t *testing.T) {
|
||||
name: "happy path with disabled os analyzer",
|
||||
args: args{
|
||||
filePath: "/etc/alpine-release",
|
||||
opener: func() ([]byte, error) {
|
||||
return ioutil.ReadFile("testdata/etc/alpine-release")
|
||||
},
|
||||
testFilePath: "testdata/etc/alpine-release",
|
||||
disabledAnalyzers: []analyzer.Type{analyzer.TypeAlpine},
|
||||
},
|
||||
want: &analyzer.AnalysisResult{},
|
||||
@@ -293,9 +284,7 @@ func TestAnalyzeFile(t *testing.T) {
|
||||
name: "happy path with package analyzer",
|
||||
args: args{
|
||||
filePath: "/lib/apk/db/installed",
|
||||
opener: func() ([]byte, error) {
|
||||
return ioutil.ReadFile("testdata/lib/apk/db/installed")
|
||||
},
|
||||
testFilePath: "testdata/lib/apk/db/installed",
|
||||
},
|
||||
want: &analyzer.AnalysisResult{
|
||||
PackageInfos: []types.PackageInfo{
|
||||
@@ -312,9 +301,7 @@ func TestAnalyzeFile(t *testing.T) {
|
||||
name: "happy path with disabled package analyzer",
|
||||
args: args{
|
||||
filePath: "/lib/apk/db/installed",
|
||||
opener: func() ([]byte, error) {
|
||||
return ioutil.ReadFile("testdata/lib/apk/db/installed")
|
||||
},
|
||||
testFilePath: "testdata/lib/apk/db/installed",
|
||||
disabledAnalyzers: []analyzer.Type{analyzer.TypeApk},
|
||||
},
|
||||
want: &analyzer.AnalysisResult{},
|
||||
@@ -323,9 +310,7 @@ func TestAnalyzeFile(t *testing.T) {
|
||||
name: "happy path with library analyzer",
|
||||
args: args{
|
||||
filePath: "/app/Gemfile.lock",
|
||||
opener: func() ([]byte, error) {
|
||||
return ioutil.ReadFile("testdata/app/Gemfile.lock")
|
||||
},
|
||||
testFilePath: "testdata/app/Gemfile.lock",
|
||||
},
|
||||
want: &analyzer.AnalysisResult{
|
||||
Applications: []types.Application{
|
||||
@@ -348,9 +333,15 @@ func TestAnalyzeFile(t *testing.T) {
|
||||
name: "happy path with invalid os information",
|
||||
args: args{
|
||||
filePath: "/etc/lsb-release",
|
||||
opener: func() ([]byte, error) {
|
||||
return []byte(`foo`), nil
|
||||
testFilePath: "testdata/etc/hostname",
|
||||
},
|
||||
want: &analyzer.AnalysisResult{},
|
||||
},
|
||||
{
|
||||
name: "happy path with a directory",
|
||||
args: args{
|
||||
filePath: "/etc/lsb-release",
|
||||
testFilePath: "testdata/etc",
|
||||
},
|
||||
want: &analyzer.AnalysisResult{},
|
||||
},
|
||||
@@ -358,9 +349,7 @@ func TestAnalyzeFile(t *testing.T) {
|
||||
name: "sad path with opener error",
|
||||
args: args{
|
||||
filePath: "/lib/apk/db/installed",
|
||||
opener: func() ([]byte, error) {
|
||||
return nil, xerrors.New("error")
|
||||
},
|
||||
testFilePath: "testdata/error",
|
||||
},
|
||||
wantErr: "unable to open a file (/lib/apk/db/installed)",
|
||||
},
|
||||
@@ -372,7 +361,17 @@ func TestAnalyzeFile(t *testing.T) {
|
||||
|
||||
got := new(analyzer.AnalysisResult)
|
||||
a := analyzer.NewAnalyzer(tt.args.disabledAnalyzers)
|
||||
err := a.AnalyzeFile(context.Background(), &wg, limit, got, tt.args.filePath, tt.args.info, tt.args.opener)
|
||||
|
||||
info, err := os.Stat(tt.args.testFilePath)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := context.Background()
|
||||
err = a.AnalyzeFile(ctx, &wg, limit, got, tt.args.filePath, info, func() ([]byte, error) {
|
||||
if tt.args.testFilePath == "testdata/error" {
|
||||
return nil, xerrors.New("error")
|
||||
}
|
||||
return os.ReadFile(tt.args.testFilePath)
|
||||
})
|
||||
|
||||
wg.Wait()
|
||||
if tt.wantErr != "" {
|
||||
@@ -485,23 +484,74 @@ func TestAnalyzer_AnalyzerVersions(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
disabled []analyzer.Type
|
||||
want string
|
||||
want map[string]int
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
disabled: []analyzer.Type{},
|
||||
want: "1111",
|
||||
want: map[string]int{
|
||||
"alpine": 1,
|
||||
"amazon": 1,
|
||||
"apk": 1,
|
||||
"bundler": 1,
|
||||
"cargo": 1,
|
||||
"centos": 1,
|
||||
"composer": 1,
|
||||
"debian": 1,
|
||||
"dpkg": 1,
|
||||
"fedora": 1,
|
||||
"gobinary": 1,
|
||||
"gomod": 1,
|
||||
"jar": 1,
|
||||
"npm": 1,
|
||||
"nuget": 1,
|
||||
"oracle": 1,
|
||||
"photon": 1,
|
||||
"pipenv": 1,
|
||||
"poetry": 1,
|
||||
"redhat": 1,
|
||||
"rpm": 1,
|
||||
"suse": 1,
|
||||
"ubuntu": 1,
|
||||
"yarn": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "disable analyzers",
|
||||
disabled: []analyzer.Type{analyzer.TypeAlpine, analyzer.TypeUbuntu},
|
||||
want: "0011",
|
||||
want: map[string]int{
|
||||
"alpine": 0,
|
||||
"amazon": 1,
|
||||
"apk": 1,
|
||||
"bundler": 1,
|
||||
"cargo": 1,
|
||||
"centos": 1,
|
||||
"composer": 1,
|
||||
"debian": 1,
|
||||
"dpkg": 1,
|
||||
"fedora": 1,
|
||||
"gobinary": 1,
|
||||
"gomod": 1,
|
||||
"jar": 1,
|
||||
"npm": 1,
|
||||
"nuget": 1,
|
||||
"oracle": 1,
|
||||
"photon": 1,
|
||||
"pipenv": 1,
|
||||
"poetry": 1,
|
||||
"redhat": 1,
|
||||
"rpm": 1,
|
||||
"suse": 1,
|
||||
"ubuntu": 0,
|
||||
"yarn": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
a := analyzer.NewAnalyzer(tt.disabled)
|
||||
got := a.AnalyzerVersions()
|
||||
fmt.Printf("%v\n", got)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
@@ -511,17 +561,23 @@ func TestAnalyzer_ImageConfigAnalyzerVersions(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
disabled []analyzer.Type
|
||||
want string
|
||||
want map[string]int
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
disabled: []analyzer.Type{},
|
||||
want: "11", // mockConfigAnalyzer is added
|
||||
want: map[string]int{
|
||||
"apk-command": 1,
|
||||
"test": 1,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "disable analyzers",
|
||||
disabled: []analyzer.Type{analyzer.TypeAlpine, analyzer.TypeApkCommand},
|
||||
want: "01", // mockConfigAnalyzer is added
|
||||
want: map[string]int{
|
||||
"apk-command": 0,
|
||||
"test": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
|
||||
77
analyzer/config/config.go
Normal file
77
analyzer/config/config.go
Normal file
@@ -0,0 +1,77 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/docker"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/hcl"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/json"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/toml"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/yaml"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
const separator = ":"
|
||||
|
||||
type ScannerOption struct {
|
||||
Namespaces []string
|
||||
FilePatterns []string
|
||||
PolicyPaths []string
|
||||
DataPaths []string
|
||||
}
|
||||
|
||||
func (o *ScannerOption) Sort() {
|
||||
sort.Slice(o.FilePatterns, func(i, j int) bool {
|
||||
return o.FilePatterns[i] < o.FilePatterns[j]
|
||||
})
|
||||
sort.Slice(o.PolicyPaths, func(i, j int) bool {
|
||||
return o.PolicyPaths[i] < o.PolicyPaths[j]
|
||||
})
|
||||
sort.Slice(o.DataPaths, func(i, j int) bool {
|
||||
return o.DataPaths[i] < o.DataPaths[j]
|
||||
})
|
||||
}
|
||||
|
||||
func RegisterConfigAnalyzers(filePatterns []string) error {
|
||||
var dockerRegexp, hclRegexp, jsonRegexp, tomlRegexp, yamlRegexp *regexp.Regexp
|
||||
for _, p := range filePatterns {
|
||||
// e.g. "dockerfile:my_dockerfile_*"
|
||||
s := strings.SplitN(p, separator, 2)
|
||||
if len(s) != 2 {
|
||||
return xerrors.Errorf("invalid file pattern (%s)", p)
|
||||
}
|
||||
fileType, pattern := s[0], s[1]
|
||||
r, err := regexp.Compile(pattern)
|
||||
if err != nil {
|
||||
return xerrors.Errorf("invalid file regexp (%s): %w", p, err)
|
||||
}
|
||||
|
||||
switch fileType {
|
||||
case types.Dockerfile:
|
||||
dockerRegexp = r
|
||||
case types.HCL:
|
||||
hclRegexp = r
|
||||
case types.JSON:
|
||||
jsonRegexp = r
|
||||
case types.TOML:
|
||||
tomlRegexp = r
|
||||
case types.YAML:
|
||||
yamlRegexp = r
|
||||
default:
|
||||
return xerrors.Errorf("unknown file type: %s, pattern: %s", fileType, pattern)
|
||||
}
|
||||
}
|
||||
|
||||
analyzer.RegisterAnalyzer(docker.NewConfigAnalyzer(dockerRegexp))
|
||||
analyzer.RegisterAnalyzer(hcl.NewConfigAnalyzer(hclRegexp))
|
||||
analyzer.RegisterAnalyzer(json.NewConfigAnalyzer(jsonRegexp))
|
||||
analyzer.RegisterAnalyzer(toml.NewConfigAnalyzer(tomlRegexp))
|
||||
analyzer.RegisterAnalyzer(yaml.NewConfigAnalyzer(yamlRegexp))
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
package config
|
||||
|
||||
const (
|
||||
YAML = "yaml"
|
||||
JSON = "json"
|
||||
TOML = "toml"
|
||||
Dockerfile = "dockerfile"
|
||||
HCL1 = "hcl1"
|
||||
HCL2 = "hcl2"
|
||||
)
|
||||
@@ -3,47 +3,56 @@ package docker
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/open-policy-agent/conftest/parser/docker"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/config/parser/dockerfile"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
analyzer.RegisterAnalyzer(&dockerConfigAnalyzer{
|
||||
parser: &docker.Parser{},
|
||||
})
|
||||
}
|
||||
|
||||
const version = 1
|
||||
|
||||
var requiredFile = "Dockerfile"
|
||||
|
||||
type dockerConfigAnalyzer struct {
|
||||
parser *docker.Parser
|
||||
type ConfigAnalyzer struct {
|
||||
parser *dockerfile.Parser
|
||||
filePattern *regexp.Regexp
|
||||
}
|
||||
|
||||
func (a dockerConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
var parsed interface{}
|
||||
if err := a.parser.Unmarshal(target.Content, &parsed); err != nil {
|
||||
func NewConfigAnalyzer(filePattern *regexp.Regexp) ConfigAnalyzer {
|
||||
return ConfigAnalyzer{
|
||||
parser: &dockerfile.Parser{},
|
||||
filePattern: filePattern,
|
||||
}
|
||||
}
|
||||
|
||||
func (s ConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
parsed, err := s.parser.Parse(target.Content)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse Dockerfile (%s): %w", target.FilePath, err)
|
||||
}
|
||||
|
||||
return &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{{
|
||||
Type: config.Dockerfile,
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: types.Dockerfile,
|
||||
FilePath: target.FilePath,
|
||||
Content: parsed,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Required does a case-insensitive check for filePath and returns true if
|
||||
// filePath equals/startsWith/hasExtension requiredFile
|
||||
func (a dockerConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
func (s ConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
if s.filePattern != nil && s.filePattern.MatchString(filePath) {
|
||||
return true
|
||||
}
|
||||
|
||||
base := filepath.Base(filePath)
|
||||
ext := filepath.Ext(base)
|
||||
if strings.EqualFold(base, requiredFile+ext) {
|
||||
@@ -56,10 +65,10 @@ func (a dockerConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (a dockerConfigAnalyzer) Type() analyzer.Type {
|
||||
func (s ConfigAnalyzer) Type() analyzer.Type {
|
||||
return analyzer.TypeDockerfile
|
||||
}
|
||||
|
||||
func (a dockerConfigAnalyzer) Version() int {
|
||||
func (s ConfigAnalyzer) Version() int {
|
||||
return version
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
package docker
|
||||
package docker_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/open-policy-agent/conftest/parser/docker"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/docker"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
@@ -26,30 +26,46 @@ func Test_dockerConfigAnalyzer_Analyze(t *testing.T) {
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.Dockerfile,
|
||||
Type: types.Dockerfile,
|
||||
FilePath: "testdata/Dockerfile.deployment",
|
||||
Content: []interface{}{
|
||||
[]interface{}{
|
||||
Content: map[string]interface{}{
|
||||
"command": map[string]interface{}{
|
||||
"foo": []interface{}{
|
||||
map[string]interface{}{
|
||||
"Cmd": "from",
|
||||
"SubCmd": "",
|
||||
"JSON": false,
|
||||
"Flags": []interface{}{},
|
||||
"JSON": false,
|
||||
"Original": "FROM foo",
|
||||
"Stage": float64(0),
|
||||
"StartLine": float64(1),
|
||||
"EndLine": float64(1),
|
||||
"SubCmd": "",
|
||||
"Value": []interface{}{"foo"},
|
||||
},
|
||||
map[string]interface{}{
|
||||
"Cmd": "copy",
|
||||
"SubCmd": "",
|
||||
"JSON": false,
|
||||
"Flags": []interface{}{},
|
||||
"JSON": false,
|
||||
"Original": "COPY . /",
|
||||
"Stage": float64(0),
|
||||
"StartLine": float64(2),
|
||||
"EndLine": float64(2),
|
||||
"SubCmd": "",
|
||||
"Value": []interface{}{".", "/"},
|
||||
},
|
||||
map[string]interface{}{
|
||||
"Cmd": "run",
|
||||
"SubCmd": "",
|
||||
"JSON": false,
|
||||
"Flags": []interface{}{},
|
||||
"Value": []interface{}{"echo hello"},
|
||||
"JSON": false,
|
||||
"Original": "RUN echo hello",
|
||||
"Stage": float64(0),
|
||||
"StartLine": float64(3),
|
||||
"EndLine": float64(3),
|
||||
"SubCmd": "",
|
||||
"Value": []interface{}{
|
||||
"echo hello",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -63,43 +79,66 @@ func Test_dockerConfigAnalyzer_Analyze(t *testing.T) {
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.Dockerfile,
|
||||
Type: types.Dockerfile,
|
||||
FilePath: "testdata/Dockerfile.multistage",
|
||||
Content: []interface{}{
|
||||
[]interface{}{
|
||||
Content: map[string]interface{}{
|
||||
"command": map[string]interface{}{
|
||||
"foo AS build": []interface{}{
|
||||
map[string]interface{}{
|
||||
"Cmd": "from",
|
||||
"SubCmd": "",
|
||||
"JSON": false,
|
||||
"Flags": []interface{}{},
|
||||
"JSON": false,
|
||||
"Original": "FROM foo AS build",
|
||||
"Stage": float64(0),
|
||||
"StartLine": float64(1),
|
||||
"EndLine": float64(1),
|
||||
"SubCmd": "",
|
||||
"Value": []interface{}{"foo", "AS", "build"},
|
||||
},
|
||||
map[string]interface{}{
|
||||
"Cmd": "copy",
|
||||
"SubCmd": "",
|
||||
"JSON": false,
|
||||
"Flags": []interface{}{},
|
||||
"JSON": false,
|
||||
"Original": "COPY . /",
|
||||
"Stage": float64(0),
|
||||
"StartLine": float64(2),
|
||||
"EndLine": float64(2),
|
||||
"SubCmd": "",
|
||||
"Value": []interface{}{".", "/"},
|
||||
},
|
||||
map[string]interface{}{
|
||||
"Cmd": "run",
|
||||
"SubCmd": "",
|
||||
"JSON": false,
|
||||
"Flags": []interface{}{},
|
||||
"JSON": false,
|
||||
"Original": "RUN echo hello",
|
||||
"Stage": float64(0),
|
||||
"StartLine": float64(3),
|
||||
"EndLine": float64(3),
|
||||
"SubCmd": "",
|
||||
"Value": []interface{}{"echo hello"},
|
||||
},
|
||||
},
|
||||
"scratch ": []interface{}{
|
||||
map[string]interface{}{
|
||||
"Cmd": "from",
|
||||
"SubCmd": "",
|
||||
"JSON": false,
|
||||
"Flags": []interface{}{},
|
||||
"JSON": false,
|
||||
"Original": "FROM scratch ",
|
||||
"Stage": float64(1),
|
||||
"StartLine": float64(5),
|
||||
"EndLine": float64(5),
|
||||
"SubCmd": "",
|
||||
"Value": []interface{}{"scratch"},
|
||||
},
|
||||
map[string]interface{}{
|
||||
"Cmd": "copy",
|
||||
"SubCmd": "",
|
||||
"JSON": false,
|
||||
"Flags": []interface{}{"--from=build"},
|
||||
"JSON": false,
|
||||
"Original": "COPY --from=build /bar /bar",
|
||||
"Stage": float64(1),
|
||||
"StartLine": float64(6),
|
||||
"EndLine": float64(6),
|
||||
"SubCmd": "",
|
||||
"Value": []interface{}{"/bar", "/bar"},
|
||||
},
|
||||
},
|
||||
@@ -108,21 +147,20 @@ func Test_dockerConfigAnalyzer_Analyze(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "broken Docker: env no value",
|
||||
inputFile: "testdata/Dockerfile.broken",
|
||||
wantErr: "parse dockerfile: ENV must have two arguments",
|
||||
wantErr: "ENV must have two arguments",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
b, err := ioutil.ReadFile(tt.inputFile)
|
||||
require.NoError(t, err)
|
||||
|
||||
a := dockerConfigAnalyzer{
|
||||
parser: &docker.Parser{},
|
||||
}
|
||||
|
||||
a := docker.NewConfigAnalyzer(nil)
|
||||
got, err := a.Analyze(analyzer.AnalysisTarget{
|
||||
FilePath: tt.inputFile,
|
||||
Content: b,
|
||||
@@ -142,6 +180,7 @@ func Test_dockerConfigAnalyzer_Analyze(t *testing.T) {
|
||||
func Test_dockerConfigAnalyzer_Required(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filePattern *regexp.Regexp
|
||||
filePath string
|
||||
want bool
|
||||
}{
|
||||
@@ -195,24 +234,25 @@ func Test_dockerConfigAnalyzer_Required(t *testing.T) {
|
||||
filePath: "deployment.json",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "file pattern",
|
||||
filePattern: regexp.MustCompile(`foo*`),
|
||||
filePath: "foo_file",
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
a := dockerConfigAnalyzer{
|
||||
parser: &docker.Parser{},
|
||||
}
|
||||
|
||||
got := a.Required(tt.filePath, nil)
|
||||
s := docker.NewConfigAnalyzer(tt.filePattern)
|
||||
got := s.Required(tt.filePath, nil)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_dockerConfigAnalyzer_Type(t *testing.T) {
|
||||
s := docker.NewConfigAnalyzer(nil)
|
||||
want := analyzer.TypeDockerfile
|
||||
a := dockerConfigAnalyzer{
|
||||
parser: &docker.Parser{},
|
||||
}
|
||||
got := a.Type()
|
||||
got := s.Type()
|
||||
assert.Equal(t, want, got)
|
||||
}
|
||||
|
||||
@@ -3,67 +3,78 @@ package hcl
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
|
||||
multierror "github.com/hashicorp/go-multierror"
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/open-policy-agent/conftest/parser/hcl1"
|
||||
"github.com/open-policy-agent/conftest/parser/hcl2"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
analyzer.RegisterAnalyzer(&hclConfigAnalyzer{
|
||||
hcl1Parser: &hcl1.Parser{},
|
||||
hcl2Parser: &hcl2.Parser{},
|
||||
})
|
||||
}
|
||||
|
||||
const version = 1
|
||||
|
||||
var requiredExts = []string{".hcl", ".hcl1", ".hcl2", ".tf"}
|
||||
|
||||
type hclConfigAnalyzer struct {
|
||||
type ConfigAnalyzer struct {
|
||||
hcl1Parser *hcl1.Parser
|
||||
hcl2Parser *hcl2.Parser
|
||||
filePattern *regexp.Regexp
|
||||
}
|
||||
|
||||
func NewConfigAnalyzer(filePattern *regexp.Regexp) ConfigAnalyzer {
|
||||
return ConfigAnalyzer{
|
||||
hcl1Parser: &hcl1.Parser{},
|
||||
hcl2Parser: &hcl2.Parser{},
|
||||
filePattern: filePattern,
|
||||
}
|
||||
}
|
||||
|
||||
// Analyze analyzes HCL-based config files, defaulting to HCL2.0 spec
|
||||
// it returns error only if content does not comply to both HCL2.0 and HCL1.0 spec
|
||||
func (a hclConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
func (a ConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
parsed, err := a.analyze(target)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse HCL (%a): %w", target.FilePath, err)
|
||||
}
|
||||
|
||||
return &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: types.HCL,
|
||||
FilePath: target.FilePath,
|
||||
Content: parsed,
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a ConfigAnalyzer) analyze(target analyzer.AnalysisTarget) (interface{}, error) {
|
||||
var errs error
|
||||
var parsed interface{}
|
||||
|
||||
if err := a.hcl2Parser.Unmarshal(target.Content, &parsed); err != nil {
|
||||
errs = multierror.Append(errs, xerrors.Errorf("unable to parse HCL2 (%s): %w", target.FilePath, err))
|
||||
errs = multierror.Append(errs, xerrors.Errorf("unable to parse HCL2 (%a): %w", target.FilePath, err))
|
||||
} else {
|
||||
return &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{{
|
||||
Type: config.HCL2,
|
||||
FilePath: target.FilePath,
|
||||
Content: parsed,
|
||||
}},
|
||||
}, nil
|
||||
return parsed, nil
|
||||
}
|
||||
|
||||
if err := a.hcl1Parser.Unmarshal(target.Content, &parsed); err != nil {
|
||||
errs = multierror.Append(errs, xerrors.Errorf("unable to parse HCL1 (%s): %w", target.FilePath, err))
|
||||
errs = multierror.Append(errs, xerrors.Errorf("unable to parse HCL1 (%a): %w", target.FilePath, err))
|
||||
} else {
|
||||
return &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{{
|
||||
Type: config.HCL1,
|
||||
FilePath: target.FilePath,
|
||||
Content: parsed,
|
||||
}},
|
||||
}, nil
|
||||
return parsed, nil
|
||||
}
|
||||
|
||||
return nil, errs
|
||||
}
|
||||
|
||||
func (a hclConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
func (a ConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
if a.filePattern != nil && a.filePattern.MatchString(filePath) {
|
||||
return true
|
||||
}
|
||||
|
||||
ext := filepath.Ext(filePath)
|
||||
for _, required := range requiredExts {
|
||||
if ext == required {
|
||||
@@ -73,10 +84,10 @@ func (a hclConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (a hclConfigAnalyzer) Type() analyzer.Type {
|
||||
func (ConfigAnalyzer) Type() analyzer.Type {
|
||||
return analyzer.TypeHCL
|
||||
}
|
||||
|
||||
func (a hclConfigAnalyzer) Version() int {
|
||||
func (ConfigAnalyzer) Version() int {
|
||||
return version
|
||||
}
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
package hcl
|
||||
package hcl_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/open-policy-agent/conftest/parser/hcl1"
|
||||
"github.com/open-policy-agent/conftest/parser/hcl2"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/hcl"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
@@ -27,19 +26,19 @@ func Test_hclConfigAnalyzer_Analyze(t *testing.T) {
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.HCL1,
|
||||
Type: types.HCL,
|
||||
FilePath: "testdata/deployment.hcl1",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": []map[string]interface{}{
|
||||
map[string]interface{}{
|
||||
{
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
},
|
||||
"spec": []map[string]interface{}{
|
||||
map[string]interface{}{
|
||||
"replicas": int(3),
|
||||
{
|
||||
"replicas": 3,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -58,7 +57,7 @@ func Test_hclConfigAnalyzer_Analyze(t *testing.T) {
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.HCL2,
|
||||
Type: types.HCL,
|
||||
FilePath: "testdata/deployment.hcl2",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
@@ -67,7 +66,7 @@ func Test_hclConfigAnalyzer_Analyze(t *testing.T) {
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"replicas": float64(3),
|
||||
"replicas": float64(4),
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -85,18 +84,18 @@ func Test_hclConfigAnalyzer_Analyze(t *testing.T) {
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.HCL1,
|
||||
Type: types.HCL,
|
||||
FilePath: "testdata/deprecated.hcl",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": []map[string]interface{}{
|
||||
map[string]interface{}{
|
||||
{
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
},
|
||||
"spec": []map[string]interface{}{
|
||||
map[string]interface{}{
|
||||
{
|
||||
"replicas": int(3),
|
||||
},
|
||||
},
|
||||
@@ -106,15 +105,14 @@ func Test_hclConfigAnalyzer_Analyze(t *testing.T) {
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
b, err := ioutil.ReadFile(tt.inputFile)
|
||||
require.NoError(t, err)
|
||||
|
||||
a := hclConfigAnalyzer{
|
||||
hcl1Parser: &hcl1.Parser{},
|
||||
hcl2Parser: &hcl2.Parser{},
|
||||
}
|
||||
a := hcl.NewConfigAnalyzer(nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
got, err := a.Analyze(analyzer.AnalysisTarget{
|
||||
FilePath: tt.inputFile,
|
||||
@@ -135,6 +133,7 @@ func Test_hclConfigAnalyzer_Analyze(t *testing.T) {
|
||||
func Test_hclConfigAnalyzer_Required(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filePattern *regexp.Regexp
|
||||
filePath string
|
||||
want bool
|
||||
}{
|
||||
@@ -163,26 +162,24 @@ func Test_hclConfigAnalyzer_Required(t *testing.T) {
|
||||
filePath: "deployment.json",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "file pattern",
|
||||
filePattern: regexp.MustCompile(`foo*`),
|
||||
filePath: "foo_file",
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
a := hclConfigAnalyzer{
|
||||
hcl1Parser: &hcl1.Parser{},
|
||||
hcl2Parser: &hcl2.Parser{},
|
||||
}
|
||||
|
||||
got := a.Required(tt.filePath, nil)
|
||||
s := hcl.NewConfigAnalyzer(tt.filePattern)
|
||||
got := s.Required(tt.filePath, nil)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
func Test_hclConfigAnalyzer_Type(t *testing.T) {
|
||||
s := hcl.NewConfigAnalyzer(nil)
|
||||
want := analyzer.TypeHCL
|
||||
a := hclConfigAnalyzer{
|
||||
hcl1Parser: &hcl1.Parser{},
|
||||
hcl2Parser: &hcl2.Parser{},
|
||||
}
|
||||
|
||||
got := a.Type()
|
||||
got := s.Type()
|
||||
assert.Equal(t, want, got)
|
||||
}
|
||||
|
||||
2
analyzer/config/hcl/testdata/deployment.hcl2
vendored
2
analyzer/config/hcl/testdata/deployment.hcl2
vendored
@@ -4,5 +4,5 @@ metadata {
|
||||
name = "hello-kubernetes"
|
||||
}
|
||||
spec {
|
||||
replicas = 3
|
||||
replicas = 4
|
||||
}
|
||||
|
||||
@@ -3,44 +3,53 @@ package json
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
|
||||
"github.com/open-policy-agent/conftest/parser/json"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
analyzer.RegisterAnalyzer(&jsonConfigAnalyzer{
|
||||
parser: &json.Parser{},
|
||||
})
|
||||
}
|
||||
|
||||
const version = 1
|
||||
|
||||
var requiredExts = []string{".json"}
|
||||
|
||||
type jsonConfigAnalyzer struct {
|
||||
type ConfigAnalyzer struct {
|
||||
parser *json.Parser
|
||||
filePattern *regexp.Regexp
|
||||
}
|
||||
|
||||
func (a jsonConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
func NewConfigAnalyzer(filePattern *regexp.Regexp) ConfigAnalyzer {
|
||||
return ConfigAnalyzer{
|
||||
parser: &json.Parser{},
|
||||
filePattern: filePattern,
|
||||
}
|
||||
}
|
||||
|
||||
func (a ConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
var parsed interface{}
|
||||
if err := a.parser.Unmarshal(target.Content, &parsed); err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse JSON (%s): %w", target.FilePath, err)
|
||||
}
|
||||
|
||||
return &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{{
|
||||
Type: config.JSON,
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: types.JSON,
|
||||
FilePath: target.FilePath,
|
||||
Content: parsed,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a jsonConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
func (a ConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
if a.filePattern != nil && a.filePattern.MatchString(filePath) {
|
||||
return true
|
||||
}
|
||||
|
||||
ext := filepath.Ext(filePath)
|
||||
for _, required := range requiredExts {
|
||||
if ext == required {
|
||||
@@ -50,10 +59,10 @@ func (a jsonConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (a jsonConfigAnalyzer) Type() analyzer.Type {
|
||||
func (ConfigAnalyzer) Type() analyzer.Type {
|
||||
return analyzer.TypeJSON
|
||||
}
|
||||
|
||||
func (a jsonConfigAnalyzer) Version() int {
|
||||
func (ConfigAnalyzer) Version() int {
|
||||
return version
|
||||
}
|
||||
|
||||
@@ -1,32 +1,41 @@
|
||||
package json
|
||||
package json_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/open-policy-agent/conftest/parser/json"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/json"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
|
||||
type args struct {
|
||||
namespaces []string
|
||||
policyPaths []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
inputFile string
|
||||
want *analyzer.AnalysisResult
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/deployment.json",
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.JSON,
|
||||
Type: "json",
|
||||
FilePath: "testdata/deployment.json",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
@@ -43,22 +52,51 @@ func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "happy path: json array",
|
||||
inputFile: "testdata/array.json",
|
||||
name: "deny",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/deployment_deny.json",
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.JSON,
|
||||
FilePath: "testdata/array.json",
|
||||
Content: []interface{}{
|
||||
map[string]interface{}{
|
||||
Type: "json",
|
||||
FilePath: "testdata/deployment_deny.json",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"replicas": float64(3),
|
||||
"replicas": float64(4),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "json array",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/array.json",
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: "json",
|
||||
FilePath: "testdata/array.json",
|
||||
Content: []interface{}{map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"replicas": float64(4),
|
||||
},
|
||||
},
|
||||
map[string]interface{}{
|
||||
@@ -68,7 +106,7 @@ func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"replicas": float64(3),
|
||||
"replicas": float64(5),
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -78,20 +116,23 @@ func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "broken JSON",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/broken.json",
|
||||
wantErr: "unmarshal json",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
b, err := ioutil.ReadFile(tt.inputFile)
|
||||
require.NoError(t, err)
|
||||
|
||||
a := jsonConfigAnalyzer{
|
||||
parser: &json.Parser{},
|
||||
}
|
||||
s := json.NewConfigAnalyzer(nil)
|
||||
|
||||
got, err := a.Analyze(analyzer.AnalysisTarget{
|
||||
got, err := s.Analyze(analyzer.AnalysisTarget{
|
||||
FilePath: tt.inputFile,
|
||||
Content: b,
|
||||
})
|
||||
@@ -110,6 +151,7 @@ func Test_jsonConfigAnalyzer_Analyze(t *testing.T) {
|
||||
func Test_jsonConfigAnalyzer_Required(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filePattern *regexp.Regexp
|
||||
filePath string
|
||||
want bool
|
||||
}{
|
||||
@@ -123,24 +165,27 @@ func Test_jsonConfigAnalyzer_Required(t *testing.T) {
|
||||
filePath: "deployment.yaml",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "file pattern",
|
||||
filePattern: regexp.MustCompile(`foo*`),
|
||||
filePath: "foo_file",
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
a := jsonConfigAnalyzer{
|
||||
parser: &json.Parser{},
|
||||
}
|
||||
s := json.NewConfigAnalyzer(tt.filePattern)
|
||||
|
||||
got := a.Required(tt.filePath, nil)
|
||||
got := s.Required(tt.filePath, nil)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_jsonConfigAnalyzer_Type(t *testing.T) {
|
||||
s := json.NewConfigAnalyzer(nil)
|
||||
|
||||
want := analyzer.TypeJSON
|
||||
a := jsonConfigAnalyzer{
|
||||
parser: &json.Parser{},
|
||||
}
|
||||
got := a.Type()
|
||||
got := s.Type()
|
||||
assert.Equal(t, want, got)
|
||||
}
|
||||
|
||||
4
analyzer/config/json/testdata/array.json
vendored
4
analyzer/config/json/testdata/array.json
vendored
@@ -6,7 +6,7 @@
|
||||
"name": "hello-kubernetes"
|
||||
},
|
||||
"spec": {
|
||||
"replicas": 3
|
||||
"replicas": 4
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -16,7 +16,7 @@
|
||||
"name": "hello-kubernetes"
|
||||
},
|
||||
"spec": {
|
||||
"replicas": 3
|
||||
"replicas": 5
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
10
analyzer/config/json/testdata/deployment_deny.json
vendored
Normal file
10
analyzer/config/json/testdata/deployment_deny.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": {
|
||||
"name": "hello-kubernetes"
|
||||
},
|
||||
"spec": {
|
||||
"replicas": 4
|
||||
}
|
||||
}
|
||||
21
analyzer/config/testdata/docker_deny.rego
vendored
Normal file
21
analyzer/config/testdata/docker_deny.rego
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
package users.dockerfile.xyz_100
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Dockerfile",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Docker Security Check",
|
||||
}
|
||||
|
||||
denylist = [
|
||||
"foo"
|
||||
]
|
||||
|
||||
deny[res] {
|
||||
input[i].Cmd == "from"
|
||||
val := input[i].Value
|
||||
contains(val[i], denylist[_])
|
||||
|
||||
res = {"type": "Docker Security Check", "msg": sprintf("deny: image found %s", [val]), "severity": "HIGH", "id": "RULE-100"}
|
||||
}
|
||||
35
analyzer/config/testdata/docker_multi.rego
vendored
Normal file
35
analyzer/config/testdata/docker_multi.rego
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
package main.dockerfile
|
||||
|
||||
denylist = [
|
||||
"foo"
|
||||
]
|
||||
|
||||
deny[res] {
|
||||
input[i].Cmd == "from"
|
||||
val := input[i].Value
|
||||
contains(val[i], denylist[_])
|
||||
|
||||
res = {
|
||||
"type": "Docker Security Check",
|
||||
"msg": sprintf("deny: image found %s", [val]),
|
||||
"severity": "HIGH",
|
||||
"id": "RULE-100"
|
||||
}
|
||||
}
|
||||
|
||||
warnlist = [
|
||||
"echo"
|
||||
]
|
||||
|
||||
warn[res] {
|
||||
input[i].Cmd == "run"
|
||||
val := input[i].Value
|
||||
contains(val[_], warnlist[_])
|
||||
|
||||
res = {
|
||||
"type": "Docker Security Check",
|
||||
"msg": sprintf("warn: command %s contains banned: %s", [val, warnlist]),
|
||||
"severity": "LOW",
|
||||
"id": "RULE-10"
|
||||
}
|
||||
}
|
||||
20
analyzer/config/testdata/docker_non.rego
vendored
Normal file
20
analyzer/config/testdata/docker_non.rego
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
package main.dockerfile
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Dockerfile",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Docker Security Check",
|
||||
}
|
||||
|
||||
denylist = [
|
||||
]
|
||||
|
||||
deny[msg] {
|
||||
input[i].Cmd == "from"
|
||||
val := input[i].Value
|
||||
contains(val[i], denylist[_])
|
||||
|
||||
msg = sprintf("deny: image found %s", [val])
|
||||
}
|
||||
13
analyzer/config/testdata/docker_violation.rego
vendored
Normal file
13
analyzer/config/testdata/docker_violation.rego
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
package main.dockerfile.id_100
|
||||
|
||||
violationlist = [
|
||||
"foo"
|
||||
]
|
||||
|
||||
violation[{"msg": msg, "details": {}}] {
|
||||
input[i].Cmd == "from"
|
||||
val := input[i].Value
|
||||
contains(val[i], violationlist[_])
|
||||
|
||||
msg = sprintf("violation: image found %s", [val])
|
||||
}
|
||||
19
analyzer/config/testdata/docker_warn.rego
vendored
Normal file
19
analyzer/config/testdata/docker_warn.rego
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
package main.dockerfile.xyz_100
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Dockerfile",
|
||||
"version": "v1.0.0",
|
||||
}
|
||||
|
||||
warnlist = [
|
||||
"foo"
|
||||
]
|
||||
|
||||
warn[msg] {
|
||||
input[i].Cmd == "from"
|
||||
val := input[i].Value
|
||||
contains(val[i], warnlist[_])
|
||||
|
||||
msg = sprintf("warn: image found %s", [val])
|
||||
}
|
||||
15
analyzer/config/testdata/kubernetes.rego
vendored
Normal file
15
analyzer/config/testdata/kubernetes.rego
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
package main.kubernetes.xyz_100
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Kubernetes Replicas",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
deny[msg] {
|
||||
rpl = input.spec.replicas
|
||||
rpl > 3
|
||||
msg = sprintf("too many replicas: %d", [rpl])
|
||||
}
|
||||
8
analyzer/config/toml/testdata/deployment_deny.toml
vendored
Normal file
8
analyzer/config/toml/testdata/deployment_deny.toml
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
apiVersion = "apps/v1"
|
||||
kind = "Deployment"
|
||||
|
||||
[metadata]
|
||||
name = "hello-kubernetes"
|
||||
|
||||
[spec]
|
||||
replicas = 4
|
||||
@@ -3,44 +3,53 @@ package toml
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
|
||||
"github.com/open-policy-agent/conftest/parser/toml"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
analyzer.RegisterAnalyzer(&tomlConfigAnalyzer{
|
||||
parser: &toml.Parser{},
|
||||
})
|
||||
}
|
||||
|
||||
const version = 1
|
||||
|
||||
var requiredExts = []string{".toml"}
|
||||
|
||||
type tomlConfigAnalyzer struct {
|
||||
type ConfigAnalyzer struct {
|
||||
parser *toml.Parser
|
||||
filePattern *regexp.Regexp
|
||||
}
|
||||
|
||||
func (a tomlConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
func NewConfigAnalyzer(filePattern *regexp.Regexp) ConfigAnalyzer {
|
||||
return ConfigAnalyzer{
|
||||
parser: &toml.Parser{},
|
||||
filePattern: filePattern,
|
||||
}
|
||||
}
|
||||
|
||||
func (a ConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
var parsed interface{}
|
||||
if err := a.parser.Unmarshal(target.Content, &parsed); err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse TOML (%s): %w", target.FilePath, err)
|
||||
}
|
||||
|
||||
return &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{{
|
||||
Type: config.TOML,
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: types.TOML,
|
||||
FilePath: target.FilePath,
|
||||
Content: parsed,
|
||||
}},
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a tomlConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
func (a ConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
if a.filePattern != nil && a.filePattern.MatchString(filePath) {
|
||||
return true
|
||||
}
|
||||
|
||||
ext := filepath.Ext(filePath)
|
||||
for _, required := range requiredExts {
|
||||
if ext == required {
|
||||
@@ -50,10 +59,10 @@ func (a tomlConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (a tomlConfigAnalyzer) Type() analyzer.Type {
|
||||
func (ConfigAnalyzer) Type() analyzer.Type {
|
||||
return analyzer.TypeTOML
|
||||
}
|
||||
|
||||
func (a tomlConfigAnalyzer) Version() int {
|
||||
func (ConfigAnalyzer) Version() int {
|
||||
return version
|
||||
}
|
||||
|
||||
@@ -1,32 +1,41 @@
|
||||
package toml
|
||||
package toml_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/open-policy-agent/conftest/parser/toml"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/toml"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func Test_tomlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
type args struct {
|
||||
namespaces []string
|
||||
policyPaths []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
inputFile string
|
||||
want *analyzer.AnalysisResult
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/deployment.toml",
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.TOML,
|
||||
Type: "toml",
|
||||
FilePath: "testdata/deployment.toml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
@@ -42,8 +51,38 @@ func Test_tomlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "deny",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/deployment_deny.toml",
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: "toml",
|
||||
FilePath: "testdata/deployment_deny.toml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"replicas": int64(4),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "broken TOML",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/broken.toml",
|
||||
wantErr: "unmarshal toml",
|
||||
},
|
||||
@@ -53,9 +92,7 @@ func Test_tomlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
b, err := ioutil.ReadFile(tt.inputFile)
|
||||
require.NoError(t, err)
|
||||
|
||||
a := tomlConfigAnalyzer{
|
||||
parser: &toml.Parser{},
|
||||
}
|
||||
a := toml.NewConfigAnalyzer(nil)
|
||||
|
||||
got, err := a.Analyze(analyzer.AnalysisTarget{
|
||||
FilePath: tt.inputFile,
|
||||
@@ -76,6 +113,7 @@ func Test_tomlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
func Test_tomlConfigAnalyzer_Required(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filePattern *regexp.Regexp
|
||||
filePath string
|
||||
want bool
|
||||
}{
|
||||
@@ -89,15 +127,27 @@ func Test_tomlConfigAnalyzer_Required(t *testing.T) {
|
||||
filePath: "deployment.json",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "file pattern",
|
||||
filePattern: regexp.MustCompile(`foo*`),
|
||||
filePath: "foo_file",
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
a := tomlConfigAnalyzer{
|
||||
parser: &toml.Parser{},
|
||||
}
|
||||
s := toml.NewConfigAnalyzer(tt.filePattern)
|
||||
|
||||
got := a.Required(tt.filePath, nil)
|
||||
got := s.Required(tt.filePath, nil)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_tomlConfigAnalyzer_Type(t *testing.T) {
|
||||
s := toml.NewConfigAnalyzer(nil)
|
||||
|
||||
want := analyzer.TypeTOML
|
||||
got := s.Type()
|
||||
assert.Equal(t, want, got)
|
||||
}
|
||||
|
||||
13
analyzer/config/yaml/testdata/deny.rego
vendored
Normal file
13
analyzer/config/yaml/testdata/deny.rego
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
package main.yaml.xyz_123
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-123",
|
||||
"title": "Bad YAML",
|
||||
"version": "v1.0.0",
|
||||
"severity": "CRITICAL",
|
||||
"type": "YAML Security Check",
|
||||
}
|
||||
|
||||
deny[msg]{
|
||||
msg := "bad"
|
||||
}
|
||||
6
analyzer/config/yaml/testdata/deployment_deny.yaml
vendored
Normal file
6
analyzer/config/yaml/testdata/deployment_deny.yaml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: hello-kubernetes
|
||||
spec:
|
||||
replicas: 4
|
||||
2
analyzer/config/yaml/testdata/multiple.yaml
vendored
2
analyzer/config/yaml/testdata/multiple.yaml
vendored
@@ -3,7 +3,7 @@ kind: Deployment
|
||||
metadata:
|
||||
name: hello-kubernetes
|
||||
spec:
|
||||
replicas: 3
|
||||
replicas: 4
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -3,44 +3,62 @@ package yaml
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
|
||||
"github.com/open-policy-agent/conftest/parser/yaml"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/config/parser/yaml"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func init() {
|
||||
analyzer.RegisterAnalyzer(&yamlConfigAnalyzer{
|
||||
parser: &yaml.Parser{},
|
||||
})
|
||||
}
|
||||
|
||||
const version = 1
|
||||
|
||||
var requiredExts = []string{".yaml", ".yml"}
|
||||
|
||||
type yamlConfigAnalyzer struct {
|
||||
type ConfigAnalyzer struct {
|
||||
parser *yaml.Parser
|
||||
filePattern *regexp.Regexp
|
||||
}
|
||||
|
||||
func (a yamlConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
var parsed interface{}
|
||||
if err := a.parser.Unmarshal(target.Content, &parsed); err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse YAML (%s): %w", target.FilePath, err)
|
||||
func NewConfigAnalyzer(filePattern *regexp.Regexp) ConfigAnalyzer {
|
||||
return ConfigAnalyzer{
|
||||
parser: &yaml.Parser{},
|
||||
filePattern: filePattern,
|
||||
}
|
||||
return &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{{
|
||||
Type: config.YAML,
|
||||
}
|
||||
|
||||
func (a ConfigAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
// YAML might have sub documents separated by "---"
|
||||
//
|
||||
// If the current configuration contains multiple configurations, evaluate each policy
|
||||
// independent from one another and aggregate the results under the same file name.
|
||||
docs := a.parser.SeparateSubDocuments(target.Content)
|
||||
|
||||
var configs []types.Config
|
||||
for _, doc := range docs {
|
||||
parsed, err := a.parser.Parse(doc)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse YAML (%a): %w", target.FilePath, err)
|
||||
}
|
||||
|
||||
configs = append(configs, types.Config{
|
||||
Type: types.YAML,
|
||||
FilePath: target.FilePath,
|
||||
Content: parsed,
|
||||
}},
|
||||
})
|
||||
}
|
||||
|
||||
return &analyzer.AnalysisResult{
|
||||
Configs: configs,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a yamlConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
func (a ConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
if a.filePattern != nil && a.filePattern.MatchString(filePath) {
|
||||
return true
|
||||
}
|
||||
|
||||
ext := filepath.Ext(filePath)
|
||||
for _, required := range requiredExts {
|
||||
if ext == required {
|
||||
@@ -50,10 +68,10 @@ func (a yamlConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (a yamlConfigAnalyzer) Type() analyzer.Type {
|
||||
func (ConfigAnalyzer) Type() analyzer.Type {
|
||||
return analyzer.TypeYaml
|
||||
}
|
||||
|
||||
func (a yamlConfigAnalyzer) Version() int {
|
||||
func (ConfigAnalyzer) Version() int {
|
||||
return version
|
||||
}
|
||||
|
||||
@@ -1,32 +1,41 @@
|
||||
package yaml
|
||||
package yaml_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/open-policy-agent/conftest/parser/yaml"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/analyzer/config/yaml"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
type args struct {
|
||||
namespaces []string
|
||||
policyPaths []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
inputFile string
|
||||
want *analyzer.AnalysisResult
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/deployment.yaml",
|
||||
want: &analyzer.AnalysisResult{
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.YAML,
|
||||
Type: "yaml",
|
||||
FilePath: "testdata/deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
@@ -35,7 +44,35 @@ func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"replicas": float64(3),
|
||||
"replicas": 3,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "deny",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/deployment_deny.yaml",
|
||||
want: &analyzer.AnalysisResult{
|
||||
OS: (*types.OS)(nil),
|
||||
PackageInfos: []types.PackageInfo(nil),
|
||||
Applications: []types.Application(nil), Configs: []types.Config{
|
||||
{
|
||||
Type: "yaml",
|
||||
FilePath: "testdata/deployment_deny.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"replicas": 4,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -44,29 +81,36 @@ func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "happy path using anchors",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"testdata/deny.rego"},
|
||||
},
|
||||
inputFile: "testdata/anchor.yaml",
|
||||
want: &analyzer.AnalysisResult{
|
||||
OS: (*types.OS)(nil),
|
||||
PackageInfos: []types.PackageInfo(nil),
|
||||
Applications: []types.Application(nil),
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.YAML,
|
||||
Type: "yaml",
|
||||
FilePath: "testdata/anchor.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"default": map[string]interface{}{
|
||||
"line": "single line",
|
||||
},
|
||||
"john": map[string]interface{}{
|
||||
"john_name": "john",
|
||||
},
|
||||
"fred": map[string]interface{}{
|
||||
"fred_name": "fred",
|
||||
},
|
||||
"main": map[string]interface{}{
|
||||
"line": "single line",
|
||||
"name": map[string]interface{}{
|
||||
"john": map[string]interface{}{
|
||||
"john_name": "john",
|
||||
"fred_name": "fred",
|
||||
},
|
||||
"main": map[interface{}]interface{}{
|
||||
"comment": "multi\nline\n",
|
||||
"line": "single line",
|
||||
"name": map[interface{}]interface{}{
|
||||
"fred_name": "fred",
|
||||
"john_name": "john",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -74,37 +118,45 @@ func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "happy path using multiple yaml",
|
||||
name: "multiple yaml",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/multiple.yaml",
|
||||
want: &analyzer.AnalysisResult{
|
||||
OS: (*types.OS)(nil),
|
||||
PackageInfos: []types.PackageInfo(nil),
|
||||
Applications: []types.Application(nil),
|
||||
Configs: []types.Config{
|
||||
{
|
||||
Type: config.YAML,
|
||||
Type: "yaml",
|
||||
FilePath: "testdata/multiple.yaml",
|
||||
Content: []interface{}{
|
||||
map[string]interface{}{
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"replicas": float64(3),
|
||||
"replicas": 4,
|
||||
},
|
||||
},
|
||||
map[string]interface{}{
|
||||
},
|
||||
{
|
||||
Type: "yaml",
|
||||
FilePath: "testdata/multiple.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "v1",
|
||||
"kind": "Service",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"ports": []interface{}{
|
||||
map[string]interface{}{
|
||||
"ports": []interface{}{map[string]interface{}{
|
||||
"port": 80,
|
||||
"protocol": "TCP",
|
||||
"port": float64(80),
|
||||
"targetPort": float64(8080),
|
||||
},
|
||||
"targetPort": 8080,
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -115,11 +167,19 @@ func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
},
|
||||
{
|
||||
name: "broken YAML",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/broken.yaml",
|
||||
wantErr: "unmarshal yaml",
|
||||
},
|
||||
{
|
||||
name: "invalid circular references yaml",
|
||||
args: args{
|
||||
namespaces: []string{"main"},
|
||||
policyPaths: []string{"../testdata/kubernetes.rego"},
|
||||
},
|
||||
inputFile: "testdata/circular_references.yaml",
|
||||
wantErr: "yaml: anchor 'circular' value contains itself",
|
||||
},
|
||||
@@ -129,9 +189,7 @@ func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
b, err := ioutil.ReadFile(tt.inputFile)
|
||||
require.NoError(t, err)
|
||||
|
||||
a := yamlConfigAnalyzer{
|
||||
parser: &yaml.Parser{},
|
||||
}
|
||||
a := yaml.NewConfigAnalyzer(nil)
|
||||
|
||||
got, err := a.Analyze(analyzer.AnalysisTarget{
|
||||
FilePath: tt.inputFile,
|
||||
@@ -152,6 +210,7 @@ func Test_yamlConfigAnalyzer_Analyze(t *testing.T) {
|
||||
func Test_yamlConfigAnalyzer_Required(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filePattern *regexp.Regexp
|
||||
filePath string
|
||||
want bool
|
||||
}{
|
||||
@@ -170,15 +229,27 @@ func Test_yamlConfigAnalyzer_Required(t *testing.T) {
|
||||
filePath: "deployment.json",
|
||||
want: false,
|
||||
},
|
||||
{
|
||||
name: "file pattern",
|
||||
filePattern: regexp.MustCompile(`foo*`),
|
||||
filePath: "foo_file",
|
||||
want: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
a := yamlConfigAnalyzer{
|
||||
parser: &yaml.Parser{},
|
||||
}
|
||||
s := yaml.NewConfigAnalyzer(tt.filePattern)
|
||||
|
||||
got := a.Required(tt.filePath, nil)
|
||||
got := s.Required(tt.filePath, nil)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_yamlConfigAnalyzer_Type(t *testing.T) {
|
||||
s := yaml.NewConfigAnalyzer(nil)
|
||||
|
||||
want := analyzer.TypeYaml
|
||||
got := s.Type()
|
||||
assert.Equal(t, want, got)
|
||||
}
|
||||
|
||||
@@ -1,46 +1,45 @@
|
||||
package analyzer
|
||||
|
||||
type Type int
|
||||
type Type string
|
||||
|
||||
// NOTE: Do not change the order of "Type" unnecessarily, as it will affect the cache.
|
||||
const (
|
||||
// OS
|
||||
TypeAlpine Type = iota + 1
|
||||
TypeAmazon
|
||||
TypeDebian
|
||||
TypePhoton
|
||||
TypeCentOS
|
||||
TypeFedora
|
||||
TypeOracle
|
||||
TypeRedHatBase
|
||||
TypeSUSE
|
||||
TypeUbuntu
|
||||
TypeAlpine Type = "alpine"
|
||||
TypeAmazon Type = "amazon"
|
||||
TypeDebian Type = "debian"
|
||||
TypePhoton Type = "photon"
|
||||
TypeCentOS Type = "centos"
|
||||
TypeFedora Type = "fedora"
|
||||
TypeOracle Type = "oracle"
|
||||
TypeRedHatBase Type = "redhat"
|
||||
TypeSUSE Type = "suse"
|
||||
TypeUbuntu Type = "ubuntu"
|
||||
|
||||
// OS Package
|
||||
TypeApk
|
||||
TypeDpkg
|
||||
TypeRpm
|
||||
TypeApk Type = "apk"
|
||||
TypeDpkg Type = "dpkg"
|
||||
TypeRpm Type = "rpm"
|
||||
|
||||
// Programming Language Package
|
||||
TypeBundler
|
||||
TypeCargo
|
||||
TypeComposer
|
||||
TypeJar
|
||||
TypeNpm
|
||||
TypeNuget
|
||||
TypePipenv
|
||||
TypePoetry
|
||||
TypeYarn
|
||||
TypeGoBinary
|
||||
TypeGoMod
|
||||
TypeBundler Type = "bundler"
|
||||
TypeCargo Type = "cargo"
|
||||
TypeComposer Type = "composer"
|
||||
TypeJar Type = "jar"
|
||||
TypeNpm Type = "npm"
|
||||
TypeNuget Type = "nuget"
|
||||
TypePipenv Type = "pipenv"
|
||||
TypePoetry Type = "poetry"
|
||||
TypeYarn Type = "yarn"
|
||||
TypeGoBinary Type = "gobinary"
|
||||
TypeGoMod Type = "gomod"
|
||||
|
||||
// Image Config
|
||||
TypeApkCommand
|
||||
TypeApkCommand Type = "apk-command"
|
||||
|
||||
// Structured Config
|
||||
TypeYaml
|
||||
TypeTOML
|
||||
TypeJSON
|
||||
TypeDockerfile
|
||||
TypeHCL
|
||||
TypeYaml Type = "yaml"
|
||||
TypeTOML Type = "toml"
|
||||
TypeJSON Type = "json"
|
||||
TypeDockerfile Type = "dockerfile"
|
||||
TypeHCL Type = "hcl"
|
||||
)
|
||||
|
||||
@@ -29,14 +29,14 @@ func TestAnalyze(t *testing.T) {
|
||||
{
|
||||
name: "happy path",
|
||||
args: args{
|
||||
analyzerType: library.GoBinary,
|
||||
analyzerType: types.GoBinary,
|
||||
filePath: "app/myweb",
|
||||
content: []byte("happy"),
|
||||
},
|
||||
want: &analyzer.AnalysisResult{
|
||||
Applications: []types.Application{
|
||||
{
|
||||
Type: library.GoBinary,
|
||||
Type: types.GoBinary,
|
||||
FilePath: "app/myweb",
|
||||
Libraries: []types.LibraryInfo{
|
||||
{
|
||||
@@ -53,7 +53,7 @@ func TestAnalyze(t *testing.T) {
|
||||
{
|
||||
name: "empty",
|
||||
args: args{
|
||||
analyzerType: library.GoBinary,
|
||||
analyzerType: types.GoBinary,
|
||||
filePath: "app/myweb",
|
||||
content: []byte(""),
|
||||
},
|
||||
@@ -62,7 +62,7 @@ func TestAnalyze(t *testing.T) {
|
||||
{
|
||||
name: "sad path",
|
||||
args: args{
|
||||
analyzerType: library.Jar,
|
||||
analyzerType: types.Jar,
|
||||
filePath: "app/myweb",
|
||||
content: []byte("sad"),
|
||||
},
|
||||
|
||||
@@ -4,12 +4,13 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/bundler"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -25,7 +26,7 @@ var (
|
||||
type bundlerLibraryAnalyzer struct{}
|
||||
|
||||
func (a bundlerLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.Bundler, target.FilePath, target.Content, bundler.Parse)
|
||||
res, err := library.Analyze(types.Bundler, target.FilePath, target.Content, bundler.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse Gemfile.lock: %w", err)
|
||||
}
|
||||
|
||||
@@ -4,11 +4,13 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/cargo"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -22,7 +24,7 @@ var requiredFiles = []string{"Cargo.lock"}
|
||||
type cargoLibraryAnalyzer struct{}
|
||||
|
||||
func (a cargoLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.Cargo, target.FilePath, target.Content, cargo.Parse)
|
||||
res, err := library.Analyze(types.Cargo, target.FilePath, target.Content, cargo.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("error with Cargo.lock: %w", err)
|
||||
}
|
||||
|
||||
@@ -4,13 +4,13 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/composer"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/composer"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -24,7 +24,7 @@ var requiredFiles = []string{"composer.lock"}
|
||||
type composerLibraryAnalyzer struct{}
|
||||
|
||||
func (a composerLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.Composer, target.FilePath, target.Content, composer.Parse)
|
||||
res, err := library.Analyze(types.Composer, target.FilePath, target.Content, composer.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("error with composer.lock: %w", err)
|
||||
}
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
package library
|
||||
|
||||
const (
|
||||
Bundler = "bundler"
|
||||
Cargo = "cargo"
|
||||
Composer = "composer"
|
||||
Npm = "npm"
|
||||
NuGet = "nuget"
|
||||
Pipenv = "pipenv"
|
||||
Poetry = "poetry"
|
||||
Yarn = "yarn"
|
||||
Jar = "jar"
|
||||
GoBinary = "gobinary"
|
||||
GoMod = "gomod"
|
||||
)
|
||||
|
||||
var (
|
||||
IgnoreDirs = []string{"node_modules", "vendor"}
|
||||
)
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/gobinary"
|
||||
)
|
||||
|
||||
@@ -19,7 +20,7 @@ const version = 1
|
||||
type gobinaryLibraryAnalyzer struct{}
|
||||
|
||||
func (a gobinaryLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.GoBinary, target.FilePath, target.Content, gobinary.Parse)
|
||||
res, err := library.Analyze(types.GoBinary, target.FilePath, target.Content, gobinary.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse %s: %w", target.FilePath, err)
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
godeptypes "github.com/aquasecurity/go-dep-parser/pkg/types"
|
||||
)
|
||||
@@ -27,7 +26,7 @@ func Test_gobinaryLibraryAnalyzer_Analyze(t *testing.T) {
|
||||
want: &analyzer.AnalysisResult{
|
||||
Applications: []types.Application{
|
||||
{
|
||||
Type: library.GoBinary,
|
||||
Type: types.GoBinary,
|
||||
FilePath: "testdata/executable_gobinary",
|
||||
Libraries: []types.LibraryInfo{
|
||||
{Library: godeptypes.Library{Name: "github.com/aquasecurity/go-pep440-version", Version: "v0.0.0-20210121094942-22b2f8951d46"}},
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/gomod"
|
||||
)
|
||||
@@ -23,7 +24,7 @@ var requiredFiles = []string{"go.sum"}
|
||||
type gomodAnalyzer struct{}
|
||||
|
||||
func (a gomodAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.GoMod, target.FilePath, target.Content, gomod.Parse)
|
||||
res, err := library.Analyze(types.GoMod, target.FilePath, target.Content, gomod.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed to analyze %s: %w", target.FilePath, err)
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
godeptypes "github.com/aquasecurity/go-dep-parser/pkg/types"
|
||||
)
|
||||
@@ -27,7 +26,7 @@ func Test_gomodAnalyzer_Analyze(t *testing.T) {
|
||||
want: &analyzer.AnalysisResult{
|
||||
Applications: []types.Application{
|
||||
{
|
||||
Type: library.GoMod,
|
||||
Type: types.GoMod,
|
||||
FilePath: "testdata/gomod_many.sum",
|
||||
Libraries: []types.LibraryInfo{
|
||||
{Library: godeptypes.Library{Name: "github.com/BurntSushi/toml", Version: "0.3.1"}},
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/jar"
|
||||
)
|
||||
|
||||
@@ -31,7 +32,7 @@ func (a javaLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.
|
||||
return nil, xerrors.Errorf("jar/war/ear parse error: %w", err)
|
||||
}
|
||||
|
||||
return library.ToAnalysisResult(library.Jar, target.FilePath, libs), nil
|
||||
return library.ToAnalysisResult(types.Jar, target.FilePath, libs), nil
|
||||
}
|
||||
|
||||
func (a javaLibraryAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
godeptypes "github.com/aquasecurity/go-dep-parser/pkg/types"
|
||||
)
|
||||
@@ -26,7 +25,7 @@ func Test_javaLibraryAnalyzer_Analyze(t *testing.T) {
|
||||
want: &analyzer.AnalysisResult{
|
||||
Applications: []types.Application{
|
||||
{
|
||||
Type: library.Jar,
|
||||
Type: types.Jar,
|
||||
FilePath: "testdata/test.war",
|
||||
Libraries: []types.LibraryInfo{
|
||||
{Library: godeptypes.Library{Name: "org.glassfish:javax.el", Version: "3.0.0"}},
|
||||
|
||||
@@ -4,13 +4,13 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/npm"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
"golang.org/x/xerrors"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/npm"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -24,7 +24,7 @@ var requiredFiles = []string{"package-lock.json"}
|
||||
type npmLibraryAnalyzer struct{}
|
||||
|
||||
func (a npmLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.Npm, target.FilePath, target.Content, npm.Parse)
|
||||
res, err := library.Analyze(types.Npm, target.FilePath, target.Content, npm.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse package-lock.json: %w", err)
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/nuget"
|
||||
)
|
||||
@@ -23,7 +24,7 @@ var requiredFiles = []string{"packages.lock.json"}
|
||||
type nugetLibraryAnalyzer struct{}
|
||||
|
||||
func (a nugetLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.NuGet, target.FilePath, target.Content, nuget.Parse)
|
||||
res, err := library.Analyze(types.NuGet, target.FilePath, target.Content, nuget.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse packages.lock.json: %w", err)
|
||||
}
|
||||
|
||||
@@ -4,13 +4,13 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/pipenv"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/pipenv"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -24,7 +24,7 @@ var requiredFiles = []string{"Pipfile.lock"}
|
||||
type pipenvLibraryAnalyzer struct{}
|
||||
|
||||
func (a pipenvLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.Pipenv, target.FilePath, target.Content, pipenv.Parse)
|
||||
res, err := library.Analyze(types.Pipenv, target.FilePath, target.Content, pipenv.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse Pipfile.lock: %w", err)
|
||||
}
|
||||
|
||||
@@ -4,12 +4,13 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/poetry"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -23,7 +24,7 @@ var requiredFiles = []string{"poetry.lock"}
|
||||
type poetryLibraryAnalyzer struct{}
|
||||
|
||||
func (a poetryLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.Poetry, target.FilePath, target.Content, poetry.Parse)
|
||||
res, err := library.Analyze(types.Poetry, target.FilePath, target.Content, poetry.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse poetry.lock: %w", err)
|
||||
}
|
||||
|
||||
@@ -4,14 +4,13 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/yarn"
|
||||
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"golang.org/x/xerrors"
|
||||
"github.com/aquasecurity/fanal/analyzer/library"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
"github.com/aquasecurity/go-dep-parser/pkg/yarn"
|
||||
)
|
||||
|
||||
func init() {
|
||||
@@ -25,7 +24,7 @@ var requiredFiles = []string{"yarn.lock"}
|
||||
type yarnLibraryAnalyzer struct{}
|
||||
|
||||
func (a yarnLibraryAnalyzer) Analyze(target analyzer.AnalysisTarget) (*analyzer.AnalysisResult, error) {
|
||||
res, err := library.Analyze(library.Yarn, target.FilePath, target.Content, yarn.Parse)
|
||||
res, err := library.Analyze(types.Yarn, target.FilePath, target.Content, yarn.Parse)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to parse yarn.lock: %w", err)
|
||||
}
|
||||
|
||||
0
analyzer/testdata/error
vendored
Normal file
0
analyzer/testdata/error
vendored
Normal file
@@ -88,7 +88,7 @@ func ApplyLayers(layers []types.BlobInfo) types.ArtifactDetail {
|
||||
for _, app := range layer.Applications {
|
||||
nestedMap.SetByString(app.FilePath, sep, app)
|
||||
}
|
||||
for _, config := range layer.Configs {
|
||||
for _, config := range layer.Misconfigurations {
|
||||
config.Layer = types.Layer{
|
||||
Digest: layer.Digest,
|
||||
DiffID: layer.DiffID,
|
||||
@@ -103,8 +103,8 @@ func ApplyLayers(layers []types.BlobInfo) types.ArtifactDetail {
|
||||
mergedLayer.Packages = append(mergedLayer.Packages, v.Packages...)
|
||||
case types.Application:
|
||||
mergedLayer.Applications = append(mergedLayer.Applications, v)
|
||||
case types.Config:
|
||||
mergedLayer.Configs = append(mergedLayer.Configs, v)
|
||||
case types.Misconfiguration:
|
||||
mergedLayer.Misconfigurations = append(mergedLayer.Misconfigurations, v)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
@@ -13,9 +13,12 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/artifact"
|
||||
"github.com/aquasecurity/fanal/cache"
|
||||
"github.com/aquasecurity/fanal/config/scanner"
|
||||
"github.com/aquasecurity/fanal/image"
|
||||
"github.com/aquasecurity/fanal/log"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/walker"
|
||||
)
|
||||
@@ -28,14 +31,28 @@ type Artifact struct {
|
||||
image image.Image
|
||||
cache cache.ArtifactCache
|
||||
analyzer analyzer.Analyzer
|
||||
scanner scanner.Scanner
|
||||
configScannerOption config.ScannerOption
|
||||
}
|
||||
|
||||
func NewArtifact(img image.Image, c cache.ArtifactCache, disabled []analyzer.Type, opt config.ScannerOption) (artifact.Artifact, error) {
|
||||
// Register config analyzers
|
||||
if err := config.RegisterConfigAnalyzers(opt.FilePatterns); err != nil {
|
||||
return nil, xerrors.Errorf("config scanner error: %w", err)
|
||||
}
|
||||
|
||||
s, err := scanner.New(opt.Namespaces, opt.PolicyPaths, opt.DataPaths)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("scanner error: %w", err)
|
||||
}
|
||||
|
||||
func NewArtifact(img image.Image, c cache.ArtifactCache, disabled []analyzer.Type) artifact.Artifact {
|
||||
return Artifact{
|
||||
image: img,
|
||||
cache: c,
|
||||
analyzer: analyzer.NewAnalyzer(disabled),
|
||||
}
|
||||
scanner: s,
|
||||
configScannerOption: opt,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) {
|
||||
@@ -49,67 +66,87 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error)
|
||||
return types.ArtifactReference{}, xerrors.Errorf("unable to get layer IDs: %w", err)
|
||||
}
|
||||
|
||||
versionedImageID, versionedDiffIDs := a.withVersionSuffix(imageID, diffIDs)
|
||||
// Debug
|
||||
log.Logger.Debugf("Image ID: %s", imageID)
|
||||
log.Logger.Debugf("Diff IDs: %v", diffIDs)
|
||||
|
||||
missingImage, missingLayers, err := a.cache.MissingBlobs(versionedImageID, versionedDiffIDs)
|
||||
// Convert image ID and layer IDs to cache keys
|
||||
imageKey, layerKeys, layerKeyMap, err := a.calcCacheKeys(imageID, diffIDs)
|
||||
if err != nil {
|
||||
return types.ArtifactReference{}, err
|
||||
}
|
||||
|
||||
missingImage, missingLayers, err := a.cache.MissingBlobs(imageKey, layerKeys)
|
||||
if err != nil {
|
||||
return types.ArtifactReference{}, xerrors.Errorf("unable to get missing layers: %w", err)
|
||||
}
|
||||
|
||||
if err := a.inspect(ctx, versionedImageID, missingImage, missingLayers); err != nil {
|
||||
missingImageKey := imageKey
|
||||
if missingImage {
|
||||
log.Logger.Debugf("Missing image ID: %s", imageID)
|
||||
} else {
|
||||
missingImageKey = ""
|
||||
}
|
||||
|
||||
if err = a.inspect(ctx, missingImageKey, missingLayers, layerKeyMap); err != nil {
|
||||
return types.ArtifactReference{}, xerrors.Errorf("analyze error: %w", err)
|
||||
}
|
||||
|
||||
return types.ArtifactReference{
|
||||
Name: a.image.Name(),
|
||||
ID: versionedImageID,
|
||||
BlobIDs: versionedDiffIDs,
|
||||
ID: imageKey,
|
||||
BlobIDs: layerKeys,
|
||||
RepoTags: a.image.RepoTags(),
|
||||
RepoDigests: a.image.RepoDigests(),
|
||||
}, nil
|
||||
|
||||
}
|
||||
|
||||
func (a Artifact) withVersionSuffix(imageID string, diffIDs []string) (string, []string) {
|
||||
// e.g. sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e
|
||||
// => sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e/1
|
||||
imageID = cache.WithVersionSuffix(imageID, a.analyzer.ImageConfigAnalyzerVersions())
|
||||
func (a Artifact) calcCacheKeys(imageID string, diffIDs []string) (string, []string, map[string]string, error) {
|
||||
// Pass an empty config scanner option so that the cache key can be the same, even when policies are updated.
|
||||
imageKey, err := cache.CalcKey(imageID, a.analyzer.ImageConfigAnalyzerVersions(), &config.ScannerOption{})
|
||||
if err != nil {
|
||||
return "", nil, nil, err
|
||||
}
|
||||
|
||||
var blobIDs []string
|
||||
layerKeyMap := map[string]string{}
|
||||
var layerKeys []string
|
||||
for _, diffID := range diffIDs {
|
||||
// e.g. sha256:0fcbbeeeb0d7fc5c06362d7a6717b999e605574c7210eff4f7418f6e9be9fbfe
|
||||
// => sha256:0fcbbeeeb0d7fc5c06362d7a6717b999e605574c7210eff4f7418f6e9be9fbfe/121110111321
|
||||
blobID := cache.WithVersionSuffix(diffID, a.analyzer.AnalyzerVersions())
|
||||
blobIDs = append(blobIDs, blobID)
|
||||
blobKey, err := cache.CalcKey(diffID, a.analyzer.AnalyzerVersions(), &a.configScannerOption)
|
||||
if err != nil {
|
||||
return "", nil, nil, err
|
||||
}
|
||||
return imageID, blobIDs
|
||||
layerKeys = append(layerKeys, blobKey)
|
||||
layerKeyMap[blobKey] = diffID
|
||||
}
|
||||
return imageKey, layerKeys, layerKeyMap, nil
|
||||
}
|
||||
|
||||
func (a Artifact) inspect(ctx context.Context, imageID string, missingImage bool, diffIDs []string) error {
|
||||
func (a Artifact) inspect(ctx context.Context, missingImage string, layerKeys []string, layerKeyMap map[string]string) error {
|
||||
done := make(chan struct{})
|
||||
errCh := make(chan error)
|
||||
|
||||
var osFound types.OS
|
||||
for _, d := range diffIDs {
|
||||
go func(ctx context.Context, versionedDiffID string) {
|
||||
diffID := cache.TrimVersionSuffix(versionedDiffID)
|
||||
for _, k := range layerKeys {
|
||||
go func(ctx context.Context, layerKey string) {
|
||||
diffID := layerKeyMap[layerKey]
|
||||
layerInfo, err := a.inspectLayer(ctx, diffID)
|
||||
if err != nil {
|
||||
errCh <- xerrors.Errorf("failed to analyze layer: %s : %w", diffID, err)
|
||||
return
|
||||
}
|
||||
if err = a.cache.PutBlob(versionedDiffID, layerInfo); err != nil {
|
||||
errCh <- xerrors.Errorf("failed to store layer: %s in cache: %w", diffID, err)
|
||||
if err = a.cache.PutBlob(layerKey, layerInfo); err != nil {
|
||||
errCh <- xerrors.Errorf("failed to store layer: %s in cache: %w", layerKey, err)
|
||||
return
|
||||
}
|
||||
if layerInfo.OS != nil {
|
||||
osFound = *layerInfo.OS
|
||||
}
|
||||
done <- struct{}{}
|
||||
}(ctx, d)
|
||||
}(ctx, k)
|
||||
}
|
||||
|
||||
for range diffIDs {
|
||||
for range layerKeys {
|
||||
select {
|
||||
case <-done:
|
||||
case err := <-errCh:
|
||||
@@ -119,8 +156,9 @@ func (a Artifact) inspect(ctx context.Context, imageID string, missingImage bool
|
||||
}
|
||||
}
|
||||
|
||||
if missingImage {
|
||||
if err := a.inspectConfig(imageID, osFound); err != nil {
|
||||
if missingImage != "" {
|
||||
log.Logger.Debugf("Missing image cache: %s", missingImage)
|
||||
if err := a.inspectConfig(missingImage, osFound); err != nil {
|
||||
return xerrors.Errorf("unable to analyze config: %w", err)
|
||||
}
|
||||
}
|
||||
@@ -130,6 +168,8 @@ func (a Artifact) inspect(ctx context.Context, imageID string, missingImage bool
|
||||
}
|
||||
|
||||
func (a Artifact) inspectLayer(ctx context.Context, diffID string) (types.BlobInfo, error) {
|
||||
log.Logger.Debugf("Missing diff ID: %s", diffID)
|
||||
|
||||
layerDigest, r, err := a.uncompressedLayer(diffID)
|
||||
if err != nil {
|
||||
return types.BlobInfo{}, xerrors.Errorf("unable to get uncompressed layer %s: %w", diffID, err)
|
||||
@@ -152,8 +192,15 @@ func (a Artifact) inspectLayer(ctx context.Context, diffID string) (types.BlobIn
|
||||
// Wait for all the goroutine to finish.
|
||||
wg.Wait()
|
||||
|
||||
// Sort the analysis result for consistent results
|
||||
result.Sort()
|
||||
|
||||
// Scan config files
|
||||
misconfs, err := a.scanner.ScanConfigs(ctx, result.Configs)
|
||||
if err != nil {
|
||||
return types.BlobInfo{}, xerrors.Errorf("config scan error: %w", err)
|
||||
}
|
||||
|
||||
layerInfo := types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
Digest: layerDigest,
|
||||
@@ -161,7 +208,7 @@ func (a Artifact) inspectLayer(ctx context.Context, diffID string) (types.BlobIn
|
||||
OS: result.OS,
|
||||
PackageInfos: result.PackageInfos,
|
||||
Applications: result.Applications,
|
||||
Configs: result.Configs,
|
||||
Misconfigurations: misconfs,
|
||||
OpaqueDirs: opqDirs,
|
||||
WhiteoutFiles: whFiles,
|
||||
}
|
||||
|
||||
@@ -11,13 +11,8 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/command/apk"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/composer"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/alpine"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/debian"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/ubuntu"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/apk"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/dpkg"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/all"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
image2 "github.com/aquasecurity/fanal/artifact/image"
|
||||
"github.com/aquasecurity/fanal/cache"
|
||||
"github.com/aquasecurity/fanal/image"
|
||||
@@ -41,18 +36,18 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
imagePath: "../../test/testdata/alpine-311.tar.gz",
|
||||
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
|
||||
Args: cache.ArtifactCacheMissingBlobsArgs{
|
||||
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72/1",
|
||||
BlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111"},
|
||||
ArtifactID: "sha256:cdb49675542ff0051aaf7bab6c7a81b6fe275a7dd57d1e0317724a51edb7d6a6",
|
||||
BlobIDs: []string{"sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e"},
|
||||
},
|
||||
Returns: cache.ArtifactCacheMissingBlobsReturns{
|
||||
MissingArtifact: true,
|
||||
MissingBlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111"},
|
||||
MissingBlobIDs: []string{"sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e"},
|
||||
},
|
||||
},
|
||||
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111",
|
||||
BlobID: "sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
@@ -64,20 +59,20 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
PackageInfos: []types.PackageInfo{{
|
||||
FilePath: "lib/apk/db/installed",
|
||||
Packages: []types.Package{
|
||||
{Name: "musl", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "busybox", Version: "1.31.1-r9", SrcName: "busybox", SrcVersion: "1.31.1-r9"},
|
||||
{Name: "alpine-baselayout", Version: "3.2.0-r3", SrcName: "alpine-baselayout", SrcVersion: "3.2.0-r3"},
|
||||
{Name: "alpine-keys", Version: "2.1-r2", SrcName: "alpine-keys", SrcVersion: "2.1-r2"},
|
||||
{Name: "apk-tools", Version: "2.10.4-r3", SrcName: "apk-tools", SrcVersion: "2.10.4-r3"},
|
||||
{Name: "busybox", Version: "1.31.1-r9", SrcName: "busybox", SrcVersion: "1.31.1-r9"},
|
||||
{Name: "ca-certificates-cacert", Version: "20191127-r1", SrcName: "ca-certificates", SrcVersion: "20191127-r1"},
|
||||
{Name: "libc-utils", Version: "0.7.2-r0", SrcName: "libc-dev", SrcVersion: "0.7.2-r0"},
|
||||
{Name: "libcrypto1.1", Version: "1.1.1d-r3", SrcName: "openssl", SrcVersion: "1.1.1d-r3"},
|
||||
{Name: "libssl1.1", Version: "1.1.1d-r3", SrcName: "openssl", SrcVersion: "1.1.1d-r3"},
|
||||
{Name: "ca-certificates-cacert", Version: "20191127-r1", SrcName: "ca-certificates", SrcVersion: "20191127-r1"},
|
||||
{Name: "libtls-standalone", Version: "2.9.1-r0", SrcName: "libtls-standalone", SrcVersion: "2.9.1-r0"},
|
||||
{Name: "musl", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "musl-utils", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "scanelf", Version: "1.2.4-r0", SrcName: "pax-utils", SrcVersion: "1.2.4-r0"},
|
||||
{Name: "ssl_client", Version: "1.31.1-r9", SrcName: "busybox", SrcVersion: "1.31.1-r9"},
|
||||
{Name: "zlib", Version: "1.2.11-r3", SrcName: "zlib", SrcVersion: "1.2.11-r3"},
|
||||
{Name: "apk-tools", Version: "2.10.4-r3", SrcName: "apk-tools", SrcVersion: "2.10.4-r3"},
|
||||
{Name: "scanelf", Version: "1.2.4-r0", SrcName: "pax-utils", SrcVersion: "1.2.4-r0"},
|
||||
{Name: "musl-utils", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "libc-utils", Version: "0.7.2-r0", SrcName: "libc-dev", SrcVersion: "0.7.2-r0"},
|
||||
},
|
||||
}},
|
||||
Applications: []types.Application(nil),
|
||||
@@ -91,7 +86,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
putArtifactExpectations: []cache.ArtifactCachePutArtifactExpectation{
|
||||
{
|
||||
Args: cache.ArtifactCachePutArtifactArgs{
|
||||
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72/1",
|
||||
ArtifactID: "sha256:cdb49675542ff0051aaf7bab6c7a81b6fe275a7dd57d1e0317724a51edb7d6a6",
|
||||
ArtifactInfo: types.ArtifactInfo{
|
||||
SchemaVersion: 1,
|
||||
Architecture: "amd64",
|
||||
@@ -104,8 +99,8 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
want: types.ArtifactReference{
|
||||
Name: "../../test/testdata/alpine-311.tar.gz",
|
||||
ID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72/1",
|
||||
BlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111"},
|
||||
ID: "sha256:cdb49675542ff0051aaf7bab6c7a81b6fe275a7dd57d1e0317724a51edb7d6a6",
|
||||
BlobIDs: []string{"sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e"},
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -113,61 +108,52 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
imagePath: "../../test/testdata/vuln-image.tar.gz",
|
||||
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
|
||||
Args: cache.ArtifactCacheMissingBlobsArgs{
|
||||
ArtifactID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4/1",
|
||||
ArtifactID: "sha256:1a0f2e0e3a3ca6bf77692726db8b41793f3ac4edb7b64dd21a93d217ad8257e8",
|
||||
BlobIDs: []string{
|
||||
"sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02/111111",
|
||||
"sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5/111111",
|
||||
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/111111",
|
||||
"sha256:a4595c43a874856bf95f3bfc4fbf78bbaa04c92c726276d4f64193a47ced0566/111111",
|
||||
"sha256:a09ebb4ede7e82bbf090f832d48711f2922851d32cfd839283a1a3201e548370",
|
||||
"sha256:2741194158d66408e5e28cee52bd561772976492e5a1ce1977801b72319e2b91",
|
||||
"sha256:7f9b53aad357de25b32717d2224554d94ba354df68ca88e82aa9bb8fc3b899c4",
|
||||
"sha256:24aa04b7e263a1e7aadf52910a05878c67455b74d293e73b9fa4607a4c70236a",
|
||||
},
|
||||
},
|
||||
Returns: cache.ArtifactCacheMissingBlobsReturns{
|
||||
MissingBlobIDs: []string{
|
||||
"sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02/111111",
|
||||
"sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5/111111",
|
||||
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/111111",
|
||||
"sha256:a09ebb4ede7e82bbf090f832d48711f2922851d32cfd839283a1a3201e548370",
|
||||
"sha256:2741194158d66408e5e28cee52bd561772976492e5a1ce1977801b72319e2b91",
|
||||
"sha256:7f9b53aad357de25b32717d2224554d94ba354df68ca88e82aa9bb8fc3b899c4",
|
||||
"sha256:24aa04b7e263a1e7aadf52910a05878c67455b74d293e73b9fa4607a4c70236a",
|
||||
},
|
||||
},
|
||||
},
|
||||
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02/111111",
|
||||
BlobID: "sha256:a09ebb4ede7e82bbf090f832d48711f2922851d32cfd839283a1a3201e548370",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
DiffID: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02",
|
||||
OS: &types.OS{Family: "debian", Name: "9.9"},
|
||||
OS: &types.OS{
|
||||
Family: "debian",
|
||||
Name: "9.9",
|
||||
},
|
||||
PackageInfos: []types.PackageInfo{
|
||||
{
|
||||
FilePath: "var/lib/dpkg/status.d/base",
|
||||
Packages: []types.Package{
|
||||
{Name: "base-files", Version: "9.9+deb9u9", SrcName: "base-files", SrcVersion: "9.9+deb9u9"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "base-files",
|
||||
Version: "9.9+deb9u9",
|
||||
SrcName: "base-files",
|
||||
SrcVersion: "9.9+deb9u9",
|
||||
},
|
||||
},
|
||||
}, {
|
||||
FilePath: "var/lib/dpkg/status.d/netbase",
|
||||
Packages: []types.Package{
|
||||
{Name: "netbase", Version: "5.4", SrcName: "netbase", SrcVersion: "5.4"},
|
||||
},
|
||||
},
|
||||
{
|
||||
Name: "netbase",
|
||||
Version: "5.4",
|
||||
SrcName: "netbase",
|
||||
SrcVersion: "5.4",
|
||||
},
|
||||
},
|
||||
}, {
|
||||
FilePath: "var/lib/dpkg/status.d/tzdata",
|
||||
Packages: []types.Package{
|
||||
{
|
||||
Name: "tzdata",
|
||||
Version: "2019a-0+deb9u1",
|
||||
SrcName: "tzdata",
|
||||
SrcVersion: "2019a-0+deb9u1",
|
||||
},
|
||||
{Name: "tzdata", Version: "2019a-0+deb9u1", SrcName: "tzdata", SrcVersion: "2019a-0+deb9u1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -176,7 +162,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5/111111",
|
||||
BlobID: "sha256:2741194158d66408e5e28cee52bd561772976492e5a1ce1977801b72319e2b91",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
@@ -206,7 +192,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/111111",
|
||||
BlobID: "sha256:7f9b53aad357de25b32717d2224554d94ba354df68ca88e82aa9bb8fc3b899c4",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
@@ -233,44 +219,116 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:24aa04b7e263a1e7aadf52910a05878c67455b74d293e73b9fa4607a4c70236a",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
DiffID: "sha256:a4595c43a874856bf95f3bfc4fbf78bbaa04c92c726276d4f64193a47ced0566",
|
||||
Applications: []types.Application{{Type: types.Bundler, FilePath: "ruby-app/Gemfile.lock",
|
||||
Libraries: []types.LibraryInfo{
|
||||
{Library: depTypes.Library{Name: "actioncable", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "actionmailer", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "actionpack", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "actionview", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "activejob", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "activemodel", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "activerecord", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "activestorage", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "activesupport", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "arel", Version: "9.0.0"}},
|
||||
{Library: depTypes.Library{Name: "ast", Version: "2.4.0"}},
|
||||
{Library: depTypes.Library{Name: "builder", Version: "3.2.3"}},
|
||||
{Library: depTypes.Library{Name: "coderay", Version: "1.1.2"}},
|
||||
{Library: depTypes.Library{Name: "concurrent-ruby", Version: "1.1.5"}},
|
||||
{Library: depTypes.Library{Name: "crass", Version: "1.0.4"}},
|
||||
{Library: depTypes.Library{Name: "dotenv", Version: "2.7.2"}},
|
||||
{Library: depTypes.Library{Name: "erubi", Version: "1.8.0"}},
|
||||
{Library: depTypes.Library{Name: "faker", Version: "1.9.3"}},
|
||||
{Library: depTypes.Library{Name: "globalid", Version: "0.4.2"}},
|
||||
{Library: depTypes.Library{Name: "i18n", Version: "1.6.0"}},
|
||||
{Library: depTypes.Library{Name: "jaro_winkler", Version: "1.5.2"}},
|
||||
{Library: depTypes.Library{Name: "json", Version: "2.2.0"}},
|
||||
{Library: depTypes.Library{Name: "loofah", Version: "2.2.3"}},
|
||||
{Library: depTypes.Library{Name: "mail", Version: "2.7.1"}},
|
||||
{Library: depTypes.Library{Name: "marcel", Version: "0.3.3"}},
|
||||
{Library: depTypes.Library{Name: "method_source", Version: "0.9.2"}},
|
||||
{Library: depTypes.Library{Name: "mimemagic", Version: "0.3.3"}},
|
||||
{Library: depTypes.Library{Name: "mini_mime", Version: "1.0.1"}},
|
||||
{Library: depTypes.Library{Name: "mini_portile2", Version: "2.4.0"}},
|
||||
{Library: depTypes.Library{Name: "minitest", Version: "5.11.3"}},
|
||||
{Library: depTypes.Library{Name: "nio4r", Version: "2.3.1"}},
|
||||
{Library: depTypes.Library{Name: "nokogiri", Version: "1.10.3"}},
|
||||
{Library: depTypes.Library{Name: "parallel", Version: "1.17.0"}},
|
||||
{Library: depTypes.Library{Name: "parser", Version: "2.6.3.0"}},
|
||||
{Library: depTypes.Library{Name: "pry", Version: "0.12.2"}},
|
||||
{Library: depTypes.Library{Name: "psych", Version: "3.1.0"}},
|
||||
{Library: depTypes.Library{Name: "rack", Version: "2.0.7"}},
|
||||
{Library: depTypes.Library{Name: "rack-test", Version: "1.1.0"}},
|
||||
{Library: depTypes.Library{Name: "rails", Version: "5.2.0"}},
|
||||
{Library: depTypes.Library{Name: "rails-dom-testing", Version: "2.0.3"}},
|
||||
{Library: depTypes.Library{Name: "rails-html-sanitizer", Version: "1.0.3"}},
|
||||
{Library: depTypes.Library{Name: "railties", Version: "5.2.3"}},
|
||||
{Library: depTypes.Library{Name: "rainbow", Version: "3.0.0"}},
|
||||
{Library: depTypes.Library{Name: "rake", Version: "12.3.2"}},
|
||||
{Library: depTypes.Library{Name: "rubocop", Version: "0.67.2"}},
|
||||
{Library: depTypes.Library{Name: "ruby-progressbar", Version: "1.10.0"}},
|
||||
{Library: depTypes.Library{Name: "sprockets", Version: "3.7.2"}},
|
||||
{Library: depTypes.Library{Name: "sprockets-rails", Version: "3.2.1"}},
|
||||
{Library: depTypes.Library{Name: "thor", Version: "0.20.3"}},
|
||||
{Library: depTypes.Library{Name: "thread_safe", Version: "0.3.6"}},
|
||||
{Library: depTypes.Library{Name: "tzinfo", Version: "1.2.5"}},
|
||||
{Library: depTypes.Library{Name: "unicode-display_width", Version: "1.5.0"}},
|
||||
{Library: depTypes.Library{Name: "websocket-driver", Version: "0.7.0"}},
|
||||
{Library: depTypes.Library{Name: "websocket-extensions", Version: "0.1.3"}},
|
||||
},
|
||||
}},
|
||||
OpaqueDirs: []string{
|
||||
"ruby-app/",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: types.ArtifactReference{
|
||||
Name: "../../test/testdata/vuln-image.tar.gz",
|
||||
ID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4/1",
|
||||
ID: "sha256:1a0f2e0e3a3ca6bf77692726db8b41793f3ac4edb7b64dd21a93d217ad8257e8",
|
||||
BlobIDs: []string{
|
||||
"sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02/111111",
|
||||
"sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5/111111",
|
||||
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/111111",
|
||||
"sha256:a4595c43a874856bf95f3bfc4fbf78bbaa04c92c726276d4f64193a47ced0566/111111",
|
||||
"sha256:a09ebb4ede7e82bbf090f832d48711f2922851d32cfd839283a1a3201e548370",
|
||||
"sha256:2741194158d66408e5e28cee52bd561772976492e5a1ce1977801b72319e2b91",
|
||||
"sha256:7f9b53aad357de25b32717d2224554d94ba354df68ca88e82aa9bb8fc3b899c4",
|
||||
"sha256:24aa04b7e263a1e7aadf52910a05878c67455b74d293e73b9fa4607a4c70236a",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "happy path: disable analyzers",
|
||||
imagePath: "../../test/testdata/vuln-image.tar.gz",
|
||||
disableAnalyzers: []analyzer.Type{analyzer.TypeDebian, analyzer.TypeDpkg, analyzer.TypeComposer},
|
||||
disableAnalyzers: []analyzer.Type{analyzer.TypeDebian, analyzer.TypeDpkg, analyzer.TypeComposer, analyzer.TypeBundler},
|
||||
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
|
||||
Args: cache.ArtifactCacheMissingBlobsArgs{
|
||||
ArtifactID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4/1",
|
||||
ArtifactID: "sha256:1a0f2e0e3a3ca6bf77692726db8b41793f3ac4edb7b64dd21a93d217ad8257e8",
|
||||
BlobIDs: []string{
|
||||
"sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02/101100",
|
||||
"sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5/101100",
|
||||
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/101100",
|
||||
"sha256:a4595c43a874856bf95f3bfc4fbf78bbaa04c92c726276d4f64193a47ced0566/101100",
|
||||
"sha256:02a02d6f53beb5f8034a3bbc711fd91817395b2d9e6cc456bf6c3ebaafa089b0",
|
||||
"sha256:bba81c25dfcc54c734b7e03d7a56f1e556093f679c9bcbca11f2a78e1b12245a",
|
||||
"sha256:225919d769f4888b35d78cfca193fab8b69558751c5a9355c4711161fe9f8f37",
|
||||
"sha256:15f83193639bf53fbc936d1161d4f79665cfe88c4b92dd0c4aa3be895041bada",
|
||||
},
|
||||
},
|
||||
Returns: cache.ArtifactCacheMissingBlobsReturns{
|
||||
MissingBlobIDs: []string{
|
||||
"sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02/101100",
|
||||
"sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5/101100",
|
||||
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/101100",
|
||||
"sha256:02a02d6f53beb5f8034a3bbc711fd91817395b2d9e6cc456bf6c3ebaafa089b0",
|
||||
"sha256:bba81c25dfcc54c734b7e03d7a56f1e556093f679c9bcbca11f2a78e1b12245a",
|
||||
"sha256:225919d769f4888b35d78cfca193fab8b69558751c5a9355c4711161fe9f8f37",
|
||||
"sha256:15f83193639bf53fbc936d1161d4f79665cfe88c4b92dd0c4aa3be895041bada",
|
||||
},
|
||||
},
|
||||
},
|
||||
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02/101100",
|
||||
BlobID: "sha256:02a02d6f53beb5f8034a3bbc711fd91817395b2d9e6cc456bf6c3ebaafa089b0",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
@@ -280,7 +338,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5/101100",
|
||||
BlobID: "sha256:bba81c25dfcc54c734b7e03d7a56f1e556093f679c9bcbca11f2a78e1b12245a",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
@@ -290,7 +348,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/101100",
|
||||
BlobID: "sha256:225919d769f4888b35d78cfca193fab8b69558751c5a9355c4711161fe9f8f37",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
@@ -299,15 +357,26 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:15f83193639bf53fbc936d1161d4f79665cfe88c4b92dd0c4aa3be895041bada",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
DiffID: "sha256:a4595c43a874856bf95f3bfc4fbf78bbaa04c92c726276d4f64193a47ced0566",
|
||||
OpaqueDirs: []string{"ruby-app/"},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
want: types.ArtifactReference{
|
||||
Name: "../../test/testdata/vuln-image.tar.gz",
|
||||
ID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4/1",
|
||||
ID: "sha256:1a0f2e0e3a3ca6bf77692726db8b41793f3ac4edb7b64dd21a93d217ad8257e8",
|
||||
BlobIDs: []string{
|
||||
"sha256:932da51564135c98a49a34a193d6cd363d8fa4184d957fde16c9d8527b3f3b02/101100",
|
||||
"sha256:dffd9992ca398466a663c87c92cfea2a2db0ae0cf33fcb99da60eec52addbfc5/101100",
|
||||
"sha256:24df0d4e20c0f42d3703bf1f1db2bdd77346c7956f74f423603d651e8e5ae8a7/101100",
|
||||
"sha256:a4595c43a874856bf95f3bfc4fbf78bbaa04c92c726276d4f64193a47ced0566/101100",
|
||||
"sha256:02a02d6f53beb5f8034a3bbc711fd91817395b2d9e6cc456bf6c3ebaafa089b0",
|
||||
"sha256:bba81c25dfcc54c734b7e03d7a56f1e556093f679c9bcbca11f2a78e1b12245a",
|
||||
"sha256:225919d769f4888b35d78cfca193fab8b69558751c5a9355c4711161fe9f8f37",
|
||||
"sha256:15f83193639bf53fbc936d1161d4f79665cfe88c4b92dd0c4aa3be895041bada",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -316,8 +385,8 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
imagePath: "../../test/testdata/alpine-311.tar.gz",
|
||||
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
|
||||
Args: cache.ArtifactCacheMissingBlobsArgs{
|
||||
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72/1",
|
||||
BlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111"},
|
||||
ArtifactID: "sha256:cdb49675542ff0051aaf7bab6c7a81b6fe275a7dd57d1e0317724a51edb7d6a6",
|
||||
BlobIDs: []string{"sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e"},
|
||||
},
|
||||
Returns: cache.ArtifactCacheMissingBlobsReturns{
|
||||
Err: xerrors.New("MissingBlobs failed"),
|
||||
@@ -330,17 +399,17 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
imagePath: "../../test/testdata/alpine-311.tar.gz",
|
||||
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
|
||||
Args: cache.ArtifactCacheMissingBlobsArgs{
|
||||
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72/1",
|
||||
BlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111"},
|
||||
ArtifactID: "sha256:cdb49675542ff0051aaf7bab6c7a81b6fe275a7dd57d1e0317724a51edb7d6a6",
|
||||
BlobIDs: []string{"sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e"},
|
||||
},
|
||||
Returns: cache.ArtifactCacheMissingBlobsReturns{
|
||||
MissingBlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111"},
|
||||
MissingBlobIDs: []string{"sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e"},
|
||||
},
|
||||
},
|
||||
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111",
|
||||
BlobID: "sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
@@ -352,20 +421,20 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
PackageInfos: []types.PackageInfo{{
|
||||
FilePath: "lib/apk/db/installed",
|
||||
Packages: []types.Package{
|
||||
{Name: "musl", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "busybox", Version: "1.31.1-r9", SrcName: "busybox", SrcVersion: "1.31.1-r9"},
|
||||
{Name: "alpine-baselayout", Version: "3.2.0-r3", SrcName: "alpine-baselayout", SrcVersion: "3.2.0-r3"},
|
||||
{Name: "alpine-keys", Version: "2.1-r2", SrcName: "alpine-keys", SrcVersion: "2.1-r2"},
|
||||
{Name: "apk-tools", Version: "2.10.4-r3", SrcName: "apk-tools", SrcVersion: "2.10.4-r3"},
|
||||
{Name: "busybox", Version: "1.31.1-r9", SrcName: "busybox", SrcVersion: "1.31.1-r9"},
|
||||
{Name: "ca-certificates-cacert", Version: "20191127-r1", SrcName: "ca-certificates", SrcVersion: "20191127-r1"},
|
||||
{Name: "libc-utils", Version: "0.7.2-r0", SrcName: "libc-dev", SrcVersion: "0.7.2-r0"},
|
||||
{Name: "libcrypto1.1", Version: "1.1.1d-r3", SrcName: "openssl", SrcVersion: "1.1.1d-r3"},
|
||||
{Name: "libssl1.1", Version: "1.1.1d-r3", SrcName: "openssl", SrcVersion: "1.1.1d-r3"},
|
||||
{Name: "ca-certificates-cacert", Version: "20191127-r1", SrcName: "ca-certificates", SrcVersion: "20191127-r1"},
|
||||
{Name: "libtls-standalone", Version: "2.9.1-r0", SrcName: "libtls-standalone", SrcVersion: "2.9.1-r0"},
|
||||
{Name: "musl", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "musl-utils", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "scanelf", Version: "1.2.4-r0", SrcName: "pax-utils", SrcVersion: "1.2.4-r0"},
|
||||
{Name: "ssl_client", Version: "1.31.1-r9", SrcName: "busybox", SrcVersion: "1.31.1-r9"},
|
||||
{Name: "zlib", Version: "1.2.11-r3", SrcName: "zlib", SrcVersion: "1.2.11-r3"},
|
||||
{Name: "apk-tools", Version: "2.10.4-r3", SrcName: "apk-tools", SrcVersion: "2.10.4-r3"},
|
||||
{Name: "scanelf", Version: "1.2.4-r0", SrcName: "pax-utils", SrcVersion: "1.2.4-r0"},
|
||||
{Name: "musl-utils", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "libc-utils", Version: "0.7.2-r0", SrcName: "libc-dev", SrcVersion: "0.7.2-r0"},
|
||||
},
|
||||
}},
|
||||
Applications: []types.Application(nil),
|
||||
@@ -385,18 +454,18 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
imagePath: "../../test/testdata/alpine-311.tar.gz",
|
||||
missingBlobsExpectation: cache.ArtifactCacheMissingBlobsExpectation{
|
||||
Args: cache.ArtifactCacheMissingBlobsArgs{
|
||||
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72/1",
|
||||
BlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111"},
|
||||
ArtifactID: "sha256:cdb49675542ff0051aaf7bab6c7a81b6fe275a7dd57d1e0317724a51edb7d6a6",
|
||||
BlobIDs: []string{"sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e"},
|
||||
},
|
||||
Returns: cache.ArtifactCacheMissingBlobsReturns{
|
||||
MissingArtifact: true,
|
||||
MissingBlobIDs: []string{"sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111"},
|
||||
MissingBlobIDs: []string{"sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e"},
|
||||
},
|
||||
},
|
||||
putBlobExpectations: []cache.ArtifactCachePutBlobExpectation{
|
||||
{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203/111111",
|
||||
BlobID: "sha256:72277b4a70d3afd27a87adfd122c8a09ed0e8ecdfa50a96d5926d104607fd85e",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: 1,
|
||||
Digest: "",
|
||||
@@ -408,20 +477,20 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
PackageInfos: []types.PackageInfo{{
|
||||
FilePath: "lib/apk/db/installed",
|
||||
Packages: []types.Package{
|
||||
{Name: "musl", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "busybox", Version: "1.31.1-r9", SrcName: "busybox", SrcVersion: "1.31.1-r9"},
|
||||
{Name: "alpine-baselayout", Version: "3.2.0-r3", SrcName: "alpine-baselayout", SrcVersion: "3.2.0-r3"},
|
||||
{Name: "alpine-keys", Version: "2.1-r2", SrcName: "alpine-keys", SrcVersion: "2.1-r2"},
|
||||
{Name: "apk-tools", Version: "2.10.4-r3", SrcName: "apk-tools", SrcVersion: "2.10.4-r3"},
|
||||
{Name: "busybox", Version: "1.31.1-r9", SrcName: "busybox", SrcVersion: "1.31.1-r9"},
|
||||
{Name: "ca-certificates-cacert", Version: "20191127-r1", SrcName: "ca-certificates", SrcVersion: "20191127-r1"},
|
||||
{Name: "libc-utils", Version: "0.7.2-r0", SrcName: "libc-dev", SrcVersion: "0.7.2-r0"},
|
||||
{Name: "libcrypto1.1", Version: "1.1.1d-r3", SrcName: "openssl", SrcVersion: "1.1.1d-r3"},
|
||||
{Name: "libssl1.1", Version: "1.1.1d-r3", SrcName: "openssl", SrcVersion: "1.1.1d-r3"},
|
||||
{Name: "ca-certificates-cacert", Version: "20191127-r1", SrcName: "ca-certificates", SrcVersion: "20191127-r1"},
|
||||
{Name: "libtls-standalone", Version: "2.9.1-r0", SrcName: "libtls-standalone", SrcVersion: "2.9.1-r0"},
|
||||
{Name: "musl", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "musl-utils", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "scanelf", Version: "1.2.4-r0", SrcName: "pax-utils", SrcVersion: "1.2.4-r0"},
|
||||
{Name: "ssl_client", Version: "1.31.1-r9", SrcName: "busybox", SrcVersion: "1.31.1-r9"},
|
||||
{Name: "zlib", Version: "1.2.11-r3", SrcName: "zlib", SrcVersion: "1.2.11-r3"},
|
||||
{Name: "apk-tools", Version: "2.10.4-r3", SrcName: "apk-tools", SrcVersion: "2.10.4-r3"},
|
||||
{Name: "scanelf", Version: "1.2.4-r0", SrcName: "pax-utils", SrcVersion: "1.2.4-r0"},
|
||||
{Name: "musl-utils", Version: "1.1.24-r2", SrcName: "musl", SrcVersion: "1.1.24-r2"},
|
||||
{Name: "libc-utils", Version: "0.7.2-r0", SrcName: "libc-dev", SrcVersion: "0.7.2-r0"},
|
||||
},
|
||||
}},
|
||||
Applications: []types.Application(nil),
|
||||
@@ -435,7 +504,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
putArtifactExpectations: []cache.ArtifactCachePutArtifactExpectation{
|
||||
{
|
||||
Args: cache.ArtifactCachePutArtifactArgs{
|
||||
ArtifactID: "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72/1",
|
||||
ArtifactID: "sha256:cdb49675542ff0051aaf7bab6c7a81b6fe275a7dd57d1e0317724a51edb7d6a6",
|
||||
ArtifactInfo: types.ArtifactInfo{
|
||||
SchemaVersion: 1,
|
||||
Architecture: "amd64",
|
||||
@@ -462,7 +531,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
img, err := image.NewArchiveImage(tt.imagePath)
|
||||
require.NoError(t, err)
|
||||
|
||||
a := image2.NewArtifact(img, mockCache, tt.disableAnalyzers)
|
||||
a, err := image2.NewArtifact(img, mockCache, tt.disableAnalyzers, config.ScannerOption{})
|
||||
require.NoError(t, err)
|
||||
|
||||
got, err := a.Inspect(context.Background())
|
||||
if tt.wantErr != "" {
|
||||
require.NotNil(t, err)
|
||||
|
||||
14
artifact/image/testdata/valid.rego
vendored
Normal file
14
artifact/image/testdata/valid.rego
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package testdata.kubernetes.id_100
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "ID-100",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
deny[res] {
|
||||
input.kind == "Deployment"
|
||||
res := {"type": "Kubernetes Check", "id": "ID-100", "msg": "deny", "severity": "CRITICAL"}
|
||||
}
|
||||
@@ -15,10 +15,12 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/alpine"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/apk"
|
||||
"github.com/aquasecurity/fanal/artifact"
|
||||
"github.com/aquasecurity/fanal/cache"
|
||||
"github.com/aquasecurity/fanal/config/scanner"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/walker"
|
||||
)
|
||||
@@ -31,14 +33,28 @@ type Artifact struct {
|
||||
dir string
|
||||
cache cache.ArtifactCache
|
||||
analyzer analyzer.Analyzer
|
||||
scanner scanner.Scanner
|
||||
configScannerOption config.ScannerOption
|
||||
}
|
||||
|
||||
func NewArtifact(dir string, c cache.ArtifactCache, disabled []analyzer.Type, opt config.ScannerOption) (artifact.Artifact, error) {
|
||||
// Register config analyzers
|
||||
if err := config.RegisterConfigAnalyzers(opt.FilePatterns); err != nil {
|
||||
return nil, xerrors.Errorf("config analyzer error: %w", err)
|
||||
}
|
||||
|
||||
s, err := scanner.New(opt.Namespaces, opt.PolicyPaths, opt.DataPaths)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("scanner error: %w", err)
|
||||
}
|
||||
|
||||
func NewArtifact(dir string, c cache.ArtifactCache, disabled []analyzer.Type) artifact.Artifact {
|
||||
return Artifact{
|
||||
dir: dir,
|
||||
cache: c,
|
||||
analyzer: analyzer.NewAnalyzer(disabled),
|
||||
}
|
||||
scanner: s,
|
||||
configScannerOption: opt,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error) {
|
||||
@@ -63,28 +79,38 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error)
|
||||
// Wait for all the goroutine to finish.
|
||||
wg.Wait()
|
||||
|
||||
// Sort the analysis result for consistent results
|
||||
result.Sort()
|
||||
|
||||
// Scan config files
|
||||
misconfs, err := a.scanner.ScanConfigs(ctx, result.Configs)
|
||||
if err != nil {
|
||||
return types.ArtifactReference{}, xerrors.Errorf("config scan error: %w", err)
|
||||
}
|
||||
|
||||
blobInfo := types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
OS: result.OS,
|
||||
PackageInfos: result.PackageInfos,
|
||||
Applications: result.Applications,
|
||||
Configs: result.Configs,
|
||||
Misconfigurations: misconfs,
|
||||
}
|
||||
|
||||
// calculate hash of JSON and use it as pseudo artifactID and blobID
|
||||
h := sha256.New()
|
||||
if err = json.NewEncoder(h).Encode(blobInfo); err != nil {
|
||||
return types.ArtifactReference{}, err
|
||||
return types.ArtifactReference{}, xerrors.Errorf("json error: %w", err)
|
||||
}
|
||||
|
||||
d := digest.NewDigest(digest.SHA256, h)
|
||||
diffID := d.String()
|
||||
blobInfo.DiffID = diffID
|
||||
versionedDiffID := cache.WithVersionSuffix(diffID, a.analyzer.AnalyzerVersions())
|
||||
cacheKey, err := cache.CalcKey(diffID, a.analyzer.AnalyzerVersions(), &a.configScannerOption)
|
||||
if err != nil {
|
||||
return types.ArtifactReference{}, xerrors.Errorf("cache key: %w", err)
|
||||
}
|
||||
|
||||
if err = a.cache.PutBlob(versionedDiffID, blobInfo); err != nil {
|
||||
if err = a.cache.PutBlob(cacheKey, blobInfo); err != nil {
|
||||
return types.ArtifactReference{}, xerrors.Errorf("failed to store blob (%s) in cache: %w", diffID, err)
|
||||
}
|
||||
|
||||
@@ -99,7 +125,7 @@ func (a Artifact) Inspect(ctx context.Context) (types.ArtifactReference, error)
|
||||
|
||||
return types.ArtifactReference{
|
||||
Name: hostName,
|
||||
ID: versionedDiffID, // use diffID as pseudo artifactID
|
||||
BlobIDs: []string{versionedDiffID},
|
||||
ID: cacheKey, // use a cache key as pseudo artifact ID
|
||||
BlobIDs: []string{cacheKey},
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -9,6 +9,8 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/all"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/cache"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
@@ -20,6 +22,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
fields fields
|
||||
scannerOpt config.ScannerOption
|
||||
disabledAnalyzers []analyzer.Type
|
||||
putBlobExpectation cache.ArtifactCachePutBlobExpectation
|
||||
want types.ArtifactReference
|
||||
@@ -32,7 +35,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e/11",
|
||||
BlobID: "sha256:42410764f3db892ca04760b45c6a6ff4b27f62fc333eb53a1a6a0b81080a22fa",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
DiffID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e",
|
||||
@@ -54,9 +57,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
want: types.ArtifactReference{
|
||||
Name: "host",
|
||||
ID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e/11",
|
||||
ID: "sha256:42410764f3db892ca04760b45c6a6ff4b27f62fc333eb53a1a6a0b81080a22fa",
|
||||
BlobIDs: []string{
|
||||
"sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e/11",
|
||||
"sha256:42410764f3db892ca04760b45c6a6ff4b27f62fc333eb53a1a6a0b81080a22fa",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -68,7 +71,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
disabledAnalyzers: []analyzer.Type{analyzer.TypeAlpine, analyzer.TypeApk},
|
||||
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:3404e98968ad338dc60ef74c0dd5bdd893478415cd2296b0c265a5650b3ae4d6/00",
|
||||
BlobID: "sha256:e02b0ed1535b913fa6518b1d68defc60f1e7e68061d4332db39208eb08094c82",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
DiffID: "sha256:3404e98968ad338dc60ef74c0dd5bdd893478415cd2296b0c265a5650b3ae4d6",
|
||||
@@ -78,9 +81,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
want: types.ArtifactReference{
|
||||
Name: "host",
|
||||
ID: "sha256:3404e98968ad338dc60ef74c0dd5bdd893478415cd2296b0c265a5650b3ae4d6/00",
|
||||
ID: "sha256:e02b0ed1535b913fa6518b1d68defc60f1e7e68061d4332db39208eb08094c82",
|
||||
BlobIDs: []string{
|
||||
"sha256:3404e98968ad338dc60ef74c0dd5bdd893478415cd2296b0c265a5650b3ae4d6/00",
|
||||
"sha256:e02b0ed1535b913fa6518b1d68defc60f1e7e68061d4332db39208eb08094c82",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -91,7 +94,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
putBlobExpectation: cache.ArtifactCachePutBlobExpectation{
|
||||
Args: cache.ArtifactCachePutBlobArgs{
|
||||
BlobID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e/11",
|
||||
BlobID: "sha256:42410764f3db892ca04760b45c6a6ff4b27f62fc333eb53a1a6a0b81080a22fa",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
DiffID: "sha256:94a4586441ddd6599fb64cb407d8c43ffb273a8bd01cd933e525b08527f6296e",
|
||||
@@ -128,7 +131,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
c := new(cache.MockArtifactCache)
|
||||
c.ApplyPutBlobExpectation(tt.putBlobExpectation)
|
||||
|
||||
a := NewArtifact(tt.fields.dir, c, tt.disabledAnalyzers)
|
||||
a, err := NewArtifact(tt.fields.dir, c, tt.disabledAnalyzers, tt.scannerOpt)
|
||||
require.NoError(t, err)
|
||||
|
||||
got, err := a.Inspect(context.Background())
|
||||
if tt.wantErr != "" {
|
||||
require.NotNil(t, err)
|
||||
|
||||
@@ -5,16 +5,19 @@ import (
|
||||
"net/url"
|
||||
"os"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
git "github.com/go-git/go-git/v5"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/artifact"
|
||||
"github.com/aquasecurity/fanal/artifact/local"
|
||||
"github.com/aquasecurity/fanal/cache"
|
||||
)
|
||||
|
||||
func NewArtifact(rawurl string, c cache.ArtifactCache, disabled []analyzer.Type) (artifact.Artifact, func(), error) {
|
||||
func NewArtifact(rawurl string, c cache.ArtifactCache, disabled []analyzer.Type, opt config.ScannerOption) (
|
||||
artifact.Artifact, func(), error) {
|
||||
cleanup := func() {}
|
||||
|
||||
u, err := newURL(rawurl)
|
||||
@@ -33,7 +36,7 @@ func NewArtifact(rawurl string, c cache.ArtifactCache, disabled []analyzer.Type)
|
||||
Depth: 1,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, cleanup, err
|
||||
return nil, cleanup, xerrors.Errorf("git error: %w", err)
|
||||
}
|
||||
|
||||
cleanup = func() {
|
||||
@@ -43,7 +46,11 @@ func NewArtifact(rawurl string, c cache.ArtifactCache, disabled []analyzer.Type)
|
||||
// JAR/WAR/EAR doesn't need to be analyzed in git repositories.
|
||||
disabled = append(disabled, analyzer.TypeJar)
|
||||
|
||||
return local.NewArtifact(tmpDir, c, disabled), cleanup, nil
|
||||
art, err := local.NewArtifact(tmpDir, c, disabled, opt)
|
||||
if err != nil {
|
||||
return nil, cleanup, xerrors.Errorf("fs artifact: %w", err)
|
||||
}
|
||||
return art, cleanup, nil
|
||||
}
|
||||
|
||||
func newURL(rawurl string) (*url.URL, error) {
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/cache"
|
||||
)
|
||||
|
||||
@@ -69,7 +70,7 @@ func TestNewArtifact(t *testing.T) {
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
_, cleanup, err := NewArtifact(tt.args.rawurl, tt.args.c, nil)
|
||||
_, cleanup, err := NewArtifact(tt.args.rawurl, tt.args.c, nil, config.ScannerOption{})
|
||||
assert.Equal(t, tt.wantErr, err != nil)
|
||||
defer cleanup()
|
||||
})
|
||||
|
||||
44
cache/key.go
vendored
44
cache/key.go
vendored
@@ -1,24 +1,42 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/mod/sumdb/dirhash"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
)
|
||||
|
||||
const keySeparator = "/"
|
||||
func CalcKey(id string, versions map[string]int, opt *config.ScannerOption) (string, error) {
|
||||
// Sort options for consistent results
|
||||
opt.Sort()
|
||||
|
||||
func WithVersionSuffix(key, version string) string {
|
||||
// e.g. sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e
|
||||
// => sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e/11201101
|
||||
return fmt.Sprintf("%s%s%s", key, keySeparator, version)
|
||||
h := sha256.New()
|
||||
|
||||
if _, err := h.Write([]byte(id)); err != nil {
|
||||
return "", xerrors.Errorf("sha256 error: %w", err)
|
||||
}
|
||||
|
||||
func TrimVersionSuffix(versioned string) string {
|
||||
// e.g.sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e/11201101
|
||||
// => sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e
|
||||
ss := strings.Split(versioned, keySeparator)
|
||||
if len(ss) < 2 {
|
||||
return versioned
|
||||
if err := json.NewEncoder(h).Encode(versions); err != nil {
|
||||
return "", xerrors.Errorf("json encode error: %w", err)
|
||||
}
|
||||
return ss[0]
|
||||
|
||||
for _, paths := range [][]string{opt.PolicyPaths, opt.DataPaths} {
|
||||
for _, p := range paths {
|
||||
s, err := dirhash.HashDir(p, "", dirhash.DefaultHash)
|
||||
if err != nil {
|
||||
return "", xerrors.Errorf("hash dir (%s): %w", p, err)
|
||||
}
|
||||
|
||||
if _, err = h.Write([]byte(s)); err != nil {
|
||||
return "", xerrors.Errorf("sha256 write error: %w", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Sprintf("sha256:%x", h.Sum(nil)), nil
|
||||
}
|
||||
|
||||
144
cache/key_test.go
vendored
144
cache/key_test.go
vendored
@@ -4,55 +4,147 @@ import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
)
|
||||
|
||||
func TestWithVersionSuffix(t *testing.T) {
|
||||
func TestCalcKey(t *testing.T) {
|
||||
type args struct {
|
||||
key string
|
||||
version string
|
||||
versions map[string]int
|
||||
patterns []string
|
||||
policy []string
|
||||
data []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
args: args{
|
||||
key: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
version: "111101112110013",
|
||||
versions: map[string]int{
|
||||
"alpine": 1,
|
||||
"debian": 1,
|
||||
},
|
||||
want: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e/111101112110013",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := WithVersionSuffix(tt.args.key, tt.args.version)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestTrimVersionSuffix(t *testing.T) {
|
||||
type args struct {
|
||||
versioned string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want string
|
||||
}{
|
||||
want: "sha256:51685eab32590231b0c9b1114e556cb3247ead73bfd86ecf9a11632147eb7333",
|
||||
},
|
||||
{
|
||||
name: "happy path",
|
||||
name: "with disabled analyzer",
|
||||
args: args{
|
||||
versioned: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e/111101112110013",
|
||||
key: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
versions: map[string]int{
|
||||
"alpine": 1,
|
||||
"debian": 0,
|
||||
"redhat": 2,
|
||||
},
|
||||
want: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
},
|
||||
want: "sha256:dff5eb1aa155d720a7949d2ca8abb48d91762bf8b39dd4bfc5c5db17d9d3ccc3",
|
||||
},
|
||||
{
|
||||
name: "with empty slice file patterns",
|
||||
args: args{
|
||||
key: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
versions: map[string]int{
|
||||
"alpine": 1,
|
||||
"debian": 1,
|
||||
},
|
||||
patterns: []string{},
|
||||
},
|
||||
want: "sha256:51685eab32590231b0c9b1114e556cb3247ead73bfd86ecf9a11632147eb7333",
|
||||
},
|
||||
{
|
||||
name: "with single empty string in file patterns",
|
||||
args: args{
|
||||
key: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
versions: map[string]int{
|
||||
"alpine": 1,
|
||||
"debian": 1,
|
||||
},
|
||||
patterns: []string{""},
|
||||
},
|
||||
want: "sha256:51685eab32590231b0c9b1114e556cb3247ead73bfd86ecf9a11632147eb7333",
|
||||
},
|
||||
{
|
||||
name: "with single non empty string in file patterns",
|
||||
args: args{
|
||||
key: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
versions: map[string]int{
|
||||
"alpine": 1,
|
||||
"debian": 1,
|
||||
},
|
||||
patterns: []string{"test"},
|
||||
},
|
||||
want: "sha256:51685eab32590231b0c9b1114e556cb3247ead73bfd86ecf9a11632147eb7333",
|
||||
},
|
||||
{
|
||||
name: "with non empty followed by empty string in file patterns",
|
||||
args: args{
|
||||
key: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
versions: map[string]int{
|
||||
"alpine": 1,
|
||||
"debian": 1,
|
||||
},
|
||||
patterns: []string{"test", ""},
|
||||
},
|
||||
want: "sha256:51685eab32590231b0c9b1114e556cb3247ead73bfd86ecf9a11632147eb7333",
|
||||
},
|
||||
{
|
||||
name: "with non empty preceded by empty string in file patterns",
|
||||
args: args{
|
||||
key: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
versions: map[string]int{
|
||||
"alpine": 1,
|
||||
"debian": 1,
|
||||
},
|
||||
patterns: []string{"", "test"},
|
||||
},
|
||||
want: "sha256:51685eab32590231b0c9b1114e556cb3247ead73bfd86ecf9a11632147eb7333",
|
||||
},
|
||||
{
|
||||
name: "with policy",
|
||||
args: args{
|
||||
key: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
versions: map[string]int{
|
||||
"alpine": 1,
|
||||
"debian": 1,
|
||||
},
|
||||
policy: []string{"testdata"},
|
||||
},
|
||||
want: "sha256:853fc0e8c43f7c764e2319498ad8e6e9a0ee4791ad5de2d223ce093cb9a8aef7",
|
||||
},
|
||||
{
|
||||
name: "with policy/non-existent dir",
|
||||
args: args{
|
||||
key: "sha256:5c534be56eca62e756ef2ef51523feda0f19cd7c15bb0c015e3d6e3ae090bf6e",
|
||||
versions: map[string]int{
|
||||
"alpine": 1,
|
||||
"debian": 1,
|
||||
},
|
||||
policy: []string{"policydir"},
|
||||
},
|
||||
wantErr: "no such file or directory",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := TrimVersionSuffix(tt.args.versioned)
|
||||
opt := &config.ScannerOption{
|
||||
FilePatterns: tt.args.patterns,
|
||||
PolicyPaths: tt.args.policy,
|
||||
DataPaths: tt.args.data,
|
||||
}
|
||||
got, err := CalcKey(tt.args.key, tt.args.versions, opt)
|
||||
if tt.wantErr != "" {
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), tt.wantErr)
|
||||
return
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -13,29 +13,8 @@ import (
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/analyzer"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/command/apk"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/config/yaml"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/bundler"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/cargo"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/composer"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/gobinary"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/gomod"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/jar"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/npm"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/nuget"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/pipenv"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/poetry"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/yarn"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/alpine"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/amazonlinux"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/debian"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/photon"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/redhatbase"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/suse"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/ubuntu"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/apk"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/dpkg"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/rpm"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/all"
|
||||
"github.com/aquasecurity/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/fanal/applier"
|
||||
"github.com/aquasecurity/fanal/artifact"
|
||||
aimage "github.com/aquasecurity/fanal/artifact/image"
|
||||
@@ -54,7 +33,6 @@ func main() {
|
||||
}
|
||||
|
||||
func run() (err error) {
|
||||
ctx := context.Background()
|
||||
app := &cli.App{
|
||||
Name: "fanal",
|
||||
Usage: "A library to analyze a container image, local filesystem and remote repository",
|
||||
@@ -63,25 +41,37 @@ func run() (err error) {
|
||||
Name: "image",
|
||||
Aliases: []string{"img"},
|
||||
Usage: "inspect a container image",
|
||||
Action: globalOption(ctx, imageAction),
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringSliceFlag{
|
||||
Name: "conf-policy",
|
||||
Usage: "policy paths",
|
||||
},
|
||||
},
|
||||
Action: globalOption(imageAction),
|
||||
},
|
||||
{
|
||||
Name: "archive",
|
||||
Aliases: []string{"ar"},
|
||||
Usage: "inspect an image archive",
|
||||
Action: globalOption(ctx, archiveAction),
|
||||
Action: globalOption(archiveAction),
|
||||
},
|
||||
{
|
||||
Name: "filesystem",
|
||||
Aliases: []string{"fs"},
|
||||
Usage: "inspect a local directory",
|
||||
Action: globalOption(ctx, fsAction),
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringSliceFlag{
|
||||
Name: "policy",
|
||||
Usage: "policy paths",
|
||||
},
|
||||
},
|
||||
Action: globalOption(fsAction),
|
||||
},
|
||||
{
|
||||
Name: "repository",
|
||||
Aliases: []string{"repo"},
|
||||
Usage: "inspect a remote repository",
|
||||
Action: globalOption(ctx, repoAction),
|
||||
Action: globalOption(repoAction),
|
||||
},
|
||||
},
|
||||
Flags: []cli.Flag{
|
||||
@@ -97,7 +87,7 @@ func run() (err error) {
|
||||
return app.Run(os.Args)
|
||||
}
|
||||
|
||||
func globalOption(ctx context.Context, f func(context.Context, *cli.Context, cache.Cache) error) func(c *cli.Context) error {
|
||||
func globalOption(f func(*cli.Context, cache.Cache) error) func(c *cli.Context) error {
|
||||
return func(c *cli.Context) error {
|
||||
cacheClient, err := initializeCache(c.String("cache"))
|
||||
if err != nil {
|
||||
@@ -112,7 +102,7 @@ func globalOption(ctx context.Context, f func(context.Context, *cli.Context, cac
|
||||
}
|
||||
return nil
|
||||
}
|
||||
return f(ctx, c, cacheClient)
|
||||
return f(c, cacheClient)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -130,35 +120,43 @@ func initializeCache(backend string) (cache.Cache, error) {
|
||||
return cacheClient, err
|
||||
}
|
||||
|
||||
func imageAction(ctx context.Context, c *cli.Context, fsCache cache.Cache) error {
|
||||
art, cleanup, err := imageArtifact(ctx, c.Args().First(), fsCache)
|
||||
func imageAction(c *cli.Context, fsCache cache.Cache) error {
|
||||
art, cleanup, err := imageArtifact(c.Context, c.Args().First(), fsCache, config.ScannerOption{
|
||||
PolicyPaths: c.StringSlice("conf-policy"),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer cleanup()
|
||||
return inspect(ctx, art, fsCache)
|
||||
return inspect(c.Context, art, fsCache)
|
||||
}
|
||||
|
||||
func archiveAction(ctx context.Context, c *cli.Context, fsCache cache.Cache) error {
|
||||
func archiveAction(c *cli.Context, fsCache cache.Cache) error {
|
||||
art, err := archiveImageArtifact(c.Args().First(), fsCache)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return inspect(ctx, art, fsCache)
|
||||
return inspect(c.Context, art, fsCache)
|
||||
}
|
||||
|
||||
func fsAction(ctx context.Context, c *cli.Context, fsCache cache.Cache) error {
|
||||
art := localArtifact(c.Args().First(), fsCache)
|
||||
return inspect(ctx, art, fsCache)
|
||||
func fsAction(c *cli.Context, fsCache cache.Cache) error {
|
||||
art, err := local.NewArtifact(c.Args().First(), fsCache, nil, config.ScannerOption{
|
||||
PolicyPaths: c.StringSlice("policy"),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
func repoAction(ctx context.Context, c *cli.Context, fsCache cache.Cache) error {
|
||||
return inspect(c.Context, art, fsCache)
|
||||
}
|
||||
|
||||
func repoAction(c *cli.Context, fsCache cache.Cache) error {
|
||||
art, cleanup, err := remoteArtifact(c.Args().First(), fsCache)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer cleanup()
|
||||
return inspect(ctx, art, fsCache)
|
||||
return inspect(c.Context, art, fsCache)
|
||||
}
|
||||
|
||||
func inspect(ctx context.Context, art artifact.Artifact, c cache.LocalArtifactCache) error {
|
||||
@@ -185,20 +183,30 @@ func inspect(ctx context.Context, art artifact.Artifact, c cache.LocalArtifactCa
|
||||
for _, app := range mergedLayer.Applications {
|
||||
fmt.Printf("%s (%s): %d\n", app.Type, app.FilePath, len(app.Libraries))
|
||||
}
|
||||
|
||||
if len(mergedLayer.Misconfigurations) > 0 {
|
||||
fmt.Println("Misconfigurations:")
|
||||
}
|
||||
for _, misconf := range mergedLayer.Misconfigurations {
|
||||
fmt.Printf(" %s: failures %d, warnings %d\n", misconf.FilePath, len(misconf.Failures), len(misconf.Warnings))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func imageArtifact(ctx context.Context, imageName string, c cache.ArtifactCache) (artifact.Artifact, func(), error) {
|
||||
opt := types.DockerOption{
|
||||
func imageArtifact(ctx context.Context, imageName string, c cache.ArtifactCache, opt config.ScannerOption) (artifact.Artifact, func(), error) {
|
||||
img, cleanup, err := image.NewDockerImage(ctx, imageName, types.DockerOption{
|
||||
Timeout: 600 * time.Second,
|
||||
SkipPing: true,
|
||||
}
|
||||
|
||||
img, cleanup, err := image.NewDockerImage(ctx, imageName, opt)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, func() {}, err
|
||||
}
|
||||
return aimage.NewArtifact(img, c, nil), cleanup, nil
|
||||
|
||||
art, err := aimage.NewArtifact(img, c, nil, opt)
|
||||
if err != nil {
|
||||
return nil, func() {}, err
|
||||
}
|
||||
return art, cleanup, nil
|
||||
}
|
||||
|
||||
func archiveImageArtifact(imagePath string, c cache.ArtifactCache) (artifact.Artifact, error) {
|
||||
@@ -207,13 +215,13 @@ func archiveImageArtifact(imagePath string, c cache.ArtifactCache) (artifact.Art
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return aimage.NewArtifact(img, c, nil), nil
|
||||
art, err := aimage.NewArtifact(img, c, nil, config.ScannerOption{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func localArtifact(dir string, c cache.ArtifactCache) artifact.Artifact {
|
||||
return local.NewArtifact(dir, c, nil)
|
||||
return art, nil
|
||||
}
|
||||
|
||||
func remoteArtifact(dir string, c cache.ArtifactCache) (artifact.Artifact, func(), error) {
|
||||
return remote.NewArtifact(dir, c, nil)
|
||||
return remote.NewArtifact(dir, c, nil, config.ScannerOption{})
|
||||
}
|
||||
|
||||
108
config/parser/dockerfile/dockerfile.go
Normal file
108
config/parser/dockerfile/dockerfile.go
Normal file
@@ -0,0 +1,108 @@
|
||||
package dockerfile
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"strings"
|
||||
|
||||
"github.com/moby/buildkit/frontend/dockerfile/instructions"
|
||||
"github.com/moby/buildkit/frontend/dockerfile/parser"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// Parser is a Dockerfile parser
|
||||
type Parser struct {
|
||||
}
|
||||
|
||||
// Resource Separates the list of commands by file
|
||||
type Resource struct {
|
||||
CommandList map[string][]Command `json:"command"`
|
||||
}
|
||||
|
||||
// Command is the struct for each dockerfile command
|
||||
type Command struct {
|
||||
Cmd string
|
||||
SubCmd string
|
||||
Flags []string
|
||||
Value []string
|
||||
Original string
|
||||
StartLine int
|
||||
EndLine int
|
||||
JSON bool
|
||||
Stage int
|
||||
}
|
||||
|
||||
// Parse parses Dockerfile
|
||||
func (p *Parser) Parse(contents []byte) (interface{}, error) {
|
||||
r := bytes.NewReader(contents)
|
||||
parsed, err := parser.Parse(r)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("dockerfile parse error: %w", err)
|
||||
}
|
||||
|
||||
fromValue := "args"
|
||||
from := make(map[string][]Command)
|
||||
|
||||
var stages []*instructions.Stage
|
||||
for _, child := range parsed.AST.Children {
|
||||
instr, err := instructions.ParseInstruction(child)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("process dockerfile instructions: %w", err)
|
||||
}
|
||||
|
||||
stage, ok := instr.(*instructions.Stage)
|
||||
if ok {
|
||||
stages = append(stages, stage)
|
||||
}
|
||||
|
||||
if child.Value == "from" {
|
||||
fromValue = strings.TrimPrefix(child.Original, "FROM ")
|
||||
}
|
||||
|
||||
cmd := Command{
|
||||
Cmd: child.Value,
|
||||
Original: child.Original,
|
||||
Flags: child.Flags,
|
||||
StartLine: child.StartLine,
|
||||
EndLine: child.EndLine,
|
||||
Stage: currentStage(stages),
|
||||
}
|
||||
|
||||
if child.Next != nil && len(child.Next.Children) > 0 {
|
||||
cmd.SubCmd = child.Next.Children[0].Value
|
||||
child = child.Next.Children[0]
|
||||
}
|
||||
|
||||
cmd.JSON = child.Attributes["json"]
|
||||
for n := child.Next; n != nil; n = n.Next {
|
||||
cmd.Value = append(cmd.Value, n.Value)
|
||||
}
|
||||
|
||||
from[fromValue] = append(from[fromValue], cmd)
|
||||
}
|
||||
|
||||
var resource Resource
|
||||
resource.CommandList = from
|
||||
|
||||
j, err := json.Marshal(resource)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("json marshal error: %w", err)
|
||||
}
|
||||
|
||||
var res interface{}
|
||||
if err = json.Unmarshal(j, &res); err != nil {
|
||||
return nil, xerrors.Errorf("json unmarshal error: %w", err)
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
// Return the index of the stages. If no stages are present,
|
||||
// we set the index to zero.
|
||||
func currentStage(stages []*instructions.Stage) int {
|
||||
if len(stages) == 0 {
|
||||
return 0
|
||||
}
|
||||
|
||||
return len(stages) - 1
|
||||
}
|
||||
46
config/parser/dockerfile/dockerfile_test.go
Normal file
46
config/parser/dockerfile/dockerfile_test.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package dockerfile_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/config/parser/dockerfile"
|
||||
)
|
||||
|
||||
func Test_dockerParser_Parse(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
inputFile string
|
||||
want string
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
inputFile: "testdata/Dockerfile.deployment",
|
||||
want: `{"command":{"foo":[{"Cmd":"from","EndLine":1,"Flags":[],"JSON":false,"Original":"FROM foo","Stage":0,"StartLine":1,"SubCmd":"","Value":["foo"]},{"Cmd":"copy","EndLine":2,"Flags":[],"JSON":false,"Original":"COPY . /","Stage":0,"StartLine":2,"SubCmd":"","Value":[".","/"]},{"Cmd":"run","EndLine":3,"Flags":[],"JSON":false,"Original":"RUN echo hello","Stage":0,"StartLine":3,"SubCmd":"","Value":["echo hello"]}]}}`,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
b, err := ioutil.ReadFile(tt.inputFile)
|
||||
require.NoError(t, err)
|
||||
|
||||
p := dockerfile.Parser{}
|
||||
got, err := p.Parse(b)
|
||||
|
||||
if tt.wantErr != "" {
|
||||
require.NotNil(t, err)
|
||||
assert.Contains(t, err.Error(), tt.wantErr)
|
||||
return
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
gotJson, err := json.Marshal(got)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.want, string(gotJson))
|
||||
})
|
||||
}
|
||||
}
|
||||
3
config/parser/dockerfile/testdata/Dockerfile.deployment
vendored
Normal file
3
config/parser/dockerfile/testdata/Dockerfile.deployment
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
FROM foo
|
||||
COPY . /
|
||||
RUN echo hello
|
||||
6
config/parser/yaml/testdata/deployment.yaml
vendored
Normal file
6
config/parser/yaml/testdata/deployment.yaml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: hello-kubernetes
|
||||
spec:
|
||||
replicas: 4
|
||||
31
config/parser/yaml/yaml.go
Normal file
31
config/parser/yaml/yaml.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package yaml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// Parser is a YAML parser.
|
||||
type Parser struct{}
|
||||
|
||||
// Parse parses YAML files.
|
||||
func (p *Parser) Parse(b []byte) (interface{}, error) {
|
||||
var v interface{}
|
||||
if err := yaml.Unmarshal(b, &v); err != nil {
|
||||
return nil, xerrors.Errorf("unmarshal yaml: %w", err)
|
||||
}
|
||||
|
||||
return v, nil
|
||||
}
|
||||
|
||||
// SeparateSubDocuments separates YAML file
|
||||
func (p *Parser) SeparateSubDocuments(data []byte) [][]byte {
|
||||
linebreak := "\n"
|
||||
if bytes.Contains(data, []byte("\r\n---\r\n")) {
|
||||
linebreak = "\r\n"
|
||||
}
|
||||
|
||||
return bytes.Split(data, []byte(linebreak+"---"+linebreak))
|
||||
}
|
||||
76
config/parser/yaml/yaml_test.go
Normal file
76
config/parser/yaml/yaml_test.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package yaml_test
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/config/parser/yaml"
|
||||
)
|
||||
|
||||
func TestParser_Parse(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
inputFile string
|
||||
want interface{}
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
inputFile: "testdata/deployment.yaml",
|
||||
want: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "hello-kubernetes",
|
||||
},
|
||||
"spec": map[string]interface{}{
|
||||
"replicas": 4,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
b, err := ioutil.ReadFile(tt.inputFile)
|
||||
require.NoError(t, err)
|
||||
p := yaml.Parser{}
|
||||
got, err := p.Parse(b)
|
||||
if tt.wantErr != "" {
|
||||
require.NotNil(t, err)
|
||||
assert.Contains(t, err.Error(), tt.wantErr)
|
||||
return
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParser_SeparateSubDocuments(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
data []byte
|
||||
want [][]byte
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
data: []byte(`kind: Pod
|
||||
---
|
||||
kind: Service`),
|
||||
want: [][]byte{
|
||||
[]byte(`kind: Pod`),
|
||||
[]byte(`kind: Service`),
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
p := &yaml.Parser{}
|
||||
got := p.SeparateSubDocuments(tt.data)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
27
config/scanner/detection.rego
Normal file
27
config/scanner/detection.rego
Normal file
@@ -0,0 +1,27 @@
|
||||
package config.type
|
||||
|
||||
# Kubernetes
|
||||
detect[type] {
|
||||
input.apiVersion != ""
|
||||
input.kind != ""
|
||||
input.metadata != ""
|
||||
input.spec != ""
|
||||
type := "kubernetes"
|
||||
}
|
||||
|
||||
# AWS CloudFormation
|
||||
detect[type] {
|
||||
input.AWSTemplateFormatVersion != ""
|
||||
type := "cloudformation"
|
||||
}
|
||||
|
||||
# Ansible Playbook
|
||||
detect[type] {
|
||||
count(input) > 0
|
||||
count({x |
|
||||
input[x].name != "";
|
||||
input[x].hosts != "";
|
||||
input[x].tasks != ""
|
||||
}) == count(input)
|
||||
type := "ansible"
|
||||
}
|
||||
94
config/scanner/detection_test.rego
Normal file
94
config/scanner/detection_test.rego
Normal file
@@ -0,0 +1,94 @@
|
||||
package config.type
|
||||
|
||||
test_detect_kubernetes {
|
||||
result := detect with input as {
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Pod",
|
||||
"metadata": {
|
||||
"name": "test"
|
||||
},
|
||||
"spec": {
|
||||
"containers": {
|
||||
"name": "nginx",
|
||||
"image": "nginx:1.14.2",
|
||||
}
|
||||
}
|
||||
}
|
||||
result[_] == "kubernetes"
|
||||
}
|
||||
|
||||
test_detect_non_kubernetes{
|
||||
result := detect with input as {
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Pod",
|
||||
"metadata": {
|
||||
"name": "test"
|
||||
},
|
||||
}
|
||||
count({x | result[x] == "kubernetes"}) == 0
|
||||
}
|
||||
|
||||
test_detect_cloudformation {
|
||||
result := detect with input as {
|
||||
"AWSTemplateFormatVersion": "2010-09-09",
|
||||
"Description": "A sample template",
|
||||
"Resources": {
|
||||
"MyEC2Instance": {
|
||||
"Type": "AWS::EC2::Instance"
|
||||
}
|
||||
}
|
||||
}
|
||||
result[_] == "cloudformation"
|
||||
}
|
||||
|
||||
test_detect_non_cloudformation {
|
||||
result := detect with input as {
|
||||
"TemplateFormatVersion": "2010-09-09",
|
||||
"Description": "A sample template",
|
||||
"Resources": {
|
||||
"MyEC2Instance": {
|
||||
"Type": "AWS::EC2::Instance"
|
||||
}
|
||||
}
|
||||
}
|
||||
count({x | result[x] == "cloudformation"}) == 0
|
||||
}
|
||||
|
||||
test_detect_ansible {
|
||||
result := detect with input as [
|
||||
{
|
||||
"name": "test",
|
||||
"hosts": "all",
|
||||
"tasks": [
|
||||
{"name": "install dependencies"},
|
||||
{"name": "setup"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "test2",
|
||||
"hosts": "web",
|
||||
"tasks": [
|
||||
{"name": "install dependencies"},
|
||||
{"name": "setup"}
|
||||
]
|
||||
}
|
||||
]
|
||||
result[_] == "ansible"
|
||||
}
|
||||
|
||||
test_detect_non_ansible {
|
||||
result := detect with input as [
|
||||
{
|
||||
"name": "test",
|
||||
"hosts": "all",
|
||||
"tasks": [
|
||||
{"name": "install dependencies"},
|
||||
{"name": "setup"}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "test2"
|
||||
}
|
||||
]
|
||||
count({x | result[x] == "ansible"}) == 0
|
||||
}
|
||||
89
config/scanner/scanner.go
Normal file
89
config/scanner/scanner.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package scanner
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "embed"
|
||||
|
||||
"github.com/open-policy-agent/opa/rego"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/policy"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
var (
|
||||
//go:embed detection.rego
|
||||
defaultDetectionModule string
|
||||
)
|
||||
|
||||
type Scanner struct {
|
||||
namespaces []string
|
||||
engine *policy.Engine
|
||||
}
|
||||
|
||||
func New(namespaces, policyPaths, dataPaths []string) (Scanner, error) {
|
||||
if len(namespaces) == 0 || len(policyPaths) == 0 {
|
||||
return Scanner{}, nil
|
||||
}
|
||||
|
||||
engine, err := policy.Load(policyPaths, dataPaths)
|
||||
if err != nil {
|
||||
return Scanner{}, xerrors.Errorf("policy load error: %w", err)
|
||||
}
|
||||
|
||||
return Scanner{
|
||||
namespaces: namespaces,
|
||||
engine: engine,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (s Scanner) ScanConfigs(ctx context.Context, files []types.Config) ([]types.Misconfiguration, error) {
|
||||
if len(s.namespaces) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var configs []types.Config
|
||||
for _, file := range files {
|
||||
// Detect config types such as CloudFormation and Kubernetes.
|
||||
configType, err := detectType(ctx, file.Content)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to detect config type: %w", err)
|
||||
}
|
||||
if configType != "" {
|
||||
file.Type = configType
|
||||
}
|
||||
|
||||
configs = append(configs, file)
|
||||
}
|
||||
|
||||
misconfs, err := s.engine.Check(ctx, configs, s.namespaces)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed to scan: %w", err)
|
||||
}
|
||||
|
||||
return misconfs, nil
|
||||
}
|
||||
|
||||
func detectType(ctx context.Context, input interface{}) (string, error) {
|
||||
results, err := rego.New(
|
||||
rego.Input(input),
|
||||
rego.Query("x = data.config.type.detect"),
|
||||
rego.Module("detection.rego", defaultDetectionModule),
|
||||
).Eval(ctx)
|
||||
|
||||
if err != nil {
|
||||
return "", xerrors.Errorf("rego eval error: %w", err)
|
||||
}
|
||||
|
||||
for _, result := range results {
|
||||
for _, configType := range result.Bindings["x"].([]interface{}) {
|
||||
v, ok := configType.(string)
|
||||
if !ok {
|
||||
return "", xerrors.Errorf("'detect' must return string")
|
||||
}
|
||||
// Return the first element
|
||||
return v, nil
|
||||
}
|
||||
}
|
||||
return "", nil
|
||||
}
|
||||
110
config/scanner/scanner_test.go
Normal file
110
config/scanner/scanner_test.go
Normal file
@@ -0,0 +1,110 @@
|
||||
package scanner_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sort"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/config/scanner"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func TestScanner_ScanConfig(t *testing.T) {
|
||||
// only does basic tests
|
||||
// check for misconfigurations in implementations
|
||||
tests := []struct {
|
||||
name string
|
||||
policyPaths []string
|
||||
dataPaths []string
|
||||
configType string
|
||||
content interface{}
|
||||
namespaces []string
|
||||
want types.Misconfiguration
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
policyPaths: []string{"testdata/valid/100.rego"},
|
||||
configType: types.Kubernetes,
|
||||
content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
},
|
||||
namespaces: []string{"testdata"},
|
||||
want: types.Misconfiguration{
|
||||
FileType: "kubernetes",
|
||||
FilePath: "deployment.yaml",
|
||||
Failures: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.kubernetes.id_100",
|
||||
Message: "deny",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
ID: "ID-100",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "happy path with multiple policies",
|
||||
policyPaths: []string{"testdata/valid/"},
|
||||
configType: types.Kubernetes,
|
||||
content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
},
|
||||
namespaces: []string{"testdata"},
|
||||
want: types.Misconfiguration{
|
||||
FileType: "kubernetes",
|
||||
FilePath: "deployment.yaml",
|
||||
Successes: types.MisconfResults(nil),
|
||||
Warnings: types.MisconfResults(nil),
|
||||
Failures: types.MisconfResults{
|
||||
types.MisconfResult{
|
||||
Namespace: "testdata.docker.id_300",
|
||||
Message: "deny",
|
||||
PolicyMetadata: types.PolicyMetadata{ID: "N/A", Type: "N/A", Title: "N/A", Severity: "UNKNOWN"},
|
||||
},
|
||||
types.MisconfResult{
|
||||
Namespace: "testdata.kubernetes.id_100",
|
||||
Message: "deny",
|
||||
PolicyMetadata: types.PolicyMetadata{ID: "ID-100", Type: "Kubernetes Security Check", Title: "Bad Deployment", Severity: "HIGH"},
|
||||
},
|
||||
types.MisconfResult{
|
||||
Namespace: "testdata.kubernetes.id_200",
|
||||
Message: "deny",
|
||||
PolicyMetadata: types.PolicyMetadata{ID: "ID-200", Type: "Kubernetes Security Check", Title: "Bad Deployment", Severity: "CRITICAL"},
|
||||
},
|
||||
}, Exceptions: types.MisconfResults(nil), Layer: types.Layer{Digest: "", DiffID: ""},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
s, err := scanner.New(tt.namespaces, tt.policyPaths, tt.dataPaths)
|
||||
require.NoError(t, err)
|
||||
|
||||
got, err := s.ScanConfigs(context.Background(), []types.Config{{tt.configType, "deployment.yaml", tt.content}})
|
||||
if tt.wantErr != "" {
|
||||
require.Error(t, err)
|
||||
assert.Contains(t, err.Error(), tt.wantErr)
|
||||
assert.Nil(t, got)
|
||||
return
|
||||
}
|
||||
|
||||
sort.Slice(got[0].Failures, func(i, j int) bool {
|
||||
return got[0].Failures[i].Namespace < got[0].Failures[j].Namespace
|
||||
})
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.want, got[0])
|
||||
})
|
||||
}
|
||||
}
|
||||
3
config/scanner/testdata/invalid/invalid.rego
vendored
Normal file
3
config/scanner/testdata/invalid/invalid.rego
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
package testdata
|
||||
|
||||
deny[msg]
|
||||
14
config/scanner/testdata/valid/100.rego
vendored
Normal file
14
config/scanner/testdata/valid/100.rego
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package testdata.kubernetes.id_100
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "ID-100",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
deny[res] {
|
||||
input.kind == "Deployment"
|
||||
res := {"type": "Kubernetes Check", "id": "ID-100", "msg": "deny", "severity": "CRITICAL"}
|
||||
}
|
||||
14
config/scanner/testdata/valid/200.rego
vendored
Normal file
14
config/scanner/testdata/valid/200.rego
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package testdata.kubernetes.id_200
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "ID-200",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "CRITICAL",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
deny[msg] {
|
||||
input.kind == "Deployment"
|
||||
msg := "deny"
|
||||
}
|
||||
6
config/scanner/testdata/valid/300.rego
vendored
Normal file
6
config/scanner/testdata/valid/300.rego
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
package testdata.docker.id_300
|
||||
|
||||
deny[res] {
|
||||
input.kind = "Deployment"
|
||||
res := {"type": "Docker Check", "id": "ID-300", "msg": "deny", "severity": "HIGH"}
|
||||
}
|
||||
12
go.mod
12
go.mod
@@ -1,6 +1,6 @@
|
||||
module github.com/aquasecurity/fanal
|
||||
|
||||
go 1.13
|
||||
go 1.16
|
||||
|
||||
require (
|
||||
github.com/GoogleCloudPlatform/docker-credential-gcr v1.5.0
|
||||
@@ -28,7 +28,10 @@ require (
|
||||
github.com/knqyf263/nested v0.0.1
|
||||
github.com/kylelemons/godebug v1.1.0
|
||||
github.com/magefile/mage v1.11.0 // indirect
|
||||
github.com/mitchellh/mapstructure v1.4.1
|
||||
github.com/moby/buildkit v0.8.1
|
||||
github.com/open-policy-agent/conftest v0.23.0
|
||||
github.com/open-policy-agent/opa v0.25.2
|
||||
github.com/opencontainers/go-digest v1.0.0
|
||||
github.com/opencontainers/image-spec v1.0.2-0.20190823105129-775207bd45b6
|
||||
github.com/saracen/walker v0.0.0-20191201085201-324a081bae7e
|
||||
@@ -36,13 +39,16 @@ require (
|
||||
github.com/sosedoff/gitkit v0.2.0
|
||||
github.com/stretchr/testify v1.7.0
|
||||
github.com/testcontainers/testcontainers-go v0.9.1-0.20210218153226-c8e070a2f18d
|
||||
github.com/urfave/cli/v2 v2.2.0
|
||||
github.com/urfave/cli/v2 v2.3.0
|
||||
go.etcd.io/bbolt v1.3.5
|
||||
go.opencensus.io v0.22.6 // indirect
|
||||
go.uber.org/zap v1.16.0
|
||||
golang.org/x/mod v0.3.0
|
||||
golang.org/x/net v0.0.0-20210220033124-5f55cee0dc0d // indirect
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9
|
||||
golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1
|
||||
google.golang.org/genproto v0.0.0-20210219173056-d891e3cb3b5b // indirect
|
||||
google.golang.org/grpc v1.35.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c
|
||||
)
|
||||
|
||||
17
go.sum
17
go.sum
@@ -116,6 +116,7 @@ github.com/Microsoft/hcsshim v0.8.14/go.mod h1:NtVKoYxQuTLx6gEq0L96c9Ju4JbRJ4nY2
|
||||
github.com/Microsoft/hcsshim/test v0.0.0-20200826032352-301c83a30e7c/go.mod h1:30A5igQ91GEmhYJF8TaRP79pMBOYynRsyOByfVV0dU4=
|
||||
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8=
|
||||
github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q=
|
||||
github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM=
|
||||
github.com/PuerkitoBio/purell v1.0.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||
@@ -369,7 +370,6 @@ github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLi
|
||||
github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
|
||||
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
|
||||
github.com/fortytw2/leaktest v1.2.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
|
||||
github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g=
|
||||
@@ -450,6 +450,7 @@ github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslW
|
||||
github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU=
|
||||
github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU=
|
||||
github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
|
||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||
github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4=
|
||||
github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
@@ -787,6 +788,8 @@ github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0Qu
|
||||
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.3.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag=
|
||||
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A=
|
||||
github.com/moby/buildkit v0.8.1 h1:zrGxLwffKM8nVxBvaJa7H404eQLfqlg1GB6YVIzXVQ0=
|
||||
github.com/moby/buildkit v0.8.1/go.mod h1:/kyU1hKy/aYCuP39GZA9MaKioovHku57N6cqlKZIaiQ=
|
||||
@@ -862,6 +865,7 @@ github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDs
|
||||
github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk=
|
||||
github.com/open-policy-agent/conftest v0.23.0 h1:i/cmUjNKDz973vR1cm+x3DqTei/jBPosPvjeot6+p9M=
|
||||
github.com/open-policy-agent/conftest v0.23.0/go.mod h1:NA6+vKd93pb04H9jiV3WRGJKLj/pzYdQg7XCdoPPUDI=
|
||||
github.com/open-policy-agent/opa v0.25.2 h1:zTQuUMvB5xkYixKB9LFVbUd7DcUt1jfS0QKTo+/Vfyc=
|
||||
github.com/open-policy-agent/opa v0.25.2/go.mod h1:iGThTRECCfKQKICueOZkXUi0opN7BR3qiAnIrNHCmlI=
|
||||
github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
|
||||
github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s=
|
||||
@@ -958,6 +962,7 @@ github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40T
|
||||
github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI=
|
||||
github.com/quasilyte/go-ruleguard v0.1.2-0.20200318202121-b00d7a75d3d8/go.mod h1:CGFX09Ci3pq9QZdj86B+VGIdNj4VyCo2iPOGS9esB/k=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0 h1:MkV+77GLUNo5oJ0jf870itWm3D0Sjh7+Za9gazKc5LQ=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20170806203942-52369c62f446/go.mod h1:uYEyJGbgTkfkS4+E/PavXkNJcbFIpEtjt2B0KDQ5+9M=
|
||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||
@@ -1097,8 +1102,8 @@ github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtX
|
||||
github.com/urfave/cli v1.22.2 h1:gsqYFH8bb9ekPA12kRo0hfjngWQjkJPlN9R0N78BoUo=
|
||||
github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
|
||||
github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ=
|
||||
github.com/urfave/cli/v2 v2.2.0 h1:JTTnM6wKzdA0Jqodd966MVj4vWbbquZykeX1sKbe2C4=
|
||||
github.com/urfave/cli/v2 v2.2.0/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ=
|
||||
github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M=
|
||||
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
|
||||
github.com/uudashr/gocognit v1.0.1/go.mod h1:j44Ayx2KW4+oB6SWMv8KsmHzZrOInQav7D3cQMJ5JUM=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasthttp v1.2.0/go.mod h1:4vX61m6KN+xDduDNwXrhIAVZaZaZiQ1luJk8LWSxF3s=
|
||||
@@ -1112,6 +1117,7 @@ github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6Ac
|
||||
github.com/vmihailenco/msgpack/v4 v4.3.12/go.mod h1:gborTTJjAo/GWTqqRjrLCn9pgNN+NXzzngzBKDPIqw4=
|
||||
github.com/vmihailenco/tagparser v0.1.1/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI=
|
||||
github.com/vmware/govmomi v0.20.3/go.mod h1:URlwyTFZX72RmxtxuaFL2Uj3fD1JTvZdx59bHWk6aFU=
|
||||
github.com/wasmerio/go-ext-wasm v0.3.1 h1:G95XP3fE2FszQSwIU+fHPBYzD0Csmd2ef33snQXNA5Q=
|
||||
github.com/wasmerio/go-ext-wasm v0.3.1/go.mod h1:VGyarTzasuS7k5KhSIGpM3tciSZlkP31Mp9VJTHMMeI=
|
||||
github.com/willf/bitset v1.1.10/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
|
||||
github.com/willf/bitset v1.1.11-0.20200630133818-d5bec3311243/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
|
||||
@@ -1125,6 +1131,7 @@ github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf
|
||||
github.com/xi2/xz v0.0.0-20171230120015-48954b6210f8/go.mod h1:HUYIGzjTL3rfEspMxjDjgmT5uz5wzYJKVo23qUhYTos=
|
||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||
github.com/yashtewari/glob-intersection v0.0.0-20180916065949-5c77d914dd0b h1:vVRagRXf67ESqAb72hG2C/ZwI8NtJF2u2V76EsuOHGY=
|
||||
github.com/yashtewari/glob-intersection v0.0.0-20180916065949-5c77d914dd0b/go.mod h1:HptNXiXVDcJjXe9SqMd0v2FsL9f8dz4GnXgltU6q/co=
|
||||
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
@@ -1304,9 +1311,8 @@ golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJ
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9 h1:SQFwaSi55rU7vdNs9Yr0Z324VNlrF+0wMqRXT4St8ck=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -1621,6 +1627,7 @@ gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRN
|
||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
||||
16
log/log.go
Normal file
16
log/log.go
Normal file
@@ -0,0 +1,16 @@
|
||||
package log
|
||||
|
||||
import (
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var Logger *zap.SugaredLogger
|
||||
|
||||
func init() {
|
||||
logger, _ := zap.NewProduction()
|
||||
Logger = logger.Sugar()
|
||||
}
|
||||
|
||||
func SetLogger(l *zap.SugaredLogger) {
|
||||
Logger = l
|
||||
}
|
||||
728
policy/engine.go
Normal file
728
policy/engine.go
Normal file
@@ -0,0 +1,728 @@
|
||||
package policy
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/open-policy-agent/opa/ast"
|
||||
"github.com/open-policy-agent/opa/loader"
|
||||
"github.com/open-policy-agent/opa/rego"
|
||||
"github.com/open-policy-agent/opa/storage"
|
||||
"github.com/open-policy-agent/opa/topdown"
|
||||
"github.com/open-policy-agent/opa/version"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
"github.com/aquasecurity/fanal/utils"
|
||||
)
|
||||
|
||||
// Engine represents the policy engine.
|
||||
type Engine struct {
|
||||
modules map[string]*ast.Module
|
||||
compiler *ast.Compiler
|
||||
store storage.Store
|
||||
policies map[string]string
|
||||
docs map[string]string
|
||||
}
|
||||
|
||||
// Load returns an Engine after loading all of the specified policies and data paths.
|
||||
func Load(policyPaths []string, dataPaths []string) (*Engine, error) {
|
||||
policies, err := loader.AllRegos(policyPaths)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("load: %w", err)
|
||||
} else if len(policies.Modules) == 0 {
|
||||
return nil, xerrors.Errorf("no policies found in %v", policyPaths)
|
||||
}
|
||||
|
||||
compiler, err := policies.Compiler()
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("get compiler: %w", err)
|
||||
}
|
||||
|
||||
policyContents := make(map[string]string)
|
||||
for path, module := range policies.ParsedModules() {
|
||||
path = filepath.Clean(path)
|
||||
path = filepath.ToSlash(path)
|
||||
|
||||
policyContents[path] = module.String()
|
||||
}
|
||||
|
||||
modules := policies.ParsedModules()
|
||||
|
||||
store, docs, err := loadData(dataPaths, allNamespaces(modules))
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("unable to load data: %w", err)
|
||||
}
|
||||
|
||||
return &Engine{
|
||||
modules: modules,
|
||||
compiler: compiler,
|
||||
policies: policyContents,
|
||||
store: store,
|
||||
docs: docs,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func allNamespaces(modules map[string]*ast.Module) []string {
|
||||
uniq := map[string]struct{}{}
|
||||
for _, module := range modules {
|
||||
namespace := strings.Replace(module.Package.Path.String(), "data.", "", 1)
|
||||
uniq[namespace] = struct{}{}
|
||||
}
|
||||
|
||||
var namespaces []string
|
||||
for ns := range uniq {
|
||||
namespaces = append(namespaces, ns)
|
||||
}
|
||||
return namespaces
|
||||
}
|
||||
|
||||
func loadData(dataPaths, namespaces []string) (storage.Store, map[string]string, error) {
|
||||
// FilteredPaths will recursively find all file paths that contain a valid document
|
||||
// extension from the given list of data paths.
|
||||
allDocumentPaths, err := loader.FilteredPaths(dataPaths, func(abspath string, info os.FileInfo, depth int) bool {
|
||||
if info.IsDir() {
|
||||
return false
|
||||
}
|
||||
ext := strings.ToLower(filepath.Ext(info.Name()))
|
||||
return !utils.StringInSlice(ext, []string{".yaml", ".yml", ".json"})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("filter data paths: %w", err)
|
||||
}
|
||||
|
||||
documents, err := loader.NewFileLoader().All(allDocumentPaths)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("load documents: %w", err)
|
||||
}
|
||||
|
||||
// Pass all namespaces so that Rego rule can refer to namespaces as data.namespaces
|
||||
documents.Documents["namespaces"] = namespaces
|
||||
|
||||
store, err := documents.Store()
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("get documents store: %w", err)
|
||||
}
|
||||
|
||||
documentContents := make(map[string]string)
|
||||
for _, documentPath := range allDocumentPaths {
|
||||
contents, err := ioutil.ReadFile(documentPath)
|
||||
if err != nil {
|
||||
return nil, nil, xerrors.Errorf("read file: %w", err)
|
||||
}
|
||||
|
||||
documentPath = filepath.Clean(documentPath)
|
||||
documentPath = filepath.ToSlash(documentPath)
|
||||
documentContents[documentPath] = string(contents)
|
||||
}
|
||||
|
||||
return store, documentContents, nil
|
||||
}
|
||||
|
||||
// Compiler returns the compiler from the loaded policies.
|
||||
func (e *Engine) Compiler() *ast.Compiler {
|
||||
return e.compiler
|
||||
}
|
||||
|
||||
// Store returns the store from the loaded documents.
|
||||
func (e *Engine) Store() storage.Store {
|
||||
return e.store
|
||||
}
|
||||
|
||||
// Modules returns the modules from the loaded policies.
|
||||
func (e *Engine) Modules() map[string]*ast.Module {
|
||||
return e.modules
|
||||
}
|
||||
|
||||
// Runtime returns the runtime of the engine.
|
||||
func (e *Engine) Runtime() *ast.Term {
|
||||
env := ast.NewObject()
|
||||
for _, pair := range os.Environ() {
|
||||
parts := strings.SplitN(pair, "=", 2)
|
||||
if len(parts) == 1 {
|
||||
env.Insert(ast.StringTerm(parts[0]), ast.NullTerm())
|
||||
} else if len(parts) > 1 {
|
||||
env.Insert(ast.StringTerm(parts[0]), ast.StringTerm(parts[1]))
|
||||
}
|
||||
}
|
||||
|
||||
obj := ast.NewObject()
|
||||
obj.Insert(ast.StringTerm("env"), ast.NewTerm(env))
|
||||
obj.Insert(ast.StringTerm("version"), ast.StringTerm(version.Version))
|
||||
obj.Insert(ast.StringTerm("commit"), ast.StringTerm(version.Vcs))
|
||||
|
||||
return ast.NewTerm(obj)
|
||||
}
|
||||
|
||||
// Check executes all of the loaded policies against the input and returns the results.
|
||||
func (e *Engine) Check(ctx context.Context, configs []types.Config, namespaces []string) ([]types.Misconfiguration, error) {
|
||||
// e.g. kubernetes => {Type: "kubernetes", FilePath: "deployment.yaml", Content: ...}
|
||||
typedConfigs := map[string][]types.Config{}
|
||||
for _, c := range configs {
|
||||
typedConfigs[c.Type] = append(typedConfigs[c.Type], c)
|
||||
}
|
||||
|
||||
uniqMisconfs := map[string]types.Misconfiguration{}
|
||||
for _, module := range e.Modules() {
|
||||
currentNamespace := strings.Replace(module.Package.Path.String(), "data.", "", 1)
|
||||
if !underNamespaces(currentNamespace, namespaces) {
|
||||
continue
|
||||
}
|
||||
|
||||
metadata, err := e.queryMetadata(ctx, currentNamespace)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed to query metadata: %w", err)
|
||||
}
|
||||
|
||||
inputOption, err := e.queryInputOption(ctx, currentNamespace)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("failed to query input option: %w", err)
|
||||
}
|
||||
|
||||
var rules []string
|
||||
for r := range module.Rules {
|
||||
currentRule := module.Rules[r].Head.Name.String()
|
||||
if isFailure(currentRule) || isWarning(currentRule) {
|
||||
rules = append(rules, currentRule)
|
||||
}
|
||||
}
|
||||
|
||||
var selectedConfigs []types.Config
|
||||
if len(inputOption.Selector.Types) > 0 {
|
||||
// Pass only the config files that match the selector types
|
||||
for _, t := range inputOption.Selector.Types {
|
||||
selectedConfigs = append(selectedConfigs, typedConfigs[t]...)
|
||||
}
|
||||
} else {
|
||||
// When the 'types' is not specified, it means '*'.
|
||||
selectedConfigs = configs
|
||||
}
|
||||
|
||||
var result map[string]types.Misconfiguration
|
||||
if inputOption.Combine {
|
||||
result, err = e.checkCombined(ctx, currentNamespace, rules, selectedConfigs, metadata)
|
||||
} else {
|
||||
result, err = e.check(ctx, currentNamespace, rules, selectedConfigs, metadata)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("policy check error: %w", err)
|
||||
}
|
||||
|
||||
for filePath, misconf := range result {
|
||||
uniqMisconfs[filePath] = mergeMisconfs(misconf, uniqMisconfs[filePath])
|
||||
}
|
||||
}
|
||||
|
||||
return toMisconfigurations(uniqMisconfs), nil
|
||||
}
|
||||
|
||||
func (e Engine) check(ctx context.Context, currentNamespace string, rules []string, configs []types.Config,
|
||||
metadata types.PolicyMetadata) (map[string]types.Misconfiguration, error) {
|
||||
|
||||
// Initialize misconfigurations
|
||||
misconfs := map[string]types.Misconfiguration{}
|
||||
for _, c := range configs {
|
||||
misconfs[c.FilePath] = types.Misconfiguration{
|
||||
FileType: c.Type,
|
||||
FilePath: c.FilePath,
|
||||
}
|
||||
}
|
||||
|
||||
for _, config := range configs {
|
||||
for _, rule := range rules {
|
||||
result, err := e.checkRule(ctx, currentNamespace, rule, config.Content, metadata)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("check rule: %w", err)
|
||||
}
|
||||
misconfs[config.FilePath] = mergeMisconfs(misconfs[config.FilePath], result)
|
||||
}
|
||||
}
|
||||
|
||||
return misconfs, nil
|
||||
}
|
||||
|
||||
type combinedInput struct {
|
||||
Path string `json:"path"`
|
||||
Contents interface{} `json:"contents"`
|
||||
}
|
||||
|
||||
func (e Engine) checkCombined(ctx context.Context, currentNamespace string, rules []string, configs []types.Config,
|
||||
metadata types.PolicyMetadata) (map[string]types.Misconfiguration, error) {
|
||||
var inputs []combinedInput
|
||||
misconfs := map[string]types.Misconfiguration{}
|
||||
for _, c := range configs {
|
||||
inputs = append(inputs, combinedInput{
|
||||
Path: c.FilePath,
|
||||
Contents: c.Content,
|
||||
})
|
||||
misconfs[c.FilePath] = types.Misconfiguration{
|
||||
FileType: c.Type,
|
||||
FilePath: c.FilePath,
|
||||
}
|
||||
}
|
||||
|
||||
for _, rule := range rules {
|
||||
results, err := e.checkRuleCombined(ctx, currentNamespace, rule, inputs, metadata)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for filePath, res := range results {
|
||||
misconfs[filePath] = mergeMisconfs(misconfs[filePath], res)
|
||||
}
|
||||
}
|
||||
|
||||
return misconfs, nil
|
||||
}
|
||||
|
||||
func (e *Engine) checkRule(ctx context.Context, namespace, rule string, input interface{}, metadata types.PolicyMetadata) (
|
||||
types.Misconfiguration, error) {
|
||||
// Exceptions based on namespace and rule
|
||||
exceptions, err := e.exceptions(ctx, namespace, rule, input, metadata)
|
||||
if err != nil {
|
||||
return types.Misconfiguration{}, xerrors.Errorf("exception error: %w", err)
|
||||
} else if len(exceptions) > 0 {
|
||||
return types.Misconfiguration{
|
||||
Exceptions: exceptions,
|
||||
}, nil
|
||||
}
|
||||
|
||||
ruleQuery := fmt.Sprintf("data.%s.%s", namespace, rule)
|
||||
ruleQueryResult, err := e.query(ctx, input, ruleQuery)
|
||||
if err != nil {
|
||||
return types.Misconfiguration{}, xerrors.Errorf("query rule: %w", err)
|
||||
}
|
||||
|
||||
var successes, failures, warnings []types.MisconfResult
|
||||
for _, ruleResult := range ruleQueryResult.results {
|
||||
result := types.MisconfResult{
|
||||
Namespace: namespace,
|
||||
Message: ruleResult.Message,
|
||||
PolicyMetadata: metadata,
|
||||
}
|
||||
|
||||
if ruleResult.Message == "" {
|
||||
continue
|
||||
} else if isFailure(rule) {
|
||||
failures = append(failures, result)
|
||||
} else {
|
||||
warnings = append(warnings, result)
|
||||
}
|
||||
}
|
||||
|
||||
if len(failures) == 0 && len(warnings) == 0 {
|
||||
successes = append(successes, types.MisconfResult{
|
||||
Namespace: namespace,
|
||||
PolicyMetadata: metadata,
|
||||
})
|
||||
}
|
||||
|
||||
return types.Misconfiguration{
|
||||
Successes: successes,
|
||||
Failures: failures,
|
||||
Warnings: warnings,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (e *Engine) checkRuleCombined(ctx context.Context, namespace, rule string, inputs []combinedInput, metadata types.PolicyMetadata) (
|
||||
map[string]types.Misconfiguration, error) {
|
||||
misconfs := map[string]types.Misconfiguration{}
|
||||
|
||||
// Exceptions based on namespace and rule
|
||||
exceptions, err := e.exceptions(ctx, namespace, rule, inputs, metadata)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("exception error: %w", err)
|
||||
} else if len(exceptions) > 0 {
|
||||
for _, input := range inputs {
|
||||
misconfs[input.Path] = types.Misconfiguration{
|
||||
FilePath: input.Path,
|
||||
Exceptions: exceptions,
|
||||
}
|
||||
}
|
||||
return misconfs, nil
|
||||
}
|
||||
|
||||
ruleQuery := fmt.Sprintf("data.%s.%s", namespace, rule)
|
||||
ruleQueryResult, err := e.query(ctx, inputs, ruleQuery)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("query rule: %w", err)
|
||||
}
|
||||
|
||||
// Fill failures and warnings
|
||||
for _, ruleResult := range ruleQueryResult.results {
|
||||
switch {
|
||||
case ruleResult.Message == "":
|
||||
continue
|
||||
case ruleResult.FilePath == "":
|
||||
return nil, xerrors.Errorf("rule missing 'filepath' field")
|
||||
}
|
||||
|
||||
misconf := misconfs[ruleResult.FilePath]
|
||||
result := types.MisconfResult{
|
||||
Namespace: namespace,
|
||||
Message: ruleResult.Message,
|
||||
PolicyMetadata: metadata,
|
||||
}
|
||||
|
||||
if isFailure(rule) {
|
||||
misconf.Failures = append(misconf.Failures, result)
|
||||
} else {
|
||||
misconf.Warnings = append(misconf.Warnings, result)
|
||||
}
|
||||
misconfs[ruleResult.FilePath] = misconf
|
||||
}
|
||||
|
||||
// Fill successes
|
||||
success := types.MisconfResult{
|
||||
Namespace: namespace,
|
||||
PolicyMetadata: metadata,
|
||||
}
|
||||
for _, input := range inputs {
|
||||
misconf, ok := misconfs[input.Path]
|
||||
if ok {
|
||||
continue
|
||||
}
|
||||
misconf.Successes = append(misconf.Successes, success)
|
||||
misconfs[input.Path] = misconf
|
||||
}
|
||||
|
||||
return misconfs, nil
|
||||
}
|
||||
|
||||
func (e *Engine) exceptions(ctx context.Context, namespace, rule string, config interface{},
|
||||
metadata types.PolicyMetadata) ([]types.MisconfResult, error) {
|
||||
exceptions, err := e.namespaceExceptions(ctx, namespace, config, metadata)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("namespace exceptions: %w", err)
|
||||
} else if len(exceptions) > 0 {
|
||||
return exceptions, nil
|
||||
}
|
||||
|
||||
exceptions, err = e.ruleExceptions(ctx, namespace, rule, config, metadata)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("rule exceptions: %w", err)
|
||||
} else if len(exceptions) > 0 {
|
||||
return exceptions, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (e *Engine) namespaceExceptions(ctx context.Context, namespace string, config interface{},
|
||||
metadata types.PolicyMetadata) ([]types.MisconfResult, error) {
|
||||
exceptionQuery := fmt.Sprintf("data.namespace.exceptions.exception[_] == %q", namespace)
|
||||
exceptionQueryResult, err := e.query(ctx, config, exceptionQuery)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("query namespace exceptions: %w", err)
|
||||
}
|
||||
|
||||
var exceptions []types.MisconfResult
|
||||
for _, exceptionResult := range exceptionQueryResult.results {
|
||||
// When an exception is found, set the message of the exception
|
||||
// to the query that triggered the exception so that it is known
|
||||
// which exception was triggered.
|
||||
if exceptionResult.Message == "" {
|
||||
exceptions = append(exceptions, types.MisconfResult{
|
||||
Namespace: namespace,
|
||||
Message: exceptionQuery,
|
||||
PolicyMetadata: metadata,
|
||||
})
|
||||
}
|
||||
}
|
||||
return exceptions, nil
|
||||
}
|
||||
|
||||
func (e *Engine) ruleExceptions(ctx context.Context, namespace, rule string, config interface{},
|
||||
metadata types.PolicyMetadata) ([]types.MisconfResult, error) {
|
||||
exceptionQuery := fmt.Sprintf("data.%s.exception[_][_] == %q", namespace, removeRulePrefix(rule))
|
||||
exceptionQueryResult, err := e.query(ctx, config, exceptionQuery)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("query rule exceptions: %w", err)
|
||||
}
|
||||
|
||||
var exceptions []types.MisconfResult
|
||||
for _, exceptionResult := range exceptionQueryResult.results {
|
||||
// When an exception is found, set the message of the exception
|
||||
// to the query that triggered the exception so that it is known
|
||||
// which exception was triggered.
|
||||
if exceptionResult.Message == "" {
|
||||
exceptions = append(exceptions, types.MisconfResult{
|
||||
Namespace: namespace,
|
||||
Message: exceptionQuery,
|
||||
PolicyMetadata: metadata,
|
||||
})
|
||||
}
|
||||
}
|
||||
return exceptions, nil
|
||||
}
|
||||
|
||||
// queryResult describes the result of evaluating a query.
|
||||
type queryResult struct {
|
||||
|
||||
// Query is the fully qualified query that was used
|
||||
// to determine the result. Ex: (data.main.deny)
|
||||
query string
|
||||
|
||||
// Results are the individual results of the query.
|
||||
// When querying data.main.deny, multiple deny rules can
|
||||
// exist, producing multiple results.
|
||||
results []queryValue
|
||||
|
||||
// Traces represents a single trace of how the query was
|
||||
// evaluated. Each trace value is a trace line.
|
||||
traces []string
|
||||
}
|
||||
|
||||
type queryValue struct {
|
||||
FilePath string
|
||||
Message string
|
||||
}
|
||||
|
||||
// query is a low-level method that has no notion of a failed policy or successful policy. // It only returns the result of executing a single query against the input.
|
||||
//
|
||||
// Example queries could include:
|
||||
// data.main.deny to query the deny rule in the main namespace
|
||||
// data.main.warn to query the warn rule in the main namespace
|
||||
func (e *Engine) query(ctx context.Context, input interface{}, query string) (queryResult, error) {
|
||||
stdout := topdown.NewBufferTracer()
|
||||
options := []func(r *rego.Rego){
|
||||
rego.Input(input),
|
||||
rego.Query(query),
|
||||
rego.Compiler(e.Compiler()),
|
||||
rego.Store(e.Store()),
|
||||
rego.Runtime(e.Runtime()),
|
||||
rego.QueryTracer(stdout),
|
||||
}
|
||||
resultSet, err := rego.New(options...).Eval(ctx)
|
||||
if err != nil {
|
||||
return queryResult{}, xerrors.Errorf("evaluating policy: %w", err)
|
||||
}
|
||||
|
||||
// After the evaluation of the policy, the results of the trace (stdout) will be populated
|
||||
// for the query. Once populated, format the trace results into a human readable format.
|
||||
buf := new(bytes.Buffer)
|
||||
topdown.PrettyTrace(buf, *stdout)
|
||||
var traces []string
|
||||
for _, line := range strings.Split(buf.String(), "\n") {
|
||||
if len(line) > 0 {
|
||||
traces = append(traces, line)
|
||||
}
|
||||
}
|
||||
|
||||
var results []queryValue
|
||||
for _, result := range resultSet {
|
||||
for _, expression := range result.Expressions {
|
||||
// Rego rules that are intended for evaluation should return a slice of values.
|
||||
// For example, deny[msg] or violation[{"msg": msg}].
|
||||
//
|
||||
// When an expression does not have a slice of values, the expression did not
|
||||
// evaluate to true, and no message was returned.
|
||||
var expressionValues []interface{}
|
||||
if _, ok := expression.Value.([]interface{}); ok {
|
||||
expressionValues = expression.Value.([]interface{})
|
||||
}
|
||||
if len(expressionValues) == 0 {
|
||||
results = append(results, queryValue{})
|
||||
continue
|
||||
}
|
||||
|
||||
for _, v := range expressionValues {
|
||||
switch val := v.(type) {
|
||||
case string:
|
||||
// Policies that only return a single string (e.g. deny[msg])
|
||||
results = append(results, queryValue{Message: val})
|
||||
case map[string]interface{}:
|
||||
msg, filePath, err := parseResult(val)
|
||||
if err != nil {
|
||||
return queryResult{}, xerrors.Errorf("failed to parse query result: %w", err)
|
||||
}
|
||||
|
||||
results = append(results, queryValue{
|
||||
Message: strings.TrimSpace(msg),
|
||||
FilePath: filePath,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return queryResult{
|
||||
query: query,
|
||||
results: results,
|
||||
traces: traces,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (e *Engine) queryMetadata(ctx context.Context, namespace string) (types.PolicyMetadata, error) {
|
||||
query := fmt.Sprintf("x = data.%s.__rego_metadata__", namespace)
|
||||
options := []func(r *rego.Rego){
|
||||
rego.Query(query),
|
||||
rego.Compiler(e.Compiler()),
|
||||
rego.Store(e.Store()),
|
||||
}
|
||||
resultSet, err := rego.New(options...).Eval(ctx)
|
||||
if err != nil {
|
||||
return types.PolicyMetadata{}, xerrors.Errorf("evaluating '__rego_metadata__': %w", err)
|
||||
}
|
||||
|
||||
// Set default values
|
||||
metadata := types.PolicyMetadata{
|
||||
ID: "N/A",
|
||||
Type: "N/A",
|
||||
Title: "N/A",
|
||||
Severity: "UNKNOWN",
|
||||
}
|
||||
|
||||
if len(resultSet) == 0 {
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
result, ok := resultSet[0].Bindings["x"].(map[string]interface{})
|
||||
if !ok {
|
||||
return types.PolicyMetadata{}, xerrors.New("'__rego_metadata__' must be map")
|
||||
}
|
||||
|
||||
if err = mapstructure.Decode(result, &metadata); err != nil {
|
||||
return types.PolicyMetadata{}, xerrors.Errorf("decode error: %w", err)
|
||||
}
|
||||
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
func (e *Engine) queryInputOption(ctx context.Context, namespace string) (types.PolicyInputOption, error) {
|
||||
query := fmt.Sprintf("x = data.%s.__rego_input__", namespace)
|
||||
options := []func(r *rego.Rego){
|
||||
rego.Query(query),
|
||||
rego.Compiler(e.Compiler()),
|
||||
rego.Store(e.Store()),
|
||||
}
|
||||
resultSet, err := rego.New(options...).Eval(ctx)
|
||||
if err != nil {
|
||||
return types.PolicyInputOption{}, xerrors.Errorf("evaluating '__rego_input__': %w", err)
|
||||
}
|
||||
|
||||
if len(resultSet) == 0 {
|
||||
return types.PolicyInputOption{}, nil
|
||||
}
|
||||
|
||||
result, ok := resultSet[0].Bindings["x"].(map[string]interface{})
|
||||
if !ok {
|
||||
return types.PolicyInputOption{}, xerrors.New("'__rego_input__' must be map")
|
||||
}
|
||||
|
||||
// Set default values
|
||||
var inputOption types.PolicyInputOption
|
||||
if err = mapstructure.Decode(result, &inputOption); err != nil {
|
||||
return types.PolicyInputOption{}, xerrors.Errorf("decode error: %w", err)
|
||||
}
|
||||
|
||||
return inputOption, nil
|
||||
}
|
||||
|
||||
func parseResult(r map[string]interface{}) (string, string, error) {
|
||||
// Policies that return metadata (e.g. deny[{"msg": msg}])
|
||||
if _, ok := r["msg"]; !ok {
|
||||
return "", "", xerrors.Errorf("rule missing 'msg' field: %v", r)
|
||||
}
|
||||
|
||||
msg, ok := r["msg"].(string)
|
||||
if !ok {
|
||||
return "", "", xerrors.Errorf("'msg' field must be string: %v", r)
|
||||
}
|
||||
|
||||
filePath, ok := r["filepath"].(string)
|
||||
if !ok {
|
||||
return msg, "", nil
|
||||
}
|
||||
|
||||
return msg, filePath, nil
|
||||
}
|
||||
|
||||
func isWarning(rule string) bool {
|
||||
warningRegex := regexp.MustCompile("^warn(_[a-zA-Z0-9]+)*$")
|
||||
return warningRegex.MatchString(rule)
|
||||
}
|
||||
|
||||
func isFailure(rule string) bool {
|
||||
failureRegex := regexp.MustCompile("^(deny|violation)(_[a-zA-Z0-9]+)*$")
|
||||
return failureRegex.MatchString(rule)
|
||||
}
|
||||
|
||||
// When matching rules for exceptions, only the name of the rule
|
||||
// is queried, so the severity prefix must be removed.
|
||||
func removeRulePrefix(rule string) string {
|
||||
rule = strings.TrimPrefix(rule, "violation_")
|
||||
rule = strings.TrimPrefix(rule, "deny_")
|
||||
rule = strings.TrimPrefix(rule, "warn_")
|
||||
|
||||
return rule
|
||||
}
|
||||
|
||||
func uniqueResults(results []types.MisconfResult) []types.MisconfResult {
|
||||
uniq := map[string]types.MisconfResult{}
|
||||
for _, result := range results {
|
||||
key := fmt.Sprintf("%s::%s::%s", result.ID, result.Namespace, result.Message)
|
||||
uniq[key] = result
|
||||
}
|
||||
|
||||
var uniqResults []types.MisconfResult
|
||||
for _, s := range uniq {
|
||||
uniqResults = append(uniqResults, s)
|
||||
}
|
||||
return uniqResults
|
||||
}
|
||||
|
||||
func underNamespaces(current string, namespaces []string) bool {
|
||||
// e.g.
|
||||
// current: 'main', namespaces: []string{'main'} => true
|
||||
// current: 'main.foo', namespaces: []string{'main'} => true
|
||||
// current: 'main.foo', namespaces: []string{'main.bar'} => false
|
||||
for _, ns := range namespaces {
|
||||
if current == ns || strings.HasPrefix(current, ns+".") {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func toMisconfigurations(misconfs map[string]types.Misconfiguration) []types.Misconfiguration {
|
||||
var results []types.Misconfiguration
|
||||
for _, misconf := range misconfs {
|
||||
// Remove duplicates
|
||||
misconf.Successes = uniqueResults(misconf.Successes)
|
||||
|
||||
// Sort results
|
||||
sort.Sort(misconf.Successes)
|
||||
sort.Sort(misconf.Warnings)
|
||||
sort.Sort(misconf.Failures)
|
||||
sort.Sort(misconf.Exceptions)
|
||||
|
||||
results = append(results, misconf)
|
||||
}
|
||||
|
||||
// Sort misconfigurations
|
||||
sort.Slice(results, func(i, j int) bool {
|
||||
if results[i].FileType != results[j].FileType {
|
||||
return results[i].FileType < results[j].FileType
|
||||
}
|
||||
return results[i].FilePath < results[j].FilePath
|
||||
})
|
||||
|
||||
return results
|
||||
}
|
||||
|
||||
func mergeMisconfs(a, b types.Misconfiguration) types.Misconfiguration {
|
||||
a.Successes = append(a.Successes, b.Successes...)
|
||||
a.Warnings = append(a.Warnings, b.Warnings...)
|
||||
a.Failures = append(a.Failures, b.Failures...)
|
||||
a.Exceptions = append(a.Exceptions, b.Exceptions...)
|
||||
return a
|
||||
}
|
||||
682
policy/engine_test.go
Normal file
682
policy/engine_test.go
Normal file
@@ -0,0 +1,682 @@
|
||||
package policy_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/aquasecurity/fanal/policy"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
)
|
||||
|
||||
func TestLoad(t *testing.T) {
|
||||
type args struct {
|
||||
policyPaths []string
|
||||
dataPaths []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
args: args{
|
||||
policyPaths: []string{"testdata/happy"},
|
||||
dataPaths: []string{"testdata/data"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "broken policy",
|
||||
args: args{
|
||||
policyPaths: []string{"testdata/sad/broken_rule.rego"},
|
||||
dataPaths: []string{"testdata/data"},
|
||||
},
|
||||
wantErr: "var msg is unsafe",
|
||||
},
|
||||
{
|
||||
name: "no policies",
|
||||
args: args{
|
||||
policyPaths: []string{"testdata/data/"},
|
||||
},
|
||||
wantErr: "no policies found in [testdata/data/]",
|
||||
},
|
||||
{
|
||||
name: "non-existent policy path",
|
||||
args: args{
|
||||
policyPaths: []string{"testdata/non-existent/"},
|
||||
},
|
||||
wantErr: "no such file or directory",
|
||||
},
|
||||
{
|
||||
name: "non-existent data path",
|
||||
args: args{
|
||||
policyPaths: []string{"testdata/happy"},
|
||||
dataPaths: []string{"testdata/non-existent/"},
|
||||
},
|
||||
wantErr: "no such file or directory",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
_, err := policy.Load(tt.args.policyPaths, tt.args.dataPaths)
|
||||
if tt.wantErr != "" {
|
||||
require.NotNil(t, err)
|
||||
assert.Contains(t, err.Error(), tt.wantErr)
|
||||
return
|
||||
}
|
||||
assert.NoError(t, err)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestEngine_Check(t *testing.T) {
|
||||
type args struct {
|
||||
configs []types.Config
|
||||
namespaces []string
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
policyPaths []string
|
||||
dataPaths []string
|
||||
args args
|
||||
want []types.Misconfiguration
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "happy path",
|
||||
policyPaths: []string{"testdata/happy"},
|
||||
dataPaths: []string{"testdata/data"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"testdata", "dummy"},
|
||||
},
|
||||
want: []types.Misconfiguration{
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Successes: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_300",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-300",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Pod",
|
||||
Severity: "CRITICAL",
|
||||
},
|
||||
},
|
||||
},
|
||||
Failures: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_100",
|
||||
Message: "deny test",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "combined files",
|
||||
policyPaths: []string{"testdata/combine"},
|
||||
dataPaths: []string{"testdata/data"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment1.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test1",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment2.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"dummy", "testdata"},
|
||||
},
|
||||
want: []types.Misconfiguration{
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment1.yaml",
|
||||
Successes: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_400",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-400",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Combined Pod",
|
||||
Severity: "LOW",
|
||||
},
|
||||
},
|
||||
},
|
||||
Failures: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_100",
|
||||
Message: "deny combined test1",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Combined Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
},
|
||||
Warnings: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_200",
|
||||
Message: "deny test1",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-200",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "MEDIUM",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment2.yaml",
|
||||
Successes: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_400",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-400",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Combined Pod",
|
||||
Severity: "LOW",
|
||||
},
|
||||
},
|
||||
},
|
||||
Failures: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_100",
|
||||
Message: "deny combined test2",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Combined Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
},
|
||||
Warnings: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_200",
|
||||
Message: "deny test2",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-200",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "MEDIUM",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "sub configs",
|
||||
policyPaths: []string{"testdata/happy"},
|
||||
dataPaths: []string{"testdata/data"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test1",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"testdata", "dummy"},
|
||||
},
|
||||
want: []types.Misconfiguration{
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Successes: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_300",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-300",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Pod",
|
||||
Severity: "CRITICAL",
|
||||
},
|
||||
},
|
||||
},
|
||||
Failures: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_100",
|
||||
Message: "deny test1",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
{
|
||||
Namespace: "testdata.xyz_100",
|
||||
Message: "deny test2",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "namespace exception",
|
||||
policyPaths: []string{"testdata/namespace_exception"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"testdata", "dummy"},
|
||||
},
|
||||
want: []types.Misconfiguration{
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Failures: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.kubernetes.xyz_200",
|
||||
Message: "deny 200 test",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-200",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
},
|
||||
Exceptions: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.kubernetes.xyz_100",
|
||||
Message: `data.namespace.exceptions.exception[_] == "testdata.kubernetes.xyz_100"`,
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "namespace exception with combined files",
|
||||
policyPaths: []string{"testdata/combine_exception"},
|
||||
dataPaths: []string{"testdata/data"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment1.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test1",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment2.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test2",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"dummy", "testdata"},
|
||||
},
|
||||
want: []types.Misconfiguration{
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment1.yaml",
|
||||
Warnings: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_100",
|
||||
Message: "deny combined test1",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Combined Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
{
|
||||
Namespace: "testdata.xyz_200",
|
||||
Message: "deny test1",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-200",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "MEDIUM",
|
||||
},
|
||||
},
|
||||
},
|
||||
Exceptions: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_300",
|
||||
Message: `data.namespace.exceptions.exception[_] == "testdata.xyz_300"`,
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-300",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Always Fail",
|
||||
Severity: "LOW",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment2.yaml",
|
||||
Warnings: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_100",
|
||||
Message: "deny combined test2",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Combined Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
{
|
||||
Namespace: "testdata.xyz_200",
|
||||
Message: "deny test2",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-200",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "MEDIUM",
|
||||
},
|
||||
},
|
||||
},
|
||||
Exceptions: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.xyz_300",
|
||||
Message: `data.namespace.exceptions.exception[_] == "testdata.xyz_300"`,
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-300",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Always Fail",
|
||||
Severity: "LOW",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "rule exception",
|
||||
policyPaths: []string{"testdata/rule_exception"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"testdata", "dummy"},
|
||||
},
|
||||
want: []types.Misconfiguration{
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Failures: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.kubernetes.xyz_100",
|
||||
Message: "deny bar test",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
},
|
||||
Exceptions: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.kubernetes.xyz_100",
|
||||
Message: `data.testdata.kubernetes.xyz_100.exception[_][_] == "foo"`,
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "XYZ-100",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "HIGH",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "missing id and severity",
|
||||
policyPaths: []string{"testdata/sad/missing_metadata_fields.rego"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"testdata", "dummy"},
|
||||
},
|
||||
want: []types.Misconfiguration{
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Failures: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.kubernetes.xyz_100",
|
||||
Message: "deny test",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "N/A",
|
||||
Type: "Kubernetes Security Check",
|
||||
Title: "Bad Deployment",
|
||||
Severity: "UNKNOWN",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "missing __rego_metadata__",
|
||||
policyPaths: []string{"testdata/sad/missing_metadata.rego"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"testdata", "dummy"},
|
||||
},
|
||||
want: []types.Misconfiguration{
|
||||
{
|
||||
FileType: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Failures: []types.MisconfResult{
|
||||
{
|
||||
Namespace: "testdata.kubernetes.xyz_100",
|
||||
Message: "deny test",
|
||||
PolicyMetadata: types.PolicyMetadata{
|
||||
ID: "N/A",
|
||||
Type: "N/A",
|
||||
Title: "N/A",
|
||||
Severity: "UNKNOWN",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "missing filepath",
|
||||
policyPaths: []string{"testdata/sad/missing_filepath.rego"},
|
||||
dataPaths: []string{"testdata/data"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment1.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
"metadata": map[string]interface{}{
|
||||
"name": "test1",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"dummy", "testdata"},
|
||||
},
|
||||
wantErr: `rule missing 'filepath' field`,
|
||||
},
|
||||
{
|
||||
name: "broken __rego_metadata__",
|
||||
policyPaths: []string{"testdata/sad/broken_metadata.rego"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"testdata", "dummy"},
|
||||
},
|
||||
wantErr: "'__rego_metadata__' must be map",
|
||||
},
|
||||
{
|
||||
name: "broken msg",
|
||||
policyPaths: []string{"testdata/sad/broken_msg.rego"},
|
||||
args: args{
|
||||
configs: []types.Config{
|
||||
{
|
||||
Type: types.Kubernetes,
|
||||
FilePath: "deployment.yaml",
|
||||
Content: map[string]interface{}{
|
||||
"apiVersion": "apps/v1",
|
||||
"kind": "Deployment",
|
||||
},
|
||||
},
|
||||
},
|
||||
namespaces: []string{"testdata", "dummy"},
|
||||
},
|
||||
wantErr: "rule missing 'msg' field",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
engine, err := policy.Load(tt.policyPaths, tt.dataPaths)
|
||||
require.NoError(t, err)
|
||||
|
||||
got, err := engine.Check(context.Background(), tt.args.configs, tt.args.namespaces)
|
||||
if tt.wantErr != "" {
|
||||
require.NotNil(t, err)
|
||||
assert.Contains(t, err.Error(), tt.wantErr)
|
||||
return
|
||||
}
|
||||
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
25
policy/testdata/combine/combined_deployment.rego
vendored
Normal file
25
policy/testdata/combine/combined_deployment.rego
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
package testdata.xyz_100
|
||||
|
||||
import data.services
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Combined Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {"types": ["kubernetes"]},
|
||||
"combine": true,
|
||||
}
|
||||
|
||||
deny[res] {
|
||||
input[i].contents.kind == "Deployment"
|
||||
services.ports[_] == 22
|
||||
res := {
|
||||
"filepath": input[i].path,
|
||||
"msg": sprintf("deny combined %s", [input[i].contents.metadata.name]),
|
||||
}
|
||||
}
|
||||
24
policy/testdata/combine/combined_pod.rego
vendored
Normal file
24
policy/testdata/combine/combined_pod.rego
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
package testdata.xyz_400
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-400",
|
||||
"title": "Bad Combined Pod",
|
||||
"version": "v1.0.0",
|
||||
"severity": "LOW",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {
|
||||
"types": ["kubernetes"]
|
||||
},
|
||||
"combine": true,
|
||||
}
|
||||
|
||||
deny[res] {
|
||||
input[i].contents.kind == "Pod"
|
||||
res := {
|
||||
"filepath": input[i].path,
|
||||
"msg": sprintf("deny combined %s", [input[i].contents.metadata.name]),
|
||||
}
|
||||
}
|
||||
22
policy/testdata/combine/deployment.rego
vendored
Normal file
22
policy/testdata/combine/deployment.rego
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
package testdata.xyz_200
|
||||
|
||||
import data.services
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-200",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "MEDIUM",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {"types": ["kubernetes"]},
|
||||
"combine": false,
|
||||
}
|
||||
|
||||
warn[msg] {
|
||||
input.kind == "Deployment"
|
||||
services.ports[_] == 22
|
||||
msg := sprintf("deny %s", [input.metadata.name])
|
||||
}
|
||||
23
policy/testdata/combine/docker.rego
vendored
Normal file
23
policy/testdata/combine/docker.rego
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
package testdata.xyz_300
|
||||
|
||||
import data.services
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-300",
|
||||
"title": "Bad Dockerfile",
|
||||
"version": "v1.0.0",
|
||||
"severity": "CRITICAL",
|
||||
"type": "Docker Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {"types": ["dockerfile"]},
|
||||
"combine": true,
|
||||
}
|
||||
|
||||
deny[res] {
|
||||
res := {
|
||||
"filepath": input[_].path,
|
||||
"msg": "bad",
|
||||
}
|
||||
}
|
||||
25
policy/testdata/combine_exception/combined_deployment.rego
vendored
Normal file
25
policy/testdata/combine_exception/combined_deployment.rego
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
package testdata.xyz_100
|
||||
|
||||
import data.services
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Combined Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {"types": ["kubernetes"]},
|
||||
"combine": true,
|
||||
}
|
||||
|
||||
warn[res] {
|
||||
input[i].contents.kind == "Deployment"
|
||||
services.ports[_] == 22
|
||||
res := {
|
||||
"filepath": input[i].path,
|
||||
"msg": sprintf("deny combined %s", [input[i].contents.metadata.name]),
|
||||
}
|
||||
}
|
||||
22
policy/testdata/combine_exception/deployment.rego
vendored
Normal file
22
policy/testdata/combine_exception/deployment.rego
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
package testdata.xyz_200
|
||||
|
||||
import data.services
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-200",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "MEDIUM",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {"types": ["kubernetes"]},
|
||||
"combine": false,
|
||||
}
|
||||
|
||||
warn[msg] {
|
||||
input.kind == "Deployment"
|
||||
services.ports[_] == 22
|
||||
msg := sprintf("deny %s", [input.metadata.name])
|
||||
}
|
||||
8
policy/testdata/combine_exception/exceptions.rego
vendored
Normal file
8
policy/testdata/combine_exception/exceptions.rego
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
package namespace.exceptions
|
||||
|
||||
import data.namespaces
|
||||
|
||||
exception[ns] {
|
||||
ns := data.namespaces[_]
|
||||
ns == "testdata.xyz_300"
|
||||
}
|
||||
23
policy/testdata/combine_exception/fail.rego
vendored
Normal file
23
policy/testdata/combine_exception/fail.rego
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
package testdata.xyz_300
|
||||
|
||||
import data.services
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-300",
|
||||
"title": "Always Fail",
|
||||
"version": "v1.0.0",
|
||||
"severity": "LOW",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {"types": ["kubernetes"]},
|
||||
"combine": true,
|
||||
}
|
||||
|
||||
deny[res] {
|
||||
res := {
|
||||
"filepath": input[i].path,
|
||||
"msg": sprintf("deny combined %s", [input[i].contents.metadata.name]),
|
||||
}
|
||||
}
|
||||
3
policy/testdata/data/service.yaml
vendored
Normal file
3
policy/testdata/data/service.yaml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
services:
|
||||
ports:
|
||||
- 22
|
||||
24
policy/testdata/happy/deployment.rego
vendored
Normal file
24
policy/testdata/happy/deployment.rego
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
package testdata.xyz_100
|
||||
|
||||
import data.services
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {
|
||||
"types": ["kubernetes"]
|
||||
},
|
||||
"combine": false,
|
||||
}
|
||||
|
||||
deny[msg] {
|
||||
input.kind == "Deployment"
|
||||
services.ports[_] == 22
|
||||
msg := sprintf("deny %s", [input.metadata.name])
|
||||
}
|
||||
20
policy/testdata/happy/docker.rego
vendored
Normal file
20
policy/testdata/happy/docker.rego
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
package testdata.xyz_200
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-200",
|
||||
"title": "Bad FROM",
|
||||
"version": "v1.0.0",
|
||||
"severity": "LOW",
|
||||
"type": "Docker Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {
|
||||
"types": ["dockerfile"]
|
||||
},
|
||||
"combine": false,
|
||||
}
|
||||
|
||||
deny[msg] {
|
||||
msg := "bad Dockerfile"
|
||||
}
|
||||
21
policy/testdata/happy/pod.rego
vendored
Normal file
21
policy/testdata/happy/pod.rego
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
package testdata.xyz_300
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-300",
|
||||
"title": "Bad Pod",
|
||||
"version": "v1.0.0",
|
||||
"severity": "CRITICAL",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {
|
||||
"types": ["kubernetes"]
|
||||
},
|
||||
"combine": false,
|
||||
}
|
||||
|
||||
deny[msg] {
|
||||
input.kind == "Pod"
|
||||
msg := sprintf("deny %s", [input.metadata.name])
|
||||
}
|
||||
14
policy/testdata/namespace_exception/100.rego
vendored
Normal file
14
policy/testdata/namespace_exception/100.rego
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package testdata.kubernetes.xyz_100
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
deny[msg] {
|
||||
input.kind == "Deployment"
|
||||
msg := sprintf("deny 100 %s", [input.metadata.name])
|
||||
}
|
||||
14
policy/testdata/namespace_exception/200.rego
vendored
Normal file
14
policy/testdata/namespace_exception/200.rego
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package testdata.kubernetes.xyz_200
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-200",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
deny[res] {
|
||||
input.kind == "Deployment"
|
||||
res := {"msg": sprintf("deny 200 %s", [input.metadata.name])}
|
||||
}
|
||||
8
policy/testdata/namespace_exception/exceptions.rego
vendored
Normal file
8
policy/testdata/namespace_exception/exceptions.rego
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
package namespace.exceptions
|
||||
|
||||
import data.namespaces
|
||||
|
||||
exception[ns] {
|
||||
ns := data.namespaces[_]
|
||||
ns == "testdata.kubernetes.xyz_100"
|
||||
}
|
||||
23
policy/testdata/rule_exception/deployment.rego
vendored
Normal file
23
policy/testdata/rule_exception/deployment.rego
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
package testdata.kubernetes.xyz_100
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
deny_foo[msg] {
|
||||
input.kind == "Deployment"
|
||||
msg := sprintf("deny foo %s", [input.metadata.name])
|
||||
}
|
||||
|
||||
deny_bar[msg] {
|
||||
input.kind == "Deployment"
|
||||
msg := sprintf("deny bar %s", [input.metadata.name])
|
||||
}
|
||||
|
||||
exception[rules] {
|
||||
rules = ["foo"]
|
||||
}
|
||||
8
policy/testdata/sad/broken_metadata.rego
vendored
Normal file
8
policy/testdata/sad/broken_metadata.rego
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
package testdata.kubernetes.xyz_100
|
||||
|
||||
__rego_metadata__ := "broken"
|
||||
|
||||
deny[msg] {
|
||||
input.kind == "Deployment"
|
||||
msg := sprintf("deny %s", [input.metadata.name])
|
||||
}
|
||||
14
policy/testdata/sad/broken_msg.rego
vendored
Normal file
14
policy/testdata/sad/broken_msg.rego
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
package testdata.kubernetes.xyz_200
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-200",
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
deny[res] {
|
||||
input.kind == "Deployment"
|
||||
res := {"hello": "world"}
|
||||
}
|
||||
5
policy/testdata/sad/broken_rule.rego
vendored
Normal file
5
policy/testdata/sad/broken_rule.rego
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
package testdata.kubernetes.xyz_100
|
||||
|
||||
deny[msg] {
|
||||
broken
|
||||
}
|
||||
24
policy/testdata/sad/missing_filepath.rego
vendored
Normal file
24
policy/testdata/sad/missing_filepath.rego
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
package testdata.xyz_100
|
||||
|
||||
import data.services
|
||||
|
||||
__rego_metadata__ := {
|
||||
"id": "XYZ-100",
|
||||
"title": "Bad Combined Deployment",
|
||||
"version": "v1.0.0",
|
||||
"severity": "HIGH",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
__rego_input__ := {
|
||||
"selector": {"types": ["kubernetes"]},
|
||||
"combine": true,
|
||||
}
|
||||
|
||||
warn[res] {
|
||||
input[i].contents.kind == "Deployment"
|
||||
services.ports[_] == 22
|
||||
res := {
|
||||
"msg": sprintf("deny combined %s", [input[i].contents.metadata.name]),
|
||||
}
|
||||
}
|
||||
6
policy/testdata/sad/missing_metadata.rego
vendored
Normal file
6
policy/testdata/sad/missing_metadata.rego
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
package testdata.kubernetes.xyz_100
|
||||
|
||||
deny[msg] {
|
||||
input.kind == "Deployment"
|
||||
msg := sprintf("deny %s", [input.metadata.name])
|
||||
}
|
||||
12
policy/testdata/sad/missing_metadata_fields.rego
vendored
Normal file
12
policy/testdata/sad/missing_metadata_fields.rego
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
package testdata.kubernetes.xyz_100
|
||||
|
||||
__rego_metadata__ := {
|
||||
"title": "Bad Deployment",
|
||||
"version": "v1.0.0",
|
||||
"type": "Kubernetes Security Check",
|
||||
}
|
||||
|
||||
deny[msg] {
|
||||
input.kind == "Deployment"
|
||||
msg := sprintf("deny %s", [input.metadata.name])
|
||||
}
|
||||
@@ -12,24 +12,7 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
_ "github.com/aquasecurity/fanal/analyzer/command/apk"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/bundler"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/cargo"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/composer"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/npm"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/pipenv"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/poetry"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/library/yarn"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/alpine"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/amazonlinux"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/debian"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/photon"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/redhatbase"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/suse"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/os/ubuntu"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/apk"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/dpkg"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/pkg/rpm"
|
||||
_ "github.com/aquasecurity/fanal/analyzer/all"
|
||||
"github.com/aquasecurity/fanal/cache"
|
||||
"github.com/aquasecurity/fanal/types"
|
||||
dtypes "github.com/docker/docker/api/types"
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user