feat(misconf): export raw Terraform data to Rego (#8741)

Signed-off-by: nikpivkin <nikita.pivkin@smartforce.io>
This commit is contained in:
Nikita Pivkin
2025-05-27 10:34:17 +06:00
committed by GitHub
parent 6c7cb7ad2d
commit aaecc29e90
30 changed files with 501 additions and 24 deletions

View File

@@ -51,6 +51,7 @@ trivy config [flags] DIR
--output-plugin-arg string [EXPERIMENTAL] output plugin arguments
--password strings password. Comma-separated passwords allowed. TRIVY_PASSWORD should be used for security reasons.
--password-stdin password from stdin. Comma-separated passwords are not supported.
--raw-config-scanners strings specify the types of scanners that will also scan raw configurations. For example, scanners will scan a non-adapted configuration into a shared state (allowed values: terraform)
--redis-ca string redis ca file location, if using redis as cache backend
--redis-cert string redis certificate file location, if using redis as cache backend
--redis-key string redis key file location, if using redis as cache backend

View File

@@ -99,6 +99,7 @@ trivy filesystem [flags] PATH
- indirect
(default [unknown,root,workspace,direct,indirect])
--pkg-types strings list of package types (allowed values: os,library) (default [os,library])
--raw-config-scanners strings specify the types of scanners that will also scan raw configurations. For example, scanners will scan a non-adapted configuration into a shared state (allowed values: terraform)
--redis-ca string redis ca file location, if using redis as cache backend
--redis-cert string redis certificate file location, if using redis as cache backend
--redis-key string redis key file location, if using redis as cache backend

View File

@@ -120,6 +120,7 @@ trivy image [flags] IMAGE_NAME
--pkg-types strings list of package types (allowed values: os,library) (default [os,library])
--platform string set platform in the form os/arch if image is multi-platform capable
--podman-host string unix podman socket path to use for podman scanning
--raw-config-scanners strings specify the types of scanners that will also scan raw configurations. For example, scanners will scan a non-adapted configuration into a shared state (allowed values: terraform)
--redis-ca string redis ca file location, if using redis as cache backend
--redis-cert string redis certificate file location, if using redis as cache backend
--redis-key string redis key file location, if using redis as cache backend

View File

@@ -111,6 +111,7 @@ trivy kubernetes [flags] [CONTEXT]
(default [unknown,root,workspace,direct,indirect])
--pkg-types strings list of package types (allowed values: os,library) (default [os,library])
--qps float specify the maximum QPS to the master from this client (default 5)
--raw-config-scanners strings specify the types of scanners that will also scan raw configurations. For example, scanners will scan a non-adapted configuration into a shared state (allowed values: terraform)
--redis-ca string redis ca file location, if using redis as cache backend
--redis-cert string redis certificate file location, if using redis as cache backend
--redis-key string redis key file location, if using redis as cache backend

View File

@@ -98,6 +98,7 @@ trivy repository [flags] (REPO_PATH | REPO_URL)
- indirect
(default [unknown,root,workspace,direct,indirect])
--pkg-types strings list of package types (allowed values: os,library) (default [os,library])
--raw-config-scanners strings specify the types of scanners that will also scan raw configurations. For example, scanners will scan a non-adapted configuration into a shared state (allowed values: terraform)
--redis-ca string redis ca file location, if using redis as cache backend
--redis-cert string redis certificate file location, if using redis as cache backend
--redis-key string redis key file location, if using redis as cache backend

View File

@@ -101,6 +101,7 @@ trivy rootfs [flags] ROOTDIR
- indirect
(default [unknown,root,workspace,direct,indirect])
--pkg-types strings list of package types (allowed values: os,library) (default [os,library])
--raw-config-scanners strings specify the types of scanners that will also scan raw configurations. For example, scanners will scan a non-adapted configuration into a shared state (allowed values: terraform)
--redis-ca string redis ca file location, if using redis as cache backend
--redis-cert string redis certificate file location, if using redis as cache backend
--redis-key string redis key file location, if using redis as cache backend

View File

@@ -91,6 +91,7 @@ trivy vm [flags] VM_IMAGE
- indirect
(default [unknown,root,workspace,direct,indirect])
--pkg-types strings list of package types (allowed values: os,library) (default [os,library])
--raw-config-scanners strings specify the types of scanners that will also scan raw configurations. For example, scanners will scan a non-adapted configuration into a shared state (allowed values: terraform)
--redis-ca string redis ca file location, if using redis as cache backend
--redis-cert string redis certificate file location, if using redis as cache backend
--redis-key string redis key file location, if using redis as cache backend

View File

@@ -409,6 +409,9 @@ misconfiguration:
# Same as '--include-non-failures'
include-non-failures: false
# Same as '--raw-config-scanners'
raw-config-scanners: []
# Same as '--render-cause'
render-cause: []

View File

@@ -207,6 +207,7 @@ You can specify input format via the `custom.input` annotation.
- `yaml` (Generic YAML)
- `json` (Generic JSON)
- `toml` (Generic TOML)
- `terraform-raw` (Terraform configuration is not converted to common state as for the Cloud format, allowing for more flexible and direct checks on the original code)
When configuration languages such as Kubernetes are not identified, file formats such as JSON will be used as `type`.
When a configuration language is identified, it will overwrite `type`.

View File

@@ -25,6 +25,7 @@ Currently out of the box the following schemas are supported natively:
1. [Docker](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/dockerfile.json)
2. [Kubernetes](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/kubernetes.json)
3. [Cloud](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/cloud.json)
4. [Terraform Raw Format](https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/terraform-raw.json)
You can interactively view these schemas with the [Trivy Schema Explorer](https://aquasecurity.github.io/trivy-schemas/)

View File

@@ -48,7 +48,7 @@ package custom.dockerfile.ID001
import future.keywords.in
```
Every rego check has a package name. In our case, we will call it `custom.dockerfile.ID001` to avoid confusion between custom checks and built-in checks. The group name `dockerfile` has no effect on the package name. Note that each package has to contain only one check. However, we can pass multiple checks into our Trivy scan.
Every Rego check has a package name. In our case, we will call it `custom.dockerfile.ID001` to avoid confusion between custom checks and built-in checks. The group name `dockerfile` has no effect on the package name. Note that each package has to contain only one check. However, we can pass multiple checks into our Trivy scan.
The first keyword of the package, in this case `custom`, will be reused in the `trivy` command as the `--namespace`.
## Allowed data

View File

@@ -90,7 +90,7 @@ trivy config --tf-vars terraform.tfvars ./
```
### Custom Checks
We have lots of examples in the [documentation](https://trivy.dev/latest/docs/scanner/misconfiguration/custom/) on how you can write and pass custom Rego checks into terraform misconfiguration scans.
We have lots of examples in the [documentation](https://trivy.dev/latest/docs/scanner/misconfiguration/custom/) on how you can write and pass custom Rego checks into terraform misconfiguration scans.
## Secret and vulnerability scans

View File

@@ -703,6 +703,7 @@ func initMisconfScannerOption(ctx context.Context, opts flag.Options) (misconf.S
DisableEmbeddedLibraries: disableEmbedded,
IncludeDeprecatedChecks: opts.IncludeDeprecatedChecks,
TfExcludeDownloaded: opts.TfExcludeDownloaded,
RawConfigScanners: opts.RawConfigScanners,
FilePatterns: opts.FilePatterns,
ConfigFileSchemas: configSchemas,
SkipFiles: opts.SkipFiles,

View File

@@ -622,6 +622,73 @@ func TestTerraformMisconfigurationScan(t *testing.T) {
},
},
},
{
name: "scan raw config",
artifactOpt: artifact.Option{
MisconfScannerOption: misconf.ScannerOption{
RawConfigScanners: []types.ConfigType{types.Terraform},
},
},
fields: fields{
dir: "./testdata/misconfig/terraform/single-failure",
},
wantBlobs: []cachetest.WantBlob{
{
ID: "sha256:6f4672e139d4066fd00391df614cdf42bda5f7a3f005d39e1d8600be86157098",
BlobInfo: types.BlobInfo{
SchemaVersion: 2,
Misconfigurations: []types.Misconfiguration{
{
FileType: "terraform",
FilePath: "main.tf",
Failures: types.MisconfResults{
{
Namespace: "user.something",
Query: "data.user.something.deny",
Message: "Empty bucket name!",
PolicyMetadata: terraformPolicyMetadata,
CauseMetadata: types.CauseMetadata{
Resource: "aws_s3_bucket.asd",
Provider: "Generic",
Service: "general",
StartLine: 1,
EndLine: 3,
},
},
{
Namespace: "user.test002",
Query: "data.user.test002.deny",
Message: "Empty bucket name!",
PolicyMetadata: types.PolicyMetadata{
ID: "TEST002",
AVDID: "AVD-TEST-0002",
Type: "Terraform Security Check",
Title: "Test policy",
Severity: "LOW",
},
CauseMetadata: types.CauseMetadata{
Resource: "aws_s3_bucket.asd",
Provider: "Terraform-Raw",
Service: "general",
StartLine: 1,
EndLine: 3,
},
},
},
},
},
},
},
},
want: artifact.Reference{
Name: "testdata/misconfig/terraform/single-failure",
Type: types.TypeFilesystem,
ID: "sha256:6f4672e139d4066fd00391df614cdf42bda5f7a3f005d39e1d8600be86157098",
BlobIDs: []string{
"sha256:6f4672e139d4066fd00391df614cdf42bda5f7a3f005d39e1d8600be86157098",
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {

View File

@@ -0,0 +1,23 @@
# METADATA
# title: Test policy
# schemas:
# - input: schema["terraform-raw"]
# custom:
# id: TEST002
# avd_id: AVD-TEST-0002
# short_code: empty-bucket-name
# severity: LOW
# input:
# selector:
# - type: terraform-raw
package user.test002
import rego.v1
deny contains res if {
some block in input.modules[_].blocks
block.kind == "resource"
block.type == "aws_s3_bucket"
not "bucket" in block.attributes
res := result.new("Empty bucket name!", block)
}

View File

@@ -116,6 +116,13 @@ var (
Values: xstrings.ToStringSlice([]types.ConfigType{types.Terraform}), // TODO: add Plan and JSON?
Default: []string{},
}
RawConfigScanners = Flag[[]string]{
Name: "raw-config-scanners",
ConfigName: "misconfiguration.raw-config-scanners",
Usage: "specify the types of scanners that will also scan raw configurations. For example, scanners will scan a non-adapted configuration into a shared state",
Values: xstrings.ToStringSlice([]types.ConfigType{types.Terraform}),
Default: []string{},
}
)
// MisconfFlagGroup composes common printer flag structs used for commands providing misconfiguration scanning.
@@ -137,6 +144,7 @@ type MisconfFlagGroup struct {
MisconfigScanners *Flag[[]string]
ConfigFileSchemas *Flag[[]string]
RenderCause *Flag[[]string]
RawConfigScanners *Flag[[]string]
}
type MisconfOptions struct {
@@ -157,6 +165,7 @@ type MisconfOptions struct {
MisconfigScanners []analyzer.Type
ConfigFileSchemas []string
RenderCause []types.ConfigType
RawConfigScanners []types.ConfigType
}
func NewMisconfFlagGroup() *MisconfFlagGroup {
@@ -177,6 +186,7 @@ func NewMisconfFlagGroup() *MisconfFlagGroup {
MisconfigScanners: MisconfigScannersFlag.Clone(),
ConfigFileSchemas: ConfigFileSchemasFlag.Clone(),
RenderCause: RenderCauseFlag.Clone(),
RawConfigScanners: RawConfigScanners.Clone(),
}
}
@@ -201,6 +211,7 @@ func (f *MisconfFlagGroup) Flags() []Flagger {
f.MisconfigScanners,
f.ConfigFileSchemas,
f.RenderCause,
f.RawConfigScanners,
}
}
@@ -221,6 +232,7 @@ func (f *MisconfFlagGroup) ToOptions(opts *Options) error {
MisconfigScanners: xstrings.ToTSlice[analyzer.Type](f.MisconfigScanners.Value()),
ConfigFileSchemas: f.ConfigFileSchemas.Value(),
RenderCause: xstrings.ToTSlice[types.ConfigType](f.RenderCause.Value()),
RawConfigScanners: xstrings.ToTSlice[types.ConfigType](f.RawConfigScanners.Value()),
}
return nil
}

View File

@@ -7,6 +7,7 @@ import (
"fmt"
"io"
"io/fs"
"slices"
"strings"
"github.com/open-policy-agent/opa/v1/ast"
@@ -286,7 +287,17 @@ func checkSubtype(ii map[string]any, provider string, subTypes []SubType) bool {
return false
}
var sourcesWithExplicitSelectors = []types.Source{
// apply terrafrom-specific checks only if selectors exist
types.SourceTerraformRaw,
}
func isPolicyApplicable(sourceType types.Source, staticMetadata *StaticMetadata, inputs ...Input) bool {
if len(staticMetadata.InputOptions.Selectors) == 0 &&
slices.Contains(sourcesWithExplicitSelectors, sourceType) {
return false
}
if len(staticMetadata.InputOptions.Selectors) == 0 { // check always applies if no selectors
return true
}

View File

@@ -19,4 +19,7 @@ var (
//go:embed cloud.json
Cloud Schema
//go:embed terraform-raw.json
TerraformRaw Schema
)

View File

@@ -5,12 +5,13 @@ import (
)
var SchemaMap = map[types.Source]Schema{
types.SourceDefsec: Cloud,
types.SourceCloud: Cloud,
types.SourceKubernetes: Kubernetes,
types.SourceRbac: Kubernetes,
types.SourceDockerfile: Dockerfile,
types.SourceTOML: Anything,
types.SourceYAML: Anything,
types.SourceJSON: Anything,
types.SourceDefsec: Cloud,
types.SourceCloud: Cloud,
types.SourceKubernetes: Kubernetes,
types.SourceRbac: Kubernetes,
types.SourceDockerfile: Dockerfile,
types.SourceTOML: Anything,
types.SourceYAML: Anything,
types.SourceJSON: Anything,
types.SourceTerraformRaw: TerraformRaw,
}

View File

@@ -0,0 +1,137 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://github.com/aquasecurity/trivy/blob/main/pkg/iac/rego/schemas/terraform.json",
"type": "object",
"properties": {
"modules": {
"type": "array",
"items": {
"$ref": "#/$defs/Module"
},
"description": "List of Terraform modules present in the configuration."
}
},
"$defs": {
"Module": {
"type": "object",
"properties": {
"root_path": {
"type": "string",
"description": "The Terraform root directory of the project."
},
"module_path": {
"type": "string",
"description": "Path to the current module. For remote modules, this is the path relative to the module's code directory."
},
"blocks": {
"type": "array",
"items": {
"$ref": "#/$defs/Block"
},
"description": "List of blocks (e.g., resource, data, variable) within the module."
}
}
},
"Block": {
"type": "object",
"properties": {
"__defsec_metadata": {
"type": "object",
"$ref": "#/$defs/Metadata",
"description": "Metadata related to the block."
},
"kind": {
"type": "string",
"description": "Kind of the block (e.g., resource, data, module)."
},
"type": {
"type": "string",
"description": "Type of the block (e.g., aws_s3_bucket for a resource). For blocks that can only have 1 label (such as module or variable) this attribute will be empty."
},
"name": {
"type": "string",
"description": "Name of the block defined by the user."
},
"attributes": {
"type": "object",
"patternProperties": {
".*": {
"$ref": "#/$defs/Attribute"
}
},
"description": "Key-value attributes associated with the block."
}
}
},
"Attribute": {
"type": "object",
"properties": {
"__defsec_metadata": {
"type": "object",
"$ref": "#/$defs/Metadata",
"description": "Metadata related to the attribute."
},
"name": {
"type": "string",
"description": "Name of the attribute."
},
"known": {
"type": "boolean",
"description": "Indicates whether the value of the attribute is known during analysis."
},
"value": {
"description": "The actual value of the attribute. If unknown, then null. Can be a primitive, object, or array.",
"oneOf": [
{ "type": "null" },
{ "type": "string" },
{ "type": "number" },
{ "type": "boolean" },
{ "type": "object" },
{ "type": "array" }
]
}
}
},
"Metadata": {
"type": "object",
"properties": {
"filepath": {
"type": "string",
"description": "Path to the source file where the object is defined relative to the module's file system."
},
"startline": {
"type": "number",
"description": "Line number where the object starts in the source file."
},
"endline": {
"type": "number",
"description": "Line number where the object ends in the source file."
},
"sourceprefix": {
"type": "string",
"description": "Module source. E.g. interface terraform-aws-modules/s3-bucket/aws"
},
"managed": {
"type": "boolean",
"description": "Indicates whether the object is controlled by this source. Not relevant for Terraform."
},
"explicit": {
"type": "boolean",
"description": "True if the object is explicitly defined by the user."
},
"unresolvable": {
"type": "boolean",
"description": "True if the value cannot be determined statically."
},
"fskey": {
"type": "string",
"description": "Internal filesystem key for uniquely identifying the object."
},
"resource": {
"type": "string",
"description": "Fully qualified resource name if applicable. E.g. aws_s3_bucket.test[0]"
}
}
}
}
}

View File

@@ -3,3 +3,15 @@ package options
type ConfigurableScanner any
type ScannerOption func(s ConfigurableScanner)
type RawConfigScanner interface {
SetScanRawConfig(v bool)
}
func WithScanRawConfig(v bool) ScannerOption {
return func(s ConfigurableScanner) {
if ss, ok := s.(RawConfigScanner); ok {
ss.SetScanRawConfig(v)
}
}
}

View File

@@ -25,6 +25,7 @@ type Executor struct {
logger *log.Logger
resultsFilters []func(scan.Results) scan.Results
regoScanner *rego.Scanner
scanRawConfig bool
}
// New creates a new Executor
@@ -44,6 +45,7 @@ func (e *Executor) Execute(ctx context.Context, modules terraform.Modules, baseP
infra := adapter.Adapt(modules)
e.logger.Debug("Adapted module(s) into state data.", log.Int("count", len(modules)))
e.logger.Debug("Scan state data")
results, err := e.regoScanner.ScanInput(ctx, types.SourceCloud, rego.Input{
Contents: infra.ToRego(),
Path: basePath,
@@ -52,7 +54,21 @@ func (e *Executor) Execute(ctx context.Context, modules terraform.Modules, baseP
return nil, err
}
e.logger.Debug("Finished applying rules.")
if e.scanRawConfig {
e.logger.Debug("Scan raw Terraform data")
results2, err := e.regoScanner.ScanInput(ctx, types.SourceTerraformRaw, rego.Input{
Contents: terraform.ExportModules(modules),
Path: basePath,
})
if err != nil {
e.logger.Error("Failed to scan raw Terraform data",
log.FilePath(basePath), log.Err(err))
} else {
results = append(results, results2...)
}
}
e.logger.Debug("Finished applying checks")
e.logger.Debug("Applying ignores...")
var ignores ignore.Rules

View File

@@ -24,3 +24,9 @@ func OptionWithRegoScanner(s *rego.Scanner) Option {
e.regoScanner = s
}
}
func OptionWithScanRawConfig(b bool) Option {
return func(e *Executor) {
e.scanRawConfig = b
}
}

View File

@@ -23,6 +23,7 @@ import (
var _ scanners.FSScanner = (*Scanner)(nil)
var _ options.ConfigurableScanner = (*Scanner)(nil)
var _ options.RawConfigScanner = (*Scanner)(nil)
var _ ConfigurableTerraformScanner = (*Scanner)(nil)
type Scanner struct {
@@ -44,6 +45,10 @@ func (s *Scanner) SetForceAllDirs(b bool) {
s.forceAllDirs = b
}
func (s *Scanner) SetScanRawConfig(b bool) {
s.AddExecutorOptions(executor.OptionWithScanRawConfig(b))
}
func (s *Scanner) AddParserOptions(opts ...parser.Option) {
s.parserOpt = append(s.parserOpt, opts...)
}

View File

@@ -13,6 +13,7 @@ import (
"github.com/aquasecurity/trivy/internal/testutil"
"github.com/aquasecurity/trivy/pkg/iac/rego"
"github.com/aquasecurity/trivy/pkg/iac/scan"
"github.com/aquasecurity/trivy/pkg/iac/scanners/options"
)
func Test_OptionWithPolicyDirs(t *testing.T) {
@@ -179,19 +180,19 @@ resource "aws_sqs_queue_policy" "bad_example" {
}`,
"/rules/test.rego": `
# METADATA
# title: Buckets should not be evil
# description: You should not allow buckets to be evil
# title: SQS policies should not allow wildcard actions
# description: SQS queue policies should avoid using "*" for actions, as this allows overly permissive access.
# scope: package
# schemas:
# - input: schema.input
# related_resources:
# - https://google.com/search?q=is+my+bucket+evil
# - https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-using-identity-based-policies.html
# custom:
# id: TEST123
# avd_id: AVD-TEST-0123
# short_code: no-evil-buckets
# short_code: no-wildcard-actions
# severity: CRITICAL
# recommended_action: Use a good bucket instead
# recommended_action: Avoid using "*" for actions in SQS policies and specify only required actions.
# input:
# selector:
# - type: cloud
@@ -1194,3 +1195,50 @@ data "google_storage_transfer_project_service_account" "production" {
})
}
}
func TestScanRawTerraform(t *testing.T) {
check := `# METADATA
# title: Buckets should not be evil
# schemas:
# - input: schema["terraform-raw"]
# custom:
# id: USER0001
# short_code: evil-bucket
# severity: HIGH
# input:
# selector:
# - type: terraform-raw
package user.bucket001
import rego.v1
deny contains res if {
some block in input.modules[_].blocks
block.kind == "resource"
block.type == "aws_s3_bucket"
name := block.attributes["bucket"]
name.value == "evil"
res := result.new("Buckets should not be evil", name)
}`
fsys := fstest.MapFS{
"main.tf": &fstest.MapFile{Data: []byte(`resource "aws_s3_bucket" "test" {
bucket = "evil"
}`)},
}
scanner := New(
ScannerWithAllDirectories(true),
options.WithScanRawConfig(true),
rego.WithEmbeddedLibraries(true),
rego.WithPolicyReader(strings.NewReader(check)),
rego.WithPolicyNamespaces("user"),
)
results, err := scanner.ScanFS(t.Context(), fsys, ".")
require.NoError(t, err)
failed := results.GetFailed()
assert.Len(t, failed, 1)
}

View File

@@ -23,8 +23,11 @@ func (s *Scanner) Name() string {
func New(opts ...options.ScannerOption) *Scanner {
scanner := &Scanner{
inner: tfscanner.New(opts...),
inner: tfscanner.New(
append(opts, options.WithScanRawConfig(false))...,
),
}
return scanner
}

View File

@@ -55,7 +55,9 @@ func (s *Scanner) ScanFS(_ context.Context, fsys fs.FS, dir string) (scan.Result
func New(opts ...options.ScannerOption) *Scanner {
scanner := &Scanner{
inner: terraform.New(opts...),
inner: terraform.New(
append(opts, options.WithScanRawConfig(false))...,
),
parser: parser.New(),
logger: log.WithPrefix("tfjson scanner"),
options: opts,

109
pkg/iac/terraform/export.go Normal file
View File

@@ -0,0 +1,109 @@
package terraform
import (
"encoding/json"
"github.com/samber/lo"
"github.com/zclconf/go-cty/cty"
ctyjson "github.com/zclconf/go-cty/cty/json"
"github.com/aquasecurity/trivy/pkg/log"
)
func ExportModules(modules Modules) TerraformConfigExport {
return TerraformConfigExport{
Modules: lo.Map(modules, func(m *Module, _ int) ModuleExport {
return m.ToModuleExport()
}),
}
}
// TODO(nikpivkin): export directly to OPA values
type TerraformConfigExport struct {
Modules []ModuleExport `json:"modules"`
}
type ModuleExport struct {
RootPath string `json:"root_path"`
ModulePath string `json:"module_path"`
ParentPath string `json:"parent_path"`
Blocks []BlockExport `json:"blocks"`
}
type BlockExport struct {
Metadata any `json:"__defsec_metadata"`
Kind string `json:"kind"`
Type string `json:"type"`
Name string `json:"name"`
Attributes map[string]AttributeExport `json:"attributes"`
}
type AttributeExport struct {
Metadata any `json:"__defsec_metadata"`
Name string `json:"name"`
Value json.RawMessage `json:"value"`
Known bool `json:"known"`
}
func (c *Module) ToModuleExport() ModuleExport {
var parentPath string
if parentModule := c.Parent(); parentModule != nil {
parentPath = parentModule.ModulePath()
}
return ModuleExport{
RootPath: c.RootPath(),
ModulePath: c.ModulePath(),
ParentPath: parentPath,
Blocks: lo.Map(c.blocks, func(b *Block, _ int) BlockExport {
return b.ToBlockExport()
}),
}
}
func (b *Block) ToBlockExport() BlockExport {
typeLabel := b.TypeLabel()
nameLabel := b.NameLabel()
if len(b.Labels()) == 1 {
nameLabel = typeLabel
typeLabel = ""
}
return BlockExport{
Metadata: b.metadata.ToRego(),
Kind: b.Type(),
Type: typeLabel,
Name: nameLabel,
Attributes: lo.SliceToMap(
b.attributes, func(a *Attribute) (string, AttributeExport) {
return a.Name(), a.ToAttributeExport()
},
),
}
}
func (a *Attribute) ToAttributeExport() AttributeExport {
value, known := ExportCtyValueToJSON(a.Value())
return AttributeExport{
Metadata: a.metadata.ToRego(),
Name: a.Name(),
Known: known,
Value: value,
}
}
func ExportCtyValueToJSON(v cty.Value) (json.RawMessage, bool) {
if v.IsNull() || !v.IsKnown() {
return json.RawMessage("null"), false
}
ty := v.Type()
bytes, err := ctyjson.Marshal(v, ty)
if err != nil {
log.WithPrefix("terraform").Debug("Failed to marshal cty value",
log.String("value", v.GoString()), log.Err(err))
return json.RawMessage("null"), false
}
return json.RawMessage(bytes), true
}

View File

@@ -5,10 +5,13 @@ type Source string
const (
SourceDockerfile Source = "dockerfile"
SourceKubernetes Source = "kubernetes"
SourceRbac Source = "rbac" // deprecated - please use "kubernetes" instead
SourceDefsec Source = "defsec" // deprecated - please use "cloud" instead
SourceCloud Source = "cloud"
SourceYAML Source = "yaml"
SourceJSON Source = "json"
SourceTOML Source = "toml"
// Deprecated: use "kubernetes" instead
SourceRbac Source = "rbac"
// Deprecated: use "cloud" instead
SourceDefsec Source = "defsec"
SourceCloud Source = "cloud"
SourceYAML Source = "yaml"
SourceJSON Source = "json"
SourceTOML Source = "toml"
SourceTerraformRaw Source = "terraform-raw"
)

View File

@@ -8,6 +8,7 @@ import (
"io/fs"
"os"
"path/filepath"
"slices"
"sort"
"strings"
@@ -74,6 +75,7 @@ type ScannerOption struct {
TerraformTFVars []string
CloudFormationParamVars []string
TfExcludeDownloaded bool
RawConfigScanners []types.ConfigType
K8sVersion string
FilePatterns []string
@@ -302,6 +304,10 @@ func scannerOptions(t detection.FileType, opt ScannerOption) ([]options.ScannerO
opts = append(opts, regoOpts...)
}
opts = append(opts, options.WithScanRawConfig(
slices.Contains(opt.RawConfigScanners, enablediacTypes[t])),
)
switch t {
case detection.FileTypeHelm:
return addHelmOpts(opts, opt), nil