mirror of
https://github.com/aquasecurity/trivy.git
synced 2025-12-05 20:40:16 -08:00
feat(misconf): initial ansible scanning support (#9332)
Signed-off-by: nikpivkin <nikita.pivkin@smartforce.io> Co-authored-by: Simar <simar@linux.com> Co-authored-by: simar7 <1254783+simar7@users.noreply.github.com>
This commit is contained in:
177
docs/guide/coverage/iac/ansible.md
Normal file
177
docs/guide/coverage/iac/ansible.md
Normal file
@@ -0,0 +1,177 @@
|
||||
# Ansible
|
||||
|
||||
Trivy analyzes tasks in playbooks and roles for misconfigurations in cloud resources.
|
||||
|
||||
!!! warning "EXPERIMENTAL"
|
||||
This feature might change without preserving backwards compatibility.
|
||||
|
||||
!!! warning "LIMITATIONS"
|
||||
Not all Ansible features are supported. See the [Limitations](#limitations) section for a detailed list.
|
||||
|
||||
## Misconfigurations
|
||||
|
||||
Trivy recursively scans directories starting from the root and detects Ansible projects by the presence of key files and folders:
|
||||
|
||||
- `ansible.cfg`, `inventory`, `group_vars`, `host_vars`, `roles` and `playbooks`
|
||||
- YAML files that resemble playbooks
|
||||
|
||||
For each project, Trivy performs the following steps:
|
||||
|
||||
- **Playbook discovery** — determines entry points, i.e., playbooks that are not used as imports in other playbooks.
|
||||
- **Task and variable resolution** — Trivy resolves tasks and variables from plays, imports, and roles.
|
||||
- **Module analysis** — modules used in tasks are scanned for insecure configurations. Currently, only cloud resource modules are supported.
|
||||
|
||||
### Project scanning
|
||||
|
||||
The Ansible scanner is enabled by default. To run only this scanner, use the `--misconfig-scanners ansible` flag:
|
||||
|
||||
```bash
|
||||
trivy conf --misconfig-scanners ansible .
|
||||
```
|
||||
|
||||
Example playbook:
|
||||
|
||||
```yaml
|
||||
- name: Example playbook
|
||||
hosts: localhost
|
||||
connection: local
|
||||
tasks:
|
||||
- name: Create S3 bucket
|
||||
amazon.aws.s3_bucket:
|
||||
name: "{{ bucket_name }}"
|
||||
region: "{{ bucket_region }}"
|
||||
state: present
|
||||
```
|
||||
|
||||
Scan result:
|
||||
|
||||
```bash
|
||||
AVD-AWS-0093 (HIGH): Public access block does not restrict public buckets
|
||||
══════════════════════════════════════════════════════════════════════════════════════════════════════════════════════════════════════
|
||||
S3 buckets should restrict public policies for the bucket. By enabling, the restrict_public_buckets, only the bucket owner and AWS Services can access if it has a public policy.
|
||||
|
||||
|
||||
See https://avd.aquasec.com/misconfig/avd-aws-0093
|
||||
──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
test.yaml:6-9
|
||||
via test.yaml:5-9 (tasks)
|
||||
via test.yaml:1-9 (play)
|
||||
──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
1 - name: Example playbook
|
||||
2 hosts: localhost
|
||||
3 connection: local
|
||||
4 tasks:
|
||||
5 - name: Create S3 bucket
|
||||
6 ┌ amazon.aws.s3_bucket:
|
||||
7 │ name: "{{ bucket_name }}"
|
||||
8 │ region: "{{ bucket_region }}"
|
||||
9 └ state: present
|
||||
──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
```
|
||||
|
||||
If the project defines a collection (contains a `galaxy.yaml` file), Trivy can resolve roles using the full name `namespace.collection.role` within the project.
|
||||
|
||||
Example `galaxy.yaml`:
|
||||
```yaml
|
||||
namespace: myorg
|
||||
name: mycollection
|
||||
version: 1.0.0
|
||||
```
|
||||
|
||||
Project structure:
|
||||
```bash
|
||||
roles/
|
||||
myrole/
|
||||
tasks/
|
||||
main.yml
|
||||
galaxy.yaml
|
||||
```
|
||||
|
||||
Using the role in a playbook:
|
||||
```yaml
|
||||
- name: Apply custom role
|
||||
hosts: localhost
|
||||
tasks:
|
||||
- name: Run role from collection
|
||||
include_role:
|
||||
name: myorg.mycollection.myrole
|
||||
```
|
||||
|
||||
Trivy can correctly locate and analyze the `myrole` role via the full collection name.
|
||||
|
||||
|
||||
### Scanning specific playbooks
|
||||
|
||||
To limit scanning to specific playbooks instead of automatically discovering them, use the `--ansible-playbook` flag (can be repeated) with the path to the playbook:
|
||||
|
||||
```bash
|
||||
trivy config --ansible-playbook playbooks/main.yaml .
|
||||
```
|
||||
|
||||
### Using inventory
|
||||
|
||||
By default, Trivy searches for inventory [in the default location](https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html#how-to-build-your-inventory): `/etc/ansible/hosts`. If an `ansible.cfg` file exists at the project root, the inventory path is taken from it.
|
||||
|
||||
To specify a custom inventory source, use the `--ansible-inventory` flag (same as Ansible’s `--inventory`). The flag can be repeated:
|
||||
|
||||
```bash
|
||||
trivy config --ansible-inventory hosts.ini \
|
||||
--ansible-inventory inventory .
|
||||
```
|
||||
|
||||
### Passing extra variables
|
||||
|
||||
To pass extra variables, use the `--ansible-extra-vars` flag (same as Ansible’s `--extra-vars`). The flag can be repeated:
|
||||
|
||||
```bash
|
||||
trivy config --ansible-extra-vars region=us-east-1 \
|
||||
--ansible-extra-vars @vars.json .
|
||||
```
|
||||
|
||||
### Rendering misconfiguration snippet
|
||||
|
||||
To display the rendered snippet, use the `--render-cause` flag.
|
||||
|
||||
Example output for an S3 bucket task using the `amazon.aws.s3_bucket` module:
|
||||
|
||||
```bash
|
||||
trivy config --render-cause ansible .
|
||||
...
|
||||
──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
447 - name: "Hetzner Cloud: Create Object Storage (S3 bucket) {{ hetzner_object_storage_name }}"
|
||||
448 ┌ amazon.aws.s3_bucket:
|
||||
449 │ endpoint_url: "{{ hetzner_object_storage_endpoint }}"
|
||||
450 │ ceph: true
|
||||
451 │ aws_access_key: "{{ hetzner_object_storage_access_key }}"
|
||||
452 │ aws_secret_key: "{{ hetzner_object_storage_secret_key }}"
|
||||
453 │ name: "{{ hetzner_object_storage_name }}"
|
||||
454 │ region: "{{ hetzner_object_storage_region }}"
|
||||
455 └ requester_pays: false
|
||||
...
|
||||
──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
Rendered cause:
|
||||
──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
amazon.aws.s3_bucket:
|
||||
endpoint_url: https://us-east-1.your-objectstorage.com
|
||||
ceph: true
|
||||
aws_access_key: ""
|
||||
aws_secret_key: ""
|
||||
name: test-pgcluster-backup
|
||||
region: us-east-1
|
||||
requester_pays: false
|
||||
state: present
|
||||
|
||||
──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
```
|
||||
|
||||
## Limitations
|
||||
|
||||
Ansible scanning has several limitations and does not support the following:
|
||||
|
||||
- Resolving remote collections
|
||||
- Inventory, lookup, and filter plugins (except `dirname`)
|
||||
- Setting facts (`set_fact`)
|
||||
- Loops: `loop`, `with_<lookup>`, etc.
|
||||
- Patterns in a play’s hosts field
|
||||
- Host ranges in inventory, e.g., `www[01:50:2].example.com`
|
||||
- Only supports the following services: AWS S3. If you have other services or clouds that you would like to see support for, please open a discussion in the Trivy project.
|
||||
@@ -8,17 +8,18 @@ Trivy scans Infrastructure as Code (IaC) files for
|
||||
|
||||
## Supported configurations
|
||||
|
||||
| Config type | File patterns |
|
||||
|-------------------------------------|----------------------------------|
|
||||
| [Kubernetes](kubernetes.md) | \*.yml, \*.yaml, \*.json |
|
||||
| [Docker](docker.md) | Dockerfile, Containerfile |
|
||||
| [Terraform](terraform.md) | \*.tf, \*.tf.json, \*.tfvars |
|
||||
| [Terraform Plan](terraform.md) | tfplan, \*.tfplan, \*.json |
|
||||
| [CloudFormation](cloudformation.md) | \*.yml, \*.yaml, \*.json |
|
||||
| [Azure ARM Template](azure-arm.md) | \*.json |
|
||||
| [Helm](helm.md) | \*.yaml, \*.tpl, \*.tar.gz, etc. |
|
||||
| [YAML][json-and-yaml] | \*.yaml, \*.yml |
|
||||
| [JSON][json-and-yaml] | \*.json |
|
||||
| Config type | File patterns |
|
||||
|-------------------------------------|-----------------------------------------------------|
|
||||
| [Kubernetes](kubernetes.md) | \*.yml, \*.yaml, \*.json |
|
||||
| [Docker](docker.md) | Dockerfile, Containerfile |
|
||||
| [Terraform](terraform.md) | \*.tf, \*.tf.json, \*.tfvars |
|
||||
| [Terraform Plan](terraform.md) | tfplan, \*.tfplan, \*.json |
|
||||
| [CloudFormation](cloudformation.md) | \*.yml, \*.yaml, \*.json |
|
||||
| [Azure ARM Template](azure-arm.md) | \*.json |
|
||||
| [Helm](helm.md) | \*.yaml, \*.tpl, \*.tar.gz, etc. |
|
||||
| [YAML][json-and-yaml] | \*.yaml, \*.yml |
|
||||
| [JSON][json-and-yaml] | \*.json |
|
||||
| [Ansible](ansible.md) | \*.yml, \*.yaml, \*.json, \*.ini, without extension |
|
||||
|
||||
[misconf]: ../../scanner/misconfiguration/index.md
|
||||
[secret]: ../../scanner/secret.md
|
||||
|
||||
@@ -9,6 +9,9 @@ trivy config [flags] DIR
|
||||
### Options
|
||||
|
||||
```
|
||||
--ansible-extra-vars strings set additional variables as key=value or @file (YAML/JSON)
|
||||
--ansible-inventory strings specify inventory host path or comma separated host list
|
||||
--ansible-playbook strings specify playbook file path(s) to scan
|
||||
--cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "memory")
|
||||
--cache-ttl duration cache TTL when using redis as cache backend
|
||||
--cf-params strings specify paths to override the CloudFormation parameters files
|
||||
@@ -46,7 +49,7 @@ trivy config [flags] DIR
|
||||
--include-deprecated-checks include deprecated checks
|
||||
--include-non-failures include successes, available with '--scanners misconfig'
|
||||
--k8s-version string specify k8s version to validate outdated api by it (example: 1.21.0)
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot])
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot,ansible])
|
||||
--module-dir string specify directory to the wasm modules that will be loaded (default "$HOME/.trivy/modules")
|
||||
-o, --output string output file name
|
||||
--output-plugin-arg string [EXPERIMENTAL] output plugin arguments
|
||||
@@ -59,7 +62,7 @@ trivy config [flags] DIR
|
||||
--redis-tls enable redis TLS with public certificates, if using redis as cache backend
|
||||
--registry-token string registry token
|
||||
--rego-error-limit int maximum number of compile errors allowed during Rego policy evaluation (default 10)
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform)
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform,ansible)
|
||||
--report string specify a compliance report format for the output (allowed values: all,summary) (default "all")
|
||||
-s, --severity strings severities of security issues to be displayed
|
||||
Allowed values:
|
||||
|
||||
@@ -19,6 +19,9 @@ trivy filesystem [flags] PATH
|
||||
### Options
|
||||
|
||||
```
|
||||
--ansible-extra-vars strings set additional variables as key=value or @file (YAML/JSON)
|
||||
--ansible-inventory strings specify inventory host path or comma separated host list
|
||||
--ansible-playbook strings specify playbook file path(s) to scan
|
||||
--cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "memory")
|
||||
--cache-ttl duration cache TTL when using redis as cache backend
|
||||
--cf-params strings specify paths to override the CloudFormation parameters files
|
||||
@@ -82,7 +85,7 @@ trivy filesystem [flags] PATH
|
||||
--license-confidence-level float specify license classifier's confidence level (default 0.9)
|
||||
--license-full eagerly look for licenses in source code headers and license files
|
||||
--list-all-pkgs output all packages in the JSON report regardless of vulnerability (default true)
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot])
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot,ansible])
|
||||
--module-dir string specify directory to the wasm modules that will be loaded (default "$HOME/.trivy/modules")
|
||||
--no-progress suppress progress bar
|
||||
--offline-scan do not issue API requests to identify dependencies
|
||||
@@ -108,7 +111,7 @@ trivy filesystem [flags] PATH
|
||||
--registry-token string registry token
|
||||
--rego-error-limit int maximum number of compile errors allowed during Rego policy evaluation (default 10)
|
||||
--rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev")
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform)
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform,ansible)
|
||||
--report string specify a compliance report format for the output (allowed values: all,summary) (default "all")
|
||||
--sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (allowed values: oci,rekor)
|
||||
--scanners strings comma-separated list of what security issues to detect (allowed values: vuln,misconfig,secret,license) (default [vuln,secret])
|
||||
|
||||
@@ -34,6 +34,9 @@ trivy image [flags] IMAGE_NAME
|
||||
### Options
|
||||
|
||||
```
|
||||
--ansible-extra-vars strings set additional variables as key=value or @file (YAML/JSON)
|
||||
--ansible-inventory strings specify inventory host path or comma separated host list
|
||||
--ansible-playbook strings specify playbook file path(s) to scan
|
||||
--cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "fs")
|
||||
--cache-ttl duration cache TTL when using redis as cache backend
|
||||
--check-namespaces strings Rego namespaces
|
||||
@@ -101,7 +104,7 @@ trivy image [flags] IMAGE_NAME
|
||||
--license-full eagerly look for licenses in source code headers and license files
|
||||
--list-all-pkgs output all packages in the JSON report regardless of vulnerability (default true)
|
||||
--max-image-size string [EXPERIMENTAL] maximum image size to process, specified in a human-readable format (e.g., '44kB', '17MB'); an error will be returned if the image exceeds this size
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot])
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot,ansible])
|
||||
--module-dir string specify directory to the wasm modules that will be loaded (default "$HOME/.trivy/modules")
|
||||
--no-progress suppress progress bar
|
||||
--offline-scan do not issue API requests to identify dependencies
|
||||
@@ -130,7 +133,7 @@ trivy image [flags] IMAGE_NAME
|
||||
--rego-error-limit int maximum number of compile errors allowed during Rego policy evaluation (default 10)
|
||||
--rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev")
|
||||
--removed-pkgs detect vulnerabilities of removed packages (only for Alpine)
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform)
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform,ansible)
|
||||
--report string specify a format for the compliance report. (allowed values: all,summary) (default "summary")
|
||||
--sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (allowed values: oci,rekor)
|
||||
--scanners strings comma-separated list of what security issues to detect (allowed values: vuln,misconfig,secret,license) (default [vuln,secret])
|
||||
|
||||
@@ -29,6 +29,9 @@ trivy kubernetes [flags] [CONTEXT]
|
||||
### Options
|
||||
|
||||
```
|
||||
--ansible-extra-vars strings set additional variables as key=value or @file (YAML/JSON)
|
||||
--ansible-inventory strings specify inventory host path or comma separated host list
|
||||
--ansible-playbook strings specify playbook file path(s) to scan
|
||||
--burst int specify the maximum burst for throttle (default 10)
|
||||
--cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "fs")
|
||||
--cache-ttl duration cache TTL when using redis as cache backend
|
||||
@@ -92,7 +95,7 @@ trivy kubernetes [flags] [CONTEXT]
|
||||
--k8s-version string specify k8s version to validate outdated api by it (example: 1.21.0)
|
||||
--kubeconfig string specify the kubeconfig file path to use
|
||||
--list-all-pkgs output all packages in the JSON report regardless of vulnerability (default true)
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot])
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot,ansible])
|
||||
--no-progress suppress progress bar
|
||||
--node-collector-imageref string indicate the image reference for the node-collector scan job (default "ghcr.io/aquasecurity/node-collector:0.3.1")
|
||||
--node-collector-namespace string specify the namespace in which the node-collector job should be deployed (default "trivy-temp")
|
||||
@@ -120,7 +123,7 @@ trivy kubernetes [flags] [CONTEXT]
|
||||
--registry-token string registry token
|
||||
--rego-error-limit int maximum number of compile errors allowed during Rego policy evaluation (default 10)
|
||||
--rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev")
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform)
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform,ansible)
|
||||
--report string specify a report format for the output (allowed values: all,summary) (default "all")
|
||||
--sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (allowed values: oci,rekor)
|
||||
--scanners strings comma-separated list of what security issues to detect (allowed values: vuln,misconfig,secret,rbac) (default [vuln,misconfig,secret,rbac])
|
||||
|
||||
@@ -18,6 +18,9 @@ trivy repository [flags] (REPO_PATH | REPO_URL)
|
||||
### Options
|
||||
|
||||
```
|
||||
--ansible-extra-vars strings set additional variables as key=value or @file (YAML/JSON)
|
||||
--ansible-inventory strings specify inventory host path or comma separated host list
|
||||
--ansible-playbook strings specify playbook file path(s) to scan
|
||||
--branch string pass the branch name to be scanned
|
||||
--cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "fs")
|
||||
--cache-ttl duration cache TTL when using redis as cache backend
|
||||
@@ -81,7 +84,7 @@ trivy repository [flags] (REPO_PATH | REPO_URL)
|
||||
--license-confidence-level float specify license classifier's confidence level (default 0.9)
|
||||
--license-full eagerly look for licenses in source code headers and license files
|
||||
--list-all-pkgs output all packages in the JSON report regardless of vulnerability (default true)
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot])
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot,ansible])
|
||||
--module-dir string specify directory to the wasm modules that will be loaded (default "$HOME/.trivy/modules")
|
||||
--no-progress suppress progress bar
|
||||
--offline-scan do not issue API requests to identify dependencies
|
||||
@@ -107,7 +110,7 @@ trivy repository [flags] (REPO_PATH | REPO_URL)
|
||||
--registry-token string registry token
|
||||
--rego-error-limit int maximum number of compile errors allowed during Rego policy evaluation (default 10)
|
||||
--rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev")
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform)
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform,ansible)
|
||||
--sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (allowed values: oci,rekor)
|
||||
--scanners strings comma-separated list of what security issues to detect (allowed values: vuln,misconfig,secret,license) (default [vuln,secret])
|
||||
--secret-config string specify a path to config file for secret scanning (default "trivy-secret.yaml")
|
||||
|
||||
@@ -22,6 +22,9 @@ trivy rootfs [flags] ROOTDIR
|
||||
### Options
|
||||
|
||||
```
|
||||
--ansible-extra-vars strings set additional variables as key=value or @file (YAML/JSON)
|
||||
--ansible-inventory strings specify inventory host path or comma separated host list
|
||||
--ansible-playbook strings specify playbook file path(s) to scan
|
||||
--cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "memory")
|
||||
--cache-ttl duration cache TTL when using redis as cache backend
|
||||
--cf-params strings specify paths to override the CloudFormation parameters files
|
||||
@@ -84,7 +87,7 @@ trivy rootfs [flags] ROOTDIR
|
||||
--license-confidence-level float specify license classifier's confidence level (default 0.9)
|
||||
--license-full eagerly look for licenses in source code headers and license files
|
||||
--list-all-pkgs output all packages in the JSON report regardless of vulnerability (default true)
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot])
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot,ansible])
|
||||
--module-dir string specify directory to the wasm modules that will be loaded (default "$HOME/.trivy/modules")
|
||||
--no-progress suppress progress bar
|
||||
--offline-scan do not issue API requests to identify dependencies
|
||||
@@ -110,7 +113,7 @@ trivy rootfs [flags] ROOTDIR
|
||||
--registry-token string registry token
|
||||
--rego-error-limit int maximum number of compile errors allowed during Rego policy evaluation (default 10)
|
||||
--rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev")
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform)
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform,ansible)
|
||||
--sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (allowed values: oci,rekor)
|
||||
--scanners strings comma-separated list of what security issues to detect (allowed values: vuln,misconfig,secret,license) (default [vuln,secret])
|
||||
--secret-config string specify a path to config file for secret scanning (default "trivy-secret.yaml")
|
||||
|
||||
@@ -20,6 +20,9 @@ trivy vm [flags] VM_IMAGE
|
||||
### Options
|
||||
|
||||
```
|
||||
--ansible-extra-vars strings set additional variables as key=value or @file (YAML/JSON)
|
||||
--ansible-inventory strings specify inventory host path or comma separated host list
|
||||
--ansible-playbook strings specify playbook file path(s) to scan
|
||||
--aws-region string AWS region to scan
|
||||
--cache-backend string [EXPERIMENTAL] cache backend (e.g. redis://localhost:6379) (default "fs")
|
||||
--cache-ttl duration cache TTL when using redis as cache backend
|
||||
@@ -76,7 +79,7 @@ trivy vm [flags] VM_IMAGE
|
||||
--include-non-failures include successes, available with '--scanners misconfig'
|
||||
--java-db-repository strings OCI repository(ies) to retrieve trivy-java-db in order of priority (default [mirror.gcr.io/aquasec/trivy-java-db:1,ghcr.io/aquasecurity/trivy-java-db:1])
|
||||
--list-all-pkgs output all packages in the JSON report regardless of vulnerability (default true)
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot])
|
||||
--misconfig-scanners strings comma-separated list of misconfig scanners to use for misconfiguration scanning (default [azure-arm,cloudformation,dockerfile,helm,kubernetes,terraform,terraformplan-json,terraformplan-snapshot,ansible])
|
||||
--module-dir string specify directory to the wasm modules that will be loaded (default "$HOME/.trivy/modules")
|
||||
--no-progress suppress progress bar
|
||||
--offline-scan do not issue API requests to identify dependencies
|
||||
@@ -98,7 +101,7 @@ trivy vm [flags] VM_IMAGE
|
||||
--redis-key string redis key file location, if using redis as cache backend
|
||||
--redis-tls enable redis TLS with public certificates, if using redis as cache backend
|
||||
--rekor-url string [EXPERIMENTAL] address of rekor STL server (default "https://rekor.sigstore.dev")
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform)
|
||||
--render-cause strings specify configuration types for which the rendered causes will be shown in the table report (allowed values: terraform,ansible)
|
||||
--sbom-sources strings [EXPERIMENTAL] try to retrieve SBOM from the specified sources (allowed values: oci,rekor)
|
||||
--scanners strings comma-separated list of what security issues to detect (allowed values: vuln,misconfig,secret,license) (default [vuln,secret])
|
||||
--secret-config string specify a path to config file for secret scanning (default "trivy-secret.yaml")
|
||||
|
||||
@@ -379,6 +379,16 @@ license:
|
||||
## Misconfiguration options
|
||||
|
||||
```yaml
|
||||
ansible:
|
||||
# Same as '--ansible-extra-vars'
|
||||
extra-vars: []
|
||||
|
||||
# Same as '--ansible-inventory'
|
||||
inventories: []
|
||||
|
||||
# Same as '--ansible-playbook'
|
||||
playbooks: []
|
||||
|
||||
misconfiguration:
|
||||
# Same as '--checks-bundle-repository'
|
||||
checks-bundle-repository: "mirror.gcr.io/aquasec/trivy-checks:1"
|
||||
@@ -428,6 +438,7 @@ misconfiguration:
|
||||
- terraform
|
||||
- terraformplan-json
|
||||
- terraformplan-snapshot
|
||||
- ansible
|
||||
|
||||
terraform:
|
||||
# Same as '--tf-exclude-downloaded-modules'
|
||||
|
||||
8
go.mod
8
go.mod
@@ -132,6 +132,11 @@ require (
|
||||
modernc.org/sqlite v1.40.1
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/go-ini/ini v1.67.0
|
||||
github.com/nikolalohinski/gonja/v2 v2.3.5
|
||||
)
|
||||
|
||||
require (
|
||||
buf.build/gen/go/bufbuild/bufplugin/protocolbuffers/go v1.36.6-20250718181942-e35f9b667443.1 // indirect
|
||||
buf.build/gen/go/bufbuild/protovalidate/protocolbuffers/go v1.36.6-20250717185734-6c6e0d3c608e.1 // indirect
|
||||
@@ -253,7 +258,6 @@ require (
|
||||
github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect
|
||||
github.com/go-git/go-billy/v5 v5.6.2 // indirect
|
||||
github.com/go-gorp/gorp/v3 v3.1.0 // indirect
|
||||
github.com/go-ini/ini v1.67.0 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.2 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
@@ -365,8 +369,6 @@ require (
|
||||
github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/oklog/ulid/v2 v2.1.1 // indirect
|
||||
github.com/onsi/ginkgo/v2 v2.23.4 // indirect
|
||||
github.com/onsi/gomega v1.36.3 // indirect
|
||||
github.com/opencontainers/runtime-spec v1.2.1 // indirect
|
||||
github.com/opencontainers/selinux v1.13.0 // indirect
|
||||
github.com/owenrumney/squealer v1.2.11 // indirect
|
||||
|
||||
6
go.sum
6
go.sum
@@ -949,6 +949,8 @@ github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+
|
||||
github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4=
|
||||
github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/nikolalohinski/gonja/v2 v2.3.5 h1:7ukCnsokmOIGXOjgW/WrM+xqgwjsQcU0ejFrrz4HQXk=
|
||||
github.com/nikolalohinski/gonja/v2 v2.3.5/go.mod h1:UIzXPVuOsr5h7dZ5DUbqk3/Z7oFA/NLGQGMjqT4L2aU=
|
||||
github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481 h1:Up6+btDp321ZG5/zdSLo48H9Iaq0UQGthrhWC6pCxzE=
|
||||
github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481/go.mod h1:yKZQO8QE2bHlgozqWDiRVqTFlLQSj30K/6SAK8EeYFw=
|
||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||
@@ -973,8 +975,8 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J
|
||||
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
|
||||
github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY=
|
||||
github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro=
|
||||
github.com/onsi/gomega v1.36.3 h1:hID7cr8t3Wp26+cYnfcjR6HpJ00fdogN6dqZ1t6IylU=
|
||||
github.com/onsi/gomega v1.36.3/go.mod h1:8D9+Txp43QWKhM24yyOBEdpkzN8FvJyAwecBgsU4KU0=
|
||||
github.com/onsi/gomega v1.37.0 h1:CdEG8g0S133B4OswTDC/5XPSzE1OeP29QOioj2PID2Y=
|
||||
github.com/onsi/gomega v1.37.0/go.mod h1:8D9+Txp43QWKhM24yyOBEdpkzN8FvJyAwecBgsU4KU0=
|
||||
github.com/open-policy-agent/opa v1.10.1 h1:haIvxZSPky8HLjRrvQwWAjCPLg8JDFSZMbbG4yyUHgY=
|
||||
github.com/open-policy-agent/opa v1.10.1/go.mod h1:7uPI3iRpOalJ0BhK6s1JALWPU9HvaV1XeBSSMZnr/PM=
|
||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||
|
||||
@@ -108,6 +108,7 @@ nav:
|
||||
- Julia: guide/coverage/language/julia.md
|
||||
- IaC:
|
||||
- Overview: guide/coverage/iac/index.md
|
||||
- Ansible: guide/coverage/iac/ansible.md
|
||||
- Azure ARM Template: guide/coverage/iac/azure-arm.md
|
||||
- CloudFormation: guide/coverage/iac/cloudformation.md
|
||||
- Docker: guide/coverage/iac/docker.md
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
package artifact
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"maps"
|
||||
"os"
|
||||
"slices"
|
||||
"strings"
|
||||
@@ -12,6 +15,7 @@ import (
|
||||
"github.com/samber/lo"
|
||||
"github.com/spf13/viper"
|
||||
"golang.org/x/xerrors"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/cache"
|
||||
"github.com/aquasecurity/trivy/pkg/commands/operation"
|
||||
@@ -734,6 +738,12 @@ func initMisconfScannerOption(ctx context.Context, opts flag.Options) (misconf.S
|
||||
return misconf.ScannerOption{}, xerrors.Errorf("load schemas error: %w", err)
|
||||
}
|
||||
|
||||
ansibleExtraVars, err := resolveAnsibleExtraVars(opts.AnsibleExtraVars)
|
||||
if err != nil {
|
||||
log.DebugContext(ctx, "Failed to resolve Ansible extra-vars", log.Err(err))
|
||||
ansibleExtraVars = make(map[string]any)
|
||||
}
|
||||
|
||||
misconfOpts := misconf.ScannerOption{
|
||||
Trace: opts.RegoOptions.Trace,
|
||||
Namespaces: append(opts.CheckNamespaces, rego.BuiltinNamespaces()...),
|
||||
@@ -758,6 +768,9 @@ func initMisconfScannerOption(ctx context.Context, opts flag.Options) (misconf.S
|
||||
ConfigFileSchemas: configSchemas,
|
||||
SkipFiles: opts.SkipFiles,
|
||||
SkipDirs: opts.SkipDirs,
|
||||
AnsiblePlaybooks: opts.AnsiblePlaybooks,
|
||||
AnsibleInventories: opts.AnsibleInventories,
|
||||
AnsibleExtraVars: ansibleExtraVars,
|
||||
}
|
||||
|
||||
regoScanner, err := misconf.InitRegoScanner(misconfOpts)
|
||||
@@ -768,3 +781,44 @@ func initMisconfScannerOption(ctx context.Context, opts flag.Options) (misconf.S
|
||||
misconfOpts.RegoScanner = regoScanner
|
||||
return misconfOpts, nil
|
||||
}
|
||||
|
||||
func resolveAnsibleExtraVars(inputs []string) (map[string]any, error) {
|
||||
result := make(map[string]any)
|
||||
|
||||
for _, input := range inputs {
|
||||
var vars map[string]any
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(input, "@"):
|
||||
data, err := os.ReadFile(input[1:])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read extra-vars file %s: %w", input[1:], err)
|
||||
}
|
||||
trimmed := bytes.TrimSpace(data)
|
||||
if len(trimmed) > 0 && trimmed[0] == '{' {
|
||||
// parse as JSON object
|
||||
if err := json.Unmarshal(trimmed, &vars); err != nil {
|
||||
return nil, fmt.Errorf("parse extra-vars JSON file %s: %w", input[1:], err)
|
||||
}
|
||||
} else {
|
||||
// parse as YAML
|
||||
if err := yaml.Unmarshal(trimmed, &vars); err != nil {
|
||||
return nil, fmt.Errorf("parse extra-vars YAML file %s: %w", input[1:], err)
|
||||
}
|
||||
}
|
||||
case strings.Contains(input, "="):
|
||||
kv := strings.SplitN(input, "=", 2)
|
||||
var val string
|
||||
if len(kv) == 2 {
|
||||
val = kv[1]
|
||||
}
|
||||
vars = map[string]any{kv[0]: val}
|
||||
default:
|
||||
return nil, fmt.Errorf("invalid extra-vars input: %s", input)
|
||||
}
|
||||
|
||||
maps.Copy(result, vars)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package all
|
||||
|
||||
import (
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/ansible"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/azurearm"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/cloudformation"
|
||||
_ "github.com/aquasecurity/trivy/pkg/fanal/analyzer/config/dockerfile"
|
||||
|
||||
37
pkg/fanal/analyzer/config/ansible/ansible.go
Normal file
37
pkg/fanal/analyzer/config/ansible/ansible.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package ansible
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer/config"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/detection"
|
||||
)
|
||||
|
||||
const (
|
||||
version = 1
|
||||
analyzerType = analyzer.TypeAnsible
|
||||
)
|
||||
|
||||
func init() {
|
||||
analyzer.RegisterPostAnalyzer(analyzerType, newAnsibleConfigAnalyzer)
|
||||
}
|
||||
|
||||
type ansibleConfigAnalyzer struct {
|
||||
*config.Analyzer
|
||||
}
|
||||
|
||||
func newAnsibleConfigAnalyzer(opts analyzer.AnalyzerOptions) (analyzer.PostAnalyzer, error) {
|
||||
a, err := config.NewAnalyzer(analyzerType, version, detection.FileTypeAnsible, opts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &ansibleConfigAnalyzer{Analyzer: a}, nil
|
||||
}
|
||||
|
||||
func (a *ansibleConfigAnalyzer) Required(filePath string, _ os.FileInfo) bool {
|
||||
return filepath.Base(filePath) == "ansible.cfg" ||
|
||||
slices.Contains([]string{"", ".yml", ".yaml", ".json", ".ini"}, filepath.Ext(filePath))
|
||||
}
|
||||
61
pkg/fanal/analyzer/config/ansible/ansible_test.go
Normal file
61
pkg/fanal/analyzer/config/ansible/ansible_test.go
Normal file
@@ -0,0 +1,61 @@
|
||||
package ansible
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/fanal/analyzer"
|
||||
)
|
||||
|
||||
func Test_ansibleConfigAnalyzer_Required(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
filePath string
|
||||
want bool
|
||||
}{
|
||||
{
|
||||
name: "yaml",
|
||||
filePath: "test.yaml",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "yml",
|
||||
filePath: "test.yml",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "json",
|
||||
filePath: "test.json",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "init",
|
||||
filePath: "test.ini",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "without extension",
|
||||
filePath: "test",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "config file",
|
||||
filePath: "ansible.cfg",
|
||||
want: true,
|
||||
},
|
||||
{
|
||||
name: "just cfg",
|
||||
filePath: "test.cfg",
|
||||
want: false,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
a, err := newAnsibleConfigAnalyzer(analyzer.AnalyzerOptions{})
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.want, a.Required(tt.filePath, nil))
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -132,6 +132,7 @@ const (
|
||||
TypeTerraformPlanSnapshot Type = Type(detection.FileTypeTerraformPlanSnapshot)
|
||||
TypeYAML Type = Type(detection.FileTypeYAML)
|
||||
TypeJSON Type = Type(detection.FileTypeJSON)
|
||||
TypeAnsible Type = Type(detection.FileTypeAnsible)
|
||||
|
||||
// ========
|
||||
// License
|
||||
@@ -266,5 +267,6 @@ var (
|
||||
TypeTerraformPlanSnapshot,
|
||||
TypeYAML,
|
||||
TypeJSON,
|
||||
TypeAnsible,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -36,7 +36,7 @@ import (
|
||||
|
||||
// Common blob IDs used across multiple test cases to reduce duplication
|
||||
const (
|
||||
alpineBaseLayerID = "sha256:be60f1fe61fc63ab50b10fe0779614e605a973a38cd7d2a02f3f20b081e56d4a"
|
||||
alpineBaseLayerID = "sha256:5fa8e7300cfe1b8f70c304e3b04f9b1f022942a0dc57d3fc0d4d3f04327e6d2a"
|
||||
alpineBaseLayerDiffID = "sha256:beee9f30bc1f711043e78d4a2be0668955d4b761d587d6f60c2c8dc081efb203"
|
||||
alpineArtifactID = "sha256:3c709d2a158be3a97051e10cd0e30f047225cb9505101feb3fadcd395c2e0408"
|
||||
composerImageID = "sha256:a187dde48cd289ac374ad8539930628314bc581a481cdb41409c9289419ddb72"
|
||||
@@ -510,7 +510,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
wantBlobs: []cachetest.WantBlob{
|
||||
{
|
||||
ID: "sha256:f2a647dcf780c603f864e491dca1a042b1e98062b530c813681d1bb4a85bcb18",
|
||||
ID: "sha256:5b61242ed7786d642c7037c5d42c97ef4eb77e79b5cee7d47c3a2476bdd37e54",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
Size: 3061760,
|
||||
@@ -598,7 +598,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "sha256:c988cc5a0b8f3dc542c15c303d9200dee47d4fbed0e498a5bfbf3b4bef7a5af7",
|
||||
ID: "sha256:1a8ac8af11a039295f3fffd3e058c034dae966ac7ace649121f0559146133ee5",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
Size: 15441920,
|
||||
@@ -693,7 +693,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "sha256:05c19ffd5d898588400522070abd98c770b2965a7f4867d5c882c2a8783e40cc",
|
||||
ID: "sha256:a686ab4c4132800a0d67a8ddf33dd89387d750a7b3427c01b9ce7bf3219cadfb",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
Size: 29696,
|
||||
@@ -900,7 +900,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "sha256:c737743c0f8b35906650a02125f05c8b35916c0febf64984f4dfaacd0f72509d",
|
||||
ID: "sha256:789b01e58c608d3a3021ce18cf6c8bd21e701116134089d949da35a25f73d9ec",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
Size: 6656,
|
||||
@@ -1763,10 +1763,10 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
Type: types.TypeContainerImage,
|
||||
ID: "sha256:0bebf0773ffd87baa7c64fbdbdf79a24ae125e3f99a8adebe52d1ccbe6bed16b",
|
||||
BlobIDs: []string{
|
||||
"sha256:f2a647dcf780c603f864e491dca1a042b1e98062b530c813681d1bb4a85bcb18",
|
||||
"sha256:c988cc5a0b8f3dc542c15c303d9200dee47d4fbed0e498a5bfbf3b4bef7a5af7",
|
||||
"sha256:05c19ffd5d898588400522070abd98c770b2965a7f4867d5c882c2a8783e40cc",
|
||||
"sha256:c737743c0f8b35906650a02125f05c8b35916c0febf64984f4dfaacd0f72509d",
|
||||
"sha256:5b61242ed7786d642c7037c5d42c97ef4eb77e79b5cee7d47c3a2476bdd37e54",
|
||||
"sha256:1a8ac8af11a039295f3fffd3e058c034dae966ac7ace649121f0559146133ee5",
|
||||
"sha256:a686ab4c4132800a0d67a8ddf33dd89387d750a7b3427c01b9ce7bf3219cadfb",
|
||||
"sha256:789b01e58c608d3a3021ce18cf6c8bd21e701116134089d949da35a25f73d9ec",
|
||||
},
|
||||
ImageMetadata: artifact.ImageMetadata{
|
||||
ID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4",
|
||||
@@ -1874,7 +1874,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
wantBlobs: []cachetest.WantBlob{
|
||||
{
|
||||
ID: "sha256:48b4a983ef1ec8f0d19934ccf7fca3d2114466ad32207e16371620628f149984",
|
||||
ID: "sha256:a83985cade3970577a9af328db9c88c0bf15cad40f7d2cf6d76e83882bc8146d",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
Size: 3061760,
|
||||
@@ -1884,7 +1884,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "sha256:a4d2820bd2c076f6153a9053843d4a56d31147ce486ec5e4a2c0405cec506d6c",
|
||||
ID: "sha256:b109622c2d106193db505762f1f3e78cf0035a69e559caf07c305c92ddb89356",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
Size: 15441920,
|
||||
@@ -1894,7 +1894,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "sha256:c5fa5e736cee843c563c222963eb89fc775f0620020ff9d51d5e5db8ef62eec4",
|
||||
ID: "sha256:115f689385cb66077c338c52f2c9d6f3018a18c89be7fe7d23f1645422d7d59d",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
Size: 29696,
|
||||
@@ -1905,7 +1905,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "sha256:7e223b95d6d589cdb196e29ef6c6ac0acdd2c471350dd9880a420b4249f6e7bb",
|
||||
ID: "sha256:60129d309cd4f16d69262106d6074f37c6d37f6c9089a9710ec96ae067716636",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
Size: 6656,
|
||||
@@ -1921,10 +1921,10 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
Type: types.TypeContainerImage,
|
||||
ID: "sha256:0bebf0773ffd87baa7c64fbdbdf79a24ae125e3f99a8adebe52d1ccbe6bed16b",
|
||||
BlobIDs: []string{
|
||||
"sha256:48b4a983ef1ec8f0d19934ccf7fca3d2114466ad32207e16371620628f149984",
|
||||
"sha256:a4d2820bd2c076f6153a9053843d4a56d31147ce486ec5e4a2c0405cec506d6c",
|
||||
"sha256:c5fa5e736cee843c563c222963eb89fc775f0620020ff9d51d5e5db8ef62eec4",
|
||||
"sha256:7e223b95d6d589cdb196e29ef6c6ac0acdd2c471350dd9880a420b4249f6e7bb",
|
||||
"sha256:a83985cade3970577a9af328db9c88c0bf15cad40f7d2cf6d76e83882bc8146d",
|
||||
"sha256:b109622c2d106193db505762f1f3e78cf0035a69e559caf07c305c92ddb89356",
|
||||
"sha256:115f689385cb66077c338c52f2c9d6f3018a18c89be7fe7d23f1645422d7d59d",
|
||||
"sha256:60129d309cd4f16d69262106d6074f37c6d37f6c9089a9710ec96ae067716636",
|
||||
},
|
||||
ImageMetadata: artifact.ImageMetadata{
|
||||
ID: "sha256:58701fd185bda36cab0557bb6438661831267aa4a9e0b54211c4d5317a48aff4",
|
||||
|
||||
@@ -226,7 +226,7 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
wantBlobs: []cachetest.WantBlob{
|
||||
{
|
||||
// Cache key is based on commit hash (8a19b492a589955c3e70c6ad8efd1e4ec6ae0d35)
|
||||
ID: "sha256:c7173e152a268c038257b877794285986c52ac569de7e516b2963f557f4e26ee",
|
||||
ID: "sha256:d37c788d6fe832712cce9020943746b8764c04f7e323ed4ad68de36c5bf7d846",
|
||||
BlobInfo: types.BlobInfo{
|
||||
SchemaVersion: types.BlobJSONSchemaVersion,
|
||||
},
|
||||
@@ -235,9 +235,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
want: artifact.Reference{
|
||||
Name: "../../../../internal/gittest/testdata/test-repo",
|
||||
Type: types.TypeRepository,
|
||||
ID: "sha256:c7173e152a268c038257b877794285986c52ac569de7e516b2963f557f4e26ee",
|
||||
ID: "sha256:d37c788d6fe832712cce9020943746b8764c04f7e323ed4ad68de36c5bf7d846",
|
||||
BlobIDs: []string{
|
||||
"sha256:c7173e152a268c038257b877794285986c52ac569de7e516b2963f557f4e26ee",
|
||||
"sha256:d37c788d6fe832712cce9020943746b8764c04f7e323ed4ad68de36c5bf7d846",
|
||||
},
|
||||
RepoMetadata: artifact.RepoMetadata{
|
||||
RepoURL: "https://github.com/aquasecurity/trivy-test-repo/",
|
||||
@@ -2383,7 +2383,7 @@ func TestYAMLConfigScan(t *testing.T) {
|
||||
Severity: "LOW",
|
||||
},
|
||||
CauseMetadata: types.CauseMetadata{
|
||||
Provider: "Generic",
|
||||
Provider: "Yaml",
|
||||
Service: "general",
|
||||
},
|
||||
},
|
||||
@@ -2405,7 +2405,7 @@ func TestYAMLConfigScan(t *testing.T) {
|
||||
Severity: "LOW",
|
||||
},
|
||||
CauseMetadata: types.CauseMetadata{
|
||||
Provider: "Generic",
|
||||
Provider: "Yaml",
|
||||
Service: "general",
|
||||
},
|
||||
},
|
||||
@@ -2454,7 +2454,7 @@ func TestYAMLConfigScan(t *testing.T) {
|
||||
Severity: "LOW",
|
||||
},
|
||||
CauseMetadata: types.CauseMetadata{
|
||||
Provider: "Generic",
|
||||
Provider: "Yaml",
|
||||
Service: "general",
|
||||
},
|
||||
},
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
# id: TEST001
|
||||
# avd_id: TEST001
|
||||
# severity: LOW
|
||||
# input:
|
||||
# selector:
|
||||
# - type: yaml
|
||||
package user.test_yaml_check
|
||||
|
||||
deny[res] {
|
||||
|
||||
@@ -4,6 +4,9 @@
|
||||
# id: TEST001
|
||||
# avd_id: TEST001
|
||||
# severity: LOW
|
||||
# input:
|
||||
# selector:
|
||||
# - type: yaml
|
||||
package user.test_yaml_check
|
||||
|
||||
deny[res] {
|
||||
|
||||
@@ -183,9 +183,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
want: artifact.Reference{
|
||||
Name: ts.URL + "/test-repo.git",
|
||||
Type: types.TypeRepository,
|
||||
ID: "sha256:dc7c6039424c9fce969d3c2972d261af442a33f13e7494464386dbe280612d4c", // Calculated from commit hash
|
||||
ID: "sha256:1587f4be90cf95b3e1b733512d674301f5fe4200055f10efa4dbf0d5e590d32d", // Calculated from commit hash
|
||||
BlobIDs: []string{
|
||||
"sha256:dc7c6039424c9fce969d3c2972d261af442a33f13e7494464386dbe280612d4c", // Calculated from commit hash
|
||||
"sha256:1587f4be90cf95b3e1b733512d674301f5fe4200055f10efa4dbf0d5e590d32d", // Calculated from commit hash
|
||||
},
|
||||
RepoMetadata: artifact.RepoMetadata{
|
||||
RepoURL: ts.URL + "/test-repo.git",
|
||||
@@ -207,9 +207,9 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
want: artifact.Reference{
|
||||
Name: "../../../../internal/gittest/testdata/test-repo",
|
||||
Type: types.TypeRepository,
|
||||
ID: "sha256:dc7c6039424c9fce969d3c2972d261af442a33f13e7494464386dbe280612d4c", // Calculated from commit hash
|
||||
ID: "sha256:1587f4be90cf95b3e1b733512d674301f5fe4200055f10efa4dbf0d5e590d32d", // Calculated from commit hash
|
||||
BlobIDs: []string{
|
||||
"sha256:dc7c6039424c9fce969d3c2972d261af442a33f13e7494464386dbe280612d4c", // Calculated from commit hash
|
||||
"sha256:1587f4be90cf95b3e1b733512d674301f5fe4200055f10efa4dbf0d5e590d32d", // Calculated from commit hash
|
||||
},
|
||||
RepoMetadata: artifact.RepoMetadata{
|
||||
RepoURL: "https://github.com/aquasecurity/trivy-test-repo/",
|
||||
@@ -267,16 +267,16 @@ func TestArtifact_Inspect(t *testing.T) {
|
||||
},
|
||||
}
|
||||
// Store the blob info in the cache to test cache hit
|
||||
cacheKey := "sha256:dc7c6039424c9fce969d3c2972d261af442a33f13e7494464386dbe280612d4c"
|
||||
cacheKey := "sha256:1587f4be90cf95b3e1b733512d674301f5fe4200055f10efa4dbf0d5e590d32d"
|
||||
err := c.PutBlob(t.Context(), cacheKey, blobInfo)
|
||||
require.NoError(t, err)
|
||||
},
|
||||
want: artifact.Reference{
|
||||
Name: "../../../../internal/gittest/testdata/test-repo",
|
||||
Type: types.TypeRepository,
|
||||
ID: "sha256:dc7c6039424c9fce969d3c2972d261af442a33f13e7494464386dbe280612d4c",
|
||||
ID: "sha256:1587f4be90cf95b3e1b733512d674301f5fe4200055f10efa4dbf0d5e590d32d",
|
||||
BlobIDs: []string{
|
||||
"sha256:dc7c6039424c9fce969d3c2972d261af442a33f13e7494464386dbe280612d4c",
|
||||
"sha256:1587f4be90cf95b3e1b733512d674301f5fe4200055f10efa4dbf0d5e590d32d",
|
||||
},
|
||||
RepoMetadata: artifact.RepoMetadata{
|
||||
RepoURL: "https://github.com/aquasecurity/trivy-test-repo/",
|
||||
|
||||
@@ -154,6 +154,7 @@ const (
|
||||
Helm ConfigType = "helm"
|
||||
Cloud ConfigType = "cloud"
|
||||
AzureARM ConfigType = "azure-arm"
|
||||
Ansible ConfigType = "ansible"
|
||||
)
|
||||
|
||||
// Language-specific file names
|
||||
|
||||
@@ -116,7 +116,7 @@ var (
|
||||
Name: "render-cause",
|
||||
ConfigName: "misconfiguration.render-cause",
|
||||
Usage: "specify configuration types for which the rendered causes will be shown in the table report",
|
||||
Values: xstrings.ToStringSlice([]types.ConfigType{types.Terraform}), // TODO: add Plan and JSON?
|
||||
Values: xstrings.ToStringSlice([]types.ConfigType{types.Terraform, types.Ansible}), // TODO: add Plan and JSON?
|
||||
Default: []string{},
|
||||
}
|
||||
RawConfigScanners = Flag[[]string]{
|
||||
@@ -126,6 +126,21 @@ var (
|
||||
Values: xstrings.ToStringSlice([]types.ConfigType{types.Terraform}),
|
||||
Default: []string{},
|
||||
}
|
||||
AnsiblePlaybooks = Flag[[]string]{
|
||||
Name: "ansible-playbook",
|
||||
ConfigName: "ansible.playbooks",
|
||||
Usage: "specify playbook file path(s) to scan",
|
||||
}
|
||||
AnsibleInventories = Flag[[]string]{
|
||||
Name: "ansible-inventory",
|
||||
ConfigName: "ansible.inventories",
|
||||
Usage: "specify inventory host path or comma separated host list",
|
||||
}
|
||||
AnsibleExtraVars = Flag[[]string]{
|
||||
Name: "ansible-extra-vars",
|
||||
ConfigName: "ansible.extra-vars",
|
||||
Usage: "set additional variables as key=value or @file (YAML/JSON)",
|
||||
}
|
||||
)
|
||||
|
||||
// MisconfFlagGroup composes common printer flag structs used for commands providing misconfiguration scanning.
|
||||
@@ -148,6 +163,10 @@ type MisconfFlagGroup struct {
|
||||
ConfigFileSchemas *Flag[[]string]
|
||||
RenderCause *Flag[[]string]
|
||||
RawConfigScanners *Flag[[]string]
|
||||
|
||||
AnsiblePlaybooks *Flag[[]string]
|
||||
AnsibleInventories *Flag[[]string]
|
||||
AnsibleExtraVars *Flag[[]string]
|
||||
}
|
||||
|
||||
type MisconfOptions struct {
|
||||
@@ -169,6 +188,10 @@ type MisconfOptions struct {
|
||||
ConfigFileSchemas []string
|
||||
RenderCause []types.ConfigType
|
||||
RawConfigScanners []types.ConfigType
|
||||
|
||||
AnsiblePlaybooks []string
|
||||
AnsibleInventories []string
|
||||
AnsibleExtraVars []string
|
||||
}
|
||||
|
||||
func NewMisconfFlagGroup() *MisconfFlagGroup {
|
||||
@@ -190,6 +213,10 @@ func NewMisconfFlagGroup() *MisconfFlagGroup {
|
||||
ConfigFileSchemas: ConfigFileSchemasFlag.Clone(),
|
||||
RenderCause: RenderCauseFlag.Clone(),
|
||||
RawConfigScanners: RawConfigScanners.Clone(),
|
||||
|
||||
AnsiblePlaybooks: AnsiblePlaybooks.Clone(),
|
||||
AnsibleInventories: AnsibleInventories.Clone(),
|
||||
AnsibleExtraVars: AnsibleExtraVars.Clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,6 +242,9 @@ func (f *MisconfFlagGroup) Flags() []Flagger {
|
||||
f.ConfigFileSchemas,
|
||||
f.RenderCause,
|
||||
f.RawConfigScanners,
|
||||
f.AnsiblePlaybooks,
|
||||
f.AnsibleInventories,
|
||||
f.AnsibleExtraVars,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -236,6 +266,9 @@ func (f *MisconfFlagGroup) ToOptions(opts *Options) error {
|
||||
ConfigFileSchemas: f.ConfigFileSchemas.Value(),
|
||||
RenderCause: xstrings.ToTSlice[types.ConfigType](f.RenderCause.Value()),
|
||||
RawConfigScanners: xstrings.ToTSlice[types.ConfigType](f.RawConfigScanners.Value()),
|
||||
AnsiblePlaybooks: f.AnsiblePlaybooks.Value(),
|
||||
AnsibleInventories: f.AnsibleInventories.Value(),
|
||||
AnsibleExtraVars: f.AnsibleExtraVars.Value(),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
17
pkg/iac/adapters/ansible/adapt.go
Normal file
17
pkg/iac/adapters/ansible/adapt.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package ansible
|
||||
|
||||
import (
|
||||
"github.com/aquasecurity/trivy/pkg/iac/adapters/ansible/aws/s3"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/providers/aws"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/parser"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/state"
|
||||
)
|
||||
|
||||
func Adapt(tasks parser.ResolvedTasks) state.State {
|
||||
return state.State{
|
||||
AWS: aws.AWS{
|
||||
S3: s3.Adapt(tasks),
|
||||
// TODO(simar): Add other AWS services
|
||||
},
|
||||
}
|
||||
}
|
||||
17
pkg/iac/adapters/ansible/aws/s3/adapt.go
Normal file
17
pkg/iac/adapters/ansible/aws/s3/adapt.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package s3
|
||||
|
||||
import (
|
||||
"github.com/aquasecurity/trivy/pkg/iac/providers/aws/s3"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/parser"
|
||||
)
|
||||
|
||||
func Adapt(tasks parser.ResolvedTasks) s3.S3 {
|
||||
a := &adapter{
|
||||
tasks: tasks,
|
||||
bucketMap: make(map[string]*s3.Bucket),
|
||||
}
|
||||
|
||||
return s3.S3{
|
||||
Buckets: a.adaptBuckets(),
|
||||
}
|
||||
}
|
||||
64
pkg/iac/adapters/ansible/aws/s3/adapt_test.go
Normal file
64
pkg/iac/adapters/ansible/aws/s3/adapt_test.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package s3
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/internal/testutil"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/providers/aws/s3"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/parser"
|
||||
iacTypes "github.com/aquasecurity/trivy/pkg/iac/types"
|
||||
)
|
||||
|
||||
func TestAdapt(t *testing.T) {
|
||||
fsys := fstest.MapFS{
|
||||
"playbook.yaml": {
|
||||
Data: []byte(`---
|
||||
- name: Update web servers
|
||||
hosts: localhost
|
||||
|
||||
tasks:
|
||||
- name: Ensure apache is at the latest version
|
||||
s3_bucket:
|
||||
name: mys3bucket
|
||||
encryption: "aws:kms"
|
||||
encryption_key_id: "arn:aws:kms:us-east-1:1234/5678example"
|
||||
public_access:
|
||||
block_public_acls: true
|
||||
block_public_policy: true
|
||||
ignore_public_acls: true
|
||||
restrict_public_buckets: true
|
||||
`),
|
||||
},
|
||||
}
|
||||
|
||||
project, err := parser.New(fsys, ".").Parse()
|
||||
require.NoError(t, err)
|
||||
|
||||
tasks := project.ListTasks()
|
||||
|
||||
got := Adapt(tasks)
|
||||
want := s3.S3{
|
||||
Buckets: []s3.Bucket{
|
||||
{
|
||||
Name: iacTypes.String("mys3bucket", iacTypes.NewTestMetadata()),
|
||||
Encryption: s3.Encryption{
|
||||
Enabled: iacTypes.Bool(false, iacTypes.NewTestMetadata()),
|
||||
Algorithm: iacTypes.String("aws:kms", iacTypes.NewTestMetadata()),
|
||||
KMSKeyId: iacTypes.String("arn:aws:kms:us-east-1:1234/5678example", iacTypes.NewTestMetadata()),
|
||||
},
|
||||
PublicAccessBlock: &s3.PublicAccessBlock{
|
||||
BlockPublicACLs: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
BlockPublicPolicy: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
IgnorePublicACLs: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
RestrictPublicBuckets: iacTypes.Bool(true, iacTypes.NewTestMetadata()),
|
||||
},
|
||||
Website: &s3.Website{},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
testutil.AssertDefsecEqual(t, want, got)
|
||||
}
|
||||
82
pkg/iac/adapters/ansible/aws/s3/bucket.go
Normal file
82
pkg/iac/adapters/ansible/aws/s3/bucket.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package s3
|
||||
|
||||
import (
|
||||
"github.com/aquasecurity/trivy/pkg/iac/providers/aws/s3"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/parser"
|
||||
iacTypes "github.com/aquasecurity/trivy/pkg/iac/types"
|
||||
)
|
||||
|
||||
type adapter struct {
|
||||
tasks parser.ResolvedTasks
|
||||
bucketMap map[string]*s3.Bucket
|
||||
}
|
||||
|
||||
func (a *adapter) adaptBuckets() []s3.Bucket {
|
||||
var buckets []s3.Bucket
|
||||
|
||||
for _, module := range a.tasks.GetModules("s3_bucket", "amazon.aws.s3_bucket") {
|
||||
buckets = append(buckets, a.adaptBucket(module))
|
||||
}
|
||||
|
||||
return buckets
|
||||
}
|
||||
|
||||
func (a *adapter) adaptBucket(module parser.Module) s3.Bucket {
|
||||
return s3.Bucket{
|
||||
Metadata: module.Metadata(),
|
||||
Name: module.StringValue("name"),
|
||||
Versioning: getVersioning(module),
|
||||
Encryption: getEncryption(module),
|
||||
PublicAccessBlock: getPublicAccessBlock(module),
|
||||
Logging: a.getLogging(module),
|
||||
ACL: module.StringValue("acl"),
|
||||
Website: a.getWebsite(module),
|
||||
}
|
||||
}
|
||||
|
||||
func getVersioning(module parser.Module) s3.Versioning {
|
||||
return s3.Versioning{
|
||||
Metadata: module.Metadata(),
|
||||
Enabled: module.BoolValue("versioning"),
|
||||
MFADelete: iacTypes.BoolUnresolvable(module.Metadata()),
|
||||
}
|
||||
}
|
||||
|
||||
func getEncryption(module parser.Module) s3.Encryption {
|
||||
return s3.Encryption{
|
||||
Metadata: module.Metadata(),
|
||||
Algorithm: module.StringValue("encryption"),
|
||||
KMSKeyId: module.StringValue("encryption_key_id"),
|
||||
Enabled: iacTypes.Bool(false, module.Metadata()), // TODO: handle
|
||||
}
|
||||
}
|
||||
|
||||
func getPublicAccessBlock(module parser.Module) *s3.PublicAccessBlock {
|
||||
publicAccess := module.NodeAt("public_access")
|
||||
if publicAccess.IsNil() {
|
||||
return &s3.PublicAccessBlock{
|
||||
Metadata: module.Metadata(),
|
||||
}
|
||||
}
|
||||
return &s3.PublicAccessBlock{
|
||||
Metadata: publicAccess.Metadata(),
|
||||
BlockPublicACLs: publicAccess.BoolValue("block_public_acls"),
|
||||
BlockPublicPolicy: publicAccess.BoolValue("block_public_policy"),
|
||||
IgnorePublicACLs: publicAccess.BoolValue("ignore_public_acls"),
|
||||
RestrictPublicBuckets: publicAccess.BoolValue("restrict_public_buckets"),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *adapter) getLogging(module parser.Module) s3.Logging {
|
||||
// TODO: adapt
|
||||
return s3.Logging{
|
||||
Metadata: module.Metadata(),
|
||||
}
|
||||
}
|
||||
|
||||
func (a *adapter) getWebsite(module parser.Module) *s3.Website {
|
||||
// TODO: adapt
|
||||
return &s3.Website{
|
||||
Metadata: module.Metadata(),
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/xeipuuv/gojsonschema"
|
||||
@@ -30,6 +31,7 @@ const (
|
||||
FileTypeJSON FileType = "json"
|
||||
FileTypeHelm FileType = "helm"
|
||||
FileTypeAzureARM FileType = "azure-arm"
|
||||
FileTypeAnsible FileType = "ansible"
|
||||
)
|
||||
|
||||
var matchers = make(map[FileType]func(name string, r io.ReadSeeker) bool)
|
||||
@@ -262,6 +264,12 @@ func init() {
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// TODO: improve detection
|
||||
matchers[FileTypeAnsible] = func(name string, r io.ReadSeeker) bool {
|
||||
return filepath.Base(name) == "ansible.cfg" ||
|
||||
slices.Contains([]string{"", ".yml", ".yaml", ".json", ".ini"}, filepath.Ext(name))
|
||||
}
|
||||
}
|
||||
|
||||
func IsTerraformFile(path string) bool {
|
||||
|
||||
@@ -53,6 +53,7 @@ func Test_Detection(t *testing.T) {
|
||||
expected: []FileType{
|
||||
FileTypeTerraform,
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -70,6 +71,7 @@ func Test_Detection(t *testing.T) {
|
||||
expected: []FileType{
|
||||
FileTypeTerraform,
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -108,6 +110,7 @@ func Test_Detection(t *testing.T) {
|
||||
expected: []FileType{
|
||||
FileTypeTerraform,
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -125,6 +128,7 @@ func Test_Detection(t *testing.T) {
|
||||
expected: []FileType{
|
||||
FileTypeTerraform,
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -133,6 +137,7 @@ func Test_Detection(t *testing.T) {
|
||||
expected: []FileType{
|
||||
FileTypeYAML,
|
||||
FileTypeHelm,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -154,6 +159,7 @@ func Test_Detection(t *testing.T) {
|
||||
expected: []FileType{
|
||||
FileTypeTerraformPlanJSON,
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -185,6 +191,7 @@ Resources:
|
||||
FileTypeCloudFormation,
|
||||
FileTypeYAML,
|
||||
FileTypeHelm,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -195,6 +202,7 @@ Resources:
|
||||
}`),
|
||||
expected: []FileType{
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -203,6 +211,7 @@ Resources:
|
||||
r: nil,
|
||||
expected: []FileType{
|
||||
FileTypeDockerfile,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -211,6 +220,7 @@ Resources:
|
||||
r: nil,
|
||||
expected: []FileType{
|
||||
FileTypeDockerfile,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -219,6 +229,7 @@ Resources:
|
||||
r: strings.NewReader("FROM ubuntu\n"),
|
||||
expected: []FileType{
|
||||
FileTypeDockerfile,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -235,6 +246,7 @@ Resources:
|
||||
r: nil,
|
||||
expected: []FileType{
|
||||
FileTypeYAML,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -264,6 +276,7 @@ spec:
|
||||
expected: []FileType{
|
||||
FileTypeKubernetes,
|
||||
FileTypeYAML,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -310,6 +323,7 @@ spec:
|
||||
expected: []FileType{
|
||||
FileTypeKubernetes,
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -319,6 +333,7 @@ spec:
|
||||
expected: []FileType{
|
||||
FileTypeYAML,
|
||||
FileTypeHelm,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -327,6 +342,7 @@ spec:
|
||||
r: nil,
|
||||
expected: []FileType{
|
||||
FileTypeYAML,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -351,6 +367,7 @@ spec:
|
||||
r: nil,
|
||||
expected: []FileType{
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -367,6 +384,7 @@ data:
|
||||
expected: []FileType{
|
||||
FileTypeKubernetes,
|
||||
FileTypeYAML,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -395,6 +413,7 @@ rules:
|
||||
expected: []FileType{
|
||||
FileTypeKubernetes,
|
||||
FileTypeYAML,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -424,6 +443,7 @@ rules:
|
||||
expected: []FileType{
|
||||
FileTypeJSON,
|
||||
FileTypeAzureARM,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -445,6 +465,7 @@ rules:
|
||||
expected: []FileType{
|
||||
FileTypeJSON,
|
||||
FileTypeAzureARM,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -459,6 +480,7 @@ rules:
|
||||
`),
|
||||
expected: []FileType{
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -481,8 +503,35 @@ rules:
|
||||
}`),
|
||||
expected: []FileType{
|
||||
FileTypeJSON,
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "without extension",
|
||||
path: "something",
|
||||
expected: []FileType{
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Ansible inventory INI file",
|
||||
path: "something.ini",
|
||||
expected: []FileType{
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Ansible config file",
|
||||
path: "ansible.cfg",
|
||||
expected: []FileType{
|
||||
FileTypeAnsible,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: " not Ansible config file",
|
||||
path: "something.cfg",
|
||||
expected: []FileType{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
|
||||
172
pkg/iac/scanners/ansible/fsutils/fsutils.go
Normal file
172
pkg/iac/scanners/ansible/fsutils/fsutils.go
Normal file
@@ -0,0 +1,172 @@
|
||||
package fsutils
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// FileSource represents a file together with the filesystem it belongs to.
|
||||
//
|
||||
// It abstracts over virtual filesystems and real disk paths, allowing
|
||||
// consistent access to files whether they reside in a virtual FS or on disk.
|
||||
type FileSource struct {
|
||||
// FS is the filesystem used to access the file.
|
||||
// It is ignored if Path is absolute.
|
||||
FS fs.FS
|
||||
|
||||
// Path is the relative or absolute path to the file in Unix-like format.
|
||||
// If Path is relative, FS is used; if absolute, the file is accessed directly on disk.
|
||||
Path string
|
||||
}
|
||||
|
||||
func NewFileSource(fsys fs.FS, p string) FileSource {
|
||||
if filepath.IsAbs(p) {
|
||||
return FileSource{
|
||||
FS: nil,
|
||||
Path: filepath.ToSlash(p),
|
||||
}
|
||||
}
|
||||
return FileSource{
|
||||
FS: fsys,
|
||||
Path: path.Clean(p),
|
||||
}
|
||||
}
|
||||
|
||||
func (f FileSource) String() string {
|
||||
if f.FS != nil {
|
||||
return f.Path
|
||||
}
|
||||
return f.osPath()
|
||||
}
|
||||
|
||||
func (f FileSource) osPath() string {
|
||||
return filepath.FromSlash(f.Path)
|
||||
}
|
||||
|
||||
// FSAndRelPath returns the fs.FS and relative path to use for opening the file.
|
||||
// If the FileSource has an embedded FS, it is used as-is.
|
||||
// For absolute paths without FS, it returns an os.DirFS rooted at the parent directory
|
||||
// and the file name as the relative path.
|
||||
func (f FileSource) FSAndRelPath() (fs.FS, string) {
|
||||
if f.FS != nil {
|
||||
return f.FS, f.Path
|
||||
}
|
||||
|
||||
absPath := filepath.FromSlash(f.Path)
|
||||
return os.DirFS(filepath.Dir(absPath)), filepath.Base(absPath)
|
||||
}
|
||||
|
||||
func (f FileSource) Join(elem ...string) FileSource {
|
||||
for i, e := range elem {
|
||||
elem[i] = filepath.ToSlash(e)
|
||||
}
|
||||
return FileSource{
|
||||
FS: f.FS,
|
||||
Path: path.Join(append([]string{f.Path}, elem...)...),
|
||||
}
|
||||
}
|
||||
|
||||
func (f FileSource) Stat() (fs.FileInfo, error) {
|
||||
if f.FS != nil {
|
||||
return fs.Stat(f.FS, f.Path)
|
||||
}
|
||||
return os.Stat(f.osPath())
|
||||
}
|
||||
|
||||
func (f FileSource) Exists() (bool, error) {
|
||||
_, err := f.Stat()
|
||||
if err != nil {
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
return false, nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
func (f FileSource) ReadFile() ([]byte, error) {
|
||||
if f.FS != nil {
|
||||
return fs.ReadFile(f.FS, f.Path)
|
||||
}
|
||||
return os.ReadFile(f.osPath())
|
||||
}
|
||||
|
||||
func (f FileSource) Open() (fs.File, error) {
|
||||
if f.FS != nil {
|
||||
return f.FS.Open(f.Path)
|
||||
}
|
||||
return os.Open(f.osPath())
|
||||
}
|
||||
|
||||
func (f FileSource) Dir() FileSource {
|
||||
return FileSource{
|
||||
FS: f.FS,
|
||||
Path: path.Dir(f.Path),
|
||||
}
|
||||
}
|
||||
|
||||
func (f FileSource) ReadDir() ([]fs.DirEntry, error) {
|
||||
if f.FS != nil {
|
||||
return fs.ReadDir(f.FS, f.Path)
|
||||
}
|
||||
return os.ReadDir(f.osPath())
|
||||
}
|
||||
|
||||
func (f FileSource) walkDir(fn fs.WalkDirFunc) error {
|
||||
if f.FS != nil {
|
||||
return fs.WalkDir(f.FS, f.Path, fn)
|
||||
}
|
||||
|
||||
return filepath.WalkDir(f.osPath(), fn)
|
||||
}
|
||||
|
||||
func (f FileSource) WalkDirFS(fn func(FileSource, fs.DirEntry) error) error {
|
||||
walkFn := func(path string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return fn(FileSource{FS: f.FS, Path: filepath.ToSlash(path)}, d)
|
||||
}
|
||||
return f.walkDir(walkFn)
|
||||
}
|
||||
|
||||
func WalkDirsFirstAlpha(root FileSource, fn func(FileSource, fs.DirEntry) error) error {
|
||||
var walk func(fileSrc FileSource) error
|
||||
walk = func(fileSrc FileSource) error {
|
||||
entries, err := fileSrc.ReadDir()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
SortDirsFirstAlpha(entries)
|
||||
|
||||
for _, entry := range entries {
|
||||
entrySrc := fileSrc.Join(entry.Name())
|
||||
if err := fn(entrySrc, entry); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if entry.IsDir() {
|
||||
if err := walk(entrySrc); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
return walk(root)
|
||||
}
|
||||
|
||||
func SortDirsFirstAlpha(entries []fs.DirEntry) {
|
||||
sort.Slice(entries, func(i, j int) bool {
|
||||
if entries[i].IsDir() != entries[j].IsDir() {
|
||||
return entries[i].IsDir()
|
||||
}
|
||||
return entries[i].Name() < entries[j].Name()
|
||||
})
|
||||
}
|
||||
174
pkg/iac/scanners/ansible/inventory/ini.go
Normal file
174
pkg/iac/scanners/ansible/inventory/ini.go
Normal file
@@ -0,0 +1,174 @@
|
||||
package inventory
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
)
|
||||
|
||||
const (
|
||||
sectionHosts = iota
|
||||
sectionVars
|
||||
sectionChildren
|
||||
)
|
||||
|
||||
func ParseINI(data []byte) (*Inventory, error) {
|
||||
inv := newInventory()
|
||||
|
||||
currentGroup := "ungrouped"
|
||||
sectionType := sectionHosts
|
||||
|
||||
scanner := bufio.NewScanner(bytes.NewReader(data))
|
||||
for scanner.Scan() {
|
||||
line := strings.TrimSpace(scanner.Text())
|
||||
line = removeComment(line)
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// handle section
|
||||
if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") {
|
||||
|
||||
sectionName := line[1 : len(line)-1]
|
||||
parts := strings.SplitN(sectionName, ":", 2)
|
||||
currentGroup = parts[0]
|
||||
sectionType = sectionHosts
|
||||
|
||||
// group related sections with :modifiers
|
||||
if len(parts) == 2 {
|
||||
switch parts[1] {
|
||||
case "vars":
|
||||
sectionType = sectionVars
|
||||
case "children":
|
||||
sectionType = sectionChildren
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
switch sectionType {
|
||||
case sectionHosts:
|
||||
fields := splitFields(line)
|
||||
if len(fields) == 0 {
|
||||
// skip empty line
|
||||
continue
|
||||
}
|
||||
|
||||
hostName := fields[0]
|
||||
plainHostVars := make(vars.PlainVars)
|
||||
|
||||
for _, f := range fields[1:] {
|
||||
kv := strings.SplitN(f, "=", 2)
|
||||
if len(kv) == 2 {
|
||||
plainHostVars[kv[0]] = kv[1]
|
||||
}
|
||||
}
|
||||
|
||||
hostVars := vars.NewVars(plainHostVars, vars.InvFileHostPriority)
|
||||
inv.addHost(hostName, newHost(hostVars, set.New(currentGroup)))
|
||||
case sectionVars:
|
||||
kv := strings.SplitN(line, "=", 2)
|
||||
key := strings.TrimSpace(kv[0])
|
||||
var val string
|
||||
if len(kv) == 2 {
|
||||
val = strings.TrimSpace(kv[1])
|
||||
}
|
||||
plainGroupVars := vars.PlainVars{key: val}
|
||||
groupVars := vars.NewVars(plainGroupVars, vars.InvFileGroupPriority)
|
||||
inv.addGroup(currentGroup, newGroup(groupVars, set.New[string]()))
|
||||
case sectionChildren:
|
||||
inv.addGroup(line, newGroup(make(vars.Vars), set.New(currentGroup)))
|
||||
}
|
||||
}
|
||||
|
||||
if err := scanner.Err(); err != nil {
|
||||
return nil, xerrors.Errorf("file scan: %w", err)
|
||||
}
|
||||
|
||||
inv.initDefaultGroups()
|
||||
return inv, nil
|
||||
}
|
||||
|
||||
func removeComment(line string) string {
|
||||
var (
|
||||
inQuotes bool
|
||||
quoteChar rune
|
||||
)
|
||||
|
||||
for i, r := range line {
|
||||
switch r {
|
||||
case '"', '\'':
|
||||
if inQuotes {
|
||||
if r == quoteChar {
|
||||
inQuotes = false
|
||||
}
|
||||
} else {
|
||||
inQuotes = true
|
||||
quoteChar = r
|
||||
}
|
||||
case '#', ';':
|
||||
if !inQuotes {
|
||||
return strings.TrimSpace(line[:i])
|
||||
}
|
||||
}
|
||||
}
|
||||
return strings.TrimSpace(line)
|
||||
}
|
||||
|
||||
func splitFields(input string) []string {
|
||||
var (
|
||||
inQuotes bool
|
||||
quoteChar rune
|
||||
escape bool
|
||||
field strings.Builder
|
||||
fields []string
|
||||
)
|
||||
|
||||
flush := func() {
|
||||
if field.Len() > 0 {
|
||||
fields = append(fields, field.String())
|
||||
field.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
for _, r := range input {
|
||||
if escape {
|
||||
field.WriteRune(r)
|
||||
escape = false
|
||||
continue
|
||||
}
|
||||
|
||||
switch r {
|
||||
case '\\':
|
||||
escape = true
|
||||
case '"', '\'':
|
||||
if !inQuotes {
|
||||
inQuotes = true
|
||||
quoteChar = r
|
||||
continue
|
||||
}
|
||||
|
||||
if r == quoteChar {
|
||||
inQuotes = false
|
||||
continue
|
||||
}
|
||||
field.WriteRune(r)
|
||||
case ' ', '\t':
|
||||
if inQuotes {
|
||||
field.WriteRune(r)
|
||||
} else {
|
||||
flush()
|
||||
}
|
||||
default:
|
||||
field.WriteRune(r)
|
||||
}
|
||||
}
|
||||
flush()
|
||||
|
||||
return fields
|
||||
}
|
||||
46
pkg/iac/scanners/ansible/inventory/init_test.go
Normal file
46
pkg/iac/scanners/ansible/inventory/init_test.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package inventory
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestRemoveComment(t *testing.T) {
|
||||
tests := []struct {
|
||||
in string
|
||||
expected string
|
||||
}{
|
||||
{"foo # bar", "foo"},
|
||||
{"foo ; bar", "foo"},
|
||||
{`foo "# not comment" bar`, `foo "# not comment" bar`},
|
||||
{"foo", "foo"},
|
||||
{" foo ", "foo"},
|
||||
{"", ""},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
got := removeComment(tt.in)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitFields(t *testing.T) {
|
||||
tests := []struct {
|
||||
in string
|
||||
expected []string
|
||||
}{
|
||||
{"foo bar baz", []string{"foo", "bar", "baz"}},
|
||||
{"foo bar\tbaz", []string{"foo", "bar", "baz"}},
|
||||
{`foo "bar baz" qux`, []string{"foo", "bar baz", "qux"}},
|
||||
{`foo 'bar baz'`, []string{"foo", "bar baz"}},
|
||||
{`foo bar\ baz`, []string{"foo", "bar baz"}},
|
||||
{`"foo"`, []string{"foo"}},
|
||||
{"", []string{}},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
got := splitFields(tt.in)
|
||||
assert.ElementsMatch(t, tt.expected, got)
|
||||
}
|
||||
}
|
||||
238
pkg/iac/scanners/ansible/inventory/inventory.go
Normal file
238
pkg/iac/scanners/ansible/inventory/inventory.go
Normal file
@@ -0,0 +1,238 @@
|
||||
package inventory
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"maps"
|
||||
"slices"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
)
|
||||
|
||||
type Host struct {
|
||||
Vars vars.Vars
|
||||
Groups set.Set[string]
|
||||
}
|
||||
|
||||
func newHost(vars vars.Vars, groups set.Set[string]) *Host {
|
||||
return &Host{Vars: vars, Groups: groups}
|
||||
}
|
||||
|
||||
func (h *Host) merge(other *Host) {
|
||||
h.Vars = vars.MergeVars(h.Vars, other.Vars)
|
||||
h.Groups = h.Groups.Union(other.Groups)
|
||||
}
|
||||
|
||||
type Group struct {
|
||||
Vars vars.Vars
|
||||
Parents set.Set[string]
|
||||
}
|
||||
|
||||
func newGroup(vars vars.Vars, parents set.Set[string]) *Group {
|
||||
return &Group{Vars: vars, Parents: parents}
|
||||
}
|
||||
|
||||
func (g *Group) merge(other *Group) {
|
||||
g.Vars = vars.MergeVars(g.Vars, other.Vars)
|
||||
g.Parents = g.Parents.Union(other.Parents)
|
||||
}
|
||||
|
||||
type Inventory struct {
|
||||
hosts map[string]*Host
|
||||
groups map[string]*Group
|
||||
|
||||
externalVars LoadedVars
|
||||
}
|
||||
|
||||
func newInventory() *Inventory {
|
||||
return &Inventory{
|
||||
hosts: make(map[string]*Host),
|
||||
groups: make(map[string]*Group),
|
||||
externalVars: make(LoadedVars),
|
||||
}
|
||||
}
|
||||
|
||||
func (inv *Inventory) addHost(name string, newHost *Host) {
|
||||
if inv.hosts == nil {
|
||||
inv.hosts = make(map[string]*Host)
|
||||
}
|
||||
|
||||
if h, ok := inv.hosts[name]; ok {
|
||||
h.merge(newHost)
|
||||
} else {
|
||||
// Add new host
|
||||
inv.hosts[name] = newHost
|
||||
}
|
||||
}
|
||||
|
||||
func (inv *Inventory) addGroup(name string, newGroup *Group) {
|
||||
if inv.groups == nil {
|
||||
inv.groups = make(map[string]*Group)
|
||||
}
|
||||
|
||||
if g, exists := inv.groups[name]; exists {
|
||||
g.merge(newGroup)
|
||||
} else {
|
||||
inv.groups[name] = newGroup
|
||||
}
|
||||
}
|
||||
|
||||
// ResolveVars evaluates the effective variables for the given host,
|
||||
// merging values from the host itself, its groups, and parent groups,
|
||||
// according to Ansible variable precedence rules.
|
||||
// https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html#understanding-variable-precedence
|
||||
// TODO: Add support for "ansible_group_priority"
|
||||
// See https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html#how-variables-are-merged
|
||||
func (inv *Inventory) ResolveVars(hostName string, playbookVars LoadedVars) vars.Vars {
|
||||
effective := make(vars.Vars)
|
||||
|
||||
host, ok := inv.hosts[hostName]
|
||||
if !ok {
|
||||
log.WithPrefix("ansible").Debug("ResolveVars: host not found in inventory",
|
||||
log.String("host", hostName))
|
||||
return nil
|
||||
}
|
||||
|
||||
groupsOrder, err := inv.groupTraversalOrder(hostName)
|
||||
if err != nil {
|
||||
log.WithPrefix("ansible").Debug("ResolveVars: failed to get group traversal order for host",
|
||||
log.String("host", hostName), log.Err(err))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Resolve internal group vars
|
||||
for _, groupName := range groupsOrder {
|
||||
if g, ok := inv.groups[groupName]; ok {
|
||||
maps.Copy(effective, g.Vars)
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve extenral group_vars/all
|
||||
mergeScopeVars(effective, inv.externalVars, ScopeGroupAll, "all")
|
||||
// Resolve playbook group_vars/all
|
||||
mergeScopeVars(effective, playbookVars, ScopeGroupAll, "all")
|
||||
// Resolve external group_vars/*
|
||||
mergeScopeVars(effective, inv.externalVars, ScopeGroupSpecific, groupsOrder...)
|
||||
// Resolve playbook group_vars/*
|
||||
mergeScopeVars(effective, playbookVars, ScopeGroupSpecific, groupsOrder...)
|
||||
// Resolve internal host vars
|
||||
maps.Copy(effective, host.Vars)
|
||||
// Resolve external host_vars/*
|
||||
mergeScopeVars(effective, inv.externalVars, ScopeHost, hostName)
|
||||
// Resolve playbook host_vars/*
|
||||
mergeScopeVars(effective, playbookVars, ScopeHost, hostName)
|
||||
return effective
|
||||
}
|
||||
|
||||
func mergeScopeVars(effective vars.Vars, src LoadedVars, scope VarScope, keys ...string) {
|
||||
s, ok := src[scope]
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
for _, key := range keys {
|
||||
if v, exists := s[key]; exists {
|
||||
maps.Copy(effective, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (inv *Inventory) groupTraversalOrder(hostName string) ([]string, error) {
|
||||
visited := set.New[string]()
|
||||
temp := set.New[string]()
|
||||
order := make([]string, 0, len(inv.groups))
|
||||
|
||||
var visit func(string) error
|
||||
visit = func(name string) error {
|
||||
if temp.Contains(name) {
|
||||
return fmt.Errorf("cycle detected in group hierarchy at %q", name)
|
||||
}
|
||||
if visited.Contains(name) {
|
||||
return nil
|
||||
}
|
||||
|
||||
temp.Append(name)
|
||||
group, ok := inv.groups[name]
|
||||
if ok {
|
||||
// // By default, Ansible merges groups at the same parent/child level in alphabetical order.
|
||||
parents := group.Parents.Items()
|
||||
slices.Sort(parents)
|
||||
for _, parent := range parents {
|
||||
if err := visit(parent); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
temp.Remove(name)
|
||||
visited.Append(name)
|
||||
order = append(order, name)
|
||||
return nil
|
||||
}
|
||||
|
||||
host, exists := inv.hosts[hostName]
|
||||
if !exists {
|
||||
return nil, fmt.Errorf("host %q not found", hostName)
|
||||
}
|
||||
|
||||
// By default, Ansible merges groups at the same parent/child level in alphabetical order.
|
||||
sortedHostGroups := host.Groups.Items()
|
||||
slices.Sort(sortedHostGroups)
|
||||
for _, name := range sortedHostGroups {
|
||||
if err := visit(name); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return order, nil
|
||||
}
|
||||
|
||||
// initDefaultGroups creates two default groups: "all" and "ungrouped".
|
||||
// The "all" group contains all hosts. The "ungrouped" group contains all hosts
|
||||
// that do not belong to any other group.
|
||||
// See https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html#default-groups
|
||||
func (inv *Inventory) initDefaultGroups() {
|
||||
allGroup := newGroup(make(vars.Vars), set.New[string]())
|
||||
inv.addGroup("all", allGroup)
|
||||
|
||||
ungroupedGroup := newGroup(make(vars.Vars), set.New("all"))
|
||||
inv.addGroup("ungrouped", ungroupedGroup)
|
||||
|
||||
for _, host := range inv.hosts {
|
||||
if host.Groups.Size() == 0 {
|
||||
host.Groups = set.New("ungrouped")
|
||||
}
|
||||
}
|
||||
|
||||
for groupName, group := range inv.groups {
|
||||
if groupName != "all" && group.Parents.Size() == 0 {
|
||||
group.Parents = set.New("all")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// applyVars applies a list of external variables to the inventory
|
||||
func (inv *Inventory) applyVars(externalVars LoadedVars) {
|
||||
inv.externalVars = externalVars
|
||||
}
|
||||
|
||||
// Merge combines several [Inventory] into one.
|
||||
func (inv *Inventory) Merge(other *Inventory) {
|
||||
// Merge hosts
|
||||
for name, h := range other.hosts {
|
||||
inv.addHost(name, h)
|
||||
}
|
||||
|
||||
// Merge groups
|
||||
for name, g := range other.groups {
|
||||
inv.addGroup(name, g)
|
||||
}
|
||||
}
|
||||
|
||||
func newInlineInventory(hosts []string) *Inventory {
|
||||
inv := &Inventory{}
|
||||
for _, hostName := range hosts {
|
||||
inv.addHost(hostName, newHost(make(vars.Vars), set.New[string]()))
|
||||
}
|
||||
inv.initDefaultGroups()
|
||||
return inv
|
||||
}
|
||||
301
pkg/iac/scanners/ansible/inventory/inventory_test.go
Normal file
301
pkg/iac/scanners/ansible/inventory/inventory_test.go
Normal file
@@ -0,0 +1,301 @@
|
||||
package inventory_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/inventory"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
)
|
||||
|
||||
func groupVar(val any) vars.Variable {
|
||||
return vars.NewVariable(val, vars.InvFileGroupPriority)
|
||||
}
|
||||
|
||||
func hostVar(val any) vars.Variable {
|
||||
return vars.NewVariable(val, vars.InvFileHostPriority)
|
||||
}
|
||||
|
||||
func extAllGroupVar(val any) vars.Variable {
|
||||
return vars.NewVariable(val, vars.InvExtAllGroupPriority)
|
||||
}
|
||||
|
||||
func extGroupVar(val any) vars.Variable {
|
||||
return vars.NewVariable(val, vars.InvExtGroupPriority)
|
||||
}
|
||||
|
||||
func extHostVar(val any) vars.Variable {
|
||||
return vars.NewVariable(val, vars.InvExtHostPriority)
|
||||
}
|
||||
|
||||
func TestInventory_ResolveVars_YAML(t *testing.T) {
|
||||
src := `
|
||||
all:
|
||||
vars:
|
||||
location: dc1
|
||||
os: linux
|
||||
|
||||
leafs1:
|
||||
vars:
|
||||
os: eos
|
||||
role: leaf
|
||||
hosts:
|
||||
leaf01:
|
||||
role: custom-leaf2
|
||||
tag: leafs1
|
||||
leaf02:
|
||||
ansible_host: 192.0.2.110
|
||||
|
||||
leafs0:
|
||||
vars:
|
||||
os: nxos
|
||||
hosts:
|
||||
leaf01:
|
||||
ansible_host: 192.0.2.100
|
||||
role: custom-leaf
|
||||
|
||||
spines:
|
||||
vars:
|
||||
role: spine
|
||||
hosts:
|
||||
spine01:
|
||||
ansible_host: 192.0.2.120
|
||||
spine02:
|
||||
ansible_host: 192.0.2.130
|
||||
os: nxos
|
||||
|
||||
network0:
|
||||
vars:
|
||||
environment: dev
|
||||
children:
|
||||
leafs0:
|
||||
leafs1:
|
||||
spines:
|
||||
|
||||
network1:
|
||||
vars:
|
||||
environment: prod
|
||||
children:
|
||||
network0:
|
||||
leafs1:
|
||||
spines:
|
||||
|
||||
webservers:
|
||||
vars:
|
||||
role: web
|
||||
hosts:
|
||||
webserver01:
|
||||
ansible_host: 192.0.2.140
|
||||
webserver02:
|
||||
ansible_host: 192.0.2.150
|
||||
|
||||
datacenter:
|
||||
children:
|
||||
network0:
|
||||
network1:
|
||||
webservers:
|
||||
`
|
||||
|
||||
inv, err := inventory.ParseYAML([]byte(src))
|
||||
require.NoError(t, err)
|
||||
|
||||
tests := []struct {
|
||||
hostName string
|
||||
expected vars.Vars
|
||||
}{
|
||||
{
|
||||
hostName: "leaf01",
|
||||
expected: vars.Vars{
|
||||
"location": groupVar("dc1"),
|
||||
"tag": hostVar("leafs1"),
|
||||
"os": groupVar("eos"),
|
||||
"role": hostVar("custom-leaf"),
|
||||
"environment": groupVar("dev"),
|
||||
"ansible_host": hostVar("192.0.2.100"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "leaf02",
|
||||
expected: vars.Vars{
|
||||
"location": groupVar("dc1"),
|
||||
"os": groupVar("eos"),
|
||||
"role": groupVar("leaf"),
|
||||
"environment": groupVar("dev"),
|
||||
"ansible_host": hostVar("192.0.2.110"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "spine01",
|
||||
expected: vars.Vars{
|
||||
"location": groupVar("dc1"),
|
||||
"os": groupVar("linux"),
|
||||
"role": groupVar("spine"),
|
||||
"environment": groupVar("dev"),
|
||||
"ansible_host": hostVar("192.0.2.120"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "spine02",
|
||||
expected: vars.Vars{
|
||||
"location": groupVar("dc1"),
|
||||
"os": hostVar("nxos"),
|
||||
"role": groupVar("spine"),
|
||||
"environment": groupVar("dev"),
|
||||
"ansible_host": hostVar("192.0.2.130"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "webserver01",
|
||||
expected: vars.Vars{
|
||||
"location": groupVar("dc1"),
|
||||
"os": groupVar("linux"),
|
||||
"role": groupVar("web"),
|
||||
"ansible_host": hostVar("192.0.2.140"),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.hostName, func(t *testing.T) {
|
||||
got := inv.ResolveVars(tt.hostName, make(inventory.LoadedVars))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestInventory_ResolveVars_INI(t *testing.T) {
|
||||
|
||||
src := `
|
||||
ungrouped1 ansible_host=10.0.0.99 description="standalone;# \"host"
|
||||
|
||||
[ungrouped:vars]
|
||||
# comment
|
||||
; comment
|
||||
http_port=8080
|
||||
|
||||
[all:vars]
|
||||
ansible_user=global_user ; comment
|
||||
timezone=UTC # comment
|
||||
|
||||
[web]
|
||||
web1 ansible_host=192.168.1.11 http_port=8080
|
||||
web2 ansible_host=192.168.1.12
|
||||
|
||||
[web:vars]
|
||||
timezone = Europe/Stockholm
|
||||
http_port=80
|
||||
|
||||
[db]
|
||||
db1 ansible_host=192.168.1.21
|
||||
db2 ansible_host=192.168.1.22 db_engine=postgres
|
||||
|
||||
[db:vars]
|
||||
ansible_user= "db_admin"
|
||||
backup_enabled=true
|
||||
|
||||
[app:children]
|
||||
web
|
||||
db
|
||||
|
||||
[app:vars]
|
||||
timezone=Europe/Berlin
|
||||
app_env=production
|
||||
|
||||
[test]
|
||||
test1 ansible_host=10.0.0.11
|
||||
test2 ansible_host=10.0.0.12 ansible_user=test_user
|
||||
|
||||
[test:vars]
|
||||
app_env=staging
|
||||
http_port=8081
|
||||
`
|
||||
|
||||
inv, err := inventory.ParseINI([]byte(src))
|
||||
require.NoError(t, err)
|
||||
|
||||
tests := []struct {
|
||||
hostName string
|
||||
expected vars.Vars
|
||||
}{
|
||||
{
|
||||
hostName: "web1",
|
||||
expected: vars.Vars{
|
||||
"ansible_user": groupVar("global_user"),
|
||||
"timezone": groupVar("Europe/Stockholm"),
|
||||
"http_port": hostVar("8080"),
|
||||
"ansible_host": hostVar("192.168.1.11"),
|
||||
"app_env": groupVar("production"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "web2",
|
||||
expected: vars.Vars{
|
||||
"ansible_user": groupVar("global_user"),
|
||||
"timezone": groupVar("Europe/Stockholm"),
|
||||
"http_port": groupVar("80"),
|
||||
"ansible_host": hostVar("192.168.1.12"),
|
||||
"app_env": groupVar("production"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "db1",
|
||||
expected: vars.Vars{
|
||||
"ansible_user": groupVar("\"db_admin\""),
|
||||
"timezone": groupVar("Europe/Berlin"),
|
||||
"backup_enabled": groupVar("true"),
|
||||
"ansible_host": hostVar("192.168.1.21"),
|
||||
"app_env": groupVar("production"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "db2",
|
||||
expected: vars.Vars{
|
||||
"ansible_user": groupVar("\"db_admin\""),
|
||||
"timezone": groupVar("Europe/Berlin"),
|
||||
"backup_enabled": groupVar("true"),
|
||||
"db_engine": hostVar("postgres"),
|
||||
"ansible_host": hostVar("192.168.1.22"),
|
||||
"app_env": groupVar("production"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "test1",
|
||||
expected: vars.Vars{
|
||||
"ansible_user": groupVar("global_user"),
|
||||
"timezone": groupVar("UTC"),
|
||||
"http_port": groupVar("8081"),
|
||||
"app_env": groupVar("staging"),
|
||||
"ansible_host": hostVar("10.0.0.11"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "test2",
|
||||
expected: vars.Vars{
|
||||
"ansible_user": hostVar("test_user"),
|
||||
"timezone": groupVar("UTC"),
|
||||
"http_port": groupVar("8081"),
|
||||
"app_env": groupVar("staging"),
|
||||
"ansible_host": hostVar("10.0.0.12"),
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "ungrouped1",
|
||||
expected: vars.Vars{
|
||||
"ansible_user": groupVar("global_user"),
|
||||
"timezone": groupVar("UTC"),
|
||||
"description": hostVar("standalone;# \"host"),
|
||||
"ansible_host": hostVar("10.0.0.99"),
|
||||
"http_port": groupVar("8080"),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.hostName, func(t *testing.T) {
|
||||
got := inv.ResolveVars(tt.hostName, nil)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
284
pkg/iac/scanners/ansible/inventory/inventory_vars.go
Normal file
284
pkg/iac/scanners/ansible/inventory/inventory_vars.go
Normal file
@@ -0,0 +1,284 @@
|
||||
package inventory
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"io/fs"
|
||||
"maps"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
)
|
||||
|
||||
type VarScope int
|
||||
|
||||
const (
|
||||
ScopeGroupAll VarScope = iota
|
||||
ScopeGroupSpecific
|
||||
ScopeHost
|
||||
)
|
||||
|
||||
func (s VarScope) String() string {
|
||||
switch s {
|
||||
case ScopeGroupAll:
|
||||
return "group_vars/all (external)"
|
||||
case ScopeGroupSpecific:
|
||||
return "group_vars/* (external)"
|
||||
case ScopeHost:
|
||||
return "host_vars (external)"
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
func fileBaseName(p string) string {
|
||||
return strings.TrimSuffix(filepath.Base(p), filepath.Ext(p))
|
||||
}
|
||||
|
||||
func isAllGroup(path string) bool {
|
||||
return fileBaseName(path) == "all"
|
||||
}
|
||||
|
||||
func notAllGroup(path string) bool {
|
||||
return !isAllGroup(path)
|
||||
}
|
||||
|
||||
func InventoryVarsSources(fileSrc fsutils.FileSource) []VarsSource {
|
||||
return []VarsSource{
|
||||
{
|
||||
FileSrc: fileSrc.Join("group_vars"),
|
||||
Scope: ScopeGroupAll,
|
||||
Priority: vars.InvExtAllGroupPriority,
|
||||
Match: isAllGroup,
|
||||
},
|
||||
{
|
||||
FileSrc: fileSrc.Join("group_vars"),
|
||||
Scope: ScopeGroupSpecific,
|
||||
Priority: vars.InvExtGroupPriority,
|
||||
Match: notAllGroup,
|
||||
},
|
||||
{
|
||||
FileSrc: fileSrc.Join("host_vars"),
|
||||
Scope: ScopeHost,
|
||||
Priority: vars.InvExtHostPriority,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func PlaybookVarsSources(fileSrc fsutils.FileSource) []VarsSource {
|
||||
return []VarsSource{
|
||||
{
|
||||
FileSrc: fileSrc.Join("group_vars"),
|
||||
Scope: ScopeGroupAll,
|
||||
Priority: vars.PbExtAllGroupPriority,
|
||||
Match: isAllGroup,
|
||||
},
|
||||
{
|
||||
FileSrc: fileSrc.Join("group_vars"),
|
||||
Scope: ScopeGroupSpecific,
|
||||
Priority: vars.PbExtGroupPriority,
|
||||
Match: notAllGroup,
|
||||
},
|
||||
{
|
||||
FileSrc: fileSrc.Join("host_vars"),
|
||||
Scope: ScopeHost,
|
||||
Priority: vars.PbExtHostPriority,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type VarsSource struct {
|
||||
FileSrc fsutils.FileSource
|
||||
Scope VarScope // variables scope
|
||||
Priority vars.VarPriority
|
||||
Match func(path string) bool
|
||||
}
|
||||
|
||||
// LoadedVars stores all loaded variables organized by scope and key (host or group).
|
||||
// The first map is by VarScope, the second by host/group name, each holding Vars.
|
||||
type LoadedVars map[VarScope]map[string]vars.Vars
|
||||
|
||||
func (v *LoadedVars) Merge(other LoadedVars) {
|
||||
if *v == nil {
|
||||
*v = make(LoadedVars)
|
||||
}
|
||||
for scope, objs := range other {
|
||||
if (*v)[scope] == nil {
|
||||
(*v)[scope] = make(map[string]vars.Vars)
|
||||
}
|
||||
for name, targetVars := range objs {
|
||||
existing, ok := (*v)[scope][name]
|
||||
if !ok {
|
||||
(*v)[scope][name] = targetVars
|
||||
continue
|
||||
}
|
||||
merged := make(vars.Vars)
|
||||
maps.Copy(merged, existing)
|
||||
maps.Copy(merged, targetVars)
|
||||
(*v)[scope][name] = merged
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func LoadVars(sources []VarsSource) LoadedVars {
|
||||
logger := log.WithPrefix("ansible")
|
||||
allVars := make(LoadedVars)
|
||||
|
||||
for _, src := range sources {
|
||||
srcVars, err := LoadSourceVars(src)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if allVars[src.Scope] == nil {
|
||||
allVars[src.Scope] = make(map[string]vars.Vars)
|
||||
}
|
||||
|
||||
for key, v := range srcVars {
|
||||
allVars[src.Scope][key] = vars.MergeVars(allVars[src.Scope][key], v)
|
||||
|
||||
logger.Debug("Loaded vars from directory",
|
||||
log.String("scope", src.Scope.String()), log.String("target", key))
|
||||
}
|
||||
}
|
||||
|
||||
return allVars
|
||||
}
|
||||
|
||||
func LoadSourceVars(src VarsSource) (map[string]vars.Vars, error) {
|
||||
info, err := src.FileSrc.Stat()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
result := make(map[string]vars.Vars)
|
||||
|
||||
if info.IsDir() {
|
||||
entries, err := listEntries(src.FileSrc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fsutils.SortDirsFirstAlpha(entries)
|
||||
|
||||
for _, e := range entries {
|
||||
name := e.Name()
|
||||
entrySrc := src.FileSrc.Join(name)
|
||||
target := strings.TrimSuffix(name, path.Ext(name))
|
||||
|
||||
if src.Match != nil && !src.Match(entrySrc.Path) {
|
||||
continue
|
||||
}
|
||||
|
||||
if e.IsDir() {
|
||||
walkFn := func(fileSrc fsutils.FileSource, d fs.DirEntry) error {
|
||||
if !d.IsDir() {
|
||||
plain := processFile(fileSrc)
|
||||
v := vars.NewVars(plain, src.Priority)
|
||||
result[target] = vars.MergeVars(result[target], v)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
if err := fsutils.WalkDirsFirstAlpha(entrySrc, walkFn); err != nil {
|
||||
log.WithPrefix("ansible").Debug("Walk error", log.FilePath(entrySrc.Path))
|
||||
continue
|
||||
}
|
||||
} else {
|
||||
plain := processFile(entrySrc)
|
||||
v := vars.NewVars(plain, src.Priority)
|
||||
result[target] = vars.MergeVars(result[target], v)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func processFile(fileSrc fsutils.FileSource) vars.PlainVars {
|
||||
if shouldSkipFile(fileSrc.Path) {
|
||||
return nil
|
||||
}
|
||||
|
||||
v, err := readVars(fileSrc)
|
||||
if err != nil {
|
||||
log.WithPrefix("ansible").Debug("Failed to read vars",
|
||||
log.FilePath(fileSrc.Path), log.Err(err))
|
||||
return nil
|
||||
}
|
||||
|
||||
return v
|
||||
}
|
||||
|
||||
// listEntries returns directory entries sorted alphabetically,
|
||||
// with directories listed before files.
|
||||
//
|
||||
// At the top level, if a directory and a file share the same
|
||||
// base name, the directory takes precedence and the file is ignored.
|
||||
//
|
||||
// For example, if "host_vars/group_vars" contains both "all/"
|
||||
// and "all.yaml", "all.yaml" will be skipped in favor of the directory.
|
||||
func listEntries(root fsutils.FileSource) ([]fs.DirEntry, error) {
|
||||
entries, err := root.ReadDir()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dirs := set.New[string]()
|
||||
for _, e := range entries {
|
||||
if e.IsDir() {
|
||||
dirs.Append(e.Name())
|
||||
}
|
||||
}
|
||||
|
||||
filtered := make([]fs.DirEntry, 0, len(entries))
|
||||
for _, e := range entries {
|
||||
if !e.IsDir() {
|
||||
name := strings.TrimSuffix(e.Name(), path.Ext(e.Name()))
|
||||
if dirs.Contains(name) {
|
||||
continue
|
||||
}
|
||||
}
|
||||
filtered = append(filtered, e)
|
||||
}
|
||||
return filtered, nil
|
||||
}
|
||||
|
||||
func shouldSkipFile(filePath string) bool {
|
||||
base := path.Base(filePath)
|
||||
if strings.HasPrefix(base, ".") || strings.HasSuffix(base, "~") {
|
||||
return true
|
||||
}
|
||||
if !slices.Contains(vars.VarFilesExtensions, filepath.Ext(base)) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func readVars(fileSrc fsutils.FileSource) (map[string]any, error) {
|
||||
data, err := fileSrc.ReadFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var vars map[string]any
|
||||
dataTrim := bytes.TrimSpace(data)
|
||||
if len(dataTrim) > 0 && dataTrim[0] == '{' {
|
||||
err = json.Unmarshal(dataTrim, &vars)
|
||||
} else {
|
||||
err = yaml.Unmarshal(dataTrim, &vars)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return vars, nil
|
||||
}
|
||||
71
pkg/iac/scanners/ansible/inventory/inventory_vars_test.go
Normal file
71
pkg/iac/scanners/ansible/inventory/inventory_vars_test.go
Normal file
@@ -0,0 +1,71 @@
|
||||
package inventory_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/aquasecurity/trivy/internal/testutil"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/inventory"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
)
|
||||
|
||||
func TestLoader_Load(t *testing.T) {
|
||||
files := map[string]string{
|
||||
"host_vars/host1.yaml": `
|
||||
var1: value1
|
||||
var2: value2
|
||||
`,
|
||||
"host_vars/host2.yaml": `
|
||||
var1: value1
|
||||
var2: value2
|
||||
`,
|
||||
"group_vars/group1.yaml": `
|
||||
group_var1: gvalue1
|
||||
group_var2: skipped
|
||||
`,
|
||||
"group_vars/group1/vars.yaml": `
|
||||
group_var1: gvalue1_1
|
||||
`,
|
||||
"group_vars/group1/vars2.yaml": `
|
||||
group_var1: gvalue1_2
|
||||
`,
|
||||
"group_vars/group1/first/vars2.yaml": `
|
||||
group_var1: gvalue1_0
|
||||
`,
|
||||
"group_vars/all": `
|
||||
all_var: allvalue
|
||||
`,
|
||||
}
|
||||
|
||||
fsys := testutil.CreateFS(files)
|
||||
rootSrc := fsutils.NewFileSource(fsys, ".")
|
||||
sources := inventory.InventoryVarsSources(rootSrc)
|
||||
got := inventory.LoadVars(sources)
|
||||
|
||||
expected := inventory.LoadedVars{
|
||||
inventory.ScopeHost: map[string]vars.Vars{
|
||||
"host1": {
|
||||
"var1": extHostVar("value1"),
|
||||
"var2": extHostVar("value2"),
|
||||
},
|
||||
"host2": {
|
||||
"var1": extHostVar("value1"),
|
||||
"var2": extHostVar("value2"),
|
||||
},
|
||||
},
|
||||
inventory.ScopeGroupAll: map[string]vars.Vars{
|
||||
"all": {
|
||||
"all_var": extAllGroupVar("allvalue"),
|
||||
},
|
||||
},
|
||||
inventory.ScopeGroupSpecific: map[string]vars.Vars{
|
||||
"group1": {
|
||||
"group_var1": extGroupVar("gvalue1_2"),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
assert.Equal(t, expected, got)
|
||||
}
|
||||
305
pkg/iac/scanners/ansible/inventory/load.go
Normal file
305
pkg/iac/scanners/ansible/inventory/load.go
Normal file
@@ -0,0 +1,305 @@
|
||||
package inventory
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
)
|
||||
|
||||
const defaultHostsFile = "/etc/ansible/hosts"
|
||||
|
||||
// InventorySource represents a source from which inventory data can be loaded.
|
||||
type InventorySource interface {
|
||||
isInventorySource()
|
||||
}
|
||||
|
||||
type InlineHostsSource struct {
|
||||
// Hosts is a list of hosts provided directly in-memory instead of
|
||||
// loading them from files.
|
||||
Hosts []string
|
||||
}
|
||||
|
||||
func (InlineHostsSource) isInventorySource() {}
|
||||
|
||||
type HostsDirsSource struct {
|
||||
// Dirs is a list of paths to directories containing hosts files.
|
||||
Dirs []fsutils.FileSource
|
||||
// VarsDir is the path to the directory containing host_vars and group_vars.
|
||||
VarsDir fsutils.FileSource
|
||||
}
|
||||
|
||||
func (HostsDirsSource) isInventorySource() {}
|
||||
|
||||
type HostFileSource struct {
|
||||
// File is a path to hosts file.
|
||||
File fsutils.FileSource
|
||||
// VarsDir is the path to the directory containing host_vars and group_vars.
|
||||
VarsDir fsutils.FileSource
|
||||
}
|
||||
|
||||
func (HostFileSource) isInventorySource() {}
|
||||
|
||||
type LoadOptions struct {
|
||||
// InventoryPath is the path from the "inventory" config
|
||||
// in ansible.cfg.
|
||||
InventoryPath string
|
||||
|
||||
// Sources are explicit inventory sources (CLI args, env vars, etc.).
|
||||
Sources []string
|
||||
}
|
||||
|
||||
// LoadAuto resolves inventory sources from configuration, environment variables,
|
||||
// and command-line flags, then loads the resulting inventory.
|
||||
func LoadAuto(fsys fs.FS, opts LoadOptions) *Inventory {
|
||||
sources, err := ResolveSources(fsys, opts)
|
||||
if err != nil {
|
||||
log.WithPrefix("ansible").Debug("Failed to resolve inventory sources", log.Err(err))
|
||||
}
|
||||
|
||||
if len(sources) == 0 {
|
||||
log.WithPrefix("ansible").Debug(
|
||||
"No inventory sources provided, falling back to implicit host 'localhost'")
|
||||
// https://docs.ansible.com/ansible/latest/inventory/implicit_localhost.html#implicit-localhost
|
||||
return newInlineInventory([]string{"localhost"})
|
||||
}
|
||||
|
||||
return LoadFromSources(sources)
|
||||
}
|
||||
|
||||
// ResolveSources resolves one or more Ansible inventory sources into a list of InventorySource.
|
||||
//
|
||||
// If `sources` is empty, the function falls back to defaults:
|
||||
// - cfg.Inventory directory, if configured
|
||||
// - "/etc/ansible/hosts" file, if no config is provided
|
||||
//
|
||||
// Inline host lists are detected by the presence of commas (e.g., "host1,host2").
|
||||
//
|
||||
// The returned slice of InventorySource can be used to load hosts, group_vars, and host_vars
|
||||
// for each inventory source.
|
||||
func ResolveSources(fsys fs.FS, opts LoadOptions) ([]InventorySource, error) {
|
||||
logger := log.WithPrefix("ansible")
|
||||
if len(opts.Sources) == 0 {
|
||||
if opts.InventoryPath != "" {
|
||||
// TODO: This is comma-separated list of Ansible inventory sources
|
||||
logger.Debug("Resolve inventory source from config", log.FilePath(opts.InventoryPath))
|
||||
fileSrc := fsutils.NewFileSource(fsys, opts.InventoryPath)
|
||||
src, err := resolveSource(fileSrc, set.New[string]())
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("resolve source from config: %w", err)
|
||||
}
|
||||
return []InventorySource{src}, nil
|
||||
}
|
||||
return defaultInventorySources()
|
||||
}
|
||||
|
||||
var result []InventorySource
|
||||
|
||||
seen := set.New[string]()
|
||||
|
||||
for _, s := range opts.Sources {
|
||||
logger.Debug("Resolve inventory source", log.String("source", s))
|
||||
fileSrc := fsutils.NewFileSource(fsys, s)
|
||||
src, err := resolveSource(fileSrc, seen)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
result = append(result, src)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func makeHostFileSource(fileSrc fsutils.FileSource) InventorySource {
|
||||
return HostFileSource{
|
||||
File: fileSrc,
|
||||
VarsDir: fileSrc.Dir(),
|
||||
}
|
||||
}
|
||||
|
||||
// defaultInventorySources returns sources from cfg or system defaults.
|
||||
func defaultInventorySources() ([]InventorySource, error) {
|
||||
// TODO: use ANSIBLE_INVENTORY env
|
||||
if _, err := os.Stat(defaultHostsFile); err == nil {
|
||||
log.WithPrefix("ansible").Debug("Use default hosts file", log.FilePath(defaultHostsFile))
|
||||
fileSrc := fsutils.NewFileSource(nil, defaultHostsFile)
|
||||
return []InventorySource{makeHostFileSource(fileSrc)}, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// resolveSource resolves a single source path: file, dir, or dir tree.
|
||||
func resolveSource(fileSrc fsutils.FileSource, seen set.Set[string]) (InventorySource, error) {
|
||||
// TODO: handle inline host list, e.g. "host1,host2"
|
||||
if looksLikeInlineHosts(fileSrc.Path) {
|
||||
return resolveInlineHosts(fileSrc.Path)
|
||||
}
|
||||
|
||||
info, err := fileSrc.Stat()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !info.IsDir() {
|
||||
return makeHostFileSource(fileSrc), nil
|
||||
}
|
||||
|
||||
return walkInventoryDir(fileSrc, seen)
|
||||
}
|
||||
|
||||
// Stub: determine if path looks like inline host list
|
||||
func looksLikeInlineHosts(path string) bool {
|
||||
// simple heuristic: contains comma but no wildcard
|
||||
return strings.Contains(path, ",")
|
||||
}
|
||||
|
||||
// Stub: resolve inline hosts
|
||||
func resolveInlineHosts(path string) (InventorySource, error) {
|
||||
hosts := strings.Split(path, ",")
|
||||
if len(hosts) > 0 {
|
||||
return InlineHostsSource{
|
||||
Hosts: hosts,
|
||||
}, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// walkInventoryDir recursively walks a directory and returns all inventory dirs containing files.
|
||||
func walkInventoryDir(fileSrc fsutils.FileSource, seen set.Set[string]) (InventorySource, error) {
|
||||
result := HostsDirsSource{
|
||||
VarsDir: fileSrc,
|
||||
}
|
||||
|
||||
if err := fileSrc.WalkDirFS(func(fileSrc fsutils.FileSource, de fs.DirEntry) error {
|
||||
if !de.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: allow files with no extension or with the extensions .json, .yml, or .yaml
|
||||
base := path.Base(fileSrc.Path)
|
||||
if base == "group_vars" || base == "host_vars" {
|
||||
// TODO: use fs.SkipDir?
|
||||
return nil // skip vars directories
|
||||
}
|
||||
|
||||
hasFiles, err := dirHasFiles(fileSrc)
|
||||
if err != nil {
|
||||
log.WithPrefix("ansible").Debug("Failed to read directory",
|
||||
log.FilePath(fileSrc.Path), log.Err(err))
|
||||
return nil
|
||||
}
|
||||
|
||||
if !hasFiles {
|
||||
return nil
|
||||
}
|
||||
|
||||
cleanPath := path.Clean(fileSrc.Path)
|
||||
if !seen.Contains(cleanPath) {
|
||||
seen.Append(cleanPath)
|
||||
result.Dirs = append(result.Dirs, fileSrc)
|
||||
}
|
||||
|
||||
return nil
|
||||
}); err != nil {
|
||||
return nil, xerrors.Errorf("walk dir: %w", err)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// dirHasFiles returns true if the directory contains at least one non-directory entry.
|
||||
func dirHasFiles(fileSrc fsutils.FileSource) (bool, error) {
|
||||
ents, err := fileSrc.ReadDir()
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
for _, e := range ents {
|
||||
if !e.IsDir() {
|
||||
return true, nil
|
||||
}
|
||||
}
|
||||
return false, nil
|
||||
}
|
||||
|
||||
// LoadFromSources loads inventory files or directories from
|
||||
// the given sources and merges them into a single Inventory.
|
||||
//
|
||||
// When multiple inventory sources are provided, Ansible merges
|
||||
// variables in the order the sources are specified.
|
||||
// See https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html#managing-inventory-variable-load-order
|
||||
func LoadFromSources(sources []InventorySource) *Inventory {
|
||||
logger := log.WithPrefix("ansible")
|
||||
|
||||
res := newInventory()
|
||||
externalVars := make(LoadedVars)
|
||||
|
||||
for _, source := range sources {
|
||||
|
||||
// Ansible loads host and group variable files by searching paths
|
||||
// relative to the inventory source.
|
||||
// See https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html#organizing-host-and-group-variables
|
||||
switch src := source.(type) {
|
||||
case InlineHostsSource:
|
||||
logger.Debug("Processing inline hosts source", log.Any("hosts", src.Hosts))
|
||||
inv := newInlineInventory(src.Hosts)
|
||||
res.Merge(inv)
|
||||
case HostFileSource:
|
||||
inv, err := readAndParseHosts(src.File)
|
||||
if err != nil {
|
||||
logger.Debug("Failed to parse hosts file",
|
||||
log.FilePath(src.File.Path), log.Err(err))
|
||||
continue
|
||||
}
|
||||
res.Merge(inv)
|
||||
logger.Debug("Loaded hosts file", log.FilePath(src.File.Path))
|
||||
|
||||
vars := LoadVars(InventoryVarsSources(src.VarsDir))
|
||||
externalVars.Merge(vars)
|
||||
case HostsDirsSource:
|
||||
for _, hostsDirSrc := range src.Dirs {
|
||||
entries, err := hostsDirSrc.ReadDir()
|
||||
if err != nil {
|
||||
logger.Debug("Failed to read dir with hosts files",
|
||||
log.FilePath(hostsDirSrc.Path), log.Err(err))
|
||||
continue
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
hostFileSrc := hostsDirSrc.Join(entry.Name())
|
||||
inv, err := readAndParseHosts(hostFileSrc)
|
||||
if err != nil {
|
||||
logger.Debug("Failed to parse hosts file",
|
||||
log.FilePath(hostFileSrc.Path), log.Err(err))
|
||||
continue
|
||||
}
|
||||
res.Merge(inv)
|
||||
logger.Debug("Loaded hosts file", log.FilePath(hostFileSrc.Path))
|
||||
}
|
||||
}
|
||||
vars := LoadVars(InventoryVarsSources(src.VarsDir))
|
||||
externalVars.Merge(vars)
|
||||
}
|
||||
}
|
||||
|
||||
res.applyVars(externalVars)
|
||||
return res
|
||||
}
|
||||
|
||||
func readAndParseHosts(fileSrc fsutils.FileSource) (*Inventory, error) {
|
||||
b, err := fileSrc.ReadFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if inv, err := ParseYAML(b); err == nil {
|
||||
return inv, nil
|
||||
}
|
||||
|
||||
return ParseINI(b)
|
||||
}
|
||||
345
pkg/iac/scanners/ansible/inventory/load_test.go
Normal file
345
pkg/iac/scanners/ansible/inventory/load_test.go
Normal file
@@ -0,0 +1,345 @@
|
||||
package inventory_test
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"os"
|
||||
"runtime"
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/internal/testutil"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/inventory"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
)
|
||||
|
||||
func TestLoadAuto(t *testing.T) {
|
||||
files := map[string]string{
|
||||
|
||||
"dev/hosts": `
|
||||
group1:
|
||||
hosts:
|
||||
host1:
|
||||
baz: 10
|
||||
host2:
|
||||
bar: 15
|
||||
vars:
|
||||
bar: 10
|
||||
`,
|
||||
|
||||
// host vars
|
||||
"dev/host_vars/host1.yaml": `
|
||||
foo: 1
|
||||
bar: 2
|
||||
`,
|
||||
"dev/host_vars/host2.yaml": `
|
||||
foo: 10
|
||||
baz: 20
|
||||
`,
|
||||
|
||||
// group vars
|
||||
"dev/group_vars/group1.yaml": `
|
||||
common: "yes"
|
||||
foo: 5
|
||||
`,
|
||||
|
||||
// test inventory
|
||||
"common/hosts": `
|
||||
group1:
|
||||
hosts:
|
||||
host1:
|
||||
common: 10
|
||||
`,
|
||||
|
||||
// common
|
||||
"common/group_vars/group1.yaml": `
|
||||
foo: 5
|
||||
`,
|
||||
|
||||
"common/host_vars/host1.yaml": `
|
||||
foo: 10
|
||||
`,
|
||||
}
|
||||
|
||||
fsys := testutil.CreateFS(files)
|
||||
inv := inventory.LoadAuto(fsys, inventory.LoadOptions{
|
||||
Sources: []string{"dev", "common"},
|
||||
})
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
hostName string
|
||||
expected vars.Vars
|
||||
}{
|
||||
{
|
||||
hostName: "host1",
|
||||
expected: vars.Vars{
|
||||
"foo": extHostVar(10), // external host_var from common
|
||||
"bar": extHostVar(2), // external host_var override file group
|
||||
"baz": hostVar(10), // file host
|
||||
"common": hostVar(10), // host var from common
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "host2",
|
||||
expected: vars.Vars{
|
||||
"foo": extHostVar(10), // external host_var override external group
|
||||
"bar": hostVar(15), // file host
|
||||
"baz": extHostVar(20), // external host_var override file host
|
||||
"common": extGroupVar("yes"), // from group_vars
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.hostName, func(t *testing.T) {
|
||||
got := inv.ResolveVars(tt.hostName, make(inventory.LoadedVars))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadAuto_AbsolutePath(t *testing.T) {
|
||||
// The process cannot access the file because it is being used by another process.
|
||||
if runtime.GOOS == "windows" {
|
||||
t.Skip("TODO")
|
||||
}
|
||||
|
||||
tmpFile, err := os.CreateTemp(t.TempDir(), "hosts-*.yml")
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = tmpFile.WriteString(`
|
||||
group1:
|
||||
hosts:
|
||||
host1:
|
||||
baz: 5
|
||||
vars:
|
||||
bar: 20
|
||||
`)
|
||||
require.NoError(t, err)
|
||||
|
||||
files := map[string]string{
|
||||
|
||||
"dev/hosts": `
|
||||
group1:
|
||||
hosts:
|
||||
host1:
|
||||
baz: 10
|
||||
host2:
|
||||
bar: 15
|
||||
vars:
|
||||
bar: 10
|
||||
`,
|
||||
}
|
||||
fsys := testutil.CreateFS(files)
|
||||
|
||||
opts := inventory.LoadOptions{
|
||||
Sources: []string{"dev", tmpFile.Name()},
|
||||
}
|
||||
|
||||
inv := inventory.LoadAuto(fsys, opts)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
hostName string
|
||||
expected vars.Vars
|
||||
}{
|
||||
{
|
||||
hostName: "host1",
|
||||
expected: vars.Vars{
|
||||
"bar": groupVar(20), // from second file (group)
|
||||
"baz": hostVar(5), // from second file (host)
|
||||
},
|
||||
},
|
||||
{
|
||||
hostName: "host2",
|
||||
expected: vars.Vars{
|
||||
"bar": hostVar(15), // from first file (host)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.hostName, func(t *testing.T) {
|
||||
got := inv.ResolveVars(tt.hostName, make(inventory.LoadedVars))
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLoadAuto_EmptySources(t *testing.T) {
|
||||
inv := inventory.LoadAuto(fstest.MapFS{}, inventory.LoadOptions{})
|
||||
|
||||
localhostVars := vars.Vars{
|
||||
"foo": hostVar("test"),
|
||||
}
|
||||
got := inv.ResolveVars("localhost", inventory.LoadedVars{
|
||||
inventory.ScopeHost: map[string]vars.Vars{
|
||||
"localhost": localhostVars,
|
||||
},
|
||||
})
|
||||
|
||||
assert.Equal(t, localhostVars, got)
|
||||
}
|
||||
|
||||
func TestLoadAuto_NonExistentSource(t *testing.T) {
|
||||
opts := inventory.LoadOptions{
|
||||
InventoryPath: "nonexistent",
|
||||
}
|
||||
|
||||
inv := inventory.LoadAuto(fstest.MapFS{}, opts)
|
||||
localhostVars := vars.Vars{
|
||||
"foo": hostVar("test"),
|
||||
}
|
||||
got := inv.ResolveVars("localhost", inventory.LoadedVars{
|
||||
inventory.ScopeHost: map[string]vars.Vars{
|
||||
"localhost": localhostVars,
|
||||
},
|
||||
})
|
||||
|
||||
assert.Equal(t, localhostVars, got)
|
||||
}
|
||||
|
||||
func TestResolveInventorySources(t *testing.T) {
|
||||
|
||||
files := map[string]*fstest.MapFile{
|
||||
"emptydir": {
|
||||
Mode: fs.ModeDir,
|
||||
},
|
||||
"hosts.yml": {},
|
||||
"inv/hosts.yml": {},
|
||||
"inv/group_vars/group1.yml": {},
|
||||
"inv/one/hosts.yml": {},
|
||||
"inv/one/host_vars/hosts1.yml": {},
|
||||
"inv/two/hosts.yml": {},
|
||||
"inv/two/group_vars/group1.yml": {},
|
||||
}
|
||||
|
||||
fsys := fstest.MapFS(files)
|
||||
rootSrc := fsutils.NewFileSource(fsys, ".")
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
opts inventory.LoadOptions
|
||||
expected []inventory.InventorySource
|
||||
}{
|
||||
{
|
||||
name: "single file",
|
||||
opts: inventory.LoadOptions{Sources: []string{"hosts.yml"}},
|
||||
expected: []inventory.InventorySource{
|
||||
inventory.HostFileSource{
|
||||
File: rootSrc.Join("hosts.yml"),
|
||||
VarsDir: rootSrc,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "single file in subdirectory",
|
||||
opts: inventory.LoadOptions{Sources: []string{"inv/hosts.yml"}},
|
||||
expected: []inventory.InventorySource{
|
||||
inventory.HostFileSource{
|
||||
File: rootSrc.Join("inv", "hosts.yml"),
|
||||
VarsDir: rootSrc.Join("inv"),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "nested directories with vars",
|
||||
opts: inventory.LoadOptions{Sources: []string{"inv"}},
|
||||
expected: []inventory.InventorySource{
|
||||
inventory.HostsDirsSource{
|
||||
Dirs: []fsutils.FileSource{
|
||||
rootSrc.Join("inv"),
|
||||
rootSrc.Join("inv", "one"),
|
||||
rootSrc.Join("inv", "two"),
|
||||
},
|
||||
VarsDir: rootSrc.Join("inv"),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "inline hosts list",
|
||||
opts: inventory.LoadOptions{Sources: []string{"host1,host2"}},
|
||||
expected: []inventory.InventorySource{
|
||||
inventory.InlineHostsSource{Hosts: []string{"host1", "host2"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "empty sources with cfg path",
|
||||
opts: inventory.LoadOptions{
|
||||
InventoryPath: "hosts.yml",
|
||||
},
|
||||
expected: []inventory.InventorySource{
|
||||
inventory.HostFileSource{File: rootSrc.Join("hosts.yml"), VarsDir: rootSrc},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "directory without hosts files",
|
||||
opts: inventory.LoadOptions{Sources: []string{"emptydir"}},
|
||||
expected: []inventory.InventorySource{
|
||||
inventory.HostsDirsSource{VarsDir: rootSrc.Join("emptydir")},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multiple sources",
|
||||
opts: inventory.LoadOptions{Sources: []string{"inv/one", "inv/two"}},
|
||||
expected: []inventory.InventorySource{
|
||||
inventory.HostsDirsSource{
|
||||
Dirs: []fsutils.FileSource{rootSrc.Join("inv", "one")},
|
||||
VarsDir: rootSrc.Join("inv", "one"),
|
||||
},
|
||||
inventory.HostsDirsSource{
|
||||
Dirs: []fsutils.FileSource{rootSrc.Join("inv", "two")},
|
||||
VarsDir: rootSrc.Join("inv", "two"),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "no sources",
|
||||
opts: inventory.LoadOptions{},
|
||||
expected: []inventory.InventorySource{},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
res, err := inventory.ResolveSources(fsys, tt.opts)
|
||||
require.NoError(t, err)
|
||||
require.ElementsMatch(t, tt.expected, res)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveInventorySources_AbsolutePath(t *testing.T) {
|
||||
// The process cannot access the file because it is being used by another process.
|
||||
if runtime.GOOS == "windows" {
|
||||
t.Skip("TODO")
|
||||
}
|
||||
|
||||
// create a temporary inventory file
|
||||
tmpFile, err := os.CreateTemp(t.TempDir(), "hosts-*.yml")
|
||||
require.NoError(t, err)
|
||||
|
||||
opts := inventory.LoadOptions{
|
||||
InventoryPath: tmpFile.Name(),
|
||||
}
|
||||
|
||||
fsys := fstest.MapFS{}
|
||||
|
||||
res, err := inventory.ResolveSources(fsys, opts)
|
||||
require.NoError(t, err)
|
||||
|
||||
fileSrc := fsutils.NewFileSource(nil, tmpFile.Name())
|
||||
|
||||
expected := []inventory.InventorySource{
|
||||
inventory.HostFileSource{
|
||||
File: fileSrc,
|
||||
VarsDir: fileSrc.Dir(),
|
||||
},
|
||||
}
|
||||
|
||||
require.ElementsMatch(t, expected, res)
|
||||
}
|
||||
59
pkg/iac/scanners/ansible/inventory/yaml.go
Normal file
59
pkg/iac/scanners/ansible/inventory/yaml.go
Normal file
@@ -0,0 +1,59 @@
|
||||
package inventory
|
||||
|
||||
import (
|
||||
"golang.org/x/xerrors"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/orderedmap"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
)
|
||||
|
||||
type rawGroup struct {
|
||||
Hosts map[string]vars.PlainVars `yaml:"hosts"`
|
||||
Children orderedmap.OrderedMap[string, rawGroup] `yaml:"children"`
|
||||
Vars vars.PlainVars `yaml:"vars"`
|
||||
}
|
||||
|
||||
func ParseYAML(data []byte) (*Inventory, error) {
|
||||
var raw orderedmap.OrderedMap[string, rawGroup]
|
||||
if err := yaml.Unmarshal(data, &raw); err != nil {
|
||||
return nil, xerrors.Errorf("unmarshal inventory yaml: %w", err)
|
||||
}
|
||||
|
||||
inv := newInventory()
|
||||
|
||||
for groupName, groupRaw := range raw.Iter() {
|
||||
if err := parseGroup(groupName, groupRaw, inv, nil); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
inv.initDefaultGroups()
|
||||
return inv, nil
|
||||
}
|
||||
|
||||
// parseGroup recursively parses a rawGroup and adds it to Inventory
|
||||
func parseGroup(name string, rg rawGroup, inv *Inventory, parents []string) error {
|
||||
// Add group
|
||||
groupVars := vars.NewVars(rg.Vars, vars.InvFileGroupPriority)
|
||||
newGroup := newGroup(groupVars, set.New(parents...))
|
||||
inv.addGroup(name, newGroup)
|
||||
|
||||
// Add hosts
|
||||
// A host can be in multiple groups, but Ansible processes only one instance of the host at runtime.
|
||||
// Ansible merges the data from multiple groups.
|
||||
for hostName, plainHostVars := range rg.Hosts {
|
||||
groups := set.New(append(parents, name)...)
|
||||
// TODO: support for host ranges, e.g. www[01:50:2].example.com
|
||||
// https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html#adding-ranges-of-hosts
|
||||
hostVars := vars.NewVars(plainHostVars, vars.InvFileHostPriority)
|
||||
inv.addHost(hostName, newHost(hostVars, groups))
|
||||
}
|
||||
|
||||
// Recursively parse children groups
|
||||
for childName, childRg := range rg.Children.Iter() {
|
||||
parseGroup(childName, childRg, inv, append(parents, name))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
93
pkg/iac/scanners/ansible/orderedmap/orderedmap.go
Normal file
93
pkg/iac/scanners/ansible/orderedmap/orderedmap.go
Normal file
@@ -0,0 +1,93 @@
|
||||
package orderedmap
|
||||
|
||||
import (
|
||||
"iter"
|
||||
"maps"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
// OrderedMap is a map that preserves insertion order of keys.
|
||||
type OrderedMap[K comparable, V any] struct {
|
||||
keys []K
|
||||
data map[K]V
|
||||
}
|
||||
|
||||
func New[K comparable, V any](capacity int) *OrderedMap[K, V] {
|
||||
return &OrderedMap[K, V]{
|
||||
keys: make([]K, 0, capacity),
|
||||
data: make(map[K]V, capacity),
|
||||
}
|
||||
}
|
||||
|
||||
func NewWithData[K comparable, V any](keys []K, data map[K]V) *OrderedMap[K, V] {
|
||||
return &OrderedMap[K, V]{
|
||||
keys: keys,
|
||||
data: data,
|
||||
}
|
||||
}
|
||||
|
||||
// Len returns the number of entires in the map
|
||||
func (m *OrderedMap[K, V]) Len() int {
|
||||
return len(m.keys)
|
||||
}
|
||||
|
||||
func (m *OrderedMap[K, V]) Get(key K) (V, bool) {
|
||||
val, ok := m.data[key]
|
||||
return val, ok
|
||||
}
|
||||
|
||||
func (m *OrderedMap[K, V]) Set(key K, value V) {
|
||||
if _, exists := m.data[key]; !exists {
|
||||
m.keys = append(m.keys, key)
|
||||
}
|
||||
m.data[key] = value
|
||||
}
|
||||
|
||||
// Iter returns an iterator over the map
|
||||
func (m *OrderedMap[K, V]) Iter() iter.Seq2[K, V] {
|
||||
return func(yield func(K, V) bool) {
|
||||
for _, k := range m.keys {
|
||||
v := m.data[k]
|
||||
if !yield(k, v) {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (m *OrderedMap[K, V]) AsMap() map[K]V {
|
||||
return maps.Clone(m.data)
|
||||
}
|
||||
|
||||
func (m *OrderedMap[K, V]) UnmarshalYAML(n *yaml.Node) error {
|
||||
if n.Kind != yaml.MappingNode {
|
||||
return xerrors.Errorf("expected map node, got %s", n.Tag)
|
||||
}
|
||||
|
||||
if len(n.Content)%2 != 0 {
|
||||
return xerrors.New("invalid map node content length")
|
||||
}
|
||||
|
||||
size := len(n.Content) / 2
|
||||
m.keys = make([]K, 0, size)
|
||||
m.data = make(map[K]V, size)
|
||||
|
||||
for i := 0; i < len(n.Content); i += 2 {
|
||||
keyNode, valueNode := n.Content[i], n.Content[i+1]
|
||||
|
||||
var key K
|
||||
if err := keyNode.Decode(&key); err != nil {
|
||||
return err
|
||||
}
|
||||
m.keys = append(m.keys, key)
|
||||
|
||||
var value V
|
||||
if err := valueNode.Decode(&value); err != nil {
|
||||
return err
|
||||
}
|
||||
m.data[key] = value
|
||||
}
|
||||
return nil
|
||||
}
|
||||
57
pkg/iac/scanners/ansible/orderedmap/orderedmap_test.go
Normal file
57
pkg/iac/scanners/ansible/orderedmap/orderedmap_test.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package orderedmap_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/orderedmap"
|
||||
)
|
||||
|
||||
func makeSampleMap() *orderedmap.OrderedMap[string, int] {
|
||||
m := orderedmap.New[string, int](3)
|
||||
m.Set("a", 1)
|
||||
m.Set("b", 2)
|
||||
m.Set("c", 3)
|
||||
return m
|
||||
}
|
||||
|
||||
func TestOrderedMap_UnmarshalYAML(t *testing.T) {
|
||||
yamlData := `
|
||||
a: 1
|
||||
b: 2
|
||||
c: 3
|
||||
`
|
||||
var om orderedmap.OrderedMap[string, int]
|
||||
err := yaml.Unmarshal([]byte(yamlData), &om)
|
||||
require.NoError(t, err)
|
||||
|
||||
m := make(map[string]int)
|
||||
var keys []string
|
||||
for k, v := range om.Iter() {
|
||||
m[k] = v
|
||||
keys = append(keys, k)
|
||||
}
|
||||
|
||||
expectedMap := map[string]int{"a": 1, "b": 2, "c": 3}
|
||||
assert.Equal(t, expectedMap, m)
|
||||
|
||||
expectedKeys := []string{"a", "b", "c"}
|
||||
assert.Equal(t, expectedKeys, keys)
|
||||
}
|
||||
|
||||
func TestOrderedMap_Iter(t *testing.T) {
|
||||
m := makeSampleMap()
|
||||
|
||||
collected := make(map[string]int)
|
||||
var order []string
|
||||
for k, v := range m.Iter() {
|
||||
collected[k] = v
|
||||
order = append(order, k)
|
||||
}
|
||||
|
||||
assert.Equal(t, map[string]int{"a": 1, "b": 2, "c": 3}, collected)
|
||||
assert.Equal(t, []string{"a", "b", "c"}, order)
|
||||
}
|
||||
85
pkg/iac/scanners/ansible/parser/config.go
Normal file
85
pkg/iac/scanners/ansible/parser/config.go
Normal file
@@ -0,0 +1,85 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/go-ini/ini"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/aquasecurity/trivy/pkg/utils/fsutils"
|
||||
)
|
||||
|
||||
type AnsibleConfig struct {
|
||||
Inventory string
|
||||
}
|
||||
|
||||
func LoadConfig(fsys fs.FS, dir string) (AnsibleConfig, error) {
|
||||
logger := log.WithPrefix("ansible")
|
||||
// https://docs.ansible.com/ansible/latest/reference_appendices/config.html#the-configuration-file
|
||||
cfgPaths := []struct {
|
||||
path string
|
||||
useOS bool
|
||||
}{
|
||||
{os.Getenv("ANSIBLE_CONFIG"), true},
|
||||
{path.Join(dir, "ansible.cfg"), false},
|
||||
{filepath.Join(fsutils.HomeDir(), ".ansible.cfg"), true},
|
||||
{"/etc/ansible/ansible.cfg", true},
|
||||
}
|
||||
|
||||
for _, p := range cfgPaths {
|
||||
if p.path == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
var b []byte
|
||||
var err error
|
||||
|
||||
logger.Debug("Trying config", log.FilePath(p.path))
|
||||
|
||||
if p.useOS {
|
||||
b, err = os.ReadFile(p.path)
|
||||
} else {
|
||||
b, err = fs.ReadFile(fsys, p.path)
|
||||
}
|
||||
|
||||
if errors.Is(err, fs.ErrNotExist) {
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
return AnsibleConfig{}, xerrors.Errorf("read file %q: %w", p.path, err)
|
||||
}
|
||||
|
||||
cfg, err := parseConfig(b)
|
||||
if err != nil {
|
||||
return AnsibleConfig{}, xerrors.Errorf("parse config %q: %w", p.path, err)
|
||||
}
|
||||
|
||||
logger.Debug("Loaded config", log.FilePath(p.path))
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
logger.Debug("No config found in search paths")
|
||||
return AnsibleConfig{}, nil
|
||||
}
|
||||
|
||||
func parseConfig(b []byte) (AnsibleConfig, error) {
|
||||
// TODO: expand vars using os.ExpandEnv() ?
|
||||
f, err := ini.Load(b)
|
||||
if err != nil {
|
||||
return AnsibleConfig{}, xerrors.Errorf("load config file: %w", err)
|
||||
}
|
||||
|
||||
cfg := AnsibleConfig{}
|
||||
|
||||
section := f.Section("defaults")
|
||||
if k, err := section.GetKey("inventory"); err == nil {
|
||||
// TODO: trim spaces ?
|
||||
cfg.Inventory = k.String()
|
||||
}
|
||||
return cfg, nil
|
||||
}
|
||||
89
pkg/iac/scanners/ansible/parser/discovery.go
Normal file
89
pkg/iac/scanners/ansible/parser/discovery.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"path"
|
||||
"slices"
|
||||
|
||||
"github.com/bmatcuk/doublestar/v4"
|
||||
)
|
||||
|
||||
// FindProjects locates Ansible project roots within fsys starting from root.
|
||||
// A directory is recognized as a project root if it contains key files or directories
|
||||
// like ansible.cfg, inventory, group_vars, host_vars, roles, playbooks, or YAML playbooks.
|
||||
//
|
||||
// Returns a slice of project root paths.
|
||||
func FindProjects(fsys fs.FS, root string) ([]string, error) {
|
||||
var roots []string
|
||||
walkFn := func(filePath string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
if isAnsibleProject(fsys, filePath) {
|
||||
roots = append(roots, filePath)
|
||||
return fs.SkipDir
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := fs.WalkDir(fsys, root, walkFn); err != nil {
|
||||
return nil, fmt.Errorf("walk dir: %w", err)
|
||||
}
|
||||
|
||||
return roots, nil
|
||||
}
|
||||
|
||||
func isAnsibleProject(fsys fs.FS, dir string) bool {
|
||||
anchors := []string{
|
||||
"ansible.cfg",
|
||||
"inventory", "group_vars", "host_vars", "roles", "playbooks",
|
||||
}
|
||||
|
||||
for _, name := range anchors {
|
||||
if pathExists(fsys, path.Join(dir, name)) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
if entries, err := doublestar.Glob(fsys, dir+"/roles/**/{tasks,defaults,vars}"); err == nil && len(entries) > 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
if entries, err := doublestar.Glob(fsys, dir+"/*.{yml,yaml}"); err == nil && len(entries) > 0 {
|
||||
for _, entry := range entries {
|
||||
if isPlaybookFile(fsys, entry) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func isPlaybookFile(fsys fs.FS, filePath string) bool {
|
||||
data, err := fs.ReadFile(fsys, filePath)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
var plays []*Play
|
||||
if err := decodeYAML(data, &plays); err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
return slices.ContainsFunc(plays, func(play *Play) bool {
|
||||
return play.Hosts() != ""
|
||||
})
|
||||
}
|
||||
|
||||
func pathExists(fsys fs.FS, filePath string) bool {
|
||||
_, err := fs.Stat(fsys, filePath)
|
||||
return err == nil
|
||||
}
|
||||
72
pkg/iac/scanners/ansible/parser/discovery_test.go
Normal file
72
pkg/iac/scanners/ansible/parser/discovery_test.go
Normal file
@@ -0,0 +1,72 @@
|
||||
package parser_test
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/parser"
|
||||
)
|
||||
|
||||
func TestFindProjects(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
fsys fs.FS
|
||||
dir string
|
||||
expected []string
|
||||
}{
|
||||
{
|
||||
name: "single project with ansible.cfg",
|
||||
fsys: fstest.MapFS{
|
||||
"project1/ansible.cfg": &fstest.MapFile{Data: []byte("[defaults]\n")},
|
||||
"project1/inventory/hosts": &fstest.MapFile{Data: []byte("[all]\nlocalhost\n")},
|
||||
},
|
||||
dir: "project1",
|
||||
expected: []string{"project1"},
|
||||
},
|
||||
{
|
||||
name: "no projects",
|
||||
fsys: fstest.MapFS{
|
||||
"random/file.txt": &fstest.MapFile{Data: []byte("hello")},
|
||||
},
|
||||
dir: ".",
|
||||
expected: nil,
|
||||
},
|
||||
{
|
||||
name: "project detected by playbook yaml",
|
||||
fsys: fstest.MapFS{
|
||||
"proj/main.yml": &fstest.MapFile{Data: []byte(`- hosts: all
|
||||
tasks:
|
||||
- debug: msg=hello
|
||||
`)},
|
||||
},
|
||||
dir: "proj",
|
||||
expected: []string{"proj"},
|
||||
},
|
||||
{
|
||||
name: "nested projects",
|
||||
fsys: fstest.MapFS{
|
||||
"proj1/ansible.cfg": &fstest.MapFile{Data: []byte("[defaults]\n")},
|
||||
"proj2/main.yaml": &fstest.MapFile{Data: []byte(`- hosts: all
|
||||
tasks:
|
||||
- debug: msg=hello
|
||||
`)},
|
||||
"proj1/roles/role1/tasks/main.yml": &fstest.MapFile{Data: []byte("- debug: msg=ok")},
|
||||
},
|
||||
dir: ".",
|
||||
expected: []string{"proj1", "proj2"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got, err := parser.FindProjects(tt.fsys, tt.dir)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.ElementsMatch(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
30
pkg/iac/scanners/ansible/parser/module.go
Normal file
30
pkg/iac/scanners/ansible/parser/module.go
Normal file
@@ -0,0 +1,30 @@
|
||||
package parser
|
||||
|
||||
// Module represents a logical module in a playbook or task.
|
||||
// It wraps a Node and provides module-specific utility methods.
|
||||
//
|
||||
// All the data and metadata for the module is stored in the embedded Node.
|
||||
type Module struct {
|
||||
*Node
|
||||
|
||||
Name string
|
||||
}
|
||||
|
||||
// IsFreeForm returns true if the module is a free-form Ansible module.
|
||||
// In Ansible, a free-form module is called using a single scalar value
|
||||
// instead of a key-value mapping.
|
||||
//
|
||||
// Example:
|
||||
//
|
||||
// # Free-form
|
||||
// - command: echo "Hello"
|
||||
// # IsFreeForm() -> true
|
||||
//
|
||||
// # Structured
|
||||
// - ansible.builtin.yum:
|
||||
// name: vim
|
||||
// state: present
|
||||
// # IsFreeForm() -> false
|
||||
func (m *Module) IsFreeForm() bool {
|
||||
return m.Node.IsString()
|
||||
}
|
||||
544
pkg/iac/scanners/ansible/parser/node.go
Normal file
544
pkg/iac/scanners/ansible/parser/node.go
Normal file
@@ -0,0 +1,544 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/orderedmap"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
iacTypes "github.com/aquasecurity/trivy/pkg/iac/types"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
)
|
||||
|
||||
type NodeValue interface {
|
||||
MarshalYAML() (any, error)
|
||||
|
||||
nodeValueMarker()
|
||||
}
|
||||
|
||||
type Scalar struct {
|
||||
Val any
|
||||
}
|
||||
|
||||
func (s *Scalar) MarshalYAML() (any, error) {
|
||||
if s.Val == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return s.Val, nil
|
||||
}
|
||||
|
||||
type Mapping struct {
|
||||
Fields *orderedmap.OrderedMap[string, *Node]
|
||||
}
|
||||
|
||||
func (m *Mapping) MarshalYAML() (any, error) {
|
||||
node := &yaml.Node{
|
||||
Kind: yaml.MappingNode,
|
||||
Tag: "!!map",
|
||||
Content: nil,
|
||||
}
|
||||
for key, child := range m.Fields.Iter() {
|
||||
keyNode := &yaml.Node{
|
||||
Kind: yaml.ScalarNode,
|
||||
Tag: "!!str",
|
||||
Value: key,
|
||||
}
|
||||
valYAML, err := child.MarshalYAML()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
valNode := &yaml.Node{}
|
||||
if err := valNode.Encode(valYAML); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
node.Content = append(node.Content, keyNode, valNode)
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
type Sequence struct {
|
||||
Items []*Node
|
||||
}
|
||||
|
||||
func (s *Sequence) MarshalYAML() (any, error) {
|
||||
node := &yaml.Node{
|
||||
Kind: yaml.SequenceNode,
|
||||
Tag: "!!seq",
|
||||
Content: nil,
|
||||
}
|
||||
for _, item := range s.Items {
|
||||
itemYAML, err := item.MarshalYAML()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
itemNode := &yaml.Node{}
|
||||
if err := itemNode.Encode(itemYAML); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
node.Content = append(node.Content, itemNode)
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func (s *Scalar) nodeValueMarker() {}
|
||||
func (m *Mapping) nodeValueMarker() {}
|
||||
func (s *Sequence) nodeValueMarker() {}
|
||||
|
||||
type Node struct {
|
||||
rng Range
|
||||
metadata iacTypes.Metadata
|
||||
val NodeValue
|
||||
|
||||
unknown bool
|
||||
}
|
||||
|
||||
func (n *Node) Metadata() iacTypes.Metadata {
|
||||
return n.metadata
|
||||
}
|
||||
|
||||
func (n *Node) IsKnown() bool {
|
||||
return !n.unknown
|
||||
}
|
||||
|
||||
func (n *Node) UnmarshalYAML(node *yaml.Node) error {
|
||||
n.rng = rangeFromNode(node)
|
||||
|
||||
switch node.Kind {
|
||||
case yaml.ScalarNode:
|
||||
switch node.Tag {
|
||||
case "!!int":
|
||||
v, err := strconv.Atoi(node.Value)
|
||||
if err == nil {
|
||||
n.val = &Scalar{Val: v}
|
||||
}
|
||||
case "!!float":
|
||||
// TODO: handle float properly
|
||||
case "!!bool":
|
||||
v, err := strconv.ParseBool(node.Value)
|
||||
if err == nil {
|
||||
n.val = &Scalar{Val: v}
|
||||
}
|
||||
case StrTag, "!!string":
|
||||
n.val = &Scalar{Val: node.Value}
|
||||
}
|
||||
return nil
|
||||
case yaml.MappingNode:
|
||||
n.rng.Start--
|
||||
childData, err := decodeMapNode(node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
n.val = childData
|
||||
return nil
|
||||
case yaml.SequenceNode:
|
||||
n.rng.Start--
|
||||
childData, err := decodeSequenceNode(node)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
n.val = childData
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeMapNode(node *yaml.Node) (*Mapping, error) {
|
||||
childMap := orderedmap.New[string, *Node](len(node.Content) / 2)
|
||||
|
||||
for i := 0; i < len(node.Content); i += 2 {
|
||||
keyNode, valueNode := node.Content[i], node.Content[i+1]
|
||||
|
||||
childNode, err := decodeChildNode(valueNode)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
childMap.Set(keyNode.Value, &childNode)
|
||||
}
|
||||
|
||||
return &Mapping{Fields: childMap}, nil
|
||||
}
|
||||
|
||||
func decodeSequenceNode(node *yaml.Node) (*Sequence, error) {
|
||||
items := make([]*Node, 0, len(node.Content))
|
||||
|
||||
for _, elemNode := range node.Content {
|
||||
childNode, err := decodeChildNode(elemNode)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if elemNode.Kind == yaml.MappingNode {
|
||||
childNode.rng.Start++
|
||||
}
|
||||
items = append(items, &childNode)
|
||||
}
|
||||
|
||||
return &Sequence{Items: items}, nil
|
||||
}
|
||||
|
||||
func decodeChildNode(yNode *yaml.Node) (Node, error) {
|
||||
if yNode.Kind == yaml.ScalarNode && yNode.Tag == NullTag {
|
||||
return Node{
|
||||
rng: rangeFromNode(yNode),
|
||||
val: nil,
|
||||
}, nil
|
||||
}
|
||||
|
||||
var n Node
|
||||
if err := yNode.Decode(&n); err != nil {
|
||||
return Node{}, err
|
||||
}
|
||||
return n, nil
|
||||
}
|
||||
|
||||
func (n *Node) initMetadata(fileSrc fsutils.FileSource, parent *iacTypes.Metadata, nodePath []string) {
|
||||
fsys, relPath := fileSrc.FSAndRelPath()
|
||||
ref := cmp.Or(strings.Join(nodePath, "."), ".")
|
||||
rng := iacTypes.NewRange(relPath, n.rng.Start, n.rng.End, "", fsys)
|
||||
|
||||
n.metadata = iacTypes.NewMetadata(rng, ref)
|
||||
n.metadata.SetParentPtr(parent)
|
||||
|
||||
switch val := n.val.(type) {
|
||||
case *Mapping:
|
||||
for key, attr := range val.Fields.Iter() {
|
||||
if attr == nil {
|
||||
continue
|
||||
}
|
||||
childPath := append(nodePath, key)
|
||||
attr.initMetadata(fileSrc, parent, childPath)
|
||||
}
|
||||
case *Sequence:
|
||||
for idx, attr := range val.Items {
|
||||
if attr == nil {
|
||||
continue
|
||||
}
|
||||
childPath := append(nodePath, fmt.Sprintf("[%d]", idx))
|
||||
attr.initMetadata(fileSrc, parent, childPath)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (n *Node) Render(variables vars.Vars) (*Node, error) {
|
||||
return n.render(variables, set.New[string]())
|
||||
}
|
||||
|
||||
func (n *Node) render(variables vars.Vars, visited set.Set[string]) (*Node, error) {
|
||||
if n == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
switch v := n.val.(type) {
|
||||
case *Scalar:
|
||||
if s, ok := v.Val.(string); ok {
|
||||
if found := visited.Contains(s); found {
|
||||
return n, fmt.Errorf("cyclic reference detected: %q", s)
|
||||
}
|
||||
|
||||
visited.Append(s)
|
||||
rendered, err := evaluateTemplate(s, variables)
|
||||
if err != nil {
|
||||
n.unknown = true
|
||||
return n, fmt.Errorf("node ref %q: %w", n.metadata.Reference(), err)
|
||||
}
|
||||
|
||||
newNode := n.withValue(&Scalar{Val: rendered})
|
||||
if strings.Contains(rendered, "{{") {
|
||||
return newNode.render(variables, visited)
|
||||
}
|
||||
visited.Remove(s)
|
||||
|
||||
return newNode, nil
|
||||
}
|
||||
return n, nil
|
||||
case *Mapping:
|
||||
var errs error
|
||||
fields := orderedmap.New[string, *Node](v.Fields.Len())
|
||||
for key, val := range v.Fields.Iter() {
|
||||
r, err := val.Render(variables)
|
||||
if err != nil {
|
||||
errs = multierror.Append(err)
|
||||
}
|
||||
fields.Set(key, r)
|
||||
}
|
||||
return n.withValue(&Mapping{Fields: fields}), errs
|
||||
case *Sequence:
|
||||
var errs error
|
||||
items := make([]*Node, 0, len(v.Items))
|
||||
for _, val := range v.Items {
|
||||
r, err := val.Render(variables)
|
||||
if err != nil {
|
||||
errs = multierror.Append(err)
|
||||
}
|
||||
items = append(items, r)
|
||||
}
|
||||
return n.withValue(&Sequence{Items: items}), errs
|
||||
default:
|
||||
return n, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (n *Node) withValue(val NodeValue) *Node {
|
||||
return &Node{val: val, rng: n.rng, metadata: n.metadata}
|
||||
}
|
||||
|
||||
func (n *Node) MarshalYAML() (any, error) {
|
||||
if n.val == nil {
|
||||
return nil, nil
|
||||
}
|
||||
return n.val.MarshalYAML()
|
||||
}
|
||||
|
||||
func (n *Node) IsNil() bool {
|
||||
return n == nil || n.val == nil
|
||||
}
|
||||
|
||||
func (n *Node) IsMap() bool {
|
||||
return safeOp(n, func(nv NodeValue) bool {
|
||||
_, ok := nv.(*Mapping)
|
||||
return ok
|
||||
})
|
||||
}
|
||||
|
||||
func (n *Node) IsList() bool {
|
||||
return safeOp(n, func(nv NodeValue) bool {
|
||||
_, ok := nv.(*Sequence)
|
||||
return ok
|
||||
})
|
||||
}
|
||||
|
||||
func (n *Node) IsBool() bool {
|
||||
return checkScalarType[bool](n)
|
||||
}
|
||||
|
||||
func (n *Node) IsString() bool {
|
||||
return checkScalarType[string](n)
|
||||
}
|
||||
|
||||
func (n *Node) ToList() []*Node {
|
||||
return safeOp(n, func(nv NodeValue) []*Node {
|
||||
val, ok := nv.(*Sequence)
|
||||
if !ok || val == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return val.Items
|
||||
})
|
||||
}
|
||||
|
||||
func (n *Node) ToMap() map[string]*Node {
|
||||
return safeOp(n, func(nv NodeValue) map[string]*Node {
|
||||
val, ok := nv.(*Mapping)
|
||||
if !ok || val == nil {
|
||||
return make(map[string]*Node)
|
||||
}
|
||||
|
||||
return val.Fields.AsMap()
|
||||
})
|
||||
}
|
||||
|
||||
func (n *Node) NodeAt(path string) *Node {
|
||||
if path == "" || !n.IsMap() {
|
||||
return nil
|
||||
}
|
||||
|
||||
parts := strings.SplitN(path, ".", 2)
|
||||
|
||||
attr, exists := n.ToMap()[parts[0]]
|
||||
if !exists {
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(parts) == 1 {
|
||||
return attr
|
||||
}
|
||||
|
||||
return attr.NodeAt(parts[1])
|
||||
}
|
||||
|
||||
func (n *Node) StringValue(path string) iacTypes.StringValue {
|
||||
def := iacTypes.StringDefault("", n.metadata)
|
||||
if n.IsNil() {
|
||||
return def
|
||||
}
|
||||
|
||||
if n.unknown {
|
||||
return iacTypes.StringUnresolvable(n.metadata)
|
||||
}
|
||||
|
||||
nested := n.NodeAt(path)
|
||||
val, ok := nested.AsString()
|
||||
if !ok {
|
||||
return iacTypes.StringUnresolvable(n.metadata)
|
||||
}
|
||||
return iacTypes.String(val, n.metadata)
|
||||
}
|
||||
|
||||
func (n *Node) BoolValue(path string) iacTypes.BoolValue {
|
||||
def := iacTypes.BoolDefault(false, n.metadata)
|
||||
if n.IsNil() {
|
||||
return def
|
||||
}
|
||||
|
||||
if n.unknown {
|
||||
return iacTypes.BoolUnresolvable(n.metadata)
|
||||
}
|
||||
|
||||
nested := n.NodeAt(path)
|
||||
val, ok := nested.AsBool()
|
||||
if !ok {
|
||||
return iacTypes.BoolUnresolvable(n.metadata)
|
||||
}
|
||||
|
||||
return iacTypes.Bool(val, iacTypes.Metadata{})
|
||||
}
|
||||
|
||||
func (n *Node) AsBool() (bool, bool) {
|
||||
if n.IsNil() {
|
||||
return false, false
|
||||
}
|
||||
|
||||
scalar, ok := n.val.(*Scalar)
|
||||
if !ok {
|
||||
return false, false
|
||||
}
|
||||
|
||||
switch val := scalar.Val.(type) {
|
||||
case bool:
|
||||
return val, true
|
||||
case string:
|
||||
return parseAnsibleBool(val)
|
||||
case int:
|
||||
switch val {
|
||||
case 0:
|
||||
return false, true
|
||||
case 1:
|
||||
return true, true
|
||||
}
|
||||
}
|
||||
return false, false
|
||||
}
|
||||
|
||||
// parseAnsibleBool implements Ansible's string→bool conversion.
|
||||
func parseAnsibleBool(s string) (bool, bool) {
|
||||
normalized := strings.ToLower(strings.TrimSpace(s))
|
||||
|
||||
switch normalized {
|
||||
case "1", "t", "true", "y", "yes", "on":
|
||||
return true, true
|
||||
case "0", "f", "false", "n", "no", "off":
|
||||
return false, true
|
||||
default:
|
||||
return false, false
|
||||
}
|
||||
}
|
||||
|
||||
func (n *Node) AsString() (string, bool) {
|
||||
if !n.IsString() {
|
||||
return "", false
|
||||
}
|
||||
|
||||
scalar, _ := n.val.(*Scalar)
|
||||
val, ok := scalar.Val.(string)
|
||||
return val, ok
|
||||
}
|
||||
|
||||
func (n *Node) Value() any {
|
||||
return safeOp(n, func(nv NodeValue) any {
|
||||
scalar, ok := nv.(*Scalar)
|
||||
if ok {
|
||||
return scalar.Val
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
func checkScalarType[T any](n *Node) bool {
|
||||
return safeOp(n, func(nv NodeValue) bool {
|
||||
scalar, ok := nv.(*Scalar)
|
||||
if !ok || scalar.Val == nil {
|
||||
return false
|
||||
}
|
||||
_, ok = scalar.Val.(T)
|
||||
return ok
|
||||
})
|
||||
}
|
||||
|
||||
func safeOp[T any](n *Node, op func(NodeValue) T) T {
|
||||
var zero T
|
||||
if n.IsNil() || n.val == nil {
|
||||
return zero
|
||||
}
|
||||
return op(n.val)
|
||||
}
|
||||
|
||||
func (n *Node) Subtree(r Range) *Node {
|
||||
sub, _ := n.subtree(r)
|
||||
return sub
|
||||
}
|
||||
|
||||
func (n *Node) subtree(r Range) (*Node, bool) {
|
||||
if n == nil {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
if r.Covers(n.rng) {
|
||||
return n, false
|
||||
}
|
||||
|
||||
switch val := n.val.(type) {
|
||||
case *Mapping:
|
||||
subFields := orderedmap.New[string, *Node](val.Fields.Len())
|
||||
for k, child := range val.Fields.Iter() {
|
||||
if child == nil {
|
||||
continue
|
||||
}
|
||||
sub, partial := child.subtree(r)
|
||||
if sub == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
if partial && r.Start > child.rng.Start {
|
||||
return sub, true
|
||||
}
|
||||
subFields.Set(k, sub)
|
||||
}
|
||||
if subFields.Len() > 0 {
|
||||
return n.withValue(&Mapping{Fields: subFields}), true
|
||||
}
|
||||
return nil, false
|
||||
case *Sequence:
|
||||
items := make([]*Node, 0, len(val.Items))
|
||||
for _, item := range val.Items {
|
||||
if item == nil {
|
||||
continue
|
||||
}
|
||||
sub, partial := item.subtree(r)
|
||||
if sub == nil {
|
||||
continue
|
||||
}
|
||||
if partial {
|
||||
// Return a new sequence containing only the partially matched element.
|
||||
return n.withValue(&Sequence{Items: []*Node{sub}}), true
|
||||
}
|
||||
|
||||
items = append(items, sub)
|
||||
}
|
||||
if len(items) > 0 {
|
||||
return n.withValue(&Sequence{Items: items}), true
|
||||
}
|
||||
return nil, false
|
||||
default:
|
||||
if r.Overlaps(n.rng) {
|
||||
return n, false
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
}
|
||||
293
pkg/iac/scanners/ansible/parser/node_test.go
Normal file
293
pkg/iac/scanners/ansible/parser/node_test.go
Normal file
@@ -0,0 +1,293 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/orderedmap"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
)
|
||||
|
||||
func mustNodeFromYAML(t *testing.T, src string) *Node {
|
||||
t.Helper()
|
||||
var n Node
|
||||
require.NoError(t, decodeYAML([]byte(src), &n))
|
||||
return &n
|
||||
}
|
||||
|
||||
func TestNode_UnmarshalYAML(t *testing.T) {
|
||||
src := `name: {{ testname }}
|
||||
len: 100
|
||||
keys:
|
||||
- a
|
||||
- 101
|
||||
- true
|
||||
- null
|
||||
state:
|
||||
name: test
|
||||
len: 200
|
||||
foo: null
|
||||
`
|
||||
expected := &Node{
|
||||
rng: Range{0, 11},
|
||||
val: &Mapping{
|
||||
Fields: func() *orderedmap.OrderedMap[string, *Node] {
|
||||
m := orderedmap.New[string, *Node](4)
|
||||
m.Set("name", &Node{rng: Range{1, 1}, val: &Scalar{Val: "{{ testname }}"}})
|
||||
m.Set("len", &Node{rng: Range{2, 2}, val: &Scalar{Val: 100}})
|
||||
m.Set("keys", &Node{
|
||||
rng: Range{3, 7},
|
||||
val: &Sequence{
|
||||
Items: []*Node{
|
||||
{rng: Range{4, 4}, val: &Scalar{Val: "a"}},
|
||||
{rng: Range{5, 5}, val: &Scalar{Val: 101}},
|
||||
{rng: Range{6, 6}, val: &Scalar{Val: true}},
|
||||
{rng: Range{7, 7}, val: nil},
|
||||
},
|
||||
},
|
||||
})
|
||||
m.Set("state", &Node{
|
||||
rng: Range{8, 11},
|
||||
val: &Mapping{
|
||||
Fields: func() *orderedmap.OrderedMap[string, *Node] {
|
||||
sm := orderedmap.New[string, *Node](3)
|
||||
sm.Set("name", &Node{rng: Range{9, 9}, val: &Scalar{Val: "test"}})
|
||||
sm.Set("len", &Node{rng: Range{10, 10}, val: &Scalar{Val: 200}})
|
||||
sm.Set("foo", &Node{rng: Range{11, 11}, val: nil})
|
||||
return sm
|
||||
}(),
|
||||
},
|
||||
})
|
||||
return m
|
||||
}(),
|
||||
},
|
||||
}
|
||||
|
||||
n := mustNodeFromYAML(t, src)
|
||||
assert.Equal(t, expected, n)
|
||||
}
|
||||
|
||||
func TestNode_NodeAt(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
src string
|
||||
path string
|
||||
expected any
|
||||
}{
|
||||
{
|
||||
name: "first level",
|
||||
src: `name: mys3bucket`,
|
||||
path: "name",
|
||||
expected: "mys3bucket",
|
||||
},
|
||||
{
|
||||
name: "happy",
|
||||
src: `tags:
|
||||
example: tag1`,
|
||||
path: "tags.example",
|
||||
expected: "tag1",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
n := mustNodeFromYAML(t, tt.src)
|
||||
got := n.NodeAt(tt.path).Value()
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNode_Render(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
yamlSrc string
|
||||
vars map[string]any
|
||||
want string
|
||||
wantErr bool
|
||||
}{
|
||||
{
|
||||
name: "simple interpolation",
|
||||
yamlSrc: `"{{ b }} {{ c }}"`,
|
||||
vars: map[string]any{
|
||||
"b": "hello",
|
||||
"c": "world",
|
||||
},
|
||||
want: "hello world\n",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "chained references",
|
||||
yamlSrc: `"{{ b }}"`,
|
||||
vars: map[string]any{
|
||||
"b": "{{ c }}",
|
||||
"c": "final",
|
||||
},
|
||||
want: "final\n",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "cyclic reference",
|
||||
yamlSrc: `"{{ a }}"`,
|
||||
vars: map[string]any{
|
||||
"a": "{{ a }}",
|
||||
},
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "shared variable",
|
||||
yamlSrc: `"{{ x }} and {{ y }}"`,
|
||||
vars: map[string]any{
|
||||
"x": "{{ shared }}",
|
||||
"y": "{{ shared }}",
|
||||
"shared": "value",
|
||||
},
|
||||
want: "value and value\n",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "undefined variable",
|
||||
yamlSrc: `"{{ missing }}"`,
|
||||
vars: make(map[string]any),
|
||||
wantErr: true,
|
||||
},
|
||||
{
|
||||
name: "empty template",
|
||||
yamlSrc: `""`,
|
||||
vars: make(map[string]any),
|
||||
want: `""
|
||||
`,
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "mixed literal and template",
|
||||
yamlSrc: `"start {{ a }} end"`,
|
||||
vars: map[string]any{
|
||||
"a": "{{ b }}",
|
||||
"b": "middle",
|
||||
},
|
||||
want: "start middle end\n",
|
||||
wantErr: false,
|
||||
},
|
||||
{
|
||||
name: "sequence and mapping",
|
||||
yamlSrc: `
|
||||
list:
|
||||
- "{{ x }}"
|
||||
- "{{ y }}"
|
||||
dict:
|
||||
key1: "{{ a }}"
|
||||
key2: "{{ b }}"
|
||||
`,
|
||||
vars: map[string]any{"x": "1", "y": "2", "a": "A", "b": "B"},
|
||||
want: `list:
|
||||
- "1"
|
||||
- "2"
|
||||
dict:
|
||||
key1: A
|
||||
key2: B
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
n := mustNodeFromYAML(t, tt.yamlSrc)
|
||||
got, err := n.Render(vars.NewVars(tt.vars, 0))
|
||||
if tt.wantErr {
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
|
||||
marshaled, err := yaml.Marshal(got)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.want, string(marshaled))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNode_Subtree(t *testing.T) {
|
||||
src := `name: test
|
||||
len: 100
|
||||
state:
|
||||
foo: bar
|
||||
num: 42
|
||||
elems:
|
||||
- foo: 1
|
||||
baz: 2
|
||||
- bar
|
||||
`
|
||||
tests := []struct {
|
||||
name string
|
||||
query Range
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "no cover",
|
||||
query: Range{0, 0},
|
||||
expected: "null\n",
|
||||
},
|
||||
{
|
||||
name: "single top-level field",
|
||||
query: Range{1, 1},
|
||||
expected: "name: test\n",
|
||||
},
|
||||
{
|
||||
name: "single nested field",
|
||||
query: Range{5, 5},
|
||||
expected: "num: 42\n",
|
||||
},
|
||||
{
|
||||
name: "nested mapping",
|
||||
query: Range{4, 5},
|
||||
expected: `foo: bar
|
||||
num: 42
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "map with some fields",
|
||||
query: Range{6, 7},
|
||||
expected: `elems:
|
||||
- foo: 1
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "single nested element",
|
||||
query: Range{9, 9},
|
||||
expected: `- bar
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "nested slice",
|
||||
query: Range{7, 9},
|
||||
expected: `- foo: 1
|
||||
baz: 2
|
||||
- bar
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "nested slice elem",
|
||||
query: Range{9, 9},
|
||||
expected: `- bar
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "full range",
|
||||
query: Range{1, 9},
|
||||
expected: src,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
node := mustNodeFromYAML(t, src)
|
||||
subtree := node.Subtree(tt.query)
|
||||
marshaled, err := yaml.Marshal(subtree)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, string(marshaled))
|
||||
})
|
||||
}
|
||||
}
|
||||
761
pkg/iac/scanners/ansible/parser/parser.go
Normal file
761
pkg/iac/scanners/ansible/parser/parser.go
Normal file
@@ -0,0 +1,761 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/go-multierror"
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/inventory"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
iacTypes "github.com/aquasecurity/trivy/pkg/iac/types"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
)
|
||||
|
||||
const (
|
||||
ansibleBuiltinPrefix = "ansible.builtin."
|
||||
)
|
||||
|
||||
func withBuiltinPrefix(actions ...string) []string {
|
||||
result := make([]string, 0, len(actions)*2)
|
||||
for _, action := range actions {
|
||||
result = append(result, action, ansibleBuiltinPrefix+action)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
type AnsibleProject struct {
|
||||
path string
|
||||
|
||||
cfg AnsibleConfig
|
||||
inventory *inventory.Inventory
|
||||
galaxyManifest *GalaxyManifest
|
||||
|
||||
tasks ResolvedTasks
|
||||
}
|
||||
|
||||
func (p *AnsibleProject) Path() string {
|
||||
return p.path
|
||||
}
|
||||
|
||||
// TODO(nikita): some tasks do not contain metadata
|
||||
func (p *AnsibleProject) ListTasks() ResolvedTasks {
|
||||
return p.tasks
|
||||
}
|
||||
|
||||
type GalaxyManifest struct {
|
||||
Namespace string `yaml:"namespace"`
|
||||
Name string `yaml:"name"`
|
||||
}
|
||||
|
||||
type Option func(p *Parser)
|
||||
|
||||
func WithPlaybooks(playbooks ...string) Option {
|
||||
return func(p *Parser) {
|
||||
p.playbooks = playbooks
|
||||
}
|
||||
}
|
||||
|
||||
func WithInventories(inventories ...string) Option {
|
||||
return func(p *Parser) {
|
||||
p.inventories = inventories
|
||||
}
|
||||
}
|
||||
|
||||
func WithExtraVars(v map[string]any) Option {
|
||||
return func(p *Parser) {
|
||||
p.extraVars = vars.NewVars(v, vars.ExtraVarsPriority)
|
||||
}
|
||||
}
|
||||
|
||||
type Parser struct {
|
||||
fsys fs.FS
|
||||
rootSrc fsutils.FileSource
|
||||
logger *log.Logger
|
||||
|
||||
inventories []string
|
||||
playbooks []string
|
||||
extraVars vars.Vars
|
||||
|
||||
project *AnsibleProject
|
||||
|
||||
// resolvedTasks caches the fully expanded list of tasks for each playbook,
|
||||
// keyed by the playbook's file path to avoid redundant parsing and resolution.
|
||||
resolvedTasks map[string]ResolvedTasks
|
||||
|
||||
// roleCache stores loaded role data keyed by role name,
|
||||
// enabling reuse of roles across multiple playbooks without repeated loading.
|
||||
roleCache map[string]*Role
|
||||
}
|
||||
|
||||
func New(fsys fs.FS, root string, opts ...Option) *Parser {
|
||||
p := &Parser{
|
||||
fsys: fsys,
|
||||
rootSrc: fsutils.NewFileSource(fsys, root),
|
||||
logger: log.WithPrefix("ansible"),
|
||||
extraVars: make(vars.Vars),
|
||||
|
||||
resolvedTasks: make(map[string]ResolvedTasks),
|
||||
roleCache: make(map[string]*Role),
|
||||
}
|
||||
|
||||
for _, opt := range opts {
|
||||
opt(p)
|
||||
}
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
func ParseProject(fsys fs.FS, root string, opts ...Option) (*AnsibleProject, error) {
|
||||
parser := New(fsys, root, opts...)
|
||||
project, err := parser.Parse()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return project, nil
|
||||
}
|
||||
|
||||
func (p *Parser) Parse() (*AnsibleProject, error) {
|
||||
p.logger.Debug("Parse Ansible project", log.FilePath(p.rootSrc.Path))
|
||||
|
||||
err := p.initProject()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
playbookSources := lo.Map(p.playbooks, func(playbookPath string, _ int) fsutils.FileSource {
|
||||
return p.rootSrc.Join(playbookPath)
|
||||
})
|
||||
|
||||
if len(playbookSources) == 0 {
|
||||
playbookSources, err = p.resolvePlaybooksPaths()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
tasks, err := p.parsePlaybooks(playbookSources)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p.project.tasks = tasks
|
||||
return p.project, nil
|
||||
}
|
||||
|
||||
func (p *Parser) initProject() error {
|
||||
cfg, err := p.readAnsibleConfig()
|
||||
if err != nil {
|
||||
return xerrors.Errorf("read config: %w", err)
|
||||
}
|
||||
|
||||
inv := inventory.LoadAuto(p.fsys, inventory.LoadOptions{
|
||||
InventoryPath: cfg.Inventory,
|
||||
Sources: p.inventories,
|
||||
})
|
||||
|
||||
p.project = &AnsibleProject{
|
||||
path: p.rootSrc.Path,
|
||||
cfg: cfg,
|
||||
inventory: inv,
|
||||
galaxyManifest: p.findGalaxyManifest(),
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) findGalaxyManifest() *GalaxyManifest {
|
||||
var manifest GalaxyManifest
|
||||
if err := decodeYAMLFileWithExtension(p.rootSrc.Join("galaxy"), &manifest, yamlExtensions); err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
p.logger.Debug("Found Galaxy manifest",
|
||||
log.String("namespace", manifest.Namespace), log.String("name", manifest.Name))
|
||||
|
||||
return &manifest
|
||||
}
|
||||
|
||||
func (p *Parser) parsePlaybooks(sources []fsutils.FileSource) (ResolvedTasks, error) {
|
||||
playbooks := make(map[string]*Playbook)
|
||||
|
||||
for _, src := range sources {
|
||||
pb, err := p.loadPlaybook(src)
|
||||
if err != nil {
|
||||
// Skip files that are YAML but not valid playbooks.
|
||||
p.logger.Debug("Skipping YAML file: not a playbook",
|
||||
log.FilePath(src.Path), log.Err(err))
|
||||
continue
|
||||
}
|
||||
playbooks[src.Path] = pb
|
||||
}
|
||||
|
||||
entryPoints := findEntryPoints(playbooks)
|
||||
|
||||
// TODO: Filter entrypoint playbooks by hosts and inventory.
|
||||
// For each play, check its 'hosts' field against the inventory (hosts and groups).
|
||||
// Include playbooks targeting at least one host from the inventory.
|
||||
// Handle special cases such as 'all', 'localhost', and dynamic variables.
|
||||
// Optionally, add a mode to bypass this filtering for full scans or debugging.
|
||||
|
||||
// Resolve tasks from entrypoint playbooks — those that are not imported/included by others.
|
||||
var allTasks ResolvedTasks
|
||||
for _, playbookSrc := range entryPoints {
|
||||
tasks, err := p.resolvePlaybook(nil, nil, playbookSrc, playbooks)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("resolve playbook: %w", err)
|
||||
}
|
||||
allTasks = append(allTasks, tasks...)
|
||||
}
|
||||
|
||||
return allTasks, nil
|
||||
}
|
||||
|
||||
func (p *Parser) loadPlaybook(f fsutils.FileSource) (*Playbook, error) {
|
||||
var plays []*Play
|
||||
if err := decodeYAMLFile(f, &plays); err != nil {
|
||||
return nil, xerrors.Errorf("decode YAML file: %w", err)
|
||||
}
|
||||
|
||||
p.logger.Debug("Loaded playbook",
|
||||
log.FilePath(f.Path), log.Int("plays_count", len(plays)))
|
||||
return &Playbook{
|
||||
Src: f,
|
||||
Plays: plays,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func findEntryPoints(playbooks map[string]*Playbook) []fsutils.FileSource {
|
||||
included := set.New[string]()
|
||||
|
||||
for _, pb := range playbooks {
|
||||
for _, p := range pb.Plays {
|
||||
if incPath, ok := p.includedPlaybook(); ok {
|
||||
includedSrc := pb.resolveIncludedSrc(incPath)
|
||||
included.Append(includedSrc.Path)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var entryPoints []fsutils.FileSource
|
||||
for path, pb := range playbooks {
|
||||
if !included.Contains(path) {
|
||||
entryPoints = append(entryPoints, pb.Src)
|
||||
}
|
||||
}
|
||||
|
||||
return entryPoints
|
||||
}
|
||||
|
||||
// resolvePlaybook recursively expands tasks, roles, and included playbooks within the given playbook.
|
||||
func (p *Parser) resolvePlaybook(
|
||||
parent *iacTypes.Metadata, parentVars vars.Vars,
|
||||
playbookSrc fsutils.FileSource, playbooks map[string]*Playbook,
|
||||
) (ResolvedTasks, error) {
|
||||
pb, exists := playbooks[playbookSrc.Path]
|
||||
if !exists {
|
||||
// Attempting to load a playbook outside the scan directory (may be an included playbook).
|
||||
var err error
|
||||
pb, err = p.loadPlaybook(playbookSrc)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("load playbook: %w", err)
|
||||
}
|
||||
// Caching the loading external playbook for reuse
|
||||
playbooks[playbookSrc.Path] = pb
|
||||
}
|
||||
|
||||
if cached, exists := p.resolvedTasks[pb.Src.Path]; exists {
|
||||
return cached, nil
|
||||
}
|
||||
|
||||
p.logger.Debug("Resolve playbook tasks", log.FilePath(pb.Src.Path))
|
||||
|
||||
// Ansible loads host and group variable files by searching paths
|
||||
// relative to the playbook file.
|
||||
// See https://docs.ansible.com/ansible/latest/inventory_guide/intro_inventory.html#organizing-host-and-group-variables
|
||||
playbookInvVars := inventory.LoadVars(inventory.PlaybookVarsSources(playbookSrc.Dir()))
|
||||
|
||||
var tasks ResolvedTasks
|
||||
for _, play := range pb.Plays {
|
||||
|
||||
// Initializing the metadata of the play and its nested elements
|
||||
play.initMetadata(playbookSrc, parent)
|
||||
|
||||
// TODO: resolve hosts by pattern:
|
||||
// https://docs.ansible.com/ansible/latest/inventory_guide/intro_patterns.html#common-patterns
|
||||
hosts := play.inner.Hosts
|
||||
|
||||
// TODO: iterate over hosts
|
||||
hostVars := p.project.inventory.ResolveVars(hosts, playbookInvVars)
|
||||
playVars := vars.MergeVars(hostVars, parentVars, play.Variables())
|
||||
|
||||
for _, playTask := range play.listTasks() {
|
||||
// TODO: pass parent metadata
|
||||
|
||||
// TODO: Support expanding loops (e.g. 'loop', 'with_items') in tasks.
|
||||
// Example:
|
||||
// - name: Install multiple packages
|
||||
// ansible.builtin.yum:
|
||||
// name: "{{ item }}"
|
||||
// state: present
|
||||
// loop:
|
||||
// - httpd
|
||||
// - memcached
|
||||
// - mariadb
|
||||
//
|
||||
// During expansion, the task should be duplicated for each item with `item` rendered.
|
||||
|
||||
childrenTasks, err := p.expandTask(playVars, playTask)
|
||||
if err != nil {
|
||||
p.logger.Debug("Failed to expand playbook task",
|
||||
log.String("source", playTask.metadata.Range().String()), log.Err(err))
|
||||
}
|
||||
tasks = append(tasks, childrenTasks...)
|
||||
}
|
||||
|
||||
// https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_reuse_roles.html#using-roles-at-the-play-level
|
||||
for _, roleDef := range play.roleDefinitions() {
|
||||
roleTasks, err := p.resolveRoleDefinitionTasks(roleDef, play, playVars)
|
||||
if err != nil {
|
||||
p.logger.Debug("Failed to load role", log.String("role", roleDef.name()), log.Err(err))
|
||||
continue
|
||||
}
|
||||
tasks = append(tasks, roleTasks...)
|
||||
}
|
||||
|
||||
// https://docs.ansible.com/ansible/latest/collections/ansible/builtin/import_playbook_module.html
|
||||
if incPath, ok := play.includedPlaybook(); ok {
|
||||
p.logger.Debug("Resolve playbook include",
|
||||
log.String("source", play.metadata.Range().String()),
|
||||
log.String("include", incPath),
|
||||
)
|
||||
|
||||
effectiveVars := vars.MergeVars(playVars, play.specialVars())
|
||||
renderedPath, err := evaluateTemplate(incPath, effectiveVars)
|
||||
if err != nil {
|
||||
p.logger.Debug("Failed to render path",
|
||||
log.FilePath(incPath), log.Err(err))
|
||||
continue
|
||||
}
|
||||
|
||||
fullIncSrc := pb.resolveIncludedSrc(renderedPath)
|
||||
includedTasks, err := p.resolvePlaybook(&play.metadata, playVars, fullIncSrc, playbooks)
|
||||
if err != nil && errors.Is(err, fs.ErrNotExist) {
|
||||
p.logger.Debug("Failed to load included playbook",
|
||||
log.FilePath(fullIncSrc.Path), log.Err(err))
|
||||
} else {
|
||||
if err != nil {
|
||||
p.logger.Debug("An error occurred while resolving playbook tasks",
|
||||
log.FilePath(fullIncSrc.Path), log.Err(err))
|
||||
}
|
||||
p.logger.Debug("Loaded included playbook tasks",
|
||||
log.FilePath(fullIncSrc.Path), log.Int("tasks_count", len(includedTasks)))
|
||||
tasks = append(tasks, includedTasks...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
p.logger.Debug("Resolved playbook tasks",
|
||||
log.FilePath(pb.Src.Path), log.Int("tasks_count", len(tasks)))
|
||||
|
||||
p.resolvedTasks[pb.Src.Path] = tasks
|
||||
return tasks, nil
|
||||
}
|
||||
|
||||
func (p *Parser) resolveRoleDefinitionTasks(
|
||||
roleDef *RoleDefinition, play *Play, playVars vars.Vars,
|
||||
) (ResolvedTasks, error) {
|
||||
p.logger.Debug("Resolve role at play level",
|
||||
log.String("name", roleDef.name()),
|
||||
log.String("source", roleDef.metadata.Range().String()))
|
||||
|
||||
role, err := p.loadRole(&roleDef.metadata, play, roleDef.name())
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("load role %q: %w", roleDef.name(), err)
|
||||
}
|
||||
|
||||
// When using the roles option at the play level, each role ‘x’
|
||||
// looks for files named main.yml, main.yaml, or main (without extension)
|
||||
// in its internal directories (tasks, defaults, vars, etc.).
|
||||
|
||||
// Ignore non-existent files, as they are loaded by default and may be missing
|
||||
roleDefaults, err := role.defaultVariables("main")
|
||||
if err != nil && !errors.Is(err, fs.ErrNotExist) {
|
||||
p.logger.Debug("Failed to load role default variables", log.Err(err))
|
||||
}
|
||||
roleVariables, err := role.fileVariables("main")
|
||||
if err != nil && !errors.Is(err, fs.ErrNotExist) {
|
||||
p.logger.Debug("Failed to load role variables", log.Err(err))
|
||||
}
|
||||
|
||||
roleScopeVars := vars.MergeVars(
|
||||
// Role default variables have the lowest priority
|
||||
roleDefaults,
|
||||
playVars,
|
||||
roleVariables,
|
||||
)
|
||||
|
||||
roleTasks, err := role.getTasks("main")
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("load role tasks: %w", err)
|
||||
}
|
||||
|
||||
var tasks ResolvedTasks
|
||||
for _, roleTask := range roleTasks {
|
||||
childrenTasks, err := p.expandTask(roleScopeVars, roleTask)
|
||||
if err != nil {
|
||||
p.logger.Debug("Failed to expand role tasks", log.Err(err))
|
||||
continue
|
||||
}
|
||||
tasks = append(tasks, childrenTasks...)
|
||||
}
|
||||
|
||||
p.logger.Debug("Included role loaded",
|
||||
log.FilePath(role.src.Path), log.Int("tasks_count", len(tasks)))
|
||||
return tasks, nil
|
||||
}
|
||||
|
||||
// loadRole loads a role by name.
|
||||
func (p *Parser) loadRole(parent *iacTypes.Metadata, play *Play, roleName string) (*Role, error) {
|
||||
cachedRole, exists := p.roleCache[roleName]
|
||||
if exists {
|
||||
return cachedRole, nil
|
||||
}
|
||||
|
||||
roleSrc, exists := p.resolveRolePath(play.src.Dir(), roleName)
|
||||
if !exists || roleSrc.Path == "" {
|
||||
return nil, xerrors.Errorf("role %q not found", roleName)
|
||||
}
|
||||
|
||||
r := &Role{
|
||||
name: roleName,
|
||||
src: roleSrc,
|
||||
play: play,
|
||||
cachedTasks: make(map[string][]*Task),
|
||||
}
|
||||
r.initMetadata(roleSrc, parent)
|
||||
|
||||
if err := p.loadRoleDependencies(r); err != nil {
|
||||
return nil, xerrors.Errorf("load role deps: %w", err)
|
||||
}
|
||||
|
||||
p.roleCache[roleName] = r
|
||||
|
||||
p.logger.Debug("Role found",
|
||||
log.String("name", roleName),
|
||||
log.String("source", parent.GetMetadata().Range().String()),
|
||||
log.FilePath(roleSrc.Path))
|
||||
return r, nil
|
||||
}
|
||||
|
||||
func (p *Parser) loadRoleDependencies(r *Role) error {
|
||||
// The meta directory is an exception: it always uses the standard
|
||||
// main.yml (or main.yaml/main) file without allowing custom filenames or overrides.
|
||||
metaSrc := r.src.Join("meta", "main")
|
||||
|
||||
var roleMeta RoleMeta
|
||||
if err := decodeYAMLFileWithExtension(metaSrc, &roleMeta, yamlExtensions); err != nil && !errors.Is(err, os.ErrNotExist) {
|
||||
return xerrors.Errorf("load meta: %w", err)
|
||||
}
|
||||
|
||||
roleMeta.updateMetadata(metaSrc.FS, &r.metadata, metaSrc.Path)
|
||||
|
||||
for _, dep := range roleMeta.dependencies() {
|
||||
depRole, err := p.loadRole(&roleMeta.metadata, r.play, dep.name())
|
||||
if err != nil {
|
||||
return xerrors.Errorf("load role dependency %q: %w", dep.name(), err)
|
||||
}
|
||||
r.directDeps = append(r.directDeps, depRole)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: support all possible locations of the role
|
||||
// https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_reuse_roles.html
|
||||
func (p *Parser) resolveRolePath(playbookDirSrc fsutils.FileSource, name string) (fsutils.FileSource, bool) {
|
||||
isPath := filepath.IsAbs(name) || strings.HasPrefix(name, ".")
|
||||
if isPath {
|
||||
if !filepath.IsAbs(name) {
|
||||
return playbookDirSrc.Join(name), true
|
||||
}
|
||||
return fsutils.NewFileSource(nil, name), true
|
||||
}
|
||||
|
||||
roleName := name
|
||||
baseSrc := playbookDirSrc
|
||||
|
||||
parts := strings.SplitN(roleName, ".", 3)
|
||||
if len(parts) == 3 {
|
||||
if m := p.project.galaxyManifest; m != nil &&
|
||||
m.Namespace == parts[0] && m.Name == parts[1] {
|
||||
roleName = parts[2]
|
||||
baseSrc = p.rootSrc
|
||||
|
||||
// TODO: support resolving roles from namespace.collection
|
||||
// by searching in the collections/ansible_collections directory
|
||||
}
|
||||
}
|
||||
|
||||
roleSources := []fsutils.FileSource{
|
||||
baseSrc.Join("roles", roleName),
|
||||
}
|
||||
|
||||
if defaultRolesPath, exists := os.LookupEnv("DEFAULT_ROLES_PATH"); exists {
|
||||
rolesSrc := fsutils.NewFileSource(nil, defaultRolesPath)
|
||||
roleSources = append(roleSources, rolesSrc.Join(roleName))
|
||||
}
|
||||
|
||||
for _, roleSrc := range roleSources {
|
||||
if exists, _ := roleSrc.Exists(); exists {
|
||||
return roleSrc, true
|
||||
}
|
||||
}
|
||||
|
||||
return fsutils.FileSource{}, false
|
||||
}
|
||||
|
||||
// expandTask dispatches task expansion based on task type (block, include, role).
|
||||
func (p *Parser) expandTask(parentVars vars.Vars, t *Task) (ResolvedTasks, error) {
|
||||
|
||||
// TODO: pass parentVars ?
|
||||
effectiveVars := vars.MergeVars(parentVars, t.Variables())
|
||||
|
||||
taskSource := t.metadata.Range().String()
|
||||
switch {
|
||||
case t.isBlock():
|
||||
tasks, err := p.expandBlockTasks(effectiveVars, t)
|
||||
return wrapIfErr(tasks, fmt.Sprintf("expand block tasks %s", taskSource), err)
|
||||
case t.isTaskInclude():
|
||||
tasks, err := p.resolveTasksInclude(effectiveVars, t)
|
||||
return wrapIfErr(tasks, fmt.Sprintf("resolve tasks include %s", taskSource), err)
|
||||
case t.isRoleInclude():
|
||||
tasks, err := p.resolveRoleInclude(effectiveVars, t)
|
||||
return wrapIfErr(tasks, fmt.Sprintf("resolve role include %s", taskSource), err)
|
||||
default:
|
||||
resolvedTask := p.resolveTask(t, parentVars)
|
||||
// TODO: check that the task is not absent
|
||||
return ResolvedTasks{resolvedTask}, nil
|
||||
}
|
||||
}
|
||||
|
||||
// wrapIfErr adds context to err but still returns val even if err != nil.
|
||||
func wrapIfErr[T any](val T, msg string, err error) (T, error) {
|
||||
if err != nil {
|
||||
return val, xerrors.Errorf("%s: %w", msg, err)
|
||||
}
|
||||
return val, nil
|
||||
}
|
||||
|
||||
// expandBlockTasks expands a block task into its constituent tasks.
|
||||
//
|
||||
// Blocks group multiple tasks under a single block in a playbook.
|
||||
// See https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_blocks.html
|
||||
func (p *Parser) expandBlockTasks(parentVars vars.Vars, t *Task) (ResolvedTasks, error) {
|
||||
var res ResolvedTasks
|
||||
var errs error
|
||||
for _, task := range t.inner.Block {
|
||||
children, err := p.expandTask(parentVars, task)
|
||||
if err != nil {
|
||||
errs = multierror.Append(errs, err)
|
||||
}
|
||||
res = append(res, children...)
|
||||
}
|
||||
|
||||
p.logger.Debug("Expanded block tasks",
|
||||
log.String("source", t.metadata.Range().String()),
|
||||
log.Int("tasks_count", len(res)),
|
||||
)
|
||||
return res, errs
|
||||
}
|
||||
|
||||
func (p *Parser) resolveTask(task *Task, parentVars vars.Vars) *ResolvedTask {
|
||||
return task.resolved(p.effecitveVarsForTask(task, parentVars))
|
||||
}
|
||||
|
||||
func (p *Parser) effecitveVarsForTask(task *Task, parentVars vars.Vars) vars.Vars {
|
||||
return vars.MergeVars(parentVars, task.Variables(), p.extraVars, specialVarsForTask(task))
|
||||
}
|
||||
|
||||
func specialVarsForTask(task *Task) vars.Vars {
|
||||
variables := task.play.specialVars()
|
||||
if task.role != nil {
|
||||
variables = vars.MergeVars(variables, task.role.specialVars())
|
||||
}
|
||||
|
||||
return variables
|
||||
}
|
||||
|
||||
// resolveTasksInclude locates a tasks include or import file and loads its tasks.
|
||||
//
|
||||
// Supports Ansible modules 'include_tasks' and 'import_tasks'.
|
||||
// See https://docs.ansible.com/ansible/latest/collections/ansible/builtin/include_tasks_module.html
|
||||
func (p *Parser) resolveTasksInclude(parentVars vars.Vars, task *Task) (ResolvedTasks, error) {
|
||||
resolvedTask := p.resolveTask(task, parentVars)
|
||||
moduleKeys := withBuiltinPrefix(ModuleIncludeTasks, ModuleImportTasks)
|
||||
m, err := resolvedTask.ResolveModule(moduleKeys, true)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("resolving module for keys %v: %w", moduleKeys, err)
|
||||
}
|
||||
|
||||
var tasksFilePath string
|
||||
if m.IsFreeForm() {
|
||||
tasksFilePath, _ = m.AsString()
|
||||
} else {
|
||||
tasksFilePath = getStringParam(m, "file")
|
||||
}
|
||||
|
||||
if tasksFilePath == "" {
|
||||
return nil, xerrors.New("tasks file is empty")
|
||||
}
|
||||
|
||||
taskSrc := task.src.Dir().Join(tasksFilePath)
|
||||
includedTasks, err := loadTasks(task.play, &task.metadata, taskSrc)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("load tasks from %q: %w", taskSrc.Path, err)
|
||||
}
|
||||
|
||||
var allTasks ResolvedTasks
|
||||
|
||||
var errs error
|
||||
for _, loadedTask := range includedTasks {
|
||||
children, err := p.expandTask(parentVars, loadedTask)
|
||||
if err != nil {
|
||||
errs = multierror.Append(xerrors.Errorf("expand included task: %w", err))
|
||||
}
|
||||
allTasks = append(allTasks, children...)
|
||||
}
|
||||
|
||||
p.logger.Debug("Included tasks loaded",
|
||||
log.String("source", task.metadata.Range().String()),
|
||||
log.FilePath(taskSrc.Path),
|
||||
log.Int("tasks_count", len(allTasks)))
|
||||
return allTasks, errs
|
||||
}
|
||||
|
||||
// resolveRoleInclude locates an included or imported role and loads its tasks.
|
||||
//
|
||||
// Supports Ansible modules 'include_role' and 'import_role'.
|
||||
// See https://docs.ansible.com/ansible/latest/collections/ansible/builtin/include_role_module.html
|
||||
func (p *Parser) resolveRoleInclude(parentVars vars.Vars, task *Task) (ResolvedTasks, error) {
|
||||
resolvedTask := p.resolveTask(task, parentVars)
|
||||
moduleKeys := withBuiltinPrefix(ModuleIncludeRole, ModuleImportRole)
|
||||
m, err := resolvedTask.ResolveModule(moduleKeys, true)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("resolving module for keys %v: %w", moduleKeys, err)
|
||||
}
|
||||
|
||||
module := RoleIncludeModule{
|
||||
Name: getStringParam(m, "name"),
|
||||
TasksFrom: cmp.Or(getStringParam(m, "tasks_from"), "main"),
|
||||
VarsFrom: cmp.Or(getStringParam(m, "vars_from"), "main"),
|
||||
DefaultsFrom: cmp.Or(getStringParam(m, "defaults_from"), "main"),
|
||||
}
|
||||
|
||||
if module.Name == "" {
|
||||
return nil, xerrors.New("role name is empty")
|
||||
}
|
||||
|
||||
// When using include_role/import_role, custom file names or paths can be specified
|
||||
// for various role components instead of the default "main". This applies to tasks,
|
||||
// defaults, vars, handlers, meta, etc.
|
||||
// See: https://docs.ansible.com/ansible/latest/collections/ansible/builtin/include_role_module.html
|
||||
role, err := p.loadRole(&task.metadata, task.play, module.Name)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("load included role %q: %w", module.Name, err)
|
||||
}
|
||||
|
||||
roleDefaults, err := role.defaultVariables(module.DefaultsFrom)
|
||||
if err != nil && !errors.Is(err, fs.ErrNotExist) {
|
||||
p.logger.Debug("Failed to load role default variables", log.Err(err))
|
||||
}
|
||||
roleVariables, err := role.fileVariables(module.VarsFrom)
|
||||
if err != nil && !errors.Is(err, fs.ErrNotExist) {
|
||||
p.logger.Debug("Failed to load role variables", log.Err(err))
|
||||
}
|
||||
|
||||
roleScopeVars := vars.MergeVars(
|
||||
// Role default variables have the lowest priority
|
||||
roleDefaults,
|
||||
parentVars,
|
||||
roleVariables,
|
||||
)
|
||||
|
||||
var allTasks ResolvedTasks
|
||||
|
||||
roleTasks, err := role.getTasks(module.TasksFrom)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("load tasks from %q: %w", module.TasksFrom, err)
|
||||
}
|
||||
|
||||
var errs error
|
||||
for _, roleTask := range roleTasks {
|
||||
// TODO: do not update the parent in the metadata here, as the dependency chain may be lost
|
||||
// if the task is a role dependency task
|
||||
// task.updateParent(t)
|
||||
children, err := p.expandTask(roleScopeVars, roleTask)
|
||||
if err != nil {
|
||||
errs = multierror.Append(xerrors.Errorf("expand task: %w", err))
|
||||
}
|
||||
allTasks = append(allTasks, children...)
|
||||
}
|
||||
|
||||
p.logger.Debug("Included role loaded",
|
||||
log.String("source", task.metadata.Range().String()),
|
||||
log.FilePath(role.src.Path),
|
||||
log.Int("tasks_count", len(allTasks)))
|
||||
return allTasks, errs
|
||||
}
|
||||
|
||||
func getStringParam(m Module, paramKey string) string {
|
||||
val, _ := m.NodeAt(paramKey).AsString()
|
||||
return val
|
||||
}
|
||||
|
||||
func (p *Parser) readAnsibleConfig() (AnsibleConfig, error) {
|
||||
return LoadConfig(p.fsys, p.rootSrc.Path)
|
||||
}
|
||||
|
||||
func (p *Parser) resolvePlaybooksPaths() ([]fsutils.FileSource, error) {
|
||||
entries, err := p.rootSrc.ReadDir()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var res []fsutils.FileSource
|
||||
|
||||
for _, entry := range entries {
|
||||
if isYAMLFile(entry.Name()) {
|
||||
res = append(res, p.rootSrc.Join(entry.Name()))
|
||||
}
|
||||
}
|
||||
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func loadTasks(play *Play, parentMetadata *iacTypes.Metadata, fileSrc fsutils.FileSource) ([]*Task, error) {
|
||||
var fileTasks []*Task
|
||||
tasksExtensions := append(yamlExtensions, "")
|
||||
if err := decodeYAMLFileWithExtension(fileSrc, &fileTasks, tasksExtensions); err != nil {
|
||||
return nil, xerrors.Errorf("decode tasks file %q: %w", fileSrc.Path, err)
|
||||
}
|
||||
for _, task := range fileTasks {
|
||||
task.init(play, fileSrc, parentMetadata)
|
||||
}
|
||||
return fileTasks, nil
|
||||
}
|
||||
|
||||
var yamlExtensions = []string{".yml", ".yaml"}
|
||||
|
||||
func isYAMLFile(filePath string) bool {
|
||||
ext := filepath.Ext(filePath)
|
||||
return slices.Contains(yamlExtensions, ext)
|
||||
}
|
||||
911
pkg/iac/scanners/ansible/parser/parser_test.go
Normal file
911
pkg/iac/scanners/ansible/parser/parser_test.go
Normal file
@@ -0,0 +1,911 @@
|
||||
package parser_test
|
||||
|
||||
import (
|
||||
"cmp"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/internal/testutil"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/parser"
|
||||
)
|
||||
|
||||
func TestParseProject(t *testing.T) {
|
||||
fsys := os.DirFS(filepath.Join("testdata", "sample-proj"))
|
||||
|
||||
project, err := parser.New(fsys, ".").Parse()
|
||||
require.NoError(t, err)
|
||||
|
||||
tasks := project.ListTasks()
|
||||
assert.Len(t, tasks, 6)
|
||||
}
|
||||
|
||||
func TestParser_Parse(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
dir string
|
||||
opts []parser.Option
|
||||
files map[string]string
|
||||
expectedTasks []string
|
||||
}{
|
||||
{
|
||||
name: "tasks in play",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: localhost
|
||||
pre_tasks:
|
||||
- name: Pre-task
|
||||
debug:
|
||||
msg: test
|
||||
tasks:
|
||||
- name: Task
|
||||
debug:
|
||||
msg: test
|
||||
post_tasks:
|
||||
- name: Post-task
|
||||
debug:
|
||||
msg: test
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Pre-task", "Task", "Post-task"},
|
||||
},
|
||||
{
|
||||
name: "task name with unquoted template",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: localhost
|
||||
tasks:
|
||||
- name: {{ Task }}
|
||||
debug:
|
||||
msg: test
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"{{ Task }}"},
|
||||
},
|
||||
{
|
||||
name: "tasks in role",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: localhost
|
||||
roles:
|
||||
- test
|
||||
`,
|
||||
"roles/test/tasks/main.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "role with dependencies",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: localhost
|
||||
roles:
|
||||
- test
|
||||
`,
|
||||
"roles/test/tasks/main.yaml": `---
|
||||
- name: Role task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
"roles/test/meta/main.yaml": `---
|
||||
dependencies:
|
||||
- role: role2
|
||||
`,
|
||||
"roles/role2/tasks/main.yaml": `---
|
||||
- name: Dependent task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Dependent task", "Role task"},
|
||||
},
|
||||
{
|
||||
name: "block task",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- tasks:
|
||||
- name: Test block
|
||||
block:
|
||||
- name: Test task 1
|
||||
debug:
|
||||
msg: Test task
|
||||
- name: Test task 2
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task 1", "Test task 2"},
|
||||
},
|
||||
{
|
||||
name: "block task includes role",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- tasks:
|
||||
- name: Test block
|
||||
block:
|
||||
- name: Test task
|
||||
include_role:
|
||||
name: test
|
||||
`,
|
||||
"roles/test/tasks/main.yaml": `---
|
||||
- name: Role task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Role task"},
|
||||
},
|
||||
{
|
||||
name: "nested block includes tasks",
|
||||
dir: "playbooks",
|
||||
files: map[string]string{
|
||||
"playbooks/playbook.yaml": `---
|
||||
- tasks:
|
||||
- name: Test block
|
||||
block:
|
||||
- name: nested block
|
||||
block:
|
||||
- include_tasks:
|
||||
file: test.yaml
|
||||
`,
|
||||
"playbooks/test.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "include and import tasks in play",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
|
||||
- name: Include task list in play
|
||||
ansible.builtin.include_tasks:
|
||||
file: test.yaml
|
||||
|
||||
- name: Import task list in play
|
||||
ansible.builtin.import_tasks:
|
||||
file: test2.yaml
|
||||
`,
|
||||
"test.yaml": `---
|
||||
- name: Included task
|
||||
debug:
|
||||
msg: Included task
|
||||
`,
|
||||
"test2.yaml": `---
|
||||
- name: Imported task
|
||||
debug:
|
||||
msg: Imported task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task", "Included task", "Imported task"},
|
||||
},
|
||||
{
|
||||
name: "unresolved template in include",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
|
||||
- include_tasks: "{{item}}"
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "include and import tasks in role",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- name: Update web servers
|
||||
hosts: localhost
|
||||
roles:
|
||||
- test
|
||||
`,
|
||||
"roles/test/tasks/main.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
|
||||
- name: Include task list in role
|
||||
ansible.builtin.include_tasks:
|
||||
file: test.yaml
|
||||
|
||||
- name: Import task list in role
|
||||
ansible.builtin.import_tasks:
|
||||
file: test2.yaml
|
||||
`,
|
||||
"roles/test/tasks/test.yaml": `---
|
||||
- name: Included task
|
||||
debug:
|
||||
msg: Included task
|
||||
`,
|
||||
"roles/test/tasks/test2.yaml": `---
|
||||
- name: Imported task
|
||||
debug:
|
||||
msg: Imported task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task", "Included task", "Imported task"},
|
||||
},
|
||||
{
|
||||
name: "include role in play",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- name: Test task
|
||||
include_role:
|
||||
name: test
|
||||
tasks_from: test
|
||||
`,
|
||||
"roles/test/tasks/main.yaml": `---
|
||||
- name: Main task
|
||||
debug:
|
||||
msg: Main task
|
||||
`,
|
||||
"roles/test/tasks/test.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "inline include role in play",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- {include_role: {name: test, tasks_from: test}}
|
||||
`,
|
||||
"roles/test/tasks/test.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "include role in play by path",
|
||||
dir: "playbooks",
|
||||
files: map[string]string{
|
||||
"playbooks/playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- {include_role: {name: ../roles/test, tasks_from: test}}
|
||||
`,
|
||||
"roles/test/tasks/test.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "role task from subdir",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- {include_role: {name: test, tasks_from: subdir/test.yaml}}
|
||||
`,
|
||||
"roles/test/tasks/subdir/test.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "import role in play",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- name: Test task
|
||||
import_role:
|
||||
name: test
|
||||
tasks_from: test
|
||||
`,
|
||||
"roles/test/tasks/main.yaml": `---
|
||||
- name: Main task
|
||||
debug:
|
||||
msg: Main task
|
||||
`,
|
||||
"roles/test/tasks/test.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "include role in role",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
roles:
|
||||
- main
|
||||
`,
|
||||
"roles/main/tasks/main.yaml": `---
|
||||
- name: Main task
|
||||
include_role:
|
||||
name: test
|
||||
tasks_from: test
|
||||
`,
|
||||
"roles/test/tasks/test.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "import role in role",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
roles:
|
||||
- main
|
||||
`,
|
||||
"roles/main/tasks/main.yaml": `---
|
||||
- name: Main task
|
||||
ansible.builtin.import_role:
|
||||
name: test
|
||||
tasks_from: test
|
||||
`,
|
||||
"roles/test/tasks/test.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "include tasks is free form",
|
||||
dir: "playbooks",
|
||||
files: map[string]string{
|
||||
"playbooks/playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- include_tasks: "{{ playbook_dir |dirname }}/tasks/test.yml"
|
||||
`,
|
||||
"playbooks/tasks/test.yml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "include_tasks with templated path",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- include_tasks: "{{ tasks_file }}"
|
||||
vars:
|
||||
tasks_file: tasks/test.yml
|
||||
`,
|
||||
"tasks/test.yml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
{
|
||||
name: "import playbook",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: localhost
|
||||
tasks:
|
||||
- name: Task
|
||||
debug:
|
||||
msg: test
|
||||
|
||||
- name: Include playbook
|
||||
ansible.builtin.import_playbook: other.yaml
|
||||
`,
|
||||
"other.yaml": `---
|
||||
- hosts: localhost
|
||||
tasks:
|
||||
- name: Included Task
|
||||
debug:
|
||||
msg: test
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Task", "Included Task"},
|
||||
},
|
||||
{
|
||||
name: "with unused role",
|
||||
files: map[string]string{
|
||||
"playbook.yaml": `---
|
||||
- hosts: localhost
|
||||
roles:
|
||||
- main
|
||||
`,
|
||||
"roles/main/tasks/main.yaml": `---
|
||||
- name: Main role task
|
||||
debug:
|
||||
msg: test
|
||||
`,
|
||||
"roles/unused/tasks/main.yaml": `---
|
||||
- name: Unused task
|
||||
debug:
|
||||
msg: test
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Main role task"},
|
||||
},
|
||||
{
|
||||
name: "multiple playbooks",
|
||||
files: map[string]string{
|
||||
"site.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- name: Foo task
|
||||
debug:
|
||||
msg: Test
|
||||
`,
|
||||
"playbook.yaml": `---
|
||||
- hosts: all
|
||||
tasks:
|
||||
- name: Bar task
|
||||
debug:
|
||||
msg: Test
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Foo task", "Bar task"},
|
||||
},
|
||||
{
|
||||
name: "included playbook outside root directory",
|
||||
dir: "project",
|
||||
files: map[string]string{
|
||||
"project/main.yml": `
|
||||
- name: Main play
|
||||
hosts: all
|
||||
import_playbook: ../common/common.yml
|
||||
`,
|
||||
"common/common.yml": `
|
||||
- name: Common play
|
||||
hosts: all
|
||||
tasks:
|
||||
- name: task from common playbook
|
||||
debug: null
|
||||
msg: hello from common
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{
|
||||
"task from common playbook",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "role from namespace.collection",
|
||||
dir: "project",
|
||||
files: map[string]string{
|
||||
"project/galaxy.yaml": `
|
||||
namespace: myns
|
||||
name: mycol
|
||||
`,
|
||||
"project/playbook.yml": `
|
||||
- name: Play with collection role
|
||||
hosts: all
|
||||
roles:
|
||||
- myns.mycol.myrole
|
||||
`,
|
||||
"project/roles/myrole/tasks/main.yml": `
|
||||
- name: task from collection role
|
||||
debug:
|
||||
msg: hello from collection role
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{
|
||||
"task from collection role",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "with playbook sources",
|
||||
dir: "project",
|
||||
opts: []parser.Option{parser.WithPlaybooks("playbooks/playbook.yaml")},
|
||||
files: map[string]string{
|
||||
"project/playbooks/playbook.yaml": `---
|
||||
- hosts: localhost
|
||||
roles:
|
||||
- "../roles/test"
|
||||
`,
|
||||
"project/roles/test/tasks/main.yaml": `---
|
||||
- name: Test task
|
||||
debug:
|
||||
msg: Test task
|
||||
`,
|
||||
},
|
||||
expectedTasks: []string{"Test task"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
fsys := testutil.CreateFS(tt.files)
|
||||
dir := cmp.Or(tt.dir, ".")
|
||||
p := parser.New(fsys, dir, tt.opts...)
|
||||
project, err := p.Parse()
|
||||
require.NoError(t, err)
|
||||
|
||||
tasks := project.ListTasks()
|
||||
|
||||
taskNames := lo.Map(tasks, func(task *parser.ResolvedTask, _ int) string {
|
||||
return task.Name
|
||||
})
|
||||
|
||||
assert.ElementsMatch(t, tt.expectedTasks, taskNames)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParser_AbsolutePath(t *testing.T) {
|
||||
// The process cannot access the file because it is being used by another process.
|
||||
if runtime.GOOS == "windows" {
|
||||
t.Skip("TODO")
|
||||
}
|
||||
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
tasksFile, err := os.CreateTemp(tmpDir, "tasks-*.yml")
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = tasksFile.WriteString(`- name: Test task
|
||||
debug:
|
||||
msg: "From task"
|
||||
`)
|
||||
require.NoError(t, err)
|
||||
|
||||
playbookFile, err := os.CreateTemp(tmpDir, "playbook-*.yml")
|
||||
require.NoError(t, err)
|
||||
|
||||
playbookSrc := fmt.Sprintf(`- name: test
|
||||
hosts: localhost
|
||||
connection: local
|
||||
tasks:
|
||||
- name: test
|
||||
include_tasks: "{{ playbook_dir }}/%s"
|
||||
`, filepath.Base(tasksFile.Name()))
|
||||
|
||||
_, err = playbookFile.WriteString(playbookSrc)
|
||||
require.NoError(t, err)
|
||||
|
||||
project, err := parser.New(os.DirFS(tmpDir), ".").Parse()
|
||||
require.NoError(t, err)
|
||||
tasks := project.ListTasks()
|
||||
taskNames := lo.Map(tasks, func(task *parser.ResolvedTask, _ int) string {
|
||||
return task.Name
|
||||
})
|
||||
|
||||
expected := []string{
|
||||
"Test task",
|
||||
}
|
||||
assert.ElementsMatch(t, expected, taskNames)
|
||||
}
|
||||
|
||||
func TestParse_ResolveVariables(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
files map[string]string
|
||||
opts []parser.Option
|
||||
}{
|
||||
{
|
||||
name: "vars in task",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: test
|
||||
tasks:
|
||||
- name: create bucket
|
||||
vars:
|
||||
public_access: "true"
|
||||
s3_bucket:
|
||||
name: "{{ bucket }}"
|
||||
public_access: "{{ public_access }}"
|
||||
`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "vars in play",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: test
|
||||
public_access: "true"
|
||||
tasks:
|
||||
- name: create bucket
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "vars in block",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: test
|
||||
tasks:
|
||||
- block:
|
||||
- name: create bucket
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
vars:
|
||||
public_access: "true"
|
||||
`,
|
||||
},
|
||||
},
|
||||
// {
|
||||
// name: "vars from vars_files",
|
||||
// files: map[string]string{
|
||||
// "main.yaml": `---
|
||||
// - name: test
|
||||
// vars_files:
|
||||
// - vars.yaml
|
||||
// vars:
|
||||
// bucket: test
|
||||
// tasks:
|
||||
// - name: create bucket
|
||||
// s3_bucket:
|
||||
// name: '{{ bucket }}'
|
||||
// public_access: '{{ public_access }}'
|
||||
// `,
|
||||
// "vars.yaml": `public_access: "true"`,
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// name: "vars from include_vars",
|
||||
// files: map[string]string{
|
||||
// "main.yaml": `---
|
||||
// - name: test
|
||||
// vars:
|
||||
// bucket: test
|
||||
// tasks:
|
||||
// - include_vars: vars.yaml
|
||||
// - name: create bucket
|
||||
// s3_bucket:
|
||||
// name: '{{ bucket }}'
|
||||
// public_access: '{{ public_access }}'
|
||||
// `,
|
||||
// "vars.yaml": `public_access: "true"`,
|
||||
// },
|
||||
// },
|
||||
// {
|
||||
// name: "vars from set_fact",
|
||||
// files: map[string]string{
|
||||
// "main.yaml": `---
|
||||
// - name: test
|
||||
// vars:
|
||||
// bucket: test
|
||||
// tasks:
|
||||
// - set_fact:
|
||||
// public_access: "true"
|
||||
// - name: create bucket
|
||||
// s3_bucket:
|
||||
// name: '{{ bucket }}'
|
||||
// public_access: '{{ public_access }}'
|
||||
// `,
|
||||
// },
|
||||
// },
|
||||
{
|
||||
name: "vars from included tasks",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: test
|
||||
tasks:
|
||||
- include_tasks: included.yaml
|
||||
`,
|
||||
"included.yaml": `
|
||||
- name: create bucket
|
||||
vars:
|
||||
public_access: "true"
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "vars from imported tasks",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: test
|
||||
tasks:
|
||||
- import_tasks: imported.yaml
|
||||
`,
|
||||
"imported.yaml": `
|
||||
- name: create bucket
|
||||
vars:
|
||||
public_access: "true"
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "vars from role defaults",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: test
|
||||
roles:
|
||||
- myrole
|
||||
`,
|
||||
"roles/myrole/defaults/main.yaml": `public_access: "true"
|
||||
bucket: "from-role"
|
||||
`,
|
||||
"roles/myrole/tasks/main.yaml": `
|
||||
- name: create bucket
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "vars from role vars",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: test
|
||||
roles:
|
||||
- myrole
|
||||
`,
|
||||
"roles/myrole/vars/main.yaml": `public_access: "true"`,
|
||||
"roles/myrole/tasks/main.yaml": `
|
||||
- name: create bucket
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "vars from nested role dirs",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: test
|
||||
roles:
|
||||
- myrole
|
||||
`,
|
||||
"roles/myrole/vars/main/subdir/vars.yaml": `public_access: "foo"`,
|
||||
"roles/myrole/vars/main/vars.yaml": `public_access: "true"`,
|
||||
"roles/myrole/tasks/main.yaml": `
|
||||
- name: create bucket
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "vars for host",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: test
|
||||
hosts: webservers
|
||||
tasks:
|
||||
- name: create bucket
|
||||
vars:
|
||||
public_access: "true"
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "with extra vars",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: from-play
|
||||
hosts: webservers
|
||||
tasks:
|
||||
- name: create bucket
|
||||
vars:
|
||||
public_access: "true"
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
`,
|
||||
},
|
||||
opts: []parser.Option{
|
||||
parser.WithExtraVars(map[string]any{
|
||||
"bucket": "test",
|
||||
}),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "with host_vars",
|
||||
files: map[string]string{
|
||||
"main.yaml": `---
|
||||
- name: test
|
||||
vars:
|
||||
bucket: from-play
|
||||
hosts: webservers
|
||||
tasks:
|
||||
- name: create bucket
|
||||
vars:
|
||||
public_access: "true"
|
||||
s3_bucket:
|
||||
name: '{{ bucket }}'
|
||||
public_access: '{{ public_access }}'
|
||||
`,
|
||||
"host_vars/webservers": ``,
|
||||
},
|
||||
opts: []parser.Option{
|
||||
parser.WithExtraVars(map[string]any{
|
||||
"bucket": "test",
|
||||
}),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
fsys := testutil.CreateFS(tt.files)
|
||||
p := parser.New(fsys, ".", tt.opts...)
|
||||
project, err := p.Parse()
|
||||
require.NoError(t, err)
|
||||
|
||||
tasks := project.ListTasks()
|
||||
modules := tasks.GetModules("s3_bucket")
|
||||
require.Len(t, modules, 1)
|
||||
|
||||
m := modules[0]
|
||||
assert.Equal(t, "test", m.StringValue("name").Value())
|
||||
assert.Equal(t, "true", m.StringValue("public_access").Value())
|
||||
})
|
||||
}
|
||||
}
|
||||
218
pkg/iac/scanners/ansible/parser/play.go
Normal file
218
pkg/iac/scanners/ansible/parser/play.go
Normal file
@@ -0,0 +1,218 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
iacTypes "github.com/aquasecurity/trivy/pkg/iac/types"
|
||||
)
|
||||
|
||||
// Playbook represents a sequence of plays in an Ansible playbook.
|
||||
//
|
||||
// A playbook is typically loaded from a YAML file and contains a list
|
||||
// of plays that are executed in order.
|
||||
//
|
||||
// The playbook corresponds to a YAML list, for example:
|
||||
//
|
||||
// - name: First play
|
||||
// hosts: all
|
||||
// tasks:
|
||||
//
|
||||
// - ...
|
||||
//
|
||||
// - name: Second play
|
||||
// hosts: dbservers
|
||||
// tasks:
|
||||
//
|
||||
// - ...
|
||||
type Playbook struct {
|
||||
Src fsutils.FileSource
|
||||
Plays []*Play
|
||||
Tasks []*Task
|
||||
}
|
||||
|
||||
func (pb *Playbook) resolveIncludedSrc(incPath string) fsutils.FileSource {
|
||||
return pb.Src.Dir().Join(incPath)
|
||||
}
|
||||
|
||||
// Play represents a single play in an Ansible playbook.
|
||||
//
|
||||
// An Ansible playbook is a list of such plays, where each play defines
|
||||
// settings and tasks for a specific group of hosts.
|
||||
//
|
||||
// Example playbook YAML:
|
||||
//
|
||||
// - name: My first play
|
||||
// hosts: myhosts
|
||||
// tasks:
|
||||
// - name: Ping my hosts
|
||||
// ping:
|
||||
//
|
||||
// This play contains a name, target hosts, and a list of tasks.
|
||||
type Play struct {
|
||||
inner playInner
|
||||
|
||||
src fsutils.FileSource
|
||||
metadata iacTypes.Metadata
|
||||
rng Range
|
||||
|
||||
raw map[string]any
|
||||
}
|
||||
|
||||
func (p *Play) Hosts() string {
|
||||
return p.inner.Hosts
|
||||
}
|
||||
|
||||
func (p *Play) Variables() vars.Vars {
|
||||
return vars.NewVars(p.inner.Vars, vars.PlayVarsPriority)
|
||||
}
|
||||
|
||||
type playInner struct {
|
||||
Name string `yaml:"name"`
|
||||
ImportPlaybook string `yaml:"import_playbook"`
|
||||
Hosts string `yaml:"hosts"`
|
||||
RoleDefinitions []*RoleDefinition `yaml:"roles"`
|
||||
PreTasks []*Task `yaml:"pre_tasks"`
|
||||
Tasks []*Task `yaml:"tasks"`
|
||||
PostTasks []*Task `yaml:"post_tasks"`
|
||||
Vars vars.PlainVars `yaml:"vars"`
|
||||
VarFiles []string `yaml:"var_files"`
|
||||
}
|
||||
|
||||
func (p *Play) UnmarshalYAML(node *yaml.Node) error {
|
||||
p.rng = rangeFromNode(node)
|
||||
|
||||
if err := node.Decode(&p.raw); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := node.Decode(&p.inner); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, task := range p.listTasks() {
|
||||
task.play = p
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// includedPlaybook returns the path of an included or imported playbook within the play.
|
||||
func (p *Play) includedPlaybook() (string, bool) {
|
||||
for _, k := range withBuiltinPrefix("import_playbook", "include_playbook") {
|
||||
val, exists := p.raw[k]
|
||||
if !exists {
|
||||
continue
|
||||
}
|
||||
|
||||
// TODO: render Jinja2 template in playbookPath before returning
|
||||
// For example, if playbookPath == "{{ playbook_dir }}/common.yml"
|
||||
// then use a template engine to replace {{ playbook_dir }} with actual path.
|
||||
|
||||
// TODO: support collections syntax like "my_namespace.my_collection.my_playbook"
|
||||
//
|
||||
// Example:
|
||||
// - name: Include a playbook from a collection
|
||||
// ansible.builtin.import_playbook: my_namespace.my_collection.my_playbook
|
||||
//
|
||||
// convert this to a real file path by locating the collection directory
|
||||
// and appending "my_playbook.yml" or similar.
|
||||
|
||||
playbookPath, ok := val.(string)
|
||||
return filepath.ToSlash(playbookPath), ok
|
||||
}
|
||||
|
||||
return "", false
|
||||
}
|
||||
|
||||
func (p *Play) roleDefinitions() []*RoleDefinition {
|
||||
return p.inner.RoleDefinitions
|
||||
}
|
||||
|
||||
func (p *Play) initMetadata(fileSrc fsutils.FileSource, parent *iacTypes.Metadata) {
|
||||
fsys, relPath := fileSrc.FSAndRelPath()
|
||||
|
||||
p.src = fileSrc
|
||||
p.metadata = iacTypes.NewMetadata(
|
||||
iacTypes.NewRange(relPath, p.rng.Start, p.rng.End, "", fsys),
|
||||
"play",
|
||||
)
|
||||
p.metadata.SetParentPtr(parent)
|
||||
|
||||
for _, roleDef := range p.inner.RoleDefinitions {
|
||||
roleDef.initMetadata(fileSrc, &p.metadata)
|
||||
}
|
||||
|
||||
for _, task := range p.listTasks() {
|
||||
task.init(p, fileSrc, &p.metadata)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Play) listTasks() []*Task {
|
||||
res := make([]*Task, 0, len(p.inner.PreTasks)+len(p.inner.Tasks)+len(p.inner.PostTasks))
|
||||
res = append(res, p.inner.PreTasks...)
|
||||
res = append(res, p.inner.Tasks...)
|
||||
res = append(res, p.inner.PostTasks...)
|
||||
return res
|
||||
}
|
||||
|
||||
// https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html
|
||||
func (p *Play) specialVars() vars.Vars {
|
||||
plainVars := vars.PlainVars{
|
||||
"ansible_play_name": p.inner.Name,
|
||||
"playbook_dir": p.src.Dir(),
|
||||
}
|
||||
return vars.NewVars(plainVars, vars.SpecialVarsPriority)
|
||||
}
|
||||
|
||||
// RoleDefinition represents a role reference within a play.
|
||||
//
|
||||
// It typically contains the role name and optional parameters
|
||||
// that customize how the role is applied.
|
||||
//
|
||||
// Example usage in a playbook:
|
||||
//
|
||||
// roles:
|
||||
// - common
|
||||
// - role: webserver
|
||||
// vars:
|
||||
// http_port: 80
|
||||
type RoleDefinition struct {
|
||||
inner roleDefinitionInner
|
||||
|
||||
metadata iacTypes.Metadata
|
||||
rng Range
|
||||
}
|
||||
|
||||
type roleDefinitionInner struct {
|
||||
Name string `yaml:"role"`
|
||||
Vars map[string]any `yaml:"vars"`
|
||||
}
|
||||
|
||||
func (r *RoleDefinition) UnmarshalYAML(node *yaml.Node) error {
|
||||
r.rng = rangeFromNode(node)
|
||||
|
||||
// a role can be a string or a dictionary
|
||||
if node.Kind == yaml.ScalarNode {
|
||||
r.inner.Name = node.Value
|
||||
return nil
|
||||
}
|
||||
|
||||
return node.Decode(&r.inner)
|
||||
}
|
||||
|
||||
func (r *RoleDefinition) initMetadata(fileSrc fsutils.FileSource, parent *iacTypes.Metadata) {
|
||||
fsys, relPath := fileSrc.FSAndRelPath()
|
||||
r.metadata = iacTypes.NewMetadata(
|
||||
iacTypes.NewRange(relPath, r.rng.Start, r.rng.End, "", fsys),
|
||||
"role-def",
|
||||
)
|
||||
r.metadata.SetParentPtr(parent)
|
||||
}
|
||||
|
||||
func (r *RoleDefinition) name() string {
|
||||
return r.inner.Name
|
||||
}
|
||||
31
pkg/iac/scanners/ansible/parser/range.go
Normal file
31
pkg/iac/scanners/ansible/parser/range.go
Normal file
@@ -0,0 +1,31 @@
|
||||
package parser
|
||||
|
||||
import "gopkg.in/yaml.v3"
|
||||
|
||||
type Range struct {
|
||||
Start int
|
||||
End int
|
||||
}
|
||||
|
||||
// Covers returns true if 'r' fully contains 'other'.
|
||||
func (r Range) Covers(other Range) bool {
|
||||
return r.Start <= other.Start && r.End >= other.End
|
||||
}
|
||||
|
||||
func (r Range) Overlaps(o Range) bool {
|
||||
return r.Start < o.End && o.Start < r.End
|
||||
}
|
||||
|
||||
func rangeFromNode(node *yaml.Node) Range {
|
||||
return Range{
|
||||
Start: node.Line,
|
||||
End: calculateEndLine(node),
|
||||
}
|
||||
}
|
||||
|
||||
func calculateEndLine(node *yaml.Node) int {
|
||||
for node.Content != nil {
|
||||
node = node.Content[len(node.Content)-1]
|
||||
}
|
||||
return node.Line
|
||||
}
|
||||
148
pkg/iac/scanners/ansible/parser/role.go
Normal file
148
pkg/iac/scanners/ansible/parser/role.go
Normal file
@@ -0,0 +1,148 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io/fs"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
iacTypes "github.com/aquasecurity/trivy/pkg/iac/types"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
)
|
||||
|
||||
// Role represent project role
|
||||
type Role struct {
|
||||
name string
|
||||
src fsutils.FileSource
|
||||
metadata iacTypes.Metadata
|
||||
play *Play
|
||||
|
||||
cachedTasks map[string][]*Task
|
||||
|
||||
directDeps []*Role
|
||||
}
|
||||
|
||||
func (r *Role) initMetadata(fileSrc fsutils.FileSource, parent *iacTypes.Metadata) {
|
||||
fsys, relPath := fileSrc.FSAndRelPath()
|
||||
rng := iacTypes.NewRange(relPath, 0, 0, "", fsys)
|
||||
r.metadata = iacTypes.NewMetadata(rng, "role")
|
||||
r.metadata.SetParentPtr(parent)
|
||||
}
|
||||
|
||||
func (r *Role) getTasks(tasksFile string) ([]*Task, error) {
|
||||
if cached, ok := r.cachedTasks[tasksFile]; ok {
|
||||
return cached, nil
|
||||
}
|
||||
|
||||
var allTasks []*Task
|
||||
|
||||
for _, dep := range r.directDeps {
|
||||
// TODO: find out how direct dependency tasks are loaded
|
||||
depTasks, err := dep.getTasks("main")
|
||||
if err != nil && !errors.Is(err, fs.ErrNotExist) {
|
||||
log.WithPrefix("ansible").Debug("Failed to load dependency tasks",
|
||||
log.String("dependency", dep.name))
|
||||
} else if err == nil {
|
||||
allTasks = append(allTasks, depTasks...)
|
||||
}
|
||||
}
|
||||
|
||||
tasksFileSrc := r.src.Join("tasks", tasksFile)
|
||||
fileTasks, err := loadTasks(r.play, &r.metadata, tasksFileSrc)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, roleTask := range fileTasks {
|
||||
roleTask.role = r
|
||||
}
|
||||
allTasks = append(allTasks, fileTasks...)
|
||||
log.WithPrefix("ansible").Debug("Role tasks loaded",
|
||||
log.FilePath(tasksFileSrc.Path), log.Int("tasks_count", len(allTasks)))
|
||||
|
||||
r.cachedTasks[tasksFile] = allTasks
|
||||
return allTasks, nil
|
||||
}
|
||||
|
||||
func (r *Role) fileVariables(from string) (vars.Vars, error) {
|
||||
return r.loadVars("vars", vars.RoleVarsPriority, from)
|
||||
}
|
||||
|
||||
func (r *Role) defaultVariables(from string) (vars.Vars, error) {
|
||||
return r.loadVars("defaults", vars.RoleDefaultsPriority, from)
|
||||
}
|
||||
|
||||
func (r *Role) loadVars(scope string, priority vars.VarPriority, from string) (vars.Vars, error) {
|
||||
var variables vars.Vars
|
||||
|
||||
walkFn := func(fs fsutils.FileSource, de fs.DirEntry) error {
|
||||
if de.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
var plainFileVars vars.PlainVars
|
||||
if err := decodeYAMLFileWithExtension(fs, &plainFileVars, vars.VarFilesExtensions); err != nil {
|
||||
return xerrors.Errorf("load vars: %w", err)
|
||||
}
|
||||
fileVars := vars.NewVars(plainFileVars, priority)
|
||||
variables = vars.MergeVars(variables, fileVars)
|
||||
return nil
|
||||
}
|
||||
|
||||
varsSrc := r.src.Join(scope, from)
|
||||
|
||||
// try load from file
|
||||
var plainFileVars vars.PlainVars
|
||||
if err := decodeYAMLFileWithExtension(varsSrc, &plainFileVars, vars.VarFilesExtensions); err == nil {
|
||||
log.WithPrefix("ansible").Debug("Loaded vars file", log.FilePath(varsSrc.Path))
|
||||
variables = vars.NewVars(plainFileVars, priority)
|
||||
return variables, nil
|
||||
}
|
||||
|
||||
if err := fsutils.WalkDirsFirstAlpha(varsSrc, walkFn); err != nil {
|
||||
return nil, xerrors.Errorf("collect variables from %q: %w", varsSrc.Path, err)
|
||||
}
|
||||
|
||||
log.WithPrefix("ansible").Debug("Loaded vars from directory",
|
||||
log.String("scope", scope), log.FilePath(varsSrc.Path))
|
||||
return variables, nil
|
||||
}
|
||||
|
||||
// https://docs.ansible.com/ansible/latest/reference_appendices/special_variables.html
|
||||
func (r *Role) specialVars() vars.Vars {
|
||||
plainVars := vars.PlainVars{
|
||||
"role_name": r.name,
|
||||
"role_path": r.src,
|
||||
}
|
||||
return vars.NewVars(plainVars, vars.SpecialVarsPriority)
|
||||
}
|
||||
|
||||
type RoleMeta struct {
|
||||
metadata iacTypes.Metadata
|
||||
rng Range
|
||||
inner roleMetaInner
|
||||
}
|
||||
|
||||
func (m *RoleMeta) updateMetadata(fsys fs.FS, parent *iacTypes.Metadata, path string) {
|
||||
m.metadata = iacTypes.NewMetadata(
|
||||
iacTypes.NewRange(path, m.rng.Start, m.rng.End, "", fsys),
|
||||
"role-metadata",
|
||||
)
|
||||
m.metadata.SetParentPtr(parent)
|
||||
}
|
||||
|
||||
func (m RoleMeta) dependencies() []*RoleDefinition {
|
||||
return m.inner.Dependencies
|
||||
}
|
||||
|
||||
type roleMetaInner struct {
|
||||
Dependencies []*RoleDefinition `yaml:"dependencies"`
|
||||
}
|
||||
|
||||
func (m *RoleMeta) UnmarshalYAML(node *yaml.Node) error {
|
||||
m.rng = rangeFromNode(node)
|
||||
return node.Decode(&m.inner)
|
||||
}
|
||||
240
pkg/iac/scanners/ansible/parser/task.go
Normal file
240
pkg/iac/scanners/ansible/parser/task.go
Normal file
@@ -0,0 +1,240 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"errors"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"golang.org/x/xerrors"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/orderedmap"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
iacTypes "github.com/aquasecurity/trivy/pkg/iac/types"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
"github.com/aquasecurity/trivy/pkg/set"
|
||||
)
|
||||
|
||||
const (
|
||||
ModuleIncludeRole = "include_role"
|
||||
ModuleImportRole = "import_role"
|
||||
ModuleIncludeTasks = "include_tasks"
|
||||
ModuleImportTasks = "import_tasks"
|
||||
)
|
||||
|
||||
// RoleIncludeModule represents the "include_role" or "import_role" module
|
||||
type RoleIncludeModule struct {
|
||||
Name string
|
||||
TasksFrom string
|
||||
DefaultsFrom string
|
||||
VarsFrom string
|
||||
}
|
||||
|
||||
// Task represents a single Ansible task.
|
||||
//
|
||||
// A task defines a single unit of work, which may include running a module,
|
||||
// calling a role, or including other task files.
|
||||
//
|
||||
// Tasks can contain parameters, conditions (when), loops, and other
|
||||
// Ansible constructs.
|
||||
//
|
||||
// Example task:
|
||||
//
|
||||
// - name: Install nginx
|
||||
// apt:
|
||||
// name: nginx
|
||||
// state: present
|
||||
type Task struct {
|
||||
inner taskInner
|
||||
raw orderedmap.OrderedMap[string, *Node]
|
||||
|
||||
rng Range
|
||||
src fsutils.FileSource
|
||||
metadata iacTypes.Metadata
|
||||
|
||||
role *Role
|
||||
play *Play
|
||||
}
|
||||
|
||||
func (t *Task) Variables() vars.Vars {
|
||||
if t.isBlock() {
|
||||
return vars.NewVars(t.inner.Vars, vars.BlockVarsPriority)
|
||||
}
|
||||
return vars.NewVars(t.inner.Vars, vars.TaskVarsPriority)
|
||||
}
|
||||
|
||||
type taskInner struct {
|
||||
Name string `yaml:"name"`
|
||||
Block []*Task `yaml:"block"`
|
||||
Vars vars.PlainVars `yaml:"vars"`
|
||||
}
|
||||
|
||||
func (t *Task) UnmarshalYAML(node *yaml.Node) error {
|
||||
t.rng = rangeFromNode(node)
|
||||
|
||||
var rawMap orderedmap.OrderedMap[string, *Node]
|
||||
if err := node.Decode(&rawMap); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
t.raw = rawMap
|
||||
return node.Decode(&t.inner)
|
||||
}
|
||||
|
||||
func (t *Task) isBlock() bool {
|
||||
return len(t.inner.Block) > 0
|
||||
}
|
||||
|
||||
func (t *Task) init(play *Play, fileSrc fsutils.FileSource, parent *iacTypes.Metadata) {
|
||||
fsys, relPath := fileSrc.FSAndRelPath()
|
||||
ref := lo.Ternary(t.isBlock(), "tasks-block", "tasks")
|
||||
rng := iacTypes.NewRange(relPath, t.rng.Start, t.rng.End, "", fsys)
|
||||
t.play = play
|
||||
t.src = fileSrc
|
||||
t.metadata = iacTypes.NewMetadata(rng, ref)
|
||||
t.metadata.SetParentPtr(parent)
|
||||
|
||||
for _, tt := range t.inner.Block {
|
||||
tt.init(play, fileSrc, parent)
|
||||
}
|
||||
|
||||
for _, n := range t.raw.Iter() {
|
||||
if n == nil {
|
||||
continue
|
||||
}
|
||||
n.initMetadata(fileSrc, &t.metadata, nil)
|
||||
}
|
||||
}
|
||||
|
||||
// hasModuleKey checks if the task has any of the given module keys in its raw map.
|
||||
func (t *Task) hasModuleKey(keys []string) bool {
|
||||
for _, module := range keys {
|
||||
if _, exists := t.raw.Get(module); exists {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// isTaskInclude returns true if the task includes or imports other tasks (task include modules).
|
||||
func (t *Task) isTaskInclude() bool {
|
||||
return t.hasModuleKey(withBuiltinPrefix(ModuleImportTasks, ModuleIncludeTasks))
|
||||
}
|
||||
|
||||
// isRoleInclude returns true if the task includes or imports a role (role include modules).
|
||||
func (t *Task) isRoleInclude() bool {
|
||||
return t.hasModuleKey(withBuiltinPrefix(ModuleImportRole, ModuleIncludeRole))
|
||||
}
|
||||
|
||||
func (t *Task) resolved(variables vars.Vars) *ResolvedTask {
|
||||
if variables == nil {
|
||||
variables = make(vars.Vars)
|
||||
}
|
||||
resolved := &ResolvedTask{
|
||||
Name: t.inner.Name,
|
||||
Metadata: t.metadata,
|
||||
Vars: variables,
|
||||
Range: t.rng,
|
||||
Fields: t.raw,
|
||||
}
|
||||
|
||||
return resolved
|
||||
}
|
||||
|
||||
type ResolvedTasks []*ResolvedTask
|
||||
|
||||
func (t ResolvedTasks) GetModules(keys ...string) []Module {
|
||||
var modules []Module
|
||||
|
||||
for _, task := range t {
|
||||
m, err := task.ResolveModule(keys, false)
|
||||
if err != nil {
|
||||
if errors.Is(err, ErrModuleNotFound) {
|
||||
continue
|
||||
}
|
||||
log.WithPrefix("ansible").Debug("Failed to find module", log.Err(err))
|
||||
continue
|
||||
}
|
||||
modules = append(modules, m)
|
||||
}
|
||||
|
||||
return modules
|
||||
}
|
||||
|
||||
func (t ResolvedTasks) FilterByState(exclude ...string) ResolvedTasks {
|
||||
excludeSet := set.New(exclude...)
|
||||
return lo.Filter(t, func(task *ResolvedTask, _ int) bool {
|
||||
state, exists := task.Fields.Get("state")
|
||||
if !exists || state == nil || !state.IsKnown() {
|
||||
return true
|
||||
}
|
||||
if v, ok := state.AsString(); ok && excludeSet.Contains(v) {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// ResolvedTask represents an Ansible task with all variables resolved.
|
||||
//
|
||||
// It holds only the essential data needed for execution and
|
||||
// ensures the original Task remains unmodified.
|
||||
type ResolvedTask struct {
|
||||
Name string
|
||||
Fields orderedmap.OrderedMap[string, *Node]
|
||||
Vars vars.Vars
|
||||
|
||||
Metadata iacTypes.Metadata
|
||||
Range Range
|
||||
}
|
||||
|
||||
var ErrModuleNotFound = errors.New("module not found")
|
||||
|
||||
// ResolveModule searches for the first module from given keys in task fields,
|
||||
// renders its parameters using task variables, and returns the module.
|
||||
// The module can be either structured (map of parameters) or free-form (string).
|
||||
// Returns an error if no module is found or if rendering fails.
|
||||
func (t *ResolvedTask) ResolveModule(keys []string, strict bool) (Module, error) {
|
||||
for _, key := range keys {
|
||||
f, exists := t.Fields.Get(key)
|
||||
if !exists {
|
||||
continue
|
||||
}
|
||||
|
||||
// TODO: cache the module?
|
||||
rendered, err := f.Render(t.Vars)
|
||||
if err != nil {
|
||||
if strict {
|
||||
return Module{}, xerrors.Errorf("render: %w", err)
|
||||
}
|
||||
log.WithPrefix("ansible").Debug("Failed to render module params",
|
||||
log.String("source", t.Metadata.Range().String()),
|
||||
log.Err(err))
|
||||
}
|
||||
return Module{Node: rendered, Name: key}, nil
|
||||
}
|
||||
return Module{}, ErrModuleNotFound
|
||||
}
|
||||
|
||||
func (t *ResolvedTask) MarshalYAML() (any, error) {
|
||||
out := make(map[string]any, t.Fields.Len())
|
||||
for fieldName, field := range t.Fields.Iter() {
|
||||
rendered, _ := field.Render(t.Vars)
|
||||
out[fieldName] = rendered.val
|
||||
}
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (t *ResolvedTask) GetFieldsByRange(r Range) map[string]*Node {
|
||||
out := make(map[string]*Node)
|
||||
for key, node := range t.Fields.Iter() {
|
||||
if node == nil {
|
||||
continue
|
||||
}
|
||||
sub := node.Subtree(r)
|
||||
if sub != nil {
|
||||
out[key], _ = sub.Render(t.Vars)
|
||||
}
|
||||
}
|
||||
return out
|
||||
}
|
||||
126
pkg/iac/scanners/ansible/parser/task_test.go
Normal file
126
pkg/iac/scanners/ansible/parser/task_test.go
Normal file
@@ -0,0 +1,126 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/samber/lo"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
)
|
||||
|
||||
func TestResolvedTask_MarshalYAML(t *testing.T) {
|
||||
src := `name: "{{ name }}"
|
||||
msg: "{{ msg }}"
|
||||
num: "{{ num }}"
|
||||
nested:
|
||||
foo: "foo"
|
||||
`
|
||||
|
||||
var task Task
|
||||
require.NoError(t, decodeYAML([]byte(src), &task))
|
||||
|
||||
plainVars := vars.PlainVars{
|
||||
"name": "test task",
|
||||
"msg": "hello",
|
||||
"num": 42,
|
||||
}
|
||||
resolved := task.resolved(vars.NewVars(plainVars, 0))
|
||||
|
||||
data, err := yaml.Marshal(resolved)
|
||||
require.NoError(t, err)
|
||||
|
||||
got := string(data)
|
||||
wantSubstrs := []string{
|
||||
"name: test task",
|
||||
"msg: hello",
|
||||
`num: "42"`,
|
||||
`foo: foo`,
|
||||
}
|
||||
for _, substr := range wantSubstrs {
|
||||
assert.Contains(t, got, substr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolvedTasks_GetModules(t *testing.T) {
|
||||
src := `
|
||||
- name: s3 bucket present
|
||||
amazon.aws.s3_bucket:
|
||||
name: mybucket
|
||||
|
||||
- name: user absent
|
||||
ansible.builtin.user:
|
||||
name: old_user
|
||||
|
||||
- name: debug task
|
||||
ansible.builtin.debug:
|
||||
msg: "Hello"
|
||||
`
|
||||
var tasks []*Task
|
||||
require.NoError(t, decodeYAML([]byte(src), &tasks))
|
||||
|
||||
var resolved ResolvedTasks
|
||||
for _, t := range tasks {
|
||||
resolved = append(resolved, t.resolved(nil))
|
||||
}
|
||||
|
||||
modules := resolved.GetModules("amazon.aws.s3_bucket", "ansible.builtin.user")
|
||||
var names []string
|
||||
for _, m := range modules {
|
||||
names = append(names, m.Name)
|
||||
}
|
||||
|
||||
expected := []string{"amazon.aws.s3_bucket", "ansible.builtin.user"}
|
||||
assert.ElementsMatch(t, expected, names)
|
||||
}
|
||||
|
||||
func TestResolvedTasks_FilterByState(t *testing.T) {
|
||||
src := `
|
||||
- name: task1
|
||||
state: present
|
||||
- name: task2
|
||||
state: absent
|
||||
- name: task3
|
||||
state: present
|
||||
- name: task4
|
||||
# no state
|
||||
`
|
||||
var tasks []*Task
|
||||
require.NoError(t, decodeYAML([]byte(src), &tasks))
|
||||
|
||||
var resolved ResolvedTasks
|
||||
for _, t := range tasks {
|
||||
resolved = append(resolved, t.resolved(nil))
|
||||
}
|
||||
filtered := resolved.FilterByState("absent")
|
||||
|
||||
names := lo.Map(filtered, func(t *ResolvedTask, _ int) string { return t.Name })
|
||||
assert.ElementsMatch(t, []string{"task1", "task3", "task4"}, names)
|
||||
}
|
||||
|
||||
func TestResolvedTask_GetFieldsByRange(t *testing.T) {
|
||||
src := `a: valueA
|
||||
b: valueB
|
||||
c: valueC
|
||||
d: valueD
|
||||
`
|
||||
|
||||
var task *Task
|
||||
require.NoError(t, decodeYAML([]byte(src), &task))
|
||||
|
||||
resolved := task.resolved(nil)
|
||||
r := Range{Start: 2, End: 3}
|
||||
|
||||
fields := resolved.GetFieldsByRange(r)
|
||||
|
||||
expected := `b: valueB
|
||||
c: valueC
|
||||
`
|
||||
|
||||
marshaled, err := yaml.Marshal(fields)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, expected, string(marshaled))
|
||||
}
|
||||
143
pkg/iac/scanners/ansible/parser/template.go
Normal file
143
pkg/iac/scanners/ansible/parser/template.go
Normal file
@@ -0,0 +1,143 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/nikolalohinski/gonja/v2"
|
||||
"github.com/nikolalohinski/gonja/v2/config"
|
||||
"github.com/nikolalohinski/gonja/v2/exec"
|
||||
"github.com/nikolalohinski/gonja/v2/loaders"
|
||||
"golang.org/x/xerrors"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
)
|
||||
|
||||
var gonjaConfig *config.Config
|
||||
var gonjaEnv *exec.Environment
|
||||
|
||||
// TODO: implement support for a subset of popular Ansible filter plugins.
|
||||
// https://docs.ansible.com/ansible/latest/collections/ansible/builtin/index.html#filter-plugins
|
||||
// Example: see dirnameFilter for how the "dirname" filter is implemented.
|
||||
func init() {
|
||||
gonjaConfig = gonja.DefaultConfig.Inherit()
|
||||
gonjaConfig.StrictUndefined = true
|
||||
|
||||
gonjaEnv = gonja.DefaultEnvironment
|
||||
gonjaEnv.Filters.Register("dirname", dirnameFilter)
|
||||
}
|
||||
|
||||
// TODO: add support for a subset of popular Ansible lookup plugins.
|
||||
// Reference: https://docs.ansible.com/ansible/latest/collections/ansible/builtin/index.html#lookup-plugins
|
||||
//
|
||||
// Examples: "template" lookup, "vars" lookup.
|
||||
//
|
||||
// Idea: register lookup plugins in the execution context as functions,
|
||||
// and then use this context as a parent context
|
||||
//
|
||||
// ectx := exec.NewContext(map[string]any{
|
||||
// "lookup": func(args *exec.VarArgs) (any, error) {
|
||||
// ...
|
||||
// if args.Args[0].String() == "template" {
|
||||
// // read and evaluate template
|
||||
// }
|
||||
//
|
||||
// ...
|
||||
// return nil, fmt.Errorf("unsupported lookup plugin %s", args.Args[0].String())
|
||||
// },
|
||||
// })
|
||||
|
||||
// evaluateTemplateSafe executes a Gonja template with given variables safely.
|
||||
// It prevents infinite loops and recovers from panics.
|
||||
// Added due to infinite loop issue: https://github.com/NikolaLohinski/gonja/issues/52
|
||||
func evaluateTemplate(input string, variables vars.Vars) (string, error) {
|
||||
type result struct {
|
||||
res string
|
||||
err error
|
||||
}
|
||||
|
||||
resultCh := make(chan result, 1)
|
||||
|
||||
// Run the template evaluation in a separate goroutine
|
||||
// to prevent infinite loops or long-running evaluation
|
||||
go func() {
|
||||
// Catch any panic that may occur during template evaluation
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
resultCh <- result{"", fmt.Errorf("template evaluation panic: %v", r)}
|
||||
}
|
||||
}()
|
||||
|
||||
res, err := evaluateTemplateUnsafe(input, variables)
|
||||
resultCh <- result{res, err}
|
||||
}()
|
||||
|
||||
// Wait for evaluation to finish or timeout after 2 seconds
|
||||
timeout := time.Second * 2
|
||||
select {
|
||||
case r := <-resultCh:
|
||||
return r.res, r.err
|
||||
case <-time.After(timeout):
|
||||
return "", fmt.Errorf("template evaluation timeout after %s", timeout)
|
||||
}
|
||||
}
|
||||
|
||||
// evaluateTemplate evaluates a template with given variables.
|
||||
func evaluateTemplateUnsafe(input string, variables vars.Vars) (string, error) {
|
||||
tpl, err := newTemplate(input)
|
||||
if err != nil {
|
||||
return "", xerrors.Errorf("init template: %w", err)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
if err := tpl.Execute(&buf, exec.NewContext(variables.ToPlain())); err != nil {
|
||||
return "", xerrors.Errorf("execute template: %w", err)
|
||||
}
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
// newTemplate creates a new template. This function is similar to [gonja.FromBytes],
|
||||
// but applies a custom configuration.
|
||||
func newTemplate(input string) (*exec.Template, error) {
|
||||
rootID := fmt.Sprintf("root-%s", string(sha256.New().Sum([]byte(input))))
|
||||
|
||||
loader, err := loaders.NewFileSystemLoader("")
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("create fs loader: %w", err)
|
||||
}
|
||||
|
||||
shiftedLoader, err := loaders.NewShiftedLoader(rootID, strings.NewReader(input), loader)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("create shifted loader: %w", err)
|
||||
}
|
||||
|
||||
tpl, err := exec.NewTemplate(rootID, gonjaConfig, shiftedLoader, gonjaEnv)
|
||||
if err != nil {
|
||||
return nil, xerrors.Errorf("create new template: %w", err)
|
||||
}
|
||||
|
||||
return tpl, nil
|
||||
}
|
||||
|
||||
func dirnameFilter(_ *exec.Evaluator, in *exec.Value, params *exec.VarArgs) *exec.Value {
|
||||
if in == nil {
|
||||
return exec.ValueError(errors.New("input value is nil"))
|
||||
}
|
||||
|
||||
if params != nil && len(params.Args) > 0 {
|
||||
return exec.ValueError(errors.New("no parameters allowed"))
|
||||
}
|
||||
|
||||
switch val := in.Val.Interface().(type) {
|
||||
case fsutils.FileSource:
|
||||
return exec.AsSafeValue(val.Dir())
|
||||
default:
|
||||
return exec.ValueError(fmt.Errorf("unsupported type %T", in.Val.Interface()))
|
||||
}
|
||||
}
|
||||
76
pkg/iac/scanners/ansible/parser/template_test.go
Normal file
76
pkg/iac/scanners/ansible/parser/template_test.go
Normal file
@@ -0,0 +1,76 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/vars"
|
||||
)
|
||||
|
||||
func TestEvaluateTemplate(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
vars vars.PlainVars
|
||||
expected string
|
||||
expectErr bool
|
||||
}{
|
||||
{
|
||||
name: "simple variable",
|
||||
input: "Hello {{ name }}",
|
||||
vars: vars.PlainVars{"name": "World"},
|
||||
expected: "Hello World",
|
||||
},
|
||||
{
|
||||
name: "arithmetic",
|
||||
input: "{{ a + b }}",
|
||||
vars: vars.PlainVars{"a": 2, "b": 3},
|
||||
expected: "5",
|
||||
},
|
||||
{
|
||||
name: "if else true",
|
||||
input: "{% if flag %}Yes{% else %}No{% endif %}",
|
||||
vars: vars.PlainVars{"flag": true},
|
||||
expected: "Yes",
|
||||
},
|
||||
{
|
||||
name: "if else false",
|
||||
input: "{% if flag %}Yes{% else %}No{% endif %}",
|
||||
vars: vars.PlainVars{"flag": false},
|
||||
expected: "No",
|
||||
},
|
||||
{
|
||||
name: "invalid template",
|
||||
input: "{{ foo ",
|
||||
vars: vars.PlainVars{},
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "missing variable",
|
||||
input: "Hello {{ name }}",
|
||||
vars: vars.PlainVars{},
|
||||
expectErr: true,
|
||||
},
|
||||
{
|
||||
name: "multiple variables",
|
||||
input: "{{ greeting }}, {{ name }}!",
|
||||
vars: vars.PlainVars{"greeting": "Hi", "name": "Alice"},
|
||||
expected: "Hi, Alice!",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
variables := vars.NewVars(tt.vars, 0)
|
||||
got, err := evaluateTemplate(tt.input, variables)
|
||||
if tt.expectErr {
|
||||
require.Error(t, err)
|
||||
return
|
||||
}
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, tt.expected, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
0
pkg/iac/scanners/ansible/parser/testdata/sample-proj/inventory/.gitkeep
vendored
Normal file
0
pkg/iac/scanners/ansible/parser/testdata/sample-proj/inventory/.gitkeep
vendored
Normal file
34
pkg/iac/scanners/ansible/parser/testdata/sample-proj/playbook.yaml
vendored
Normal file
34
pkg/iac/scanners/ansible/parser/testdata/sample-proj/playbook.yaml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
---
|
||||
- name: Update web servers
|
||||
hosts: webservers
|
||||
remote_user: root
|
||||
|
||||
tasks:
|
||||
- name: Ensure apache is at the latest version
|
||||
s3_bucket:
|
||||
name: mys3bucket
|
||||
public_access:
|
||||
ignore_public_acls: true
|
||||
|
||||
- name: Write the apache config file
|
||||
ansible.builtin.template:
|
||||
src: /srv/httpd.j2
|
||||
dest: /etc/httpd.conf
|
||||
|
||||
roles:
|
||||
- test
|
||||
|
||||
- name: Update db servers
|
||||
hosts: databases
|
||||
remote_user: root
|
||||
|
||||
tasks:
|
||||
- name: Ensure postgresql is at the latest version
|
||||
ansible.builtin.yum:
|
||||
name: postgresql
|
||||
state: latest
|
||||
|
||||
- name: Ensure that postgresql is started
|
||||
ansible.builtin.service:
|
||||
name: postgresql
|
||||
state: started
|
||||
2
pkg/iac/scanners/ansible/parser/testdata/sample-proj/roles/test/meta/main.yaml
vendored
Normal file
2
pkg/iac/scanners/ansible/parser/testdata/sample-proj/roles/test/meta/main.yaml
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
dependencies:
|
||||
- role: test2
|
||||
3
pkg/iac/scanners/ansible/parser/testdata/sample-proj/roles/test/tasks/main.yaml
vendored
Normal file
3
pkg/iac/scanners/ansible/parser/testdata/sample-proj/roles/test/tasks/main.yaml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
- name: Role task
|
||||
s3_bucket:
|
||||
name: test1-bucket
|
||||
3
pkg/iac/scanners/ansible/parser/testdata/sample-proj/roles/test2/tasks/main.yaml
vendored
Normal file
3
pkg/iac/scanners/ansible/parser/testdata/sample-proj/roles/test2/tasks/main.yaml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
- name: Role2 task
|
||||
s3_bucket:
|
||||
name: test2-bucket
|
||||
109
pkg/iac/scanners/ansible/parser/yaml.go
Normal file
109
pkg/iac/scanners/ansible/parser/yaml.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"io/fs"
|
||||
|
||||
"golang.org/x/xerrors"
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/fsutils"
|
||||
)
|
||||
|
||||
const (
|
||||
NullTag = "!!null"
|
||||
StrTag = "!!str"
|
||||
)
|
||||
|
||||
func decodeYAMLFileWithExtension(fileSrc fsutils.FileSource, dst any, extensions []string) error {
|
||||
for _, ext := range extensions {
|
||||
f := fsutils.FileSource{FS: fileSrc.FS, Path: fileSrc.Path + ext}
|
||||
if exists, _ := f.Exists(); exists {
|
||||
return decodeYAMLFile(f, dst)
|
||||
}
|
||||
}
|
||||
return fs.ErrNotExist
|
||||
}
|
||||
|
||||
func decodeYAMLFile(f fsutils.FileSource, dst any) error {
|
||||
data, err := f.ReadFile()
|
||||
if err != nil {
|
||||
return xerrors.Errorf("read file %s: %w", f.Path, err)
|
||||
}
|
||||
|
||||
if err := decodeYAML(data, dst); err != nil {
|
||||
return xerrors.Errorf("decode %s: %w", f.Path, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decodeYAML(data []byte, dst any) error {
|
||||
var root yaml.Node
|
||||
if err := yaml.Unmarshal(data, &root); err != nil {
|
||||
return err
|
||||
}
|
||||
unwrapTemplates(&root)
|
||||
return root.Decode(dst)
|
||||
}
|
||||
|
||||
// unwrapTemplates recursively traverses a YAML node tree and converts
|
||||
// any double-mapping nodes that represent templates (like {{ key }})
|
||||
// into scalar nodes with the template string.
|
||||
//
|
||||
// Specifically, it detects nodes of the form:
|
||||
//
|
||||
// MappingNode
|
||||
// Content[0]: MappingNode
|
||||
// Content[0]: ScalarNode (key)
|
||||
// Content[1]: ScalarNode null
|
||||
// Content[1]: ScalarNode null
|
||||
//
|
||||
// and converts them into:
|
||||
//
|
||||
// ScalarNode "{{ key }}"
|
||||
func unwrapTemplates(n *yaml.Node) {
|
||||
walk(n, func(node *yaml.Node) bool {
|
||||
if node.Kind != yaml.MappingNode || len(node.Content) != 2 {
|
||||
return false
|
||||
}
|
||||
|
||||
innerKey := node.Content[0]
|
||||
innerVal := node.Content[1]
|
||||
|
||||
if innerKey.Kind == yaml.MappingNode &&
|
||||
len(innerKey.Content) == 2 &&
|
||||
innerVal.Tag == NullTag &&
|
||||
innerKey.Content[0].Kind == yaml.ScalarNode &&
|
||||
innerKey.Content[0].Tag == StrTag &&
|
||||
innerKey.Content[1].Tag == NullTag {
|
||||
|
||||
node.Kind = yaml.ScalarNode
|
||||
node.Tag = StrTag
|
||||
node.Value = "{{ " + innerKey.Content[0].Value + " }}"
|
||||
node.Content = nil
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
// walk traverses a YAML node tree and calls fn on each node.
|
||||
// If fn returns true, walk stops traversing that branch.
|
||||
func walk(n *yaml.Node, fn func(*yaml.Node) (stop bool)) bool {
|
||||
if fn(n) {
|
||||
return true
|
||||
}
|
||||
switch n.Kind {
|
||||
case yaml.DocumentNode, yaml.SequenceNode:
|
||||
for _, c := range n.Content {
|
||||
walk(c, fn)
|
||||
}
|
||||
case yaml.MappingNode:
|
||||
for i := 0; i < len(n.Content); i += 2 {
|
||||
walk(n.Content[i], fn)
|
||||
walk(n.Content[i+1], fn)
|
||||
}
|
||||
case yaml.ScalarNode:
|
||||
}
|
||||
return false
|
||||
}
|
||||
90
pkg/iac/scanners/ansible/parser/yaml_test.go
Normal file
90
pkg/iac/scanners/ansible/parser/yaml_test.go
Normal file
@@ -0,0 +1,90 @@
|
||||
package parser
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"gopkg.in/yaml.v3"
|
||||
)
|
||||
|
||||
func TestUnwrapTemplates(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
src string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "single template",
|
||||
src: `
|
||||
{{ name }}
|
||||
`,
|
||||
expected: `'{{ name }}'
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "nested template in mapping",
|
||||
src: `test:
|
||||
{{ nested }}
|
||||
`,
|
||||
expected: `test: '{{ nested }}'
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "sequence with templates",
|
||||
src: `- {{ first }}
|
||||
- value
|
||||
- {{ second }}
|
||||
`,
|
||||
expected: `- '{{ first }}'
|
||||
- value
|
||||
- '{{ second }}'
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "non-template values remain",
|
||||
src: `plain: value
|
||||
seq:
|
||||
- 123
|
||||
- text
|
||||
`,
|
||||
expected: `plain: value
|
||||
seq:
|
||||
- 123
|
||||
- text
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "mapping remains unchanged",
|
||||
src: `simple_map: { key1: val1, key2: val2 }
|
||||
nested_map:
|
||||
inner: { a: 1, b: 2 }
|
||||
`,
|
||||
expected: `simple_map: {key1: val1, key2: val2}
|
||||
nested_map:
|
||||
inner: {a: 1, b: 2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "template inside inline mapping",
|
||||
src: `inline_map: { value: {{ name }}, static: fixed }`,
|
||||
expected: `inline_map: {value: '{{ name }}', static: fixed}
|
||||
`,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
var root yaml.Node
|
||||
err := yaml.Unmarshal([]byte(tt.src), &root)
|
||||
require.NoError(t, err)
|
||||
|
||||
unwrapTemplates(&root)
|
||||
|
||||
out, err := yaml.Marshal(&root)
|
||||
require.NoError(t, err)
|
||||
|
||||
assert.Equal(t, tt.expected, string(out))
|
||||
})
|
||||
}
|
||||
}
|
||||
150
pkg/iac/scanners/ansible/scanner.go
Normal file
150
pkg/iac/scanners/ansible/scanner.go
Normal file
@@ -0,0 +1,150 @@
|
||||
package ansible
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
|
||||
"gopkg.in/yaml.v3"
|
||||
|
||||
adapter "github.com/aquasecurity/trivy/pkg/iac/adapters/ansible"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/rego"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scan"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible/parser"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/options"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/types"
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
)
|
||||
|
||||
var (
|
||||
_ scanners.FSScanner = (*Scanner)(nil)
|
||||
_ options.ConfigurableScanner = (*Scanner)(nil)
|
||||
)
|
||||
|
||||
type Scanner struct {
|
||||
*rego.RegoScannerProvider
|
||||
opts []options.ScannerOption
|
||||
parserOpts []parser.Option
|
||||
}
|
||||
|
||||
func WithPlaybooks(playbooks []string) options.ScannerOption {
|
||||
return func(s options.ConfigurableScanner) {
|
||||
if ss, ok := s.(*Scanner); ok {
|
||||
ss.parserOpts = append(ss.parserOpts, parser.WithPlaybooks(playbooks...))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func WithInventories(inventories []string) options.ScannerOption {
|
||||
return func(s options.ConfigurableScanner) {
|
||||
if ss, ok := s.(*Scanner); ok {
|
||||
ss.parserOpts = append(ss.parserOpts, parser.WithInventories(inventories...))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func WithExtraVars(evars map[string]any) options.ScannerOption {
|
||||
return func(s options.ConfigurableScanner) {
|
||||
if ss, ok := s.(*Scanner); ok {
|
||||
ss.parserOpts = append(ss.parserOpts, parser.WithExtraVars(evars))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func New(opts ...options.ScannerOption) *Scanner {
|
||||
scanner := &Scanner{
|
||||
RegoScannerProvider: rego.NewRegoScannerProvider(opts...),
|
||||
opts: opts,
|
||||
}
|
||||
for _, opt := range opts {
|
||||
opt(scanner)
|
||||
}
|
||||
return scanner
|
||||
}
|
||||
|
||||
func (s *Scanner) Name() string {
|
||||
return "Ansible"
|
||||
}
|
||||
|
||||
func (s *Scanner) ScanFS(ctx context.Context, fsys fs.FS, dir string) (scan.Results, error) {
|
||||
roots, err := parser.FindProjects(fsys, dir)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("find projects: %w", err)
|
||||
}
|
||||
|
||||
var results scan.Results
|
||||
for _, projectRoot := range roots {
|
||||
log.WithPrefix("ansible").Debug("Detected ansible project", log.FilePath(projectRoot))
|
||||
project, err := parser.ParseProject(fsys, projectRoot, s.parserOpts...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("parse project: %w", err)
|
||||
}
|
||||
res, err := s.scanProject(ctx, fsys, project)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("scan project: %w", err)
|
||||
}
|
||||
results = append(results, res...)
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func (s *Scanner) scanProject(ctx context.Context, fsys fs.FS, project *parser.AnsibleProject) (scan.Results, error) {
|
||||
tasks := project.ListTasks().FilterByState("absent")
|
||||
state := adapter.Adapt(tasks)
|
||||
|
||||
rs, err := s.InitRegoScanner(fsys, s.opts)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("init rego scanner: %w", err)
|
||||
}
|
||||
|
||||
results, err := rs.ScanInput(ctx, types.SourceCloud, rego.Input{
|
||||
Path: project.Path(),
|
||||
FS: fsys,
|
||||
Contents: state.ToRego(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("rego scan: %w", err)
|
||||
}
|
||||
|
||||
for i, res := range results {
|
||||
if res.Status() != scan.StatusFailed {
|
||||
continue
|
||||
}
|
||||
|
||||
rendered, ok := renderCause(tasks, res.Range())
|
||||
if ok {
|
||||
res.WithRenderedCause(rendered)
|
||||
results[i] = res
|
||||
}
|
||||
}
|
||||
|
||||
return results, nil
|
||||
}
|
||||
|
||||
func renderCause(tasks parser.ResolvedTasks, causeRng types.Range) (scan.RenderedCause, bool) {
|
||||
fields := fieldsForRange(tasks, causeRng)
|
||||
if fields == nil {
|
||||
return scan.RenderedCause{}, false
|
||||
}
|
||||
|
||||
b, err := yaml.Marshal(fields)
|
||||
if err != nil {
|
||||
return scan.RenderedCause{}, false
|
||||
}
|
||||
return scan.RenderedCause{Raw: string(b)}, true
|
||||
}
|
||||
|
||||
func fieldsForRange(tasks parser.ResolvedTasks, causeRng types.Range) any {
|
||||
for _, task := range tasks {
|
||||
taskRng := task.Metadata.Range()
|
||||
if taskRng.GetFilename() == causeRng.GetFilename() && taskRng.Includes(causeRng) {
|
||||
queryRange := parser.Range{
|
||||
Start: causeRng.GetStartLine(),
|
||||
End: causeRng.GetEndLine(),
|
||||
}
|
||||
return task.GetFieldsByRange(queryRange)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
39
pkg/iac/scanners/ansible/scanner_test.go
Normal file
39
pkg/iac/scanners/ansible/scanner_test.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package ansible
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"testing/fstest"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/iac/rego"
|
||||
)
|
||||
|
||||
func TestBasicScan(t *testing.T) {
|
||||
fsys := fstest.MapFS{
|
||||
"playbook.yaml": {
|
||||
Data: []byte(`---
|
||||
- name: Update web servers
|
||||
hosts: localhost
|
||||
|
||||
tasks:
|
||||
- name: Ensure apache is at the latest version
|
||||
s3_bucket:
|
||||
name: mys3bucket
|
||||
public_access:
|
||||
`),
|
||||
},
|
||||
}
|
||||
|
||||
scanner := New(
|
||||
rego.WithEmbeddedLibraries(true),
|
||||
rego.WithEmbeddedPolicies(true),
|
||||
)
|
||||
|
||||
results, err := scanner.ScanFS(t.Context(), fsys, ".")
|
||||
require.NoError(t, err)
|
||||
|
||||
failed := results.GetFailed()
|
||||
assert.NotEmpty(t, failed)
|
||||
}
|
||||
148
pkg/iac/scanners/ansible/vars/vars.go
Normal file
148
pkg/iac/scanners/ansible/vars/vars.go
Normal file
@@ -0,0 +1,148 @@
|
||||
package vars
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"maps"
|
||||
|
||||
"github.com/aquasecurity/trivy/pkg/log"
|
||||
)
|
||||
|
||||
// VarPriority represents the priority level of a variable.
|
||||
// Higher values indicate higher precedence when merging variables.
|
||||
type VarPriority int
|
||||
|
||||
const (
|
||||
RoleDefaultsPriority VarPriority = iota + 1
|
||||
InvFileGroupPriority
|
||||
InvExtAllGroupPriority
|
||||
PbExtAllGroupPriority
|
||||
InvExtGroupPriority
|
||||
PbExtGroupPriority
|
||||
InvFileHostPriority
|
||||
InvExtHostPriority
|
||||
PbExtHostPriority
|
||||
PlayVarsPriority
|
||||
PlayVarsFilesPriority
|
||||
RoleVarsPriority
|
||||
BlockVarsPriority
|
||||
TaskVarsPriority
|
||||
ExtraVarsPriority
|
||||
// Special variables cannot be set directly by the user;
|
||||
// Ansible will always override them to reflect internal state.
|
||||
SpecialVarsPriority
|
||||
)
|
||||
|
||||
func (p VarPriority) Source() string {
|
||||
switch p {
|
||||
case RoleDefaultsPriority:
|
||||
return "role defaults"
|
||||
case InvFileGroupPriority:
|
||||
return "group vars (file)"
|
||||
case InvExtAllGroupPriority:
|
||||
return "group_vars/all (inv)"
|
||||
case PbExtAllGroupPriority:
|
||||
return "group_vars/all (pb)"
|
||||
case InvExtGroupPriority:
|
||||
return "group_vars/* (inv)"
|
||||
case PbExtGroupPriority:
|
||||
return "group_vars/* (pb)"
|
||||
case InvFileHostPriority:
|
||||
return "host vars (file)"
|
||||
case InvExtHostPriority:
|
||||
return "host_vars/* (inv)"
|
||||
case PbExtHostPriority:
|
||||
return "host_vars/* (pb)"
|
||||
case PlayVarsPriority:
|
||||
return "play"
|
||||
case PlayVarsFilesPriority:
|
||||
return "role (defaults)"
|
||||
case RoleVarsPriority:
|
||||
return "role"
|
||||
case BlockVarsPriority:
|
||||
return "block"
|
||||
case TaskVarsPriority:
|
||||
return "task"
|
||||
case ExtraVarsPriority:
|
||||
return "extra"
|
||||
case SpecialVarsPriority:
|
||||
return "special"
|
||||
default:
|
||||
return "unknown"
|
||||
}
|
||||
}
|
||||
|
||||
var VarFilesExtensions = []string{"", ".yml", ".yaml", ".json"}
|
||||
|
||||
// Variable represents a variable with its value and priority.
|
||||
type Variable struct {
|
||||
Value any
|
||||
Priority VarPriority
|
||||
}
|
||||
|
||||
func NewVariable(val any, priority VarPriority) Variable {
|
||||
return Variable{
|
||||
Value: val,
|
||||
Priority: priority,
|
||||
}
|
||||
}
|
||||
|
||||
// PlainVars is a simple map from variable names to their values.
|
||||
type PlainVars map[string]any
|
||||
|
||||
// Vars represents a set of variables as a map from string keys to Variable.
|
||||
type Vars map[string]Variable
|
||||
|
||||
// NewVars creates a Vars map from a plain map[string]any, assigning
|
||||
// the given priority to each variable.
|
||||
func NewVars(values PlainVars, priority VarPriority) Vars {
|
||||
v := make(Vars, len(values))
|
||||
for k, val := range values {
|
||||
v[k] = Variable{
|
||||
Value: val,
|
||||
Priority: priority,
|
||||
}
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
// ToPlain returns a plain map[string]any with only variable values,
|
||||
// discarding Priority and Source information.
|
||||
func (v Vars) ToPlain() map[string]any {
|
||||
plain := make(map[string]any, len(v))
|
||||
for k, variable := range v {
|
||||
plain[k] = variable.Value
|
||||
}
|
||||
return plain
|
||||
}
|
||||
|
||||
// Clone creates a shallow copy of Vars.
|
||||
func (v Vars) Clone() Vars {
|
||||
if v == nil {
|
||||
return nil
|
||||
}
|
||||
c := make(Vars, len(v))
|
||||
maps.Copy(c, v)
|
||||
return c
|
||||
}
|
||||
|
||||
func MergeVars(varsList ...Vars) Vars {
|
||||
result := Vars{}
|
||||
for _, vars := range varsList {
|
||||
for k, newVar := range vars {
|
||||
if existing, ok := result[k]; ok {
|
||||
if newVar.Priority < existing.Priority {
|
||||
log.WithPrefix("ansible").Debug(
|
||||
fmt.Sprintf(
|
||||
"Overwriting variable %q from %s (priority %d) with value from %s (priority %d)",
|
||||
k,
|
||||
existing.Priority.Source(), existing.Priority,
|
||||
newVar.Priority.Source(), newVar.Priority,
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
result[k] = newVar
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
@@ -21,6 +21,7 @@ import (
|
||||
"github.com/aquasecurity/trivy/pkg/iac/rego"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scan"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/ansible"
|
||||
"github.com/aquasecurity/trivy/pkg/iac/scanners/azure/arm"
|
||||
cfscanner "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation"
|
||||
cfparser "github.com/aquasecurity/trivy/pkg/iac/scanners/cloudformation/parser"
|
||||
@@ -50,6 +51,7 @@ var enablediacTypes = map[detection.FileType]types.ConfigType{
|
||||
detection.FileTypeTerraformPlanSnapshot: types.TerraformPlanSnapshot,
|
||||
detection.FileTypeJSON: types.JSON,
|
||||
detection.FileTypeYAML: types.YAML,
|
||||
detection.FileTypeAnsible: types.Ansible,
|
||||
}
|
||||
|
||||
type ScannerOption struct {
|
||||
@@ -77,6 +79,10 @@ type ScannerOption struct {
|
||||
FilePatterns []string
|
||||
ConfigFileSchemas []*ConfigFileSchema
|
||||
|
||||
AnsiblePlaybooks []string
|
||||
AnsibleInventories []string
|
||||
AnsibleExtraVars map[string]any
|
||||
|
||||
SkipFiles []string
|
||||
SkipDirs []string
|
||||
|
||||
@@ -124,6 +130,8 @@ func NewScanner(t detection.FileType, opt ScannerOption) (*Scanner, error) {
|
||||
scanner = generic.NewYamlScanner(opts...)
|
||||
case detection.FileTypeJSON:
|
||||
scanner = generic.NewJsonScanner(opts...)
|
||||
case detection.FileTypeAnsible:
|
||||
scanner = ansible.New(opts...)
|
||||
default:
|
||||
return nil, xerrors.Errorf("unknown file type: %s", t)
|
||||
}
|
||||
@@ -308,6 +316,8 @@ func scannerOptions(t detection.FileType, opt ScannerOption) ([]options.ScannerO
|
||||
return addTFOpts(opts, opt)
|
||||
case detection.FileTypeCloudFormation:
|
||||
return addCFOpts(opts, opt)
|
||||
case detection.FileTypeAnsible:
|
||||
return addAnsibleOpts(opts, opt), nil
|
||||
default:
|
||||
return opts, nil
|
||||
}
|
||||
@@ -388,6 +398,14 @@ func addHelmOpts(opts []options.ScannerOption, scannerOption ScannerOption) []op
|
||||
return opts
|
||||
}
|
||||
|
||||
func addAnsibleOpts(opts []options.ScannerOption, scannerOpt ScannerOption) []options.ScannerOption {
|
||||
return append(opts,
|
||||
ansible.WithPlaybooks(scannerOpt.AnsiblePlaybooks),
|
||||
ansible.WithInventories(scannerOpt.AnsibleInventories),
|
||||
ansible.WithExtraVars(scannerOpt.AnsibleExtraVars),
|
||||
)
|
||||
}
|
||||
|
||||
func createConfigFS(paths []string) (fs.FS, error) {
|
||||
mfs := mapfs.New()
|
||||
for _, path := range paths {
|
||||
|
||||
Reference in New Issue
Block a user