From cd9939def6ef00fcbd5ba3207386b010ea5f8d7d Mon Sep 17 00:00:00 2001 From: Carlos Polop Date: Mon, 16 Feb 2026 12:04:08 +0100 Subject: [PATCH] f --- .../gcp-dataflow-post-exploitation.md | 6 ++++++ .../gcp-dataflow-privesc.md | 19 +++++++++++++++---- .../gcp-services/gcp-dataflow-enum.md | 8 +++++++- 3 files changed, 28 insertions(+), 5 deletions(-) diff --git a/src/pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-dataflow-post-exploitation.md b/src/pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-dataflow-post-exploitation.md index ae2ce9859..a59815657 100644 --- a/src/pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-dataflow-post-exploitation.md +++ b/src/pentesting-cloud/gcp-security/gcp-post-exploitation/gcp-dataflow-post-exploitation.md @@ -46,4 +46,10 @@ Dataflow templates exist to export BigQuery data. Use the appropriate template f Streaming pipelines can read from Pub/Sub (or other sources) and write to GCS. Launch a job with a template that reads from the target Pub/Sub subscription and writes to your controlled bucket. +## References + +- [Dataflow templates](https://cloud.google.com/dataflow/docs/guides/templates/provided-templates) +- [Control access with IAM (Dataflow)](https://cloud.google.com/dataflow/docs/concepts/security-and-permissions) +- [GCP - Bigtable Post Exploitation](gcp-bigtable-post-exploitation.md) + {{#include ../../../banners/hacktricks-training.md}} diff --git a/src/pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-dataflow-privesc.md b/src/pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-dataflow-privesc.md index ddbbf3b5c..45490352f 100644 --- a/src/pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-dataflow-privesc.md +++ b/src/pentesting-cloud/gcp-security/gcp-privilege-escalation/gcp-dataflow-privesc.md @@ -37,7 +37,7 @@ gcloud dataflow jobs list --region= gcloud dataflow jobs list --project= # Describe a job to get template GCS path, staging location, and any UDF/template references -gcloud dataflow jobs describe --region= --format="yaml" +gcloud dataflow jobs describe --region= --full --format="yaml" # Look for: currentState, createTime, jobMetadata, type (JOB_TYPE_STREAMING or JOB_TYPE_BATCH) # Pipeline options often include: tempLocation, stagingLocation, templateLocation, or flexTemplateGcsPath ``` @@ -99,8 +99,10 @@ def _malicious_func(): coordination_file = "/tmp/pwnd.txt" if os.path.exists(coordination_file): return - + # malicous code goes here + with open(coordination_file, "w", encoding="utf-8") as f: + f.write("done") def transform(line): # Malicous code entry point - runs per line but coordination ensures once per worker @@ -140,10 +142,15 @@ Add the cleanup step (`drop: [malicious_step]`) so the pipeline still writes val from datetime import datetime coordination_file = "/tmp/pwnd.txt" if os.path.exists(coordination_file): - return + return True try: import urllib.request - # malicious code goes here + # malicious code goes here + with open(coordination_file, "w", encoding="utf-8") as f: + f.write("done") + except Exception: + pass + return True append: true - name: CleanupTransform type: MapToFields @@ -169,5 +176,9 @@ For exploitation details, see: ## References - [Dataflow Rider: How Attackers can Abuse Shadow Resources in Google Cloud Dataflow](https://www.varonis.com/blog/dataflow-rider) +- [Control access with IAM (Dataflow)](https://cloud.google.com/dataflow/docs/concepts/security-and-permissions) +- [gcloud dataflow jobs describe](https://cloud.google.com/sdk/gcloud/reference/dataflow/jobs/describe) +- [Apache Beam YAML: User-defined functions](https://beam.apache.org/documentation/sdks/yaml-udf/) +- [Apache Beam YAML Transform Reference](https://beam.apache.org/releases/yamldoc/current/) {{#include ../../../banners/hacktricks-training.md}} diff --git a/src/pentesting-cloud/gcp-security/gcp-services/gcp-dataflow-enum.md b/src/pentesting-cloud/gcp-security/gcp-services/gcp-dataflow-enum.md index 767d8cbae..101c0f2c5 100644 --- a/src/pentesting-cloud/gcp-security/gcp-services/gcp-dataflow-enum.md +++ b/src/pentesting-cloud/gcp-security/gcp-services/gcp-dataflow-enum.md @@ -12,7 +12,7 @@ A Dataflow pipeline typically includes: **Template:** YAML or JSON definitions (and Python/Java code for flex templates) stored in GCS that define the pipeline structure and steps. -**Launcher:** A short-lived Compute Engine instance that validates the template and prepares containers before the job runs. +**Launcher (Flex Templates):** A short-lived Compute Engine instance may be used for Flex Template launches to validate the template and prepare containers before the job runs. **Workers:** Compute Engine VMs that execute the actual data processing tasks, pulling UDFs and instructions from the template. @@ -76,4 +76,10 @@ gcloud storage ls gs:///** ../gcp-persistence/gcp-dataflow-persistence.md {{#endref}} +## References + +- [Dataflow overview](https://cloud.google.com/dataflow) +- [Pipeline workflow execution in Dataflow](https://cloud.google.com/dataflow/docs/guides/pipeline-workflows) +- [Troubleshoot templates](https://cloud.google.com/dataflow/docs/guides/troubleshoot-templates) + {{#include ../../../banners/hacktricks-training.md}}