mirror of
https://github.com/HackTricks-wiki/hacktricks-cloud.git
synced 2026-03-12 21:22:57 -07:00
f
This commit is contained in:
@@ -46,4 +46,10 @@ Dataflow templates exist to export BigQuery data. Use the appropriate template f
|
||||
|
||||
Streaming pipelines can read from Pub/Sub (or other sources) and write to GCS. Launch a job with a template that reads from the target Pub/Sub subscription and writes to your controlled bucket.
|
||||
|
||||
## References
|
||||
|
||||
- [Dataflow templates](https://cloud.google.com/dataflow/docs/guides/templates/provided-templates)
|
||||
- [Control access with IAM (Dataflow)](https://cloud.google.com/dataflow/docs/concepts/security-and-permissions)
|
||||
- [GCP - Bigtable Post Exploitation](gcp-bigtable-post-exploitation.md)
|
||||
|
||||
{{#include ../../../banners/hacktricks-training.md}}
|
||||
|
||||
@@ -37,7 +37,7 @@ gcloud dataflow jobs list --region=<region>
|
||||
gcloud dataflow jobs list --project=<PROJECT_ID>
|
||||
|
||||
# Describe a job to get template GCS path, staging location, and any UDF/template references
|
||||
gcloud dataflow jobs describe <JOB_ID> --region=<region> --format="yaml"
|
||||
gcloud dataflow jobs describe <JOB_ID> --region=<region> --full --format="yaml"
|
||||
# Look for: currentState, createTime, jobMetadata, type (JOB_TYPE_STREAMING or JOB_TYPE_BATCH)
|
||||
# Pipeline options often include: tempLocation, stagingLocation, templateLocation, or flexTemplateGcsPath
|
||||
```
|
||||
@@ -99,8 +99,10 @@ def _malicious_func():
|
||||
coordination_file = "/tmp/pwnd.txt"
|
||||
if os.path.exists(coordination_file):
|
||||
return
|
||||
|
||||
|
||||
# malicous code goes here
|
||||
with open(coordination_file, "w", encoding="utf-8") as f:
|
||||
f.write("done")
|
||||
|
||||
def transform(line):
|
||||
# Malicous code entry point - runs per line but coordination ensures once per worker
|
||||
@@ -140,10 +142,15 @@ Add the cleanup step (`drop: [malicious_step]`) so the pipeline still writes val
|
||||
from datetime import datetime
|
||||
coordination_file = "/tmp/pwnd.txt"
|
||||
if os.path.exists(coordination_file):
|
||||
return
|
||||
return True
|
||||
try:
|
||||
import urllib.request
|
||||
# malicious code goes here
|
||||
# malicious code goes here
|
||||
with open(coordination_file, "w", encoding="utf-8") as f:
|
||||
f.write("done")
|
||||
except Exception:
|
||||
pass
|
||||
return True
|
||||
append: true
|
||||
- name: CleanupTransform
|
||||
type: MapToFields
|
||||
@@ -169,5 +176,9 @@ For exploitation details, see:
|
||||
## References
|
||||
|
||||
- [Dataflow Rider: How Attackers can Abuse Shadow Resources in Google Cloud Dataflow](https://www.varonis.com/blog/dataflow-rider)
|
||||
- [Control access with IAM (Dataflow)](https://cloud.google.com/dataflow/docs/concepts/security-and-permissions)
|
||||
- [gcloud dataflow jobs describe](https://cloud.google.com/sdk/gcloud/reference/dataflow/jobs/describe)
|
||||
- [Apache Beam YAML: User-defined functions](https://beam.apache.org/documentation/sdks/yaml-udf/)
|
||||
- [Apache Beam YAML Transform Reference](https://beam.apache.org/releases/yamldoc/current/)
|
||||
|
||||
{{#include ../../../banners/hacktricks-training.md}}
|
||||
|
||||
@@ -12,7 +12,7 @@ A Dataflow pipeline typically includes:
|
||||
|
||||
**Template:** YAML or JSON definitions (and Python/Java code for flex templates) stored in GCS that define the pipeline structure and steps.
|
||||
|
||||
**Launcher:** A short-lived Compute Engine instance that validates the template and prepares containers before the job runs.
|
||||
**Launcher (Flex Templates):** A short-lived Compute Engine instance may be used for Flex Template launches to validate the template and prepare containers before the job runs.
|
||||
|
||||
**Workers:** Compute Engine VMs that execute the actual data processing tasks, pulling UDFs and instructions from the template.
|
||||
|
||||
@@ -76,4 +76,10 @@ gcloud storage ls gs://<bucket>/**
|
||||
../gcp-persistence/gcp-dataflow-persistence.md
|
||||
{{#endref}}
|
||||
|
||||
## References
|
||||
|
||||
- [Dataflow overview](https://cloud.google.com/dataflow)
|
||||
- [Pipeline workflow execution in Dataflow](https://cloud.google.com/dataflow/docs/guides/pipeline-workflows)
|
||||
- [Troubleshoot templates](https://cloud.google.com/dataflow/docs/guides/troubleshoot-templates)
|
||||
|
||||
{{#include ../../../banners/hacktricks-training.md}}
|
||||
|
||||
Reference in New Issue
Block a user