feat(secret): implement streaming secret scanner with byte offset tracking (#9264)

Co-authored-by: knqyf263 <knqyf263@users.noreply.github.com>
This commit is contained in:
Teppei Fukuda
2025-08-01 12:17:54 +04:00
committed by GitHub
parent 1473e88b74
commit 5a5e0972c7
16 changed files with 955 additions and 200 deletions

View File

@@ -3,18 +3,6 @@
"CreatedAt": "2021-08-25T12:20:30.000000005Z",
"ArtifactName": "testdata/fixtures/repo/secrets",
"ArtifactType": "repository",
"Metadata": {
"ImageConfig": {
"architecture": "",
"created": "0001-01-01T00:00:00Z",
"os": "",
"rootfs": {
"type": "",
"diff_ids": null
},
"config": {}
}
},
"Results": [
{
"Target": "deploy.sh",
@@ -70,7 +58,7 @@
]
},
"Match": "export AWS_ACCESS_KEY_ID=********************",
"Layer": {}
"Offset": 36
},
{
"RuleID": "mysecret",
@@ -113,7 +101,7 @@
]
},
"Match": "echo ********",
"Layer": {}
"Offset": 123
}
]
}

View File

@@ -1,6 +1,7 @@
package secret
import (
"bytes"
"context"
"encoding/json"
@@ -48,7 +49,7 @@ func (a *secretAnalyzer) Analyze(_ context.Context, input analyzer.ConfigAnalysi
result := a.scanner.Scan(secret.ScanArgs{
FilePath: "config.json",
Content: b,
Content: bytes.NewReader(b),
})
if len(result.Findings) == 0 {

View File

@@ -66,7 +66,8 @@ func Test_secretAnalyzer_Analyze(t *testing.T) {
},
},
},
Match: " \"secret=****************************************\"",
Match: " \"secret=****************************************\"",
Offset: 231,
},
},
},

View File

@@ -4,7 +4,6 @@ import (
"bytes"
"context"
"fmt"
"io"
"os"
"path/filepath"
"slices"
@@ -111,21 +110,6 @@ func (a *SecretAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput
log.WithPrefix("secret").Warn("The size of the scanned file is too large. It is recommended to use `--skip-files` for this file to avoid high memory consumption.", log.FilePath(input.FilePath), log.Int64("size (MB)", size/1048576))
}
var content []byte
if !binary {
content, err = io.ReadAll(input.Content)
if err != nil {
return nil, xerrors.Errorf("read error %s: %w", input.FilePath, err)
}
content = bytes.ReplaceAll(content, []byte("\r"), []byte(""))
} else {
content, err = utils.ExtractPrintableBytes(input.Content)
if err != nil {
return nil, xerrors.Errorf("binary read error %s: %w", input.FilePath, err)
}
}
filePath := input.FilePath
// Files extracted from the image have an empty input.Dir.
// Also, paths to these files do not have "/" prefix.
@@ -134,9 +118,18 @@ func (a *SecretAnalyzer) Analyze(_ context.Context, input analyzer.AnalysisInput
filePath = fmt.Sprintf("/%s", filePath)
}
reader := input.Content
if binary {
content, err := utils.ExtractPrintableBytes(input.Content)
if err != nil {
return nil, xerrors.Errorf("binary read error %s: %w", input.FilePath, err)
}
reader = bytes.NewReader(content)
}
result := a.scanner.Scan(secret.ScanArgs{
FilePath: filePath,
Content: content,
Content: reader,
Binary: binary,
})

View File

@@ -55,6 +55,7 @@ func TestSecretAnalyzer(t *testing.T) {
},
},
},
Offset: 55,
}
wantFinding2 := types.SecretFinding{
RuleID: "rule1",
@@ -93,6 +94,7 @@ func TestSecretAnalyzer(t *testing.T) {
},
},
},
Offset: 100,
}
wantFindingGH_PAT := types.SecretFinding{
RuleID: "github-fine-grained-pat",
@@ -102,6 +104,7 @@ func TestSecretAnalyzer(t *testing.T) {
StartLine: 1,
EndLine: 1,
Match: "Binary file \"/testdata/secret.cpython-310.pyc\" matches a rule \"GitHub Fine-grained personal access tokens\"",
Offset: 2,
}
tests := []struct {

View File

@@ -4,6 +4,7 @@ import (
"bytes"
"errors"
"fmt"
"io"
"os"
"regexp"
"slices"
@@ -27,8 +28,24 @@ var (
})
)
const (
// DefaultBufferSize is the default chunk size for streaming secret scanning
// 64KB provides a good balance between memory usage and I/O efficiency
// Larger buffers reduce I/O operations but use more memory
// Smaller buffers use less memory but may increase I/O overhead
DefaultBufferSize = 64 * 1024 // 64KB default buffer size
// DefaultOverlap is the number of bytes to overlap between chunks
// This ensures that secrets spanning chunk boundaries are not missed
// Must be large enough to contain the longest possible secret pattern
// 4KB is sufficient for all secret types including RSA 4096-bit keys
DefaultOverlap = 4096 // 4KB overlap for boundary handling
)
type Scanner struct {
logger *log.Logger
logger *log.Logger
bufferSize int
overlapSize int
*Global
}
@@ -103,6 +120,7 @@ type Rule struct {
AllowRules AllowRules `yaml:"allow-rules"`
ExcludeBlock ExcludeBlock `yaml:"exclude-block"`
SecretGroupName string `yaml:"secret-group-name"`
keywordsLower [][]byte // Pre-computed lowercase keywords
}
func (s *Scanner) FindLocations(r Rule, content []byte) []Location {
@@ -177,13 +195,13 @@ func (r *Rule) MatchPath(path string) bool {
return r.Path == nil || r.Path.MatchString(path)
}
func (r *Rule) MatchKeywords(content []byte) bool {
func (r *Rule) MatchKeywords(contentLower []byte) bool {
if len(r.Keywords) == 0 {
return true
}
contentLower := bytes.ToLower(content)
for _, kw := range r.Keywords {
if bytes.Contains(contentLower, []byte(strings.ToLower(kw))) {
for _, kwLower := range r.keywordsLower {
if bytes.Contains(contentLower, kwLower) {
return true
}
}
@@ -323,18 +341,63 @@ func convertSeverity(logger *log.Logger, severity string) string {
}
}
func NewScanner(config *Config) Scanner {
logger := log.WithPrefix(log.PrefixSecret)
// Option represents a functional option for configuring Scanner
type Option func(*Scanner)
// WithBufferSize configures the buffer size for streaming secret scanning
func WithBufferSize(size int) Option {
return func(s *Scanner) {
s.bufferSize = size
}
}
// WithOverlapSize configures the overlap size between chunks
func WithOverlapSize(size int) Option {
return func(s *Scanner) {
s.overlapSize = size
}
}
// precomputeLowercaseKeywords pre-computes lowercase versions of keywords for a slice of rules
func precomputeLowercaseKeywords(rules []Rule) {
for i := range rules {
rules[i].keywordsLower = make([][]byte, len(rules[i].Keywords))
for j, kw := range rules[i].Keywords {
rules[i].keywordsLower[j] = []byte(strings.ToLower(kw))
}
}
}
func NewScanner(config *Config, opts ...Option) Scanner {
scanner := Scanner{
logger: log.WithPrefix(log.PrefixSecret),
bufferSize: DefaultBufferSize,
overlapSize: DefaultOverlap,
}
// Apply functional options
for _, opt := range opts {
opt(&scanner)
}
// Validate configuration
if scanner.overlapSize >= scanner.bufferSize {
scanner.logger.Warn("Overlap size exceeds buffer size, adjusting to 1/4 of buffer size",
log.Int("overlap_size", scanner.overlapSize),
log.Int("buffer_size", scanner.bufferSize))
scanner.overlapSize = scanner.bufferSize / 4
}
// Use the default rules
if config == nil {
return Scanner{
logger: logger,
Global: &Global{
Rules: builtinRules,
AllowRules: builtinAllowRules,
},
// Pre-compute lowercase keywords for builtin rules
precomputeLowercaseKeywords(builtinRules)
scanner.Global = &Global{
Rules: builtinRules,
AllowRules: builtinAllowRules,
}
return scanner
}
enabledRules := builtinRules
@@ -359,19 +422,21 @@ func NewScanner(config *Config) Scanner {
return !slices.Contains(config.DisableAllowRuleIDs, v.ID)
})
return Scanner{
logger: logger,
Global: &Global{
Rules: rules,
AllowRules: allowRules,
ExcludeBlock: config.ExcludeBlock,
},
// Pre-compute lowercase keywords for all rules
precomputeLowercaseKeywords(rules)
scanner.Global = &Global{
Rules: rules,
AllowRules: allowRules,
ExcludeBlock: config.ExcludeBlock,
}
return scanner
}
type ScanArgs struct {
FilePath string
Content []byte
Content io.Reader
Binary bool
}
@@ -380,10 +445,20 @@ type Match struct {
Location Location
}
// Scan performs secret scanning on the provided content using streaming approach
// This method processes files in configurable chunks to maintain constant memory usage
// regardless of file size, making it suitable for scanning very large files
//
// The streaming approach:
// 1. Reads file content in chunks (default 64KB)
// 2. Maintains overlap between chunks to catch secrets at boundaries
// 3. Processes each chunk independently for secrets
// 4. Adjusts line numbers to account for chunk positioning
// 5. Combines results from all chunks
func (s *Scanner) Scan(args ScanArgs) types.Secret {
logger := s.logger.With("file_path", args.FilePath)
logger := s.logger.With(log.FilePath(args.FilePath))
// Global allowed paths
// Check if path is globally allowed (skip scanning entirely)
if s.AllowPath(args.FilePath) {
logger.Debug("Skipped secret scanning matching allowed paths")
return types.Secret{
@@ -391,38 +466,240 @@ func (s *Scanner) Scan(args ScanArgs) types.Secret {
}
}
// Perform streaming secret scanning
// This approach keeps memory usage constant regardless of file size
logger.Debug("Using streaming scanner")
result := s.scanStream(args.FilePath, args.Content, args.Binary)
return result
}
// scanStream performs streaming secret scanning by processing files in chunks
// This approach keeps memory usage constant (O(buffer_size)) regardless of file size
func (s *Scanner) scanStream(filePath string, reader io.Reader, binary bool) types.Secret {
logger := s.logger.With(log.FilePath(filePath))
logger.Debug("scanStream called", log.Int("buffer_size", s.bufferSize), log.Int("overlap_size", s.overlapSize))
// Initialize streaming context
state := s.initializeStreamingContext()
// Process file in chunks until EOF
var allFindings []types.SecretFinding
chunkNum := 0
for {
// Read next chunk with overlap from previous chunk
chunk, isEOF, err := s.readNextChunk(reader, state)
if err != nil {
logger.Error("Failed to read content during streaming", log.Err(err))
break
}
// Process the chunk for secrets if we have data
if len(chunk) > 0 {
chunkFindings := s.processChunkForSecrets(filePath, chunk, state, binary)
allFindings = append(allFindings, chunkFindings...)
chunkNum++
// Only update context if not EOF (no more chunks to process)
if !isEOF {
// Prepare for next iteration by updating context
s.updateStreamingContext(chunk, state)
}
}
// Break if we've reached end of file
if isEOF {
break
}
}
// Return empty result if no secrets found
if len(allFindings) == 0 {
return types.Secret{}
}
// Debug: log findings before deduplication
s.logger.Debug("Findings before deduplication", log.Int("count", len(allFindings)))
for i, f := range allFindings {
s.logger.Debug("Finding", log.Int("index", i), log.String("rule_id", f.RuleID), log.Int("offset", f.Offset), log.Int("line", f.StartLine))
}
// Clean up and sort findings
allFindings = s.finalizeScanResults(allFindings)
// Debug: log findings after deduplication
s.logger.Debug("Findings after deduplication", log.Int("count", len(allFindings)))
return types.Secret{
FilePath: filePath,
Findings: allFindings,
}
}
// chunkState holds the state for streaming secret scanning
type chunkState struct {
buffer []byte // Main buffer for reading chunks
overlapBuffer []byte // Buffer to store overlap from previous chunk
lineOffset int // Running count of lines processed so far
byteOffset int // Running count of bytes processed so far
}
// initializeStreamingContext sets up the initial state for streaming
func (s *Scanner) initializeStreamingContext() *chunkState {
return &chunkState{
buffer: make([]byte, s.bufferSize),
overlapBuffer: make([]byte, 0, s.overlapSize),
lineOffset: 0,
byteOffset: 0,
}
}
// readNextChunk reads the next chunk of data, incorporating overlap from the previous chunk
// Returns the chunk data, whether EOF was reached, and any error
func (s *Scanner) readNextChunk(reader io.Reader, state *chunkState) ([]byte, bool, error) {
// Copy overlap data from previous chunk to beginning of buffer
overlapLen := len(state.overlapBuffer)
copy(state.buffer[:overlapLen], state.overlapBuffer)
// Read new data after the overlap
n, err := reader.Read(state.buffer[overlapLen:])
isEOF := errors.Is(err, io.EOF)
// Handle the case where we get EOF immediately with no new data
if n == 0 && isEOF {
// Handle final overlap data if any exists
if overlapLen > 0 {
// Return the remaining overlap data as the final chunk
return state.overlapBuffer, true, nil
}
// No more data to process
return nil, true, nil
}
if err != nil && !isEOF {
// Read error occurred
return nil, false, xerrors.Errorf("failed to read next chunk: %w", err)
}
// Combine overlap and new data
totalLen := overlapLen + n
chunk := state.buffer[:totalLen]
// If we have data, return it along with the EOF flag
// This allows us to process the last chunk even if it's smaller than buffer size
return chunk, isEOF, nil
}
// processChunkForSecrets scans a chunk for secrets and adjusts line numbers based on global offset
func (s *Scanner) processChunkForSecrets(filePath string, chunk []byte, state *chunkState, binary bool) []types.SecretFinding {
// Scan the chunk
chunkResult := s.scanChunk(filePath, chunk, binary)
// Debug logging
if len(chunkResult.Findings) > 0 {
s.logger.Debug("Found secrets in chunk", log.Int("count", len(chunkResult.Findings)), log.Int("line_offset", state.lineOffset))
}
// Adjust line numbers and byte offsets to account for previous chunks
for i := range chunkResult.Findings {
// Adjust finding line numbers by adding the cumulative line offset
chunkResult.Findings[i].StartLine += state.lineOffset
chunkResult.Findings[i].EndLine += state.lineOffset
// Adjust byte offset by adding the cumulative byte offset
chunkResult.Findings[i].Offset += state.byteOffset
// Adjust code context line numbers as well
for j := range chunkResult.Findings[i].Code.Lines {
chunkResult.Findings[i].Code.Lines[j].Number += state.lineOffset
}
}
return chunkResult.Findings
}
// updateStreamingContext prepares the context for the next iteration
// This involves setting up overlap and updating line/chunk offsets
func (s *Scanner) updateStreamingContext(chunk []byte, state *chunkState) {
totalLen := len(chunk)
// Prepare overlap for next iteration to ensure secrets spanning chunk boundaries are detected
if totalLen > s.overlapSize {
// Save the last 'overlapSize' bytes for the next chunk
state.overlapBuffer = state.overlapBuffer[:s.overlapSize]
copy(state.overlapBuffer, chunk[totalLen-s.overlapSize:])
// Update line and byte offset based on non-overlapping part
// We only count lines/bytes that won't be reprocessed in the next chunk
nonOverlapPart := chunk[:totalLen-s.overlapSize]
state.lineOffset += bytes.Count(nonOverlapPart, lineSep)
state.byteOffset += len(nonOverlapPart)
} else {
// If chunk is smaller than overlap size, keep entire chunk as overlap
// This can happen with very small chunks near EOF
state.overlapBuffer = state.overlapBuffer[:totalLen]
copy(state.overlapBuffer, chunk)
// Don't update offsets since entire chunk will be reprocessed
}
}
// finalizeScanResults performs cleanup and sorting of all findings
func (s *Scanner) finalizeScanResults(findings []types.SecretFinding) []types.SecretFinding {
// Remove duplicate findings that might occur at chunk boundaries
// Note: Currently we preserve all findings to avoid losing legitimate secrets
findings = s.deduplicateFindings(findings)
// Sort findings for consistent output
sort.Slice(findings, func(i, j int) bool {
if findings[i].RuleID != findings[j].RuleID {
return findings[i].RuleID < findings[j].RuleID
}
return findings[i].Match < findings[j].Match
})
return findings
}
func (s *Scanner) scanChunk(filePath string, content []byte, binary bool) types.Secret {
logger := s.logger.With(log.FilePath(filePath))
logger.Debug("scanChunk called", log.Int("content_len", len(content)), log.Int("num_rules", len(s.Rules)))
var censored []byte
var copyCensored sync.Once
var matched []Match
var findings []types.SecretFinding
globalExcludedBlocks := newBlocks(args.Content, s.ExcludeBlock.Regexes)
globalExcludedBlocks := newBlocks(content, s.ExcludeBlock.Regexes)
// Convert content to lowercase once for all keyword matching
contentLower := bytes.ToLower(content)
for _, rule := range s.Rules {
ruleLogger := logger.With("rule_id", rule.ID)
// Check if the file path should be scanned by this rule
if !rule.MatchPath(args.FilePath) {
if !rule.MatchPath(filePath) {
ruleLogger.Debug("Skipped secret scanning as non-compliant to the rule")
continue
}
// Check if the file path should be allowed
if rule.AllowPath(args.FilePath) {
if rule.AllowPath(filePath) {
ruleLogger.Debug("Skipped secret scanning as allowed")
continue
}
// Check if the file content contains keywords and should be scanned
if !rule.MatchKeywords(args.Content) {
if !rule.MatchKeywords(contentLower) {
continue
}
// Detect secrets
locs := s.FindLocations(rule, args.Content)
locs := s.FindLocations(rule, content)
if len(locs) == 0 {
continue
}
ruleLogger.Debug("Found locations", log.Int("count", len(locs)))
localExcludedBlocks := newBlocks(args.Content, rule.ExcludeBlock.Regexes)
localExcludedBlocks := newBlocks(content, rule.ExcludeBlock.Regexes)
for _, loc := range locs {
// Skip the secret if it is within excluded blocks.
@@ -434,18 +711,20 @@ func (s *Scanner) Scan(args ScanArgs) types.Secret {
Rule: rule,
Location: loc,
})
logger.Debug("Found secret in chunk", log.String("rule_id", rule.ID), log.Int("start", loc.Start), log.Int("end", loc.End))
copyCensored.Do(func() {
censored = make([]byte, len(args.Content))
copy(censored, args.Content)
censored = make([]byte, len(content))
copy(censored, content)
})
censored = censorLocation(loc, censored)
}
}
for _, match := range matched {
finding := toFinding(match.Rule, match.Location, censored)
// Rewrite unreadable fields for binary files
if args.Binary {
finding.Match = fmt.Sprintf("Binary file %q matches a rule %q", args.FilePath, match.Rule.Title)
if binary {
finding.Match = fmt.Sprintf("Binary file %q matches a rule %q", filePath, match.Rule.Title)
finding.Code = types.Code{}
}
findings = append(findings, finding)
@@ -463,11 +742,21 @@ func (s *Scanner) Scan(args ScanArgs) types.Secret {
})
return types.Secret{
FilePath: args.FilePath,
FilePath: filePath,
Findings: findings,
}
}
// deduplicateFindings removes duplicate secret findings that may occur at chunk boundaries
func (s *Scanner) deduplicateFindings(findings []types.SecretFinding) []types.SecretFinding {
// Deduplicate based on rule ID and byte offset
// This accurately identifies the same secret across chunk boundaries
// Different secrets at the same offset are impossible, so this is safe
return lo.UniqBy(findings, func(f types.SecretFinding) string {
return fmt.Sprintf("%s:%d-%d-%d", f.RuleID, f.StartLine, f.EndLine, f.Offset)
})
}
func censorLocation(loc Location, input []byte) []byte {
for i := loc.Start; i < loc.End; i++ {
if input[i] != '\n' {
@@ -489,6 +778,7 @@ func toFinding(rule Rule, loc Location, content []byte) types.SecretFinding {
StartLine: startLine,
EndLine: endLine,
Code: code,
Offset: loc.Start,
}
}
@@ -518,7 +808,7 @@ func findLocation(start, end int, content []byte) (int, int, types.Code, string)
lineStart = lo.Ternary(start-lineStart-30 < 0, lineStart, start-30)
lineEnd = lo.Ternary(end+20 > lineEnd, lineEnd, end+20)
}
matchLine := sanitizeUTF8String(content[lineStart:lineEnd])
matchLine := sanitizeString(content[lineStart:lineEnd])
endLineNum := startLineNum + bytes.Count(content[start:end], lineSep)
var code types.Code
@@ -535,9 +825,9 @@ func findLocation(start, end int, content []byte) (int, int, types.Code, string)
var strRawLine string
if len(rawLine) > maxLineLength {
strRawLine = lo.Ternary(inCause, matchLine, sanitizeUTF8String(rawLine[:maxLineLength]))
strRawLine = lo.Ternary(inCause, matchLine, sanitizeString(rawLine[:maxLineLength]))
} else {
strRawLine = sanitizeUTF8String(rawLine)
strRawLine = sanitizeString(rawLine)
}
code.Lines = append(code.Lines, types.Line{
@@ -562,8 +852,13 @@ func findLocation(start, end int, content []byte) (int, int, types.Code, string)
return startLineNum + 1, endLineNum + 1, code, matchLine
}
// sanitizeUTF8String converts bytes to a valid UTF-8 string, logging a warning once if invalid sequences are found
func sanitizeUTF8String(data []byte) string {
// sanitizeString converts bytes to a valid string for display
// It removes carriage return characters for consistent output across different line ending styles
// and ensures valid UTF-8 encoding, logging a warning once if invalid sequences are found
func sanitizeString(data []byte) string {
// Remove carriage returns for consistent display
data = bytes.ReplaceAll(data, []byte("\r"), []byte(""))
if utf8.Valid(data) {
return string(data)
}

View File

@@ -4,6 +4,7 @@ import (
"bytes"
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/assert"
@@ -50,6 +51,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 55,
}
wantFinding2 := types.SecretFinding{
RuleID: "rule1",
@@ -86,6 +88,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 100,
}
wantFindingRegexDisabled := types.SecretFinding{
RuleID: "rule1",
@@ -122,8 +125,9 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 100,
}
wantFinding3 := types.SecretFinding{
wantFindingMultipleGroupsUsername := types.SecretFinding{
RuleID: "rule1",
Category: "general",
Title: "Generic Rule",
@@ -153,8 +157,9 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 134,
}
wantFinding4 := types.SecretFinding{
wantFindingMultipleGroupsPassword := types.SecretFinding{
RuleID: "rule1",
Category: "general",
Title: "Generic Rule",
@@ -184,6 +189,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 155,
}
wantFinding5 := types.SecretFinding{
RuleID: "aws-access-key-id",
@@ -215,6 +221,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 78,
}
wantFinding5a := types.SecretFinding{
RuleID: "aws-access-key-id",
@@ -241,6 +248,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 70,
}
wantFindingPATDisabled := types.SecretFinding{
RuleID: "aws-access-key-id",
@@ -267,6 +275,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 70,
}
wantFinding6 := types.SecretFinding{
RuleID: "github-pat",
@@ -293,6 +302,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 11,
}
wantFindingGitHubPAT := types.SecretFinding{
RuleID: "github-fine-grained-pat",
@@ -314,6 +324,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 13,
}
wantFindingMyAwsAccessKey := types.SecretFinding{
RuleID: "aws-secret-access-key",
@@ -340,8 +351,8 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 18,
}
wantFindingMyGitHubPAT := types.SecretFinding{
RuleID: "github-fine-grained-pat",
Category: secret.CategoryGitHub,
@@ -367,6 +378,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 63,
}
wantFindingGHButDisableAWS := types.SecretFinding{
RuleID: "github-pat",
@@ -393,6 +405,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 11,
}
wantFinding7 := types.SecretFinding{
RuleID: "github-pat",
@@ -414,6 +427,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 67,
}
wantFinding8 := types.SecretFinding{
RuleID: "rule1",
@@ -445,6 +459,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 55,
}
wantFinding9 := types.SecretFinding{
RuleID: "aws-secret-access-key",
@@ -471,8 +486,8 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 18,
}
wantFinding10 := types.SecretFinding{
RuleID: "aws-secret-access-key",
Category: secret.CategoryAWS,
@@ -503,6 +518,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 200,
}
wantFindingAsymmetricPrivateKeyJson := types.SecretFinding{
RuleID: "private-key",
@@ -524,6 +540,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 40,
}
wantFindingAsymmetricPrivateKey := types.SecretFinding{
RuleID: "private-key",
@@ -571,6 +588,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 32,
}
wantFindingAsymmSecretKey := types.SecretFinding{
RuleID: "private-key",
@@ -616,6 +634,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 32,
}
wantFindingMinimumAsymmSecretKey := types.SecretFinding{
RuleID: "private-key",
@@ -653,6 +672,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 1842,
}
wantFindingAlibabaAccessKeyId := types.SecretFinding{
RuleID: "alibaba-access-key-id",
@@ -684,6 +704,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 40,
}
wantFindingDockerKey1 := types.SecretFinding{
RuleID: "dockerconfig-secret",
@@ -715,6 +736,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 62,
}
wantFindingDockerKey2 := types.SecretFinding{
RuleID: "dockerconfig-secret",
@@ -746,6 +768,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 28,
}
wantFindingPrivatePackagistOrgReadToken := types.SecretFinding{
RuleID: "private-packagist-token",
@@ -775,6 +798,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 15,
}
wantFindingPrivatePackagistOrgUpdateToken := types.SecretFinding{
RuleID: "private-packagist-token",
@@ -812,6 +836,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 114,
}
wantFindingPrivatePackagistUserToken := types.SecretFinding{
RuleID: "private-packagist-token",
@@ -857,6 +882,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 208,
}
wantFindingHuggingFace := types.SecretFinding{
RuleID: "hugging-face-access-token",
@@ -878,8 +904,8 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 18,
}
wantFindingGrafanaQuoted := types.SecretFinding{
RuleID: "grafana-api-token",
Category: secret.CategoryGrafana,
@@ -908,8 +934,8 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 14,
}
wantFindingGrafanaUnquoted := types.SecretFinding{
RuleID: "grafana-api-token",
Category: secret.CategoryGrafana,
@@ -943,8 +969,8 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 123,
}
wantMultiLine := types.SecretFinding{
RuleID: "multi-line-secret",
Category: "general",
@@ -975,6 +1001,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 4,
}
wantFindingTokenInsideJs := types.SecretFinding{
RuleID: "stripe-publishable-token",
@@ -996,6 +1023,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 4016,
}
wantFindingJWT := types.SecretFinding{
RuleID: "jwt-token",
@@ -1032,6 +1060,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 14,
}
tests := []struct {
@@ -1065,6 +1094,100 @@ func TestSecretScanner(t *testing.T) {
},
},
},
{
name: "find secrets in CRLF line endings file",
configPath: filepath.Join("testdata", "skip-test.yaml"),
inputFilePath: filepath.Join("testdata", "crlf-line-endings.txt"),
want: types.Secret{
FilePath: filepath.Join("testdata", "crlf-line-endings.txt"),
Findings: []types.SecretFinding{
{
RuleID: "aws-access-key-id",
Category: secret.CategoryAWS,
Title: "AWS Access Key ID",
Severity: "CRITICAL",
StartLine: 2,
EndLine: 2,
Code: types.Code{
Lines: []types.Line{
{
Number: 1,
Content: "This is a test file with CRLF line endings.",
IsCause: false,
Highlighted: "This is a test file with CRLF line endings.",
FirstCause: false,
LastCause: false,
},
{
Number: 2,
Content: "AWS Access Key ID: ********************",
IsCause: true,
Highlighted: "AWS Access Key ID: ********************",
FirstCause: true,
LastCause: true,
},
{
Number: 3,
Content: "This line has no secrets.",
IsCause: false,
Highlighted: "This line has no secrets.",
FirstCause: false,
LastCause: false,
},
},
},
Match: "AWS Access Key ID: ********************",
Offset: 63,
},
{
RuleID: "github-pat",
Category: secret.CategoryGitHub,
Title: "GitHub Personal Access Token",
Severity: "CRITICAL",
StartLine: 4,
EndLine: 4,
Code: types.Code{
Lines: []types.Line{
{
Number: 2,
Content: "AWS Access Key ID: ********************",
IsCause: false,
Highlighted: "AWS Access Key ID: ********************",
FirstCause: false,
LastCause: false,
},
{
Number: 3,
Content: "This line has no secrets.",
IsCause: false,
Highlighted: "This line has no secrets.",
FirstCause: false,
LastCause: false,
},
{
Number: 4,
Content: "GitHub PAT: ****************************************12",
IsCause: true,
Highlighted: "GitHub PAT: ****************************************12",
FirstCause: true,
LastCause: true,
},
{
Number: 5,
Content: "End of file.",
IsCause: false,
Highlighted: "End of file.",
FirstCause: false,
LastCause: false,
},
},
},
Match: "GitHub PAT: ****************************************12",
Offset: 122,
},
},
},
},
{
name: "find Asymmetric Private Key secrets",
configPath: filepath.Join("testdata", "skip-test.yaml"),
@@ -1119,7 +1242,10 @@ func TestSecretScanner(t *testing.T) {
inputFilePath: filepath.Join("testdata", "grafana-env.txt"),
want: types.Secret{
FilePath: filepath.Join("testdata", "grafana-env.txt"),
Findings: []types.SecretFinding{wantFindingGrafanaUnquoted, wantFindingGrafanaQuoted},
Findings: []types.SecretFinding{
wantFindingGrafanaUnquoted,
wantFindingGrafanaQuoted,
},
},
},
{
@@ -1299,8 +1425,8 @@ func TestSecretScanner(t *testing.T) {
want: types.Secret{
FilePath: filepath.Join("testdata", "secret.txt"),
Findings: []types.SecretFinding{
wantFinding3,
wantFinding4,
wantFindingMultipleGroupsUsername,
wantFindingMultipleGroupsPassword,
},
},
},
@@ -1379,7 +1505,10 @@ func TestSecretScanner(t *testing.T) {
inputFilePath: "testdata/asymmetric-private-key.txt",
want: types.Secret{
FilePath: "testdata/asymmetric-private-key.txt",
Findings: []types.SecretFinding{wantFindingAsymmSecretKey, wantFindingMinimumAsymmSecretKey},
Findings: []types.SecretFinding{
wantFindingAsymmSecretKey,
wantFindingMinimumAsymmSecretKey,
},
},
},
{
@@ -1438,6 +1567,7 @@ func TestSecretScanner(t *testing.T) {
},
},
},
Offset: 6,
},
},
},
@@ -1457,10 +1587,343 @@ func TestSecretScanner(t *testing.T) {
s := secret.NewScanner(c)
got := s.Scan(secret.ScanArgs{
FilePath: tt.inputFilePath,
Content: content,
},
)
Content: bytes.NewReader(content),
})
assert.Equal(t, tt.want, got)
})
}
}
func TestSecretScannerWithStreaming(t *testing.T) {
tests := []struct {
name string
input string
bufferSize int
overlapSize int
configPath string
want types.Secret
}{
{
name: "secret in second chunk",
input: strings.Repeat("x", 520) + "\n" + // 520 bytes to push secret to second chunk
"AWS_ACCESS_KEY_ID=AKIA0123456789ABCDEF\n" + // at offset 521
strings.Repeat("y", 100), // padding
bufferSize: 512,
overlapSize: 128,
configPath: filepath.Join("testdata", "skip-test.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: []types.SecretFinding{
{
RuleID: "aws-access-key-id",
Category: secret.CategoryAWS,
Title: "AWS Access Key ID",
Severity: "CRITICAL",
StartLine: 2,
EndLine: 2,
Match: "AWS_ACCESS_KEY_ID=********************",
Offset: 539, // 521 + 18 (position of the actual key value)
},
},
},
},
{
name: "secret spanning chunk boundary",
input: strings.Repeat("x", 480) + "\n" + // 480 bytes
"AWS_ACCESS_KEY_ID=AKIA0123456789ABCDEF\n" + // at offset 481, spans chunk boundary at 512
strings.Repeat("y", 200), // padding
bufferSize: 512, // Boundary will be in the middle of the secret
overlapSize: 128,
configPath: filepath.Join("testdata", "skip-test.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: []types.SecretFinding{
{
RuleID: "aws-access-key-id",
Category: secret.CategoryAWS,
Title: "AWS Access Key ID",
Severity: "CRITICAL",
StartLine: 2,
EndLine: 2,
Match: "AWS_ACCESS_KEY_ID=********************",
Offset: 499, // 481 + 18
},
},
},
},
{
name: "multiple secrets across chunks",
input: "GITHUB_PAT=ghp_012345678901234567890123456789abcdef\n" + // at offset 0, 52 bytes
strings.Repeat("x", 1200) + "\n" + // 1200 bytes padding
"AWS_ACCESS_KEY_ID=AKIA0123456789ABCDEF\n" + // at offset 1253
strings.Repeat("y", 1400) + "\n" + // 1400 bytes padding
"stripe_key=sk_test_51H5Z3jGXvP5CVwYOLLllllllllllllllllllllllll\n" + // at offset 2693
strings.Repeat("z", 200), // final padding
bufferSize: 1024,
overlapSize: 256,
configPath: filepath.Join("testdata", "skip-test.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: []types.SecretFinding{
{
RuleID: "aws-access-key-id",
Category: secret.CategoryAWS,
Title: "AWS Access Key ID",
Severity: "CRITICAL",
StartLine: 3,
EndLine: 3,
Match: "AWS_ACCESS_KEY_ID=********************",
Offset: 1271, // 1253 + 18
},
{
RuleID: "github-pat",
Category: secret.CategoryGitHub,
Title: "GitHub Personal Access Token",
Severity: "CRITICAL",
StartLine: 1,
EndLine: 1,
Match: "GITHUB_PAT=****************************************", // 40 asterisks for 40-char token
Offset: 11, // position of the token value
},
{
RuleID: "stripe-secret-token",
Category: secret.CategoryStripe,
Title: "Stripe Secret Key",
Severity: "CRITICAL",
StartLine: 5,
EndLine: 5,
Match: "stripe_key=****************************************lllllllllll", // Stripe key pattern includes literal 'l' chars
Offset: 2704, // 2693 + 11
},
},
},
},
{
name: "secret at exact chunk boundary",
input: strings.Repeat("x", 383) + "\n" + // 383 bytes + newline = 384
"AWS_ACCESS_KEY_ID=AKIA0123456789ABCDEF", // Starts exactly at chunk boundary (384)
bufferSize: 384, // Boundary right after the newline
overlapSize: 96,
configPath: filepath.Join("testdata", "skip-test.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: []types.SecretFinding{
{
RuleID: "aws-access-key-id",
Category: secret.CategoryAWS,
Title: "AWS Access Key ID",
Severity: "CRITICAL",
StartLine: 2,
EndLine: 2,
Match: "AWS_ACCESS_KEY_ID=********************",
Offset: 402, // 384 + 18
},
},
},
},
{
name: "very small buffer and overlap",
input: strings.Repeat("x", 150) + "\n" + // 150 bytes padding
"secret=\"mysecret123\"\n" + // at offset 151
strings.Repeat("y", 345) + "\n" + // 345 bytes padding
"secret=\"anothersecret456\"\n" + // at offset 517
strings.Repeat("z", 150), // suffix padding
bufferSize: 128,
overlapSize: 32,
configPath: filepath.Join("testdata", "config.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: []types.SecretFinding{
{
RuleID: "rule1",
Category: "general",
Title: "Generic Rule",
Severity: "HIGH",
StartLine: 2,
EndLine: 2,
Match: "secret=\"***********\"",
Offset: 159, // 151 + 8 (position of "mysecret123")
},
{
RuleID: "rule1",
Category: "general",
Title: "Generic Rule",
Severity: "HIGH",
StartLine: 4,
EndLine: 4,
Match: "secret=\"****************\"",
Offset: 526, // 517 + 9
},
},
},
},
{
name: "multi-line secret with small chunks",
input: strings.Repeat("x", 720) + "\n" + // 720 bytes padding
"-----BEGIN RSA PRIVATE KEY-----\n" + // at offset 721
"MIIEpAIBAAKCAQEA1234567890abcdefghijklmnopqrstuvwxyz\n" +
"ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890abcdefghijklmnop\n" +
"qrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ12345678\n" +
"-----END RSA PRIVATE KEY-----\n" +
strings.Repeat("z", 460), // suffix padding
bufferSize: 1024, // Ensure the entire key fits in one chunk
overlapSize: 256,
configPath: filepath.Join("testdata", "skip-test.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: []types.SecretFinding{
{
RuleID: "private-key",
Category: secret.CategoryAsymmetricPrivateKey,
Title: "Asymmetric Private Key",
Severity: "HIGH",
StartLine: 3,
EndLine: 5,
Match: "****************************************************", // Multi-line secret content
Offset: 753, // 721 + 32 (after BEGIN line)
},
},
},
},
{
name: "multi-line secret exceeding overlap size (known limitation)",
input: strings.Repeat("x", 920) + "\n" + // Position key at chunk boundary (1024 - ~100 bytes for key)
"-----BEGIN RSA PRIVATE KEY-----\n" + // at offset 921
"MIIEpAIBAAKCAQEA1234567890abcdefghijklmnopqrstuvwxyz\n" +
"ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890abcdefghijklmnop\n" +
"qrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ12345678\n" +
"-----END RSA PRIVATE KEY-----\n" +
strings.Repeat("z", 460), // suffix padding
bufferSize: 1024,
overlapSize: 100, // Too small to capture the entire key
configPath: filepath.Join("testdata", "skip-test.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: nil, // Key won't be detected as it spans beyond the overlap
},
},
{
name: "no secrets in any chunk",
input: strings.Repeat("this is just normal content without any secrets in it at all\n", 50),
bufferSize: 512,
overlapSize: 128, // 1/4 of buffer
configPath: filepath.Join("testdata", "config.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: nil,
},
},
{
name: "secret in last chunk with EOF",
input: strings.Repeat("x", 1500) + "\n" + // 1500 bytes to push secret to end
"final_secret=\"supersecretvalue\"", // at offset 1501, no newline at end
bufferSize: 640,
overlapSize: 160, // 1/4 of buffer
configPath: filepath.Join("testdata", "config.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: []types.SecretFinding{
{
RuleID: "rule1",
Category: "general",
Title: "Generic Rule",
Severity: "HIGH",
StartLine: 2,
EndLine: 2,
Match: "final_secret=\"****************\"",
Offset: 1515, // 1501 + 14 (position of "supersecretvalue")
},
},
},
},
{
name: "deduplicate findings at chunk boundaries",
input: strings.Repeat("x", 330) + "\n" + // 330 bytes
"secret=\"duplicatetest123\"\n" + // at offset 331, ends at 356
strings.Repeat("y", 200), // padding
bufferSize: 400, // First chunk: 0-399, overlap: 300-399
overlapSize: 100, // Secret (331-356) is within overlap region
configPath: filepath.Join("testdata", "config.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: []types.SecretFinding{
{
RuleID: "rule1",
Category: "general",
Title: "Generic Rule",
Severity: "HIGH",
StartLine: 2,
EndLine: 2,
Match: "secret=\"****************\"",
Offset: 339, // 331 + 8 (position of "duplicatetest123")
},
},
},
},
{
name: "multiple secrets on same line",
input: strings.Repeat("x", 480) + "\n" + // 480 bytes padding
"secret=\"first123\" and secret=\"second456\" on same line\n" + // at offset 481
strings.Repeat("y", 1300), // large padding to force multiple chunks
bufferSize: 512,
overlapSize: 128, // 1/4 of buffer
configPath: filepath.Join("testdata", "config.yaml"),
want: types.Secret{
FilePath: "test.txt",
Findings: []types.SecretFinding{
{
RuleID: "rule1",
Category: "general",
Title: "Generic Rule",
Severity: "HIGH",
StartLine: 2,
EndLine: 2,
Match: "secret=\"********\" and secret=\"*********\" on same line", // Full line match for second secret
Offset: 511, // 481 + 30 (position of "second456")
},
{
RuleID: "rule1",
Category: "general",
Title: "Generic Rule",
Severity: "HIGH",
StartLine: 2,
EndLine: 2,
Match: "secret=\"********\" and secret=\"s", // Truncated match for first secret
Offset: 489, // 481 + 8 (position of "first123")
},
},
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Parse config
c, err := secret.ParseConfig(tt.configPath)
require.NoError(t, err)
// Create scanner with custom buffer and overlap sizes
s := secret.NewScanner(c,
secret.WithBufferSize(tt.bufferSize),
secret.WithOverlapSize(tt.overlapSize))
// Scan with streaming
reader := strings.NewReader(tt.input)
got := s.Scan(secret.ScanArgs{
FilePath: "test.txt",
Content: reader,
})
// Clear Code field as it's too verbose to specify in test expectations
for i := range got.Findings {
got.Findings[i].Code = types.Code{}
}
for i := range tt.want.Findings {
tt.want.Findings[i].Code = types.Code{}
}
// Compare all findings at once
assert.Equal(t, tt.want.Findings, got.Findings, "unexpected findings")
})
}
}

View File

@@ -0,0 +1,5 @@
This is a test file with CRLF line endings.
AWS Access Key ID: AKIA0123456789ABCDEF
This line has no secrets.
GitHub PAT: ghp_1234567890abcdefghijklmnopqrstuvwxyz12
End of file.

View File

@@ -17,4 +17,5 @@ type SecretFinding struct {
Code Code
Match string
Layer Layer `json:",omitzero"`
Offset int `json:",omitempty"` // Byte offset in the original file
}

View File

@@ -120,6 +120,12 @@ func (r *secretRenderer) renderCode(target string, secret types.DetectedSecret)
}
}
// Add offset information
var offsetInfo string
if secret.Offset > 0 {
offsetInfo = tml.Sprintf(" <dim>(offset: </dim><cyan>%d<dim> bytes)</dim>", secret.Offset)
}
var note string
if c := secret.Layer.CreatedBy; c != "" {
if len(c) > 40 {
@@ -130,7 +136,7 @@ func (r *secretRenderer) renderCode(target string, secret types.DetectedSecret)
} else if secret.Layer.DiffID != "" {
note = fmt.Sprintf(" (added in layer '%s')", strings.TrimPrefix(secret.Layer.DiffID, "sha256:")[:12])
}
r.printf(" <blue>%s%s<magenta>%s\r\n", target, lineInfo, note)
r.printf(" <blue>%s%s%s<magenta>%s\r\n", target, lineInfo, offsetInfo, note)
r.printSingleDivider()
for i, line := range lines {

View File

@@ -166,6 +166,7 @@ func ConvertToRPCSecretFindings(findings []ftypes.SecretFinding) []*common.Secre
Code: ConvertToRPCCode(f.Code),
Match: f.Match,
Layer: ConvertToRPCLayer(f.Layer),
Offset: int32(f.Offset),
})
}
return rpcFindings
@@ -520,6 +521,7 @@ func ConvertFromRPCSecretFindings(rpcFindings []*common.SecretFinding) []ftypes.
EndLine: int(finding.EndLine),
Code: ConvertFromRPCCode(finding.Code),
Match: finding.Match,
Offset: int(finding.Offset),
Layer: ftypes.Layer{
Digest: finding.Layer.Digest,
DiffID: finding.Layer.DiffId,

View File

@@ -298,7 +298,7 @@ func (tt *traceTransport) redactBody(body []byte, contentType string) []byte {
// First, use Trivy's secret scanner for detection
scanResult := tt.secretScanner.Scan(secret.ScanArgs{
FilePath: "http-body.txt",
Content: body,
Content: bytes.NewReader(body),
Binary: false,
})

View File

@@ -1,4 +1,4 @@
// Code generated by protoc-gen-twirp v8.1.0, DO NOT EDIT.
// Code generated by protoc-gen-twirp v8.1.3, DO NOT EDIT.
// source: rpc/cache/service.proto
package cache
@@ -6,7 +6,7 @@ package cache
import context "context"
import fmt "fmt"
import http "net/http"
import ioutil "io/ioutil"
import io "io"
import json "encoding/json"
import strconv "strconv"
import strings "strings"
@@ -20,7 +20,6 @@ import google_protobuf "google.golang.org/protobuf/types/known/emptypb"
import bytes "bytes"
import errors "errors"
import io "io"
import path "path"
import url "net/url"
@@ -67,7 +66,7 @@ func NewCacheProtobufClient(baseURL string, client HTTPClient, opts ...twirp.Cli
o(&clientOpts)
}
// Using ReadOpt allows backwards and forwads compatibility with new options in the future
// Using ReadOpt allows backwards and forwards compatibility with new options in the future
literalURLs := false
_ = clientOpts.ReadOpt("literalURLs", &literalURLs)
var pathPrefix string
@@ -300,7 +299,7 @@ func NewCacheJSONClient(baseURL string, client HTTPClient, opts ...twirp.ClientO
o(&clientOpts)
}
// Using ReadOpt allows backwards and forwads compatibility with new options in the future
// Using ReadOpt allows backwards and forwards compatibility with new options in the future
literalURLs := false
_ = clientOpts.ReadOpt("literalURLs", &literalURLs)
var pathPrefix string
@@ -529,7 +528,7 @@ type cacheServer struct {
func NewCacheServer(svc Cache, opts ...interface{}) TwirpServer {
serverOpts := newServerOpts(opts)
// Using ReadOpt allows backwards and forwads compatibility with new options in the future
// Using ReadOpt allows backwards and forwards compatibility with new options in the future
jsonSkipDefaults := false
_ = serverOpts.ReadOpt("jsonSkipDefaults", &jsonSkipDefaults)
jsonCamelCase := false
@@ -736,7 +735,7 @@ func (s *cacheServer) servePutArtifactProtobuf(ctx context.Context, resp http.Re
return
}
buf, err := ioutil.ReadAll(req.Body)
buf, err := io.ReadAll(req.Body)
if err != nil {
s.handleRequestBodyError(ctx, resp, "failed to read request body", err)
return
@@ -916,7 +915,7 @@ func (s *cacheServer) servePutBlobProtobuf(ctx context.Context, resp http.Respon
return
}
buf, err := ioutil.ReadAll(req.Body)
buf, err := io.ReadAll(req.Body)
if err != nil {
s.handleRequestBodyError(ctx, resp, "failed to read request body", err)
return
@@ -1096,7 +1095,7 @@ func (s *cacheServer) serveMissingBlobsProtobuf(ctx context.Context, resp http.R
return
}
buf, err := ioutil.ReadAll(req.Body)
buf, err := io.ReadAll(req.Body)
if err != nil {
s.handleRequestBodyError(ctx, resp, "failed to read request body", err)
return
@@ -1276,7 +1275,7 @@ func (s *cacheServer) serveDeleteBlobsProtobuf(ctx context.Context, resp http.Re
return
}
buf, err := ioutil.ReadAll(req.Body)
buf, err := io.ReadAll(req.Body)
if err != nil {
s.handleRequestBodyError(ctx, resp, "failed to read request body", err)
return
@@ -1351,7 +1350,7 @@ func (s *cacheServer) ServiceDescriptor() ([]byte, int) {
}
func (s *cacheServer) ProtocGenTwirpVersion() string {
return "v8.1.0"
return "v8.1.3"
}
// PathPrefix returns the base service path, in the form: "/<prefix>/<package>.<Service>/"
@@ -1470,7 +1469,7 @@ func writeError(ctx context.Context, resp http.ResponseWriter, err error, hooks
}
// sanitizeBaseURL parses the the baseURL, and adds the "http" scheme if needed.
// If the URL is unparsable, the baseURL is returned unchaged.
// If the URL is unparsable, the baseURL is returned unchanged.
func sanitizeBaseURL(baseURL string) string {
u, err := url.Parse(baseURL)
if err != nil {
@@ -1544,7 +1543,7 @@ func newRequest(ctx context.Context, url string, reqBody io.Reader, contentType
}
req.Header.Set("Accept", contentType)
req.Header.Set("Content-Type", contentType)
req.Header.Set("Twirp-Version", "v8.1.0")
req.Header.Set("Twirp-Version", "v8.1.3")
return req, nil
}
@@ -1595,7 +1594,7 @@ func errorFromResponse(resp *http.Response) twirp.Error {
return twirpErrorFromIntermediary(statusCode, msg, location)
}
respBodyBytes, err := ioutil.ReadAll(resp.Body)
respBodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return wrapInternal(err, "failed to read server error response body")
}
@@ -1785,13 +1784,7 @@ func doProtobufRequest(ctx context.Context, client HTTPClient, hooks *twirp.Clie
if err != nil {
return ctx, wrapInternal(err, "failed to do request")
}
defer func() {
cerr := resp.Body.Close()
if err == nil && cerr != nil {
err = wrapInternal(cerr, "failed to close response body")
}
}()
defer func() { _ = resp.Body.Close() }()
if err = ctx.Err(); err != nil {
return ctx, wrapInternal(err, "aborted because context was done")
@@ -1801,7 +1794,7 @@ func doProtobufRequest(ctx context.Context, client HTTPClient, hooks *twirp.Clie
return ctx, errorFromResponse(resp)
}
respBodyBytes, err := ioutil.ReadAll(resp.Body)
respBodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return ctx, wrapInternal(err, "failed to read response body")
}

View File

@@ -2015,6 +2015,7 @@ type SecretFinding struct {
Code *Code `protobuf:"bytes,7,opt,name=code,proto3" json:"code,omitempty"`
Match string `protobuf:"bytes,8,opt,name=match,proto3" json:"match,omitempty"`
Layer *Layer `protobuf:"bytes,10,opt,name=layer,proto3" json:"layer,omitempty"`
Offset int32 `protobuf:"varint,11,opt,name=offset,proto3" json:"offset,omitempty"` // Byte offset in the original file
}
func (x *SecretFinding) Reset() {
@@ -2112,6 +2113,13 @@ func (x *SecretFinding) GetLayer() *Layer {
return nil
}
func (x *SecretFinding) GetOffset() int32 {
if x != nil {
return x.Offset
}
return 0
}
type Secret struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
@@ -2808,7 +2816,7 @@ var file_rpc_common_service_proto_rawDesc = []byte{
0x61, 0x75, 0x73, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x72, 0x61, 0x77, 0x18, 0x01, 0x20, 0x01, 0x28,
0x09, 0x52, 0x03, 0x72, 0x61, 0x77, 0x12, 0x20, 0x0a, 0x0b, 0x68, 0x69, 0x67, 0x68, 0x6c, 0x69,
0x67, 0x68, 0x74, 0x65, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x68, 0x69, 0x67,
0x68, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x22, 0x9f, 0x02, 0x0a, 0x0d, 0x53, 0x65, 0x63,
0x68, 0x6c, 0x69, 0x67, 0x68, 0x74, 0x65, 0x64, 0x22, 0xb7, 0x02, 0x0a, 0x0d, 0x53, 0x65, 0x63,
0x72, 0x65, 0x74, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x17, 0x0a, 0x07, 0x72, 0x75,
0x6c, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x72, 0x75, 0x6c,
0x65, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18,
@@ -2826,78 +2834,80 @@ var file_rpc_common_service_proto_rawDesc = []byte{
0x28, 0x09, 0x52, 0x05, 0x6d, 0x61, 0x74, 0x63, 0x68, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79,
0x65, 0x72, 0x18, 0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79,
0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c,
0x61, 0x79, 0x65, 0x72, 0x4a, 0x04, 0x08, 0x09, 0x10, 0x0a, 0x22, 0x5d, 0x0a, 0x06, 0x53, 0x65,
0x63, 0x72, 0x65, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68,
0x12, 0x37, 0x0a, 0x08, 0x66, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03,
0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f,
0x6e, 0x2e, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52,
0x08, 0x66, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x73, 0x22, 0x99, 0x02, 0x0a, 0x0f, 0x44, 0x65,
0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a,
0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32,
0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53,
0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74,
0x79, 0x12, 0x3e, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20,
0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d,
0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f,
0x72, 0x79, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72,
0x79, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20,
0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09,
0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52,
0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d,
0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a,
0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28,
0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a,
0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e,
0x6b, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78, 0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52,
0x04, 0x74, 0x65, 0x78, 0x74, 0x22, 0xed, 0x01, 0x0a, 0x0b, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73,
0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x41, 0x0a, 0x0c, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65,
0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x74, 0x72,
0x61, 0x79, 0x65, 0x72, 0x12, 0x16, 0x0a, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x18, 0x0b,
0x20, 0x01, 0x28, 0x05, 0x52, 0x06, 0x6f, 0x66, 0x66, 0x73, 0x65, 0x74, 0x4a, 0x04, 0x08, 0x09,
0x10, 0x0a, 0x22, 0x5d, 0x0a, 0x06, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x12, 0x1a, 0x0a, 0x08,
0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
0x66, 0x69, 0x6c, 0x65, 0x70, 0x61, 0x74, 0x68, 0x12, 0x37, 0x0a, 0x08, 0x66, 0x69, 0x6e, 0x64,
0x69, 0x6e, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x74, 0x72, 0x69,
0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74,
0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67,
0x73, 0x22, 0x99, 0x02, 0x0a, 0x0f, 0x44, 0x65, 0x74, 0x65, 0x63, 0x74, 0x65, 0x64, 0x4c, 0x69,
0x63, 0x65, 0x6e, 0x73, 0x65, 0x12, 0x32, 0x0a, 0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74,
0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x16, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e,
0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x52,
0x08, 0x73, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x3e, 0x0a, 0x08, 0x63, 0x61, 0x74,
0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x74, 0x72,
0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e,
0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x0b, 0x6c, 0x69, 0x63,
0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65,
0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c,
0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d,
0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65,
0x12, 0x38, 0x0a, 0x08, 0x66, 0x69, 0x6e, 0x67, 0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03,
0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f,
0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67,
0x52, 0x08, 0x66, 0x69, 0x6e, 0x67, 0x69, 0x6e, 0x67, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61,
0x79, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76,
0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05,
0x6c, 0x61, 0x79, 0x65, 0x72, 0x22, 0x98, 0x01, 0x0a, 0x0e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73,
0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x12, 0x3e, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65,
0x67, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x22, 0x2e, 0x74, 0x72, 0x69,
0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52,
0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x19, 0x0a, 0x08, 0x70, 0x6b, 0x67,
0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x70, 0x6b, 0x67,
0x4e, 0x61, 0x6d, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74,
0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74,
0x68, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52,
0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65,
0x6e, 0x63, 0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69,
0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x07, 0x20,
0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x65, 0x78,
0x74, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x65, 0x78, 0x74, 0x22, 0xed, 0x01,
0x0a, 0x0b, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x41, 0x0a,
0x0c, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20,
0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d,
0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x2e, 0x45,
0x6e, 0x75, 0x6d, 0x52, 0x0b, 0x6c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65,
0x12, 0x1b, 0x0a, 0x09, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x02, 0x20,
0x01, 0x28, 0x09, 0x52, 0x08, 0x66, 0x69, 0x6c, 0x65, 0x50, 0x61, 0x74, 0x68, 0x12, 0x19, 0x0a,
0x08, 0x70, 0x6b, 0x67, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
0x07, 0x70, 0x6b, 0x67, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x38, 0x0a, 0x08, 0x66, 0x69, 0x6e, 0x67,
0x69, 0x6e, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x74, 0x72, 0x69,
0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73,
0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x08,
0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65,
0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a,
0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02,
0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04,
0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b,
0x22, 0x95, 0x01, 0x0a, 0x0f, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65,
0x67, 0x6f, 0x72, 0x79, 0x22, 0x81, 0x01, 0x0a, 0x04, 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0f, 0x0a,
0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d,
0x0a, 0x09, 0x46, 0x4f, 0x52, 0x42, 0x49, 0x44, 0x44, 0x45, 0x4e, 0x10, 0x01, 0x12, 0x0e, 0x0a,
0x0a, 0x52, 0x45, 0x53, 0x54, 0x52, 0x49, 0x43, 0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x0e, 0x0a,
0x0a, 0x52, 0x45, 0x43, 0x49, 0x50, 0x52, 0x4f, 0x43, 0x41, 0x4c, 0x10, 0x03, 0x12, 0x0a, 0x0a,
0x06, 0x4e, 0x4f, 0x54, 0x49, 0x43, 0x45, 0x10, 0x04, 0x12, 0x0e, 0x0a, 0x0a, 0x50, 0x45, 0x52,
0x4d, 0x49, 0x53, 0x53, 0x49, 0x56, 0x45, 0x10, 0x05, 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x4e, 0x45,
0x4e, 0x43, 0x55, 0x4d, 0x42, 0x45, 0x52, 0x45, 0x44, 0x10, 0x06, 0x12, 0x0b, 0x0a, 0x07, 0x55,
0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x07, 0x22, 0x4e, 0x0a, 0x0b, 0x4c, 0x69, 0x63, 0x65,
0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x22, 0x3f, 0x0a, 0x04, 0x45, 0x6e, 0x75, 0x6d, 0x12,
0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00,
0x12, 0x08, 0x0a, 0x04, 0x44, 0x50, 0x4b, 0x47, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x48, 0x45,
0x41, 0x44, 0x45, 0x52, 0x10, 0x02, 0x12, 0x10, 0x0a, 0x0c, 0x4c, 0x49, 0x43, 0x45, 0x4e, 0x53,
0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, 0x03, 0x2a, 0x44, 0x0a, 0x08, 0x53, 0x65, 0x76, 0x65,
0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10,
0x00, 0x12, 0x07, 0x0a, 0x03, 0x4c, 0x4f, 0x57, 0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x45,
0x44, 0x49, 0x55, 0x4d, 0x10, 0x02, 0x12, 0x08, 0x0a, 0x04, 0x48, 0x49, 0x47, 0x48, 0x10, 0x03,
0x12, 0x0c, 0x0a, 0x08, 0x43, 0x52, 0x49, 0x54, 0x49, 0x43, 0x41, 0x4c, 0x10, 0x04, 0x42, 0x31,
0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x71, 0x75,
0x61, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74, 0x79, 0x2f, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2f,
0x72, 0x70, 0x63, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x3b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f,
0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x67, 0x69, 0x6e,
0x67, 0x73, 0x12, 0x29, 0x0a, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x18, 0x05, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x13, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e,
0x2e, 0x4c, 0x61, 0x79, 0x65, 0x72, 0x52, 0x05, 0x6c, 0x61, 0x79, 0x65, 0x72, 0x22, 0x98, 0x01,
0x0a, 0x0e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x46, 0x69, 0x6e, 0x64, 0x69, 0x6e, 0x67,
0x12, 0x3e, 0x0a, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x22, 0x2e, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f,
0x6e, 0x2e, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72,
0x79, 0x2e, 0x45, 0x6e, 0x75, 0x6d, 0x52, 0x08, 0x63, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79,
0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
0x6e, 0x61, 0x6d, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64, 0x65, 0x6e,
0x63, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0a, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x64,
0x65, 0x6e, 0x63, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x18, 0x04, 0x20, 0x01,
0x28, 0x09, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x6b, 0x22, 0x95, 0x01, 0x0a, 0x0f, 0x4c, 0x69, 0x63,
0x65, 0x6e, 0x73, 0x65, 0x43, 0x61, 0x74, 0x65, 0x67, 0x6f, 0x72, 0x79, 0x22, 0x81, 0x01, 0x0a,
0x04, 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49,
0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0d, 0x0a, 0x09, 0x46, 0x4f, 0x52, 0x42, 0x49, 0x44,
0x44, 0x45, 0x4e, 0x10, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x53, 0x54, 0x52, 0x49, 0x43,
0x54, 0x45, 0x44, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x52, 0x45, 0x43, 0x49, 0x50, 0x52, 0x4f,
0x43, 0x41, 0x4c, 0x10, 0x03, 0x12, 0x0a, 0x0a, 0x06, 0x4e, 0x4f, 0x54, 0x49, 0x43, 0x45, 0x10,
0x04, 0x12, 0x0e, 0x0a, 0x0a, 0x50, 0x45, 0x52, 0x4d, 0x49, 0x53, 0x53, 0x49, 0x56, 0x45, 0x10,
0x05, 0x12, 0x10, 0x0a, 0x0c, 0x55, 0x4e, 0x45, 0x4e, 0x43, 0x55, 0x4d, 0x42, 0x45, 0x52, 0x45,
0x44, 0x10, 0x06, 0x12, 0x0b, 0x0a, 0x07, 0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x07,
0x22, 0x4e, 0x0a, 0x0b, 0x4c, 0x69, 0x63, 0x65, 0x6e, 0x73, 0x65, 0x54, 0x79, 0x70, 0x65, 0x22,
0x3f, 0x0a, 0x04, 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x0f, 0x0a, 0x0b, 0x55, 0x4e, 0x53, 0x50, 0x45,
0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x08, 0x0a, 0x04, 0x44, 0x50, 0x4b, 0x47,
0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x48, 0x45, 0x41, 0x44, 0x45, 0x52, 0x10, 0x02, 0x12, 0x10,
0x0a, 0x0c, 0x4c, 0x49, 0x43, 0x45, 0x4e, 0x53, 0x45, 0x5f, 0x46, 0x49, 0x4c, 0x45, 0x10, 0x03,
0x2a, 0x44, 0x0a, 0x08, 0x53, 0x65, 0x76, 0x65, 0x72, 0x69, 0x74, 0x79, 0x12, 0x0b, 0x0a, 0x07,
0x55, 0x4e, 0x4b, 0x4e, 0x4f, 0x57, 0x4e, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x4c, 0x4f, 0x57,
0x10, 0x01, 0x12, 0x0a, 0x0a, 0x06, 0x4d, 0x45, 0x44, 0x49, 0x55, 0x4d, 0x10, 0x02, 0x12, 0x08,
0x0a, 0x04, 0x48, 0x49, 0x47, 0x48, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x52, 0x49, 0x54,
0x49, 0x43, 0x41, 0x4c, 0x10, 0x04, 0x42, 0x31, 0x5a, 0x2f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x61, 0x71, 0x75, 0x61, 0x73, 0x65, 0x63, 0x75, 0x72, 0x69, 0x74,
0x79, 0x2f, 0x74, 0x72, 0x69, 0x76, 0x79, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x63, 0x6f, 0x6d, 0x6d,
0x6f, 0x6e, 0x3b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x33,
}
var (

View File

@@ -221,6 +221,7 @@ message SecretFinding {
Code code = 7;
string match = 8;
Layer layer = 10;
int32 offset = 11;
reserved 9; // deprecated 'deleted'
}

View File

@@ -1,4 +1,4 @@
// Code generated by protoc-gen-twirp v8.1.0, DO NOT EDIT.
// Code generated by protoc-gen-twirp v8.1.3, DO NOT EDIT.
// source: rpc/scanner/service.proto
package scanner
@@ -6,7 +6,7 @@ package scanner
import context "context"
import fmt "fmt"
import http "net/http"
import ioutil "io/ioutil"
import io "io"
import json "encoding/json"
import strconv "strconv"
import strings "strings"
@@ -18,7 +18,6 @@ import ctxsetters "github.com/twitchtv/twirp/ctxsetters"
import bytes "bytes"
import errors "errors"
import io "io"
import path "path"
import url "net/url"
@@ -59,7 +58,7 @@ func NewScannerProtobufClient(baseURL string, client HTTPClient, opts ...twirp.C
o(&clientOpts)
}
// Using ReadOpt allows backwards and forwads compatibility with new options in the future
// Using ReadOpt allows backwards and forwards compatibility with new options in the future
literalURLs := false
_ = clientOpts.ReadOpt("literalURLs", &literalURLs)
var pathPrefix string
@@ -151,7 +150,7 @@ func NewScannerJSONClient(baseURL string, client HTTPClient, opts ...twirp.Clien
o(&clientOpts)
}
// Using ReadOpt allows backwards and forwads compatibility with new options in the future
// Using ReadOpt allows backwards and forwards compatibility with new options in the future
literalURLs := false
_ = clientOpts.ReadOpt("literalURLs", &literalURLs)
var pathPrefix string
@@ -239,7 +238,7 @@ type scannerServer struct {
func NewScannerServer(svc Scanner, opts ...interface{}) TwirpServer {
serverOpts := newServerOpts(opts)
// Using ReadOpt allows backwards and forwads compatibility with new options in the future
// Using ReadOpt allows backwards and forwards compatibility with new options in the future
jsonSkipDefaults := false
_ = serverOpts.ReadOpt("jsonSkipDefaults", &jsonSkipDefaults)
jsonCamelCase := false
@@ -437,7 +436,7 @@ func (s *scannerServer) serveScanProtobuf(ctx context.Context, resp http.Respons
return
}
buf, err := ioutil.ReadAll(req.Body)
buf, err := io.ReadAll(req.Body)
if err != nil {
s.handleRequestBodyError(ctx, resp, "failed to read request body", err)
return
@@ -512,7 +511,7 @@ func (s *scannerServer) ServiceDescriptor() ([]byte, int) {
}
func (s *scannerServer) ProtocGenTwirpVersion() string {
return "v8.1.0"
return "v8.1.3"
}
// PathPrefix returns the base service path, in the form: "/<prefix>/<package>.<Service>/"
@@ -631,7 +630,7 @@ func writeError(ctx context.Context, resp http.ResponseWriter, err error, hooks
}
// sanitizeBaseURL parses the the baseURL, and adds the "http" scheme if needed.
// If the URL is unparsable, the baseURL is returned unchaged.
// If the URL is unparsable, the baseURL is returned unchanged.
func sanitizeBaseURL(baseURL string) string {
u, err := url.Parse(baseURL)
if err != nil {
@@ -705,7 +704,7 @@ func newRequest(ctx context.Context, url string, reqBody io.Reader, contentType
}
req.Header.Set("Accept", contentType)
req.Header.Set("Content-Type", contentType)
req.Header.Set("Twirp-Version", "v8.1.0")
req.Header.Set("Twirp-Version", "v8.1.3")
return req, nil
}
@@ -756,7 +755,7 @@ func errorFromResponse(resp *http.Response) twirp.Error {
return twirpErrorFromIntermediary(statusCode, msg, location)
}
respBodyBytes, err := ioutil.ReadAll(resp.Body)
respBodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return wrapInternal(err, "failed to read server error response body")
}
@@ -946,13 +945,7 @@ func doProtobufRequest(ctx context.Context, client HTTPClient, hooks *twirp.Clie
if err != nil {
return ctx, wrapInternal(err, "failed to do request")
}
defer func() {
cerr := resp.Body.Close()
if err == nil && cerr != nil {
err = wrapInternal(cerr, "failed to close response body")
}
}()
defer func() { _ = resp.Body.Close() }()
if err = ctx.Err(); err != nil {
return ctx, wrapInternal(err, "aborted because context was done")
@@ -962,7 +955,7 @@ func doProtobufRequest(ctx context.Context, client HTTPClient, hooks *twirp.Clie
return ctx, errorFromResponse(resp)
}
respBodyBytes, err := ioutil.ReadAll(resp.Body)
respBodyBytes, err := io.ReadAll(resp.Body)
if err != nil {
return ctx, wrapInternal(err, "failed to read response body")
}