fix: early-return, indent-error-flow and superfluous-else rules from revive (#8796)

Signed-off-by: Matthieu MOREL <matthieu.morel35@gmail.com>
This commit is contained in:
Matthieu MOREL
2025-04-30 08:24:09 +02:00
committed by GitHub
parent 7a58ccbc7f
commit 43350dd9b4
34 changed files with 111 additions and 136 deletions

View File

@@ -1,3 +1,7 @@
issues:
max-issues-per-linter: 0
max-same-issues: 0
linters:
settings:
depguard:
@@ -83,6 +87,19 @@ linters:
sprintf1: false
# Optimizes into strings concatenation.
strconcat: false
revive:
max-open-files: 2048
# https://github.com/mgechev/revive/blob/HEAD/RULES_DESCRIPTIONS.md
rules:
- name: early-return
arguments:
- preserve-scope
- name: indent-error-flow
arguments:
- preserve-scope
- name: superfluous-else
arguments:
- preserve-scope
testifylint:
enable-all: true
@@ -100,7 +117,7 @@ linters:
- ineffassign
- misspell
- perfsprint
# - revive # FIXME more than 50 new issues
- revive
- testifylint
- unconvert
- unused
@@ -151,9 +168,6 @@ run:
go: '1.24'
timeout: 30m
issues:
max-same-issues: 0
formatters:
enable:
- gci

View File

@@ -43,12 +43,12 @@ func init() {
}
func version() (string, error) {
if ver, err := sh.Output("git", "describe", "--tags", "--always"); err != nil {
ver, err := sh.Output("git", "describe", "--tags", "--always")
if err != nil {
return "", err
} else {
// Strips the v prefix from the tag
return strings.TrimPrefix(ver, "v"), nil
}
// Strips the v prefix from the tag
return strings.TrimPrefix(ver, "v"), nil
}
func buildLdflags() (string, error) {

View File

@@ -294,9 +294,8 @@ func TestFSCache_PutBlob(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr, tt.name)
return
} else {
require.NoError(t, err, tt.name)
}
require.NoError(t, err, tt.name)
fs.db.View(func(tx *bolt.Tx) error {
layerBucket := tx.Bucket([]byte(blobBucket))
@@ -373,9 +372,8 @@ func TestFSCache_PutArtifact(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr, tt.name)
return
} else {
require.NoError(t, err, tt.name)
}
require.NoError(t, err, tt.name)
err = fs.db.View(func(tx *bolt.Tx) error {
// check decompressedDigestBucket

View File

@@ -154,9 +154,8 @@ func TestRemoteCache_PutArtifact(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr, tt.name)
return
} else {
require.NoError(t, err, tt.name)
}
require.NoError(t, err, tt.name)
})
}
}
@@ -218,9 +217,8 @@ func TestRemoteCache_PutBlob(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr, tt.name)
return
} else {
require.NoError(t, err, tt.name)
}
require.NoError(t, err, tt.name)
})
}
}
@@ -299,10 +297,8 @@ func TestRemoteCache_MissingBlobs(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr, tt.name)
return
} else {
require.NoError(t, err, tt.name)
}
require.NoError(t, err, tt.name)
assert.Equal(t, tt.wantMissingImage, gotMissingImage)
assert.Equal(t, tt.wantMissingLayerIDs, gotMissingLayerIDs)
})

View File

@@ -231,9 +231,8 @@ func NewRootCommand(globalFlags *flag.GlobalFlagGroup) *cobra.Command {
if opts.ShowVersion {
// Customize version output
return showVersion(opts.CacheDir, versionFormat, cmd.OutOrStdout())
} else {
return cmd.Help()
}
return cmd.Help()
},
}

View File

@@ -78,11 +78,11 @@ func compat(r *types.Report) {
if vuln.PkgIdentifier.UID != "" {
continue
}
if pkg, ok := pkgs[vuln.PkgID+vuln.PkgPath]; !ok {
pkg, ok := pkgs[vuln.PkgID+vuln.PkgPath]
if !ok {
continue
} else {
r.Results[i].Vulnerabilities[j].PkgIdentifier = pkg.Identifier
}
r.Results[i].Vulnerabilities[j].PkgIdentifier = pkg.Identifier
}
}
}

View File

@@ -128,11 +128,10 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc
if lock.GraphLock.Nodes != nil {
p.logger.Debug("Handling conan lockfile as v1.x")
return p.parseV1(lock)
} else {
// try to parse requirements as conan v2.x
p.logger.Debug("Handling conan lockfile as v2.x")
return p.parseV2(lock)
}
// try to parse requirements as conan v2.x
p.logger.Debug("Handling conan lockfile as v2.x")
return p.parseV2(lock)
}
func parsePackage(text string) (string, string, error) {

View File

@@ -428,7 +428,8 @@ func (p *Parser) mergeDependencyManagements(depManagements ...[]pomDependency) [
}
func (p *Parser) parseDependencies(deps []pomDependency, props map[string]string, depManagement []pomDependency,
opts analysisOptions) []artifact {
opts analysisOptions,
) []artifact {
// Imported POMs often have no dependencies, so dependencyManagement resolution can be skipped.
if len(deps) == 0 {
return nil
@@ -549,28 +550,25 @@ func (p *Parser) retrieveParent(currentPath, relativePath string, target artifac
// Try relativePath
if relativePath != "" {
pom, err := p.tryRelativePath(target, currentPath, relativePath)
if err != nil {
errs = multierror.Append(errs, err)
} else {
if err == nil {
return pom, nil
}
errs = multierror.Append(errs, err)
}
// If not found, search the parent director
pom, err := p.tryRelativePath(target, currentPath, "../pom.xml")
if err != nil {
errs = multierror.Append(errs, err)
} else {
if err == nil {
return pom, nil
}
errs = multierror.Append(errs, err)
// If not found, search local/remote remoteRepositories
pom, err = p.tryRepository(target.GroupID, target.ArtifactID, target.Version.String())
if err != nil {
errs = multierror.Append(errs, err)
} else {
if err == nil {
return pom, nil
}
errs = multierror.Append(errs, err)
// Reaching here means the POM wasn't found
return nil, errs
@@ -640,6 +638,7 @@ func (p *Parser) openPom(filePath string) (*pom, error) {
content: content,
}, nil
}
func (p *Parser) tryRepository(groupID, artifactID, version string) (*pom, error) {
if version == "" {
return nil, xerrors.Errorf("Version missing for %s:%s", groupID, artifactID)

View File

@@ -255,12 +255,12 @@ func (p *Parser) parseLockfileVersion(lockFile LockFile) float64 {
return v
// v6+
case string:
if lockVer, err := strconv.ParseFloat(v, 64); err != nil {
lockVer, err := strconv.ParseFloat(v, 64)
if err != nil {
p.logger.Debug("Unable to convert the lock file version to float", log.Err(err))
return -1
} else {
return lockVer
}
return lockVer
default:
p.logger.Debug("Unknown type for the lock file version",
log.Any("version", lockFile.LockfileVersion))

View File

@@ -221,11 +221,10 @@ func (p *Parser) parseBlock(block []byte, lineNum int) (lib Library, deps []stri
continue
}
continue
} else {
lib.Patterns = patterns
lib.Name = name
continue
}
lib.Patterns = patterns
lib.Name = name
continue
}
}
@@ -251,23 +250,23 @@ func (p *Parser) parseBlock(block []byte, lineNum int) (lib Library, deps []stri
func parseDependencies(scanner *LineScanner) (deps []string) {
for scanner.Scan() {
line := scanner.Text()
if dep, err := parseDependency(line); err != nil {
dep, err := parseDependency(line)
if err != nil {
// finished dependencies block
return deps
} else {
deps = append(deps, dep)
}
deps = append(deps, dep)
}
return
}
func parseDependency(line string) (string, error) {
if name, version, err := getDependency(line); err != nil {
name, version, err := getDependency(line)
if err != nil {
return "", err
} else {
return packageID(name, version), nil
}
return packageID(name, version), nil
}
func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependency, map[string][]string, error) {

View File

@@ -111,11 +111,10 @@ func (p *Parser) Parse(r xio.ReadSeekerAt) ([]ftypes.Package, []ftypes.Dependenc
func countLeadingSpace(line string) int {
i := 0
for _, runeValue := range line {
if runeValue == ' ' {
i++
} else {
if runeValue != ' ' {
break
}
i++
}
return i
}

View File

@@ -121,9 +121,8 @@ func (p *Parser) parseDependencies(pkgId string, pkg cargoPkg, pkgs map[string]c
ID: pkgId,
DependsOn: dependOn,
}
} else {
return nil
}
return nil
}
func packageID(name, version string) string {

View File

@@ -344,10 +344,8 @@ func TestScanner_Detect(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr)
return
} else {
require.NoError(t, err)
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}

View File

@@ -235,9 +235,8 @@ func (r *AnalysisResult) Sort() {
sort.Slice(r.Misconfigurations, func(i, j int) bool {
if r.Misconfigurations[i].FileType != r.Misconfigurations[j].FileType {
return r.Misconfigurations[i].FileType < r.Misconfigurations[j].FileType
} else {
return r.Misconfigurations[i].FilePath < r.Misconfigurations[j].FilePath
}
return r.Misconfigurations[i].FilePath < r.Misconfigurations[j].FilePath
})
// Secrets
@@ -258,9 +257,8 @@ func (r *AnalysisResult) Sort() {
if r.Licenses[i].Type == r.Licenses[j].Type {
if r.Licenses[i].FilePath == r.Licenses[j].FilePath {
return r.Licenses[i].Layer.DiffID < r.Licenses[j].Layer.DiffID
} else {
return r.Licenses[i].FilePath < r.Licenses[j].FilePath
}
return r.Licenses[i].FilePath < r.Licenses[j].FilePath
}
return r.Licenses[i].Type < r.Licenses[j].Type

View File

@@ -165,21 +165,21 @@ func (a *gomodAnalyzer) fillAdditionalData(apps []types.Application) error {
}
// Collect dependencies of the direct dependency
if dep, err := a.collectDeps(modDir, lib.ID); err != nil {
dep, err := a.collectDeps(modDir, lib.ID)
if err != nil {
return xerrors.Errorf("dependency graph error: %w", err)
} else if dep.ID == "" {
// go.mod not found
continue
} else {
// Filter out unused dependencies and convert module names to module IDs
apps[i].Packages[j].DependsOn = lo.FilterMap(dep.DependsOn, func(modName string, _ int) (string, bool) {
if m, ok := usedPkgs[modName]; !ok {
return "", false
} else {
return m.ID, true
}
})
}
// Filter out unused dependencies and convert module names to module IDs
apps[i].Packages[j].DependsOn = lo.FilterMap(dep.DependsOn, func(modName string, _ int) (string, bool) {
m, ok := usedPkgs[modName]
if !ok {
return "", false
}
return m.ID, true
})
}
}
return nil

View File

@@ -96,10 +96,8 @@ func Test_amazonlinuxOSAnalyzer_Analyze(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr)
return
} else {
require.NoError(t, err)
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}

View File

@@ -60,9 +60,8 @@ func Test_debianOSAnalyzer_Analyze(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr)
return
} else {
require.NoError(t, err)
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}

View File

@@ -46,9 +46,8 @@ func Test_centosOSAnalyzer_Analyze(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr)
return
} else {
require.NoError(t, err)
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}

View File

@@ -46,9 +46,8 @@ func Test_fedoraOSAnalyzer_Analyze(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr)
return
} else {
require.NoError(t, err)
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}

View File

@@ -46,9 +46,8 @@ func Test_oracleOSAnalyzer_Analyze(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr)
return
} else {
require.NoError(t, err)
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}

View File

@@ -46,9 +46,8 @@ func Test_redhatOSAnalyzer_Analyze(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr)
return
} else {
require.NoError(t, err)
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}

View File

@@ -46,9 +46,8 @@ func Test_ubuntuOSAnalyzer_Analyze(t *testing.T) {
if tt.wantErr != "" {
require.ErrorContains(t, err, tt.wantErr)
return
} else {
require.NoError(t, err)
}
require.NoError(t, err)
assert.Equal(t, tt.want, got)
})
}

View File

@@ -99,7 +99,6 @@ func hasVersioning(r *parser.Resource) iacTypes.BoolValue {
versioningEnabled := false
if versioningProp.EqualTo("Enabled") {
versioningEnabled = true
}
return iacTypes.Bool(versioningEnabled, versioningProp.Metadata())
}
@@ -151,17 +150,16 @@ func getLifecycle(resource *parser.Resource) []s3.Rules {
}
func getWebsite(r *parser.Resource) *s3.Website {
if block := r.GetProperty("WebsiteConfiguration"); block.IsNil() {
block := r.GetProperty("WebsiteConfiguration")
if block.IsNil() {
return nil
} else {
return &s3.Website{
Metadata: block.Metadata(),
}
}
return &s3.Website{
Metadata: block.Metadata(),
}
}
func getBucketPolicies(fctx parser.FileContext, r *parser.Resource) []iam.Policy {
var policies []iam.Policy
for _, bucketPolicy := range fctx.GetResourcesByType("AWS::S3::BucketPolicy") {
bucket := bucketPolicy.GetStringProperty("Bucket")

View File

@@ -63,11 +63,10 @@ func parseISO8601(from string) (Iso8601Duration, error) {
var match []string
var d Iso8601Duration
if pattern.MatchString(from) {
match = pattern.FindStringSubmatch(from)
} else {
if !pattern.MatchString(from) {
return d, errors.New("could not parse duration string")
}
match = pattern.FindStringSubmatch(from)
for i, name := range pattern.SubexpNames() {
part := match[i]

View File

@@ -37,9 +37,9 @@ func ResolveFindInMap(property *Property) (resolved *Property, success bool) {
mapValues := k.(map[string]any)
if prop, ok := mapValues[secondaryLevelKey]; !ok {
prop, ok := mapValues[secondaryLevelKey]
if !ok {
return abortIntrinsic(property, "could not find a value for %s in %s, returning original Property", secondaryLevelKey, topLevelKey)
} else {
return property.deriveResolved(cftypes.String, prop), true
}
return property.deriveResolved(cftypes.String, prop), true
}

View File

@@ -34,7 +34,6 @@ func ResolveIf(property *Property) (resolved *Property, success bool) {
if conditionMet {
return trueState, true
} else {
return falseState, true
}
return falseState, true
}

View File

@@ -19,12 +19,10 @@ func ResolveSelect(property *Property) (resolved *Property, success bool) {
list := refValue[1]
if index.IsNotInt() {
if index.IsConvertableTo(cftypes.Int) {
//
index = index.ConvertTo(cftypes.Int)
} else {
if !index.IsConvertableTo(cftypes.Int) {
return abortIntrinsic(property, "index on property [%s] should be an int, returning original Property", property.name)
}
index = index.ConvertTo(cftypes.Int)
}
if list.IsNotList() {

View File

@@ -178,7 +178,6 @@ func (p *Property) RawValue() any {
}
func (p *Property) AsRawStrings() ([]string, error) {
if len(p.ctx.lines) < p.rng.GetEndLine() {
return p.ctx.lines, nil
}
@@ -269,7 +268,6 @@ func (p *Property) IntDefault(defaultValue int) iacTypes.IntValue {
}
func (p *Property) GetProperty(path string) *Property {
pathParts := strings.Split(path, ".")
first := pathParts[0]
@@ -298,9 +296,8 @@ func (p *Property) GetProperty(path string) *Property {
if nestedProperty.isFunction() {
resolved, _ := nestedProperty.resolveValue()
return resolved
} else {
return nestedProperty
}
return nestedProperty
}
return &Property{}
@@ -390,7 +387,6 @@ func (p *Property) setLogicalResource(id string) {
subProp.setLogicalResource(id)
}
}
}
func (p *Property) GetJsonBytes(squashList ...bool) []byte {

View File

@@ -108,7 +108,6 @@ func readFile(filePath string) ([]byte, error) {
return nil, err
}
return data.Bytes(), err
} else {
return os.ReadFile(filePath)
}
return os.ReadFile(filePath)
}

View File

@@ -35,7 +35,8 @@ type Block struct {
}
func NewBlock(hclBlock *hcl.Block, ctx *context.Context, moduleBlock *Block, parentBlock *Block, moduleSource string,
moduleFS fs.FS, index ...cty.Value) *Block {
moduleFS fs.FS, index ...cty.Value,
) *Block {
if ctx == nil {
ctx = context.NewContext(&hcl.EvalContext{}, nil)
}
@@ -317,7 +318,6 @@ func (b *Block) GetAttribute(name string) *Attribute {
// Supports special paths like "count.index," "each.key," and "each.value."
// The path may contain indices, keys and dots (used as separators).
func (b *Block) GetValueByPath(path string) cty.Value {
if path == "count.index" || path == "each.key" || path == "each.value" {
return b.Context().GetByDot(path)
}
@@ -428,18 +428,17 @@ func getValueByPath(val cty.Value, path []string) (cty.Value, error) {
}
func (b *Block) GetNestedAttribute(name string) (*Attribute, *Block) {
parts := strings.Split(name, ".")
blocks := parts[:len(parts)-1]
attrName := parts[len(parts)-1]
working := b
for _, subBlock := range blocks {
if checkBlock := working.GetBlock(subBlock); checkBlock == nil {
checkBlock := working.GetBlock(subBlock)
if checkBlock == nil {
return nil, working
} else {
working = checkBlock
}
working = checkBlock
}
if working != nil {
@@ -472,7 +471,6 @@ func (b *Block) FullLocalName() string {
}
func (b *Block) FullName() string {
if b.moduleBlock != nil {
return fmt.Sprintf(
"%s.%s",

View File

@@ -131,11 +131,11 @@ func (a *Artifact) Download(ctx context.Context, dir string, opt DownloadOption)
// Take the file name of the first layer if not specified
fileName := opt.Filename
if fileName == "" {
if v, ok := manifest.Layers[0].Annotations[titleAnnotation]; !ok {
v, ok := manifest.Layers[0].Annotations[titleAnnotation]
if !ok {
return xerrors.Errorf("annotation %s is missing", titleAnnotation)
} else {
fileName = v
}
fileName = v
}
layerMediaType, err := layer.MediaType()

View File

@@ -332,12 +332,12 @@ func NewServer(t *testing.T) *Server {
resEntries := models.LogEntry{}
for _, uuid := range params.EntryUUIDs {
if e, ok := entries[uuid]; !ok {
e, ok := entries[uuid]
if !ok {
http.Error(w, "no such uuid", http.StatusNotFound)
return
} else {
resEntries[uuid] = e
}
resEntries[uuid] = e
}
w.Header().Set("Content-Type", "application/json")
err = json.NewEncoder(w).Encode([]models.LogEntry{resEntries})

View File

@@ -114,13 +114,13 @@ func (sw *SarifWriter) addSarifResult(data *sarifData) {
}
func getRuleIndex(id string, indexes map[string]int) int {
if i, ok := indexes[id]; ok {
i, ok := indexes[id]
if ok {
return i
} else {
l := len(indexes)
indexes[id] = l
return l
}
l := len(indexes)
indexes[id] = l
return l
}
func (sw *SarifWriter) Write(ctx context.Context, report types.Report) error {
@@ -332,7 +332,7 @@ func ToPathUri(input string, resultClass types.ResultClass) string {
if resultClass != types.ClassOSPkg {
return input
}
var matches = pathRegex.FindStringSubmatch(input)
matches := pathRegex.FindStringSubmatch(input)
if matches != nil {
input = matches[pathRegex.SubexpIndex("path")]
}

View File

@@ -30,13 +30,11 @@ func NewDocument(filePath string, report *types.Report) (VEX, error) {
v, errs := decodeVEX(f, filePath, report)
if errs != nil {
return nil, xerrors.Errorf("unable to load VEX from file: %w", errs)
} else {
return v, nil
}
return v, nil
}
func decodeVEX(r io.ReadSeeker, source string, report *types.Report) (VEX, error) {
var errs error
// Try CycloneDX JSON
if ok, err := sbom.IsCycloneDXJSON(r); err != nil {