mirror of
https://github.com/mandiant/capa.git
synced 2025-12-21 23:00:29 -08:00
mypy
This commit is contained in:
@@ -55,7 +55,7 @@ def extract_file_arch(pe: dnfile.dnPE, **kwargs) -> Iterator[Tuple[Feature, Addr
|
||||
|
||||
def extract_file_features(pe: dnfile.dnPE) -> Iterator[Tuple[Feature, Address]]:
|
||||
for file_handler in FILE_HANDLERS:
|
||||
for feature, address in file_handler(pe=pe): # type: ignore
|
||||
for feature, address in file_handler(pe=pe):
|
||||
yield feature, address
|
||||
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from pathlib import Path
|
||||
|
||||
import dnfile
|
||||
import pefile
|
||||
from dnfile.types import DnType
|
||||
|
||||
import capa.features.extractors.helpers
|
||||
from capa.features.file import Import, FunctionName
|
||||
@@ -33,7 +34,6 @@ from capa.features.common import (
|
||||
from capa.features.address import NO_ADDRESS, Address, DNTokenAddress
|
||||
from capa.features.extractors.base_extractor import SampleHashes, StaticFeatureExtractor
|
||||
from capa.features.extractors.dnfile.helpers import (
|
||||
DnType,
|
||||
iter_dotnet_table,
|
||||
is_dotnet_mixed_mode,
|
||||
get_dotnet_managed_imports,
|
||||
|
||||
@@ -34,7 +34,7 @@ def find_embedded_pe(block_bytez: bytes, mz_xor: List[Tuple[bytes, bytes, int]])
|
||||
for match in re.finditer(re.escape(mzx), block_bytez):
|
||||
todo.append((match.start(), mzx, pex, i))
|
||||
|
||||
seg_max = len(block_bytez) # type: ignore [name-defined] # noqa: F821
|
||||
seg_max = len(block_bytez) # noqa: F821
|
||||
while len(todo):
|
||||
off, mzx, pex, i = todo.pop()
|
||||
|
||||
|
||||
@@ -140,7 +140,7 @@ def is_printable_ascii(chars: bytes) -> bool:
|
||||
|
||||
|
||||
def is_printable_utf16le(chars: bytes) -> bool:
|
||||
if all(c == b"\x00" for c in chars[1::2]):
|
||||
if all(c == 0x0 for c in chars[1::2]):
|
||||
return is_printable_ascii(chars[::2])
|
||||
return False
|
||||
|
||||
|
||||
@@ -823,7 +823,7 @@ def get_file_extractors(sample: Path, format_: str) -> List[FeatureExtractor]:
|
||||
file_extractors.append(capa.features.extractors.pefile.PefileFeatureExtractor(sample))
|
||||
file_extractors.append(capa.features.extractors.dnfile_.DnfileFeatureExtractor(sample))
|
||||
|
||||
elif format_ == capa.features.extractors.common.FORMAT_ELF:
|
||||
elif format_ == capa.features.common.FORMAT_ELF:
|
||||
file_extractors.append(capa.features.extractors.elffile.ElfFeatureExtractor(sample))
|
||||
|
||||
elif format_ == FORMAT_CAPE:
|
||||
@@ -1462,7 +1462,7 @@ def main(argv: Optional[List[str]] = None):
|
||||
# during the load of the RuleSet, we extract subscope statements into their own rules
|
||||
# that are subsequently `match`ed upon. this inflates the total rule count.
|
||||
# so, filter out the subscope rules when reporting total number of loaded rules.
|
||||
len(list(filter(lambda r: not r.is_subscope_rule(), rules.rules.values()))),
|
||||
len(list(filter(lambda r: not (r.is_subscope_rule()), rules.rules.values()))),
|
||||
)
|
||||
if args.tag:
|
||||
rules = rules.filter_rules_by_meta(args.tag)
|
||||
|
||||
@@ -215,7 +215,7 @@ def statement_from_capa(node: capa.engine.Statement) -> Statement:
|
||||
description=node.description,
|
||||
min=node.min,
|
||||
max=node.max,
|
||||
child=frz.feature_from_capa(node.child),
|
||||
child=frzf.feature_from_capa(node.child),
|
||||
)
|
||||
|
||||
elif isinstance(node, capa.engine.Subscope):
|
||||
@@ -241,7 +241,7 @@ def node_from_capa(node: Union[capa.engine.Statement, capa.engine.Feature]) -> N
|
||||
return StatementNode(statement=statement_from_capa(node))
|
||||
|
||||
elif isinstance(node, capa.engine.Feature):
|
||||
return FeatureNode(feature=frz.feature_from_capa(node))
|
||||
return FeatureNode(feature=frzf.feature_from_capa(node))
|
||||
|
||||
else:
|
||||
assert_never(node)
|
||||
|
||||
@@ -322,7 +322,7 @@ def ensure_feature_valid_for_scopes(scopes: Scopes, feature: Union[Feature, Stat
|
||||
# features of this scope that are not Characteristics will be Type instances.
|
||||
# check that the given feature is one of these types.
|
||||
types_for_scope = filter(lambda t: isinstance(t, type), supported_features)
|
||||
if not isinstance(feature, tuple(types_for_scope)): # type: ignore
|
||||
if not isinstance(feature, tuple(types_for_scope)):
|
||||
raise InvalidRule(f"feature {feature} not supported for scopes {scopes}")
|
||||
|
||||
|
||||
@@ -990,7 +990,7 @@ class Rule:
|
||||
|
||||
# leave quotes unchanged.
|
||||
# manually verified this property exists, even if mypy complains.
|
||||
y.preserve_quotes = True # type: ignore
|
||||
y.preserve_quotes = True
|
||||
|
||||
# indent lists by two spaces below their parent
|
||||
#
|
||||
@@ -1002,7 +1002,7 @@ class Rule:
|
||||
|
||||
# avoid word wrapping
|
||||
# manually verified this property exists, even if mypy complains.
|
||||
y.width = 4096 # type: ignore
|
||||
y.width = 4096
|
||||
|
||||
return y
|
||||
|
||||
|
||||
@@ -112,7 +112,7 @@ def get_capa_results(args):
|
||||
extractor = capa.main.get_extractor(
|
||||
path, format, os_, capa.main.BACKEND_VIV, sigpaths, should_save_workspace, disable_progress=True
|
||||
)
|
||||
except capa.main.UnsupportedFormatError:
|
||||
except capa.exceptions.UnsupportedFormatError:
|
||||
# i'm 100% sure if multiprocessing will reliably raise exceptions across process boundaries.
|
||||
# so instead, return an object with explicit success/failure status.
|
||||
#
|
||||
@@ -123,7 +123,7 @@ def get_capa_results(args):
|
||||
"status": "error",
|
||||
"error": f"input file does not appear to be a PE file: {path}",
|
||||
}
|
||||
except capa.main.UnsupportedRuntimeError:
|
||||
except capa.exceptions.UnsupportedRuntimeError:
|
||||
return {
|
||||
"path": path,
|
||||
"status": "error",
|
||||
|
||||
@@ -359,7 +359,7 @@ def get_sample_capabilities(ctx: Context, path: Path) -> Set[str]:
|
||||
elif nice_path.name.endswith(capa.helpers.EXTENSIONS_SHELLCODE_64):
|
||||
format_ = "sc64"
|
||||
else:
|
||||
format_ = capa.main.get_auto_format(nice_path)
|
||||
format_ = capa.helpers.get_auto_format(nice_path)
|
||||
|
||||
logger.debug("analyzing sample: %s", nice_path)
|
||||
extractor = capa.main.get_extractor(
|
||||
|
||||
@@ -47,7 +47,7 @@ from typing import Dict, List
|
||||
from pathlib import Path
|
||||
|
||||
import requests
|
||||
from stix2 import Filter, MemoryStore, AttackPattern # type: ignore
|
||||
from stix2 import Filter, MemoryStore, AttackPattern
|
||||
|
||||
logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s")
|
||||
|
||||
|
||||
@@ -140,7 +140,7 @@ def test_freeze_bytes_roundtrip():
|
||||
|
||||
|
||||
def roundtrip_feature(feature):
|
||||
assert feature == capa.features.freeze.feature_from_capa(feature).to_capa()
|
||||
assert feature == capa.features.freeze.features.feature_from_capa(feature).to_capa()
|
||||
|
||||
|
||||
def test_serialize_features():
|
||||
|
||||
Reference in New Issue
Block a user