Compare commits

..

2 Commits

Author SHA1 Message Date
Mike Hunhoff
7a66dfc025 update CHANGELOG 2025-02-19 15:43:07 -07:00
Mike Hunhoff
f5db5fd5cf be2: improve number extraction 2025-02-19 15:38:42 -07:00
38 changed files with 897 additions and 1543 deletions

View File

@@ -1,27 +0,0 @@
[tool.bumpversion]
current_version = "9.3.1"
[[tool.bumpversion.files]]
filename = "capa/version.py"
search = '__version__ = "{current_version}"'
replace = '__version__ = "{new_version}"'
[[tool.bumpversion.files]]
filename = "capa/ida/plugin/ida-plugin.json"
search = '"version": "{current_version}"'
replace = '"version": "{new_version}"'
[[tool.bumpversion.files]]
filename = "capa/ida/plugin/ida-plugin.json"
search = '"flare-capa=={current_version}"'
replace = '"flare-capa=={new_version}"'
[[tool.bumpversion.files]]
filename = "CHANGELOG.md"
search = "v{current_version}...master"
replace = "v{current_version}...{new_version}"
[[tool.bumpversion.files]]
filename = "CHANGELOG.md"
search = "master (unreleased)"
replace = "v{new_version}"

View File

@@ -74,9 +74,6 @@ a = Analysis(
# only be installed locally.
"binaryninja",
"ida",
# remove once https://github.com/mandiant/capa/issues/2681 has
# been addressed by PyInstaller
"pkg_resources",
],
)

View File

@@ -9,7 +9,6 @@ on:
- '**.md'
release:
types: [edited, published]
workflow_dispatch: # manual trigger for testing
permissions:
contents: write
@@ -23,38 +22,24 @@ jobs:
fail-fast: true
matrix:
include:
- os: ubuntu-22.04
- os: ubuntu-20.04
# use old linux so that the shared library versioning is more portable
artifact_name: capa
asset_name: linux
python_version: '3.10'
- os: ubuntu-22.04-arm
artifact_name: capa
asset_name: linux-arm64
python_version: '3.10'
- os: ubuntu-22.04
- os: ubuntu-20.04
artifact_name: capa
asset_name: linux-py312
python_version: '3.12'
- os: windows-2022
- os: windows-2019
artifact_name: capa.exe
asset_name: windows
python_version: '3.10'
# Windows 11 ARM64 complains of conflicting package version
# Additionally, there is no ARM64 build of Python for Python 3.10 on Windows 11 ARM: https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
#- os: windows-11-arm
# artifact_name: capa.exe
# asset_name: windows-arm64
# python_version: '3.12'
- os: macos-15-intel
# macos-15-intel is the lowest native intel build
- os: macos-13
# use older macOS for assumed better portability
artifact_name: capa
asset_name: macos
python_version: '3.10'
- os: macos-14
artifact_name: capa
asset_name: macos-arm64
python_version: '3.10'
steps:
- name: Checkout capa
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
@@ -64,7 +49,7 @@ jobs:
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
with:
python-version: ${{ matrix.python_version }}
- if: matrix.os == 'ubuntu-22.04' || matrix.os == 'ubuntu-22.04-arm'
- if: matrix.os == 'ubuntu-20.04'
run: sudo apt-get install -y libyaml-dev
- name: Upgrade pip, setuptools
run: python -m pip install --upgrade pip setuptools
@@ -74,28 +59,6 @@ jobs:
pip install -e .[build]
- name: Build standalone executable
run: pyinstaller --log-level DEBUG .github/pyinstaller/pyinstaller.spec
- name: Does it run without warnings or errors?
shell: bash
run: |
if [[ "${{ matrix.os }}" == "windows-2022" ]] || [[ "${{ matrix.os }}" == "windows-11-arm" ]]; then
EXECUTABLE=".\\dist\\capa"
else
EXECUTABLE="./dist/capa"
fi
output=$(${EXECUTABLE} --version 2>&1)
exit_code=$?
echo "${output}"
echo "${exit_code}"
if echo "${output}" | grep -iE 'error|warning'; then
exit 1
fi
if [[ "${exit_code}" -ne 0 ]]; then
exit 1
fi
- name: Does it run (PE)?
run: dist/capa -d "tests/data/Practical Malware Analysis Lab 01-01.dll_"
- name: Does it run (Shellcode)?
@@ -111,6 +74,34 @@ jobs:
name: ${{ matrix.asset_name }}
path: dist/${{ matrix.artifact_name }}
test_run:
name: Test run on ${{ matrix.os }} / ${{ matrix.asset_name }}
runs-on: ${{ matrix.os }}
needs: [build]
strategy:
matrix:
include:
# OSs not already tested above
- os: ubuntu-22.04
artifact_name: capa
asset_name: linux
- os: ubuntu-22.04
artifact_name: capa
asset_name: linux-py312
- os: windows-2022
artifact_name: capa.exe
asset_name: windows
steps:
- name: Download ${{ matrix.asset_name }}
uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with:
name: ${{ matrix.asset_name }}
- name: Set executable flag
if: matrix.os != 'windows-2022'
run: chmod +x ${{ matrix.artifact_name }}
- name: Run capa
run: ./${{ matrix.artifact_name }} -h
zip_and_upload:
# upload zipped binaries to Release page
if: github.event_name == 'release'
@@ -122,18 +113,12 @@ jobs:
include:
- asset_name: linux
artifact_name: capa
- asset_name: linux-arm64
artifact_name: capa
- asset_name: linux-py312
artifact_name: capa
- asset_name: windows
artifact_name: capa.exe
#- asset_name: windows-arm64
# artifact_name: capa.exe
- asset_name: macos
artifact_name: capa
- asset_name: macos-arm64
artifact_name: capa
steps:
- name: Download ${{ matrix.asset_name }}
uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2

View File

@@ -35,7 +35,7 @@ jobs:
with:
path: dist/*
- name: publish package
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # release/v1.12.4
uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 # release/v1
with:
skip-existing: true
verbose: true

View File

@@ -42,10 +42,10 @@ jobs:
- name: Checkout capa
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
# use latest available python to take advantage of best performance
- name: Set up Python 3.13
- name: Set up Python 3.12
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
with:
python-version: "3.13"
python-version: "3.12"
- name: Install dependencies
run: |
pip install -r requirements.txt
@@ -70,10 +70,10 @@ jobs:
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with:
submodules: recursive
- name: Set up Python 3.13
- name: Set up Python 3.12
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
with:
python-version: "3.13"
python-version: "3.12"
- name: Install capa
run: |
pip install -r requirements.txt
@@ -88,14 +88,16 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-22.04, ubuntu-22.04-arm, windows-2022, macos-15-intel, macos-14]
os: [ubuntu-20.04, windows-2019, macos-13]
# across all operating systems
python-version: ["3.10", "3.13"]
python-version: ["3.10", "3.11"]
include:
# on Ubuntu run these as well
- os: ubuntu-22.04
- os: ubuntu-20.04
python-version: "3.10"
- os: ubuntu-20.04
python-version: "3.11"
- os: ubuntu-22.04
- os: ubuntu-20.04
python-version: "3.12"
steps:
- name: Checkout capa with submodules
@@ -107,7 +109,7 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
- name: Install pyyaml
if: matrix.os == 'ubuntu-22.04'
if: matrix.os == 'ubuntu-20.04'
run: sudo apt-get install -y libyaml-dev
- name: Install capa
run: |
@@ -129,7 +131,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.13"]
python-version: ["3.10", "3.11"]
steps:
- name: Checkout capa with submodules
# do only run if BN_SERIAL is available, have to do this in every step, see https://github.com/orgs/community/discussions/26726#discussioncomment-3253118
@@ -166,12 +168,12 @@ jobs:
ghidra-tests:
name: Ghidra tests for ${{ matrix.python-version }}
runs-on: ubuntu-22.04
runs-on: ubuntu-20.04
needs: [tests]
strategy:
fail-fast: false
matrix:
python-version: ["3.10", "3.13"]
python-version: ["3.10", "3.11"]
java-version: ["17"]
ghidra-version: ["11.0.1"]
public-version: ["PUBLIC_20240130"] # for ghidra releases

1
.gitignore vendored
View File

@@ -122,7 +122,6 @@ scripts/perf/*.zip
*/.DS_Store
Pipfile
Pipfile.lock
uv.lock
/cache/
.github/binja/binaryninja
.github/binja/download_headless.py

View File

@@ -6,187 +6,23 @@
### Breaking Changes
### New Rules (4)
### New Rules (0)
- nursery/run-as-nodejs-native-module mehunhoff@google.com
- nursery/inject-shellcode-using-thread-pool-work-insertion-with-tp_io still@teamt5.org
- nursery/inject-shellcode-using-thread-pool-work-insertion-with-tp_timer still@teamt5.org
- nursery/inject-shellcode-using-thread-pool-work-insertion-with-tp_work still@teamt5.org
-
### Bug Fixes
- Fixed insecure deserialization vulnerability in YAML loading @0x1622 (#2770)
### capa Explorer Web
### capa Explorer IDA Pro plugin
### Development
- ci: deprecate macos-13 runner and use Python v3.13 for testing @mike-hunhoff #2777
### Raw diffs
- [capa v9.3.1...master](https://github.com/mandiant/capa/compare/v9.3.1...master)
- [capa-rules v9.3.1...master](https://github.com/mandiant/capa-rules/compare/v9.3.1...master)
## v9.3.1
This patch release fixes a missing import for the capa explorer plugin for IDA Pro.
### Bug Fixes
- add missing ida-netnode dependency to project.toml @mike-hunhoff #2765
### Development
- ci: bump binja min version @mike-hunhoff #2763
### Raw diffs
- [capa v9.3.0...master](https://github.com/mandiant/capa/compare/v9.3.0...master)
- [capa-rules v9.3.0...master](https://github.com/mandiant/capa-rules/compare/v9.3.0...master)
## v9.3.0
capa v9.3.0 comes with over 20 new and/or impoved rules.
For IDA users the capa explorer plugin is now available via the IDA Pro plugin repository and contains Qt compatibility layer for PyQt5 and PySide6 support.
Additionally a Binary Ninja bug has been fixed. Released binaries now include ARM64 binaries (Linux and macOS).
### New Features
- ci: add support for arm64 binary releases
### Breaking Changes
### New Rules (24)
- anti-analysis/anti-vm/vm-detection/detect-mouse-movement-via-activity-checks-on-windows tevajdr@gmail.com
- nursery/create-executable-heap moritz.raabe@mandiant.com
- anti-analysis/packer/dxpack/packed-with-dxpack jakubjozwiak@google.com
- anti-analysis/anti-av/patch-bitdefender-hooking-dll-function jakubjozwiak@google.com
- nursery/acquire-load-driver-privileges mehunhoff@google.com
- nursery/communicate-using-ftp mehunhoff@google.com
- linking/static/eclipse-paho-mqtt-c/linked-against-eclipse-paho-mqtt-c jakubjozwiak@google.com
- linking/static/qmqtt/linked-against-qmqtt jakubjozwiak@google.com
- anti-analysis/anti-forensic/disable-powershell-transcription jakubjozwiak@google.com
- host-interaction/powershell/bypass-powershell-constrained-language-mode-via-getsystemlockdownpolicy-patch jakubjozwiak@google.com
- linking/static/grpc/linked-against-grpc jakubjozwiak@google.com
- linking/static/hp-socket/linked-against-hp-socket jakubjozwiak@google.com
- load-code/execute-jscript-via-vsaengine-in-dotnet jakubjozwiak@google.com
- linking/static/funchook/linked-against-funchook jakubjozwiak@google.com
- linking/static/plthook/linked-against-plthook jakubjozwiak@google.com
- host-interaction/network/enumerate-tcp-connections-via-wmi-com-api jakubjozwiak@google.com
- host-interaction/network/routing-table/create-routing-table-entry jakubjozwiak@google.com
- host-interaction/network/routing-table/get-routing-table michael.hunhoff@mandiant.com
- host-interaction/file-system/use-io_uring-io-interface-on-linux jakubjozwiak@google.com
- collection/keylog/log-keystrokes-via-direct-input zeze-zeze
- nursery/compiled-from-fsharp mehunhoff@google.com
- nursery/decrypt-data-using-aes-via-dotnet mehunhoff@google.com
- nursery/get-dotnet-assembly-entry-point mehunhoff@google.com
### Bug Fixes
- binja: fix a crash during feature extraction when the MLIL is unavailable @xusheng6 #2714
### capa Explorer Web
### capa Explorer IDA Pro plugin
- add `ida-plugin.json` for inclusion in the IDA Pro plugin repository @williballenthin
- ida plugin: add Qt compatibility layer for PyQt5 and PySide6 support @williballenthin #2707
- delay import to not load Qt* when running under idalib @mr-tz #2752
### Development
- ci: remove redundant "test_run" action from build workflow @mike-hunhoff #2692
- dev: add bumpmyversion to bump and sync versions across the project @mr-tz
### Raw diffs
- [capa v9.2.1...9.3.0](https://github.com/mandiant/capa/compare/v9.2.1...9.3.0)
- [capa-rules v9.2.1...9.3.0](https://github.com/mandiant/capa-rules/compare/v9.2.1...9.3.0)
## v9.2.1
This point release fixes bugs including removing an unnecessary PyInstaller warning message and enabling the standalone binary to execute on systems running older versions of glibc.
### Bug Fixes
- ci: exclude pkg_resources from PyInstaller build @mike-hunhoff #2684
- ci: downgrade Ubuntu version to accommodate older glibc versions @mike-hunhoff #2684
### Development
- ci: upgrade Windows version to avoid deprecation @mike-hunhoff #2684
- ci: check if build runs without warnings or errors @mike-hunhoff #2684
### Raw diffs
- [capa v9.2.0...v9.2.1](https://github.com/mandiant/capa/compare/v9.2.0...v9.2.1)
- [capa-rules v9.2.0...v9.2.1](https://github.com/mandiant/capa-rules/compare/v9.2.0...v9.2.1)
## v9.2.0
This release improves a few aspects of dynamic analysis, including relaxing our validation on fields across many CAPE versions and processing additional VMRay submission file types, for example.
It also includes an updated rule pack containing new rules and rule fixes.
### New Features
- vmray: do not restrict analysis to PE and ELF files, e.g. docx @mike-hunhoff #2672
### Breaking Changes
### New Rules (22)
- communication/socket/connect-socket moritz.raabe@mandiant.com joakim@intezer.com mrhafizfarhad@gmail.com
- communication/socket/udp/connect-udp-socket mrhafizfarhad@gmail.com
- nursery/enter-debug-mode-in-dotnet @v1bh475u
- nursery/decrypt-data-using-tripledes-in-dotnet 0xRavenspar
- nursery/encrypt-data-using-tripledes-in-dotnet 0xRavenspar
- nursery/disable-system-features-via-registry-on-windows mehunhoff@google.com
- data-manipulation/encryption/chaskey/encrypt-data-using-chaskey still@teamt5.org
- data-manipulation/encryption/speck/encrypt-data-using-speck still@teamt5.org
- load-code/dotnet/load-assembly-via-iassembly still@teamt5.org
- malware-family/donut-loader/load-shellcode-via-donut still@teamt5.org
- nursery/disable-device-guard-features-via-registry-on-windows mehunhoff@google.com
- nursery/disable-firewall-features-via-registry-on-windows mehunhoff@google.com
- nursery/disable-system-restore-features-via-registry-on-windows mehunhoff@google.com
- nursery/disable-windows-defender-features-via-registry-on-windows mehunhoff@google.com
- host-interaction/file-system/write/clear-file-content jakeperalta7
- host-interaction/filter/unload-minifilter-driver JakePeralta7
- exploitation/enumeration/make-suspicious-ntquerysysteminformation-call zdw@google.com
- exploitation/gadgets/load-ntoskrnl zdw@google.com
- exploitation/gadgets/resolve-ntoskrnl-gadgets zdw@google.com
- exploitation/spraying/make-suspicious-ntfscontrolfile-call zdw@google.com
- anti-analysis/anti-forensic/unload-sysmon JakePeralta7
### Bug Fixes
- cape: make some fields optional @williballenthin #2631 #2632
- lint: add WARN for regex features that contain unescaped dot #2635
- lint: add ERROR for incomplete registry control set regex #2643
- binja: update unit test core version #2670
### Raw diffs
- [capa v9.1.0...v9.2.0](https://github.com/mandiant/capa/compare/v9.1.0...v9.2.0)
- [capa-rules v9.1.0...v9.2.0](https://github.com/mandiant/capa-rules/compare/v9.1.0...v9.2.0)
## v9.1.0
This release improves a few aspects of dynamic analysis, relaxing our validation on fields across many CAPE versions, for example.
It also includes an updated rule pack in which many dynamic rules make better use of the "span of calls" scope.
### New Rules (3)
- host-interaction/registry/change-registry-key-timestamp wballenthin@google.com
- host-interaction/mutex/check-mutex-and-terminate-process-on-windows @_re_fox moritz.raabe@mandiant.com mehunhoff@google.com
- anti-analysis/anti-forensic/clear-logs/clear-windows-event-logs-remotely 99.elad.levi@gmail.com
### Bug Fixes
- only parse CAPE fields required for analysis @mike-hunhoff #2607
- main: render result document without needing associated rules @williballenthin #2610
- vmray: only verify process OS and monitor IDs match @mike-hunhoff #2613
- render: don't assume prior matches exist within a thread @mike-hunhoff #2612
- improve _number_ feature extraction for BinExport @mike-hunhoff #2609
### capa Explorer Web
### capa Explorer IDA Pro plugin
### Development
### Raw diffs
- [capa v9.0.0...v9.1.0](https://github.com/mandiant/capa/compare/v9.0.0...v9.1.0)
- [capa-rules v9.0.0...v9.1.0](https://github.com/mandiant/capa-rules/compare/v9.0.0...v9.1.0)
- [capa v9.0.0...master](https://github.com/mandiant/capa/compare/v9.0.0...master)
- [capa-rules v9.0.0...master](https://github.com/mandiant/capa-rules/compare/v9.0.0...master)
## v9.0.0

View File

@@ -315,6 +315,3 @@ If you use Ghidra, then you can use the [capa + Ghidra integration](/capa/ghidra
## capa testfiles
The [capa-testfiles repository](https://github.com/mandiant/capa-testfiles) contains the data we use to test capa's code and rules
## mailing list
Subscribe to the FLARE mailing list for community announcements! Email "subscribe" to [flare-external@google.com](mailto:flare-external@google.com?subject=subscribe).

View File

@@ -349,30 +349,9 @@ def get_operand_register_expression(be2: BinExport2, operand: BinExport2.Operand
def get_operand_immediate_expression(be2: BinExport2, operand: BinExport2.Operand) -> Optional[BinExport2.Expression]:
if len(operand.expression_index) == 1:
# - type: IMMEDIATE_INT
# immediate: 20588728364
# parent_index: 0
expression: BinExport2.Expression = be2.expression[operand.expression_index[0]]
for expression in get_operand_expressions(be2, operand):
if expression.type == BinExport2.Expression.IMMEDIATE_INT:
return expression
elif len(operand.expression_index) == 2:
# from IDA, which provides a size hint for every operand,
# we get the following pattern for immediate constants:
#
# - type: SIZE_PREFIX
# symbol: "b8"
# - type: IMMEDIATE_INT
# immediate: 20588728364
# parent_index: 0
expression0: BinExport2.Expression = be2.expression[operand.expression_index[0]]
expression1: BinExport2.Expression = be2.expression[operand.expression_index[1]]
if expression0.type == BinExport2.Expression.SIZE_PREFIX:
if expression1.type == BinExport2.Expression.IMMEDIATE_INT:
return expression1
return None

View File

@@ -19,6 +19,7 @@ from binaryninja import (
Function,
BinaryView,
SymbolType,
ILException,
RegisterValueType,
VariableSourceType,
LowLevelILOperation,
@@ -191,8 +192,9 @@ def extract_stackstring(fh: FunctionHandle):
if bv is None:
return
mlil = func.mlil
if mlil is None:
try:
mlil = func.mlil
except ILException:
return
for block in mlil.basic_blocks:

View File

@@ -54,8 +54,7 @@ class CapeExtractor(DynamicFeatureExtractor):
def get_base_address(self) -> Union[AbsoluteVirtualAddress, _NoAddress, None]:
# value according to the PE header, the actual trace may use a different imagebase
assert self.report.static is not None
assert self.report.static.pe is not None
assert self.report.static is not None and self.report.static.pe is not None
return AbsoluteVirtualAddress(self.report.static.pe.imagebase)
def extract_global_features(self) -> Iterator[tuple[Feature, Address]]:

View File

@@ -88,49 +88,31 @@ def extract_file_strings(report: CapeReport) -> Iterator[tuple[Feature, Address]
def extract_used_regkeys(report: CapeReport) -> Iterator[tuple[Feature, Address]]:
if not report.behavior.summary:
return
for regkey in report.behavior.summary.keys:
yield String(regkey), NO_ADDRESS
def extract_used_files(report: CapeReport) -> Iterator[tuple[Feature, Address]]:
if not report.behavior.summary:
return
for file in report.behavior.summary.files:
yield String(file), NO_ADDRESS
def extract_used_mutexes(report: CapeReport) -> Iterator[tuple[Feature, Address]]:
if not report.behavior.summary:
return
for mutex in report.behavior.summary.mutexes:
yield String(mutex), NO_ADDRESS
def extract_used_commands(report: CapeReport) -> Iterator[tuple[Feature, Address]]:
if not report.behavior.summary:
return
for cmd in report.behavior.summary.executed_commands:
yield String(cmd), NO_ADDRESS
def extract_used_apis(report: CapeReport) -> Iterator[tuple[Feature, Address]]:
if not report.behavior.summary:
return
for symbol in report.behavior.summary.resolved_apis:
yield String(symbol), NO_ADDRESS
def extract_used_services(report: CapeReport) -> Iterator[tuple[Feature, Address]]:
if not report.behavior.summary:
return
for svc in report.behavior.summary.created_services:
yield String(svc), NO_ADDRESS
for svc in report.behavior.summary.started_services:

View File

@@ -188,15 +188,15 @@ class PE(FlexibleModel):
# timestamp: str
# list[ImportedDll], or dict[basename(dll), ImportedDll]
imports: list[ImportedDll] | dict[str, ImportedDll] = Field(default_factory=list) # type: ignore
imports: Union[list[ImportedDll], dict[str, ImportedDll]]
# imported_dll_count: Optional[int] = None
# imphash: str
# exported_dll_name: Optional[str] = None
exports: list[ExportedSymbol] = Field(default_factory=list)
exports: list[ExportedSymbol]
# dirents: list[DirectoryEntry]
sections: list[Section] = Field(default_factory=list)
sections: list[Section]
# ep_bytes: Optional[HexBytes] = None
@@ -364,7 +364,7 @@ class EncryptedBuffer(FlexibleModel):
class Behavior(FlexibleModel):
summary: Summary | None = None
summary: Summary
# list of processes, of threads, of calls
processes: list[Process]

View File

@@ -96,7 +96,14 @@ class VMRayAnalysis:
% (self.submission_name, self.submission_type)
)
if self.submission_static is None:
if self.submission_static is not None:
if self.submission_static.pe is None and self.submission_static.elf is None:
# we only support static analysis for PE and ELF files for now
raise UnsupportedFormatError(
"archive does not contain a supported file format (submission_name: %s, submission_type: %s)"
% (self.submission_name, self.submission_type)
)
else:
# VMRay may not record static analysis for certain file types, e.g. MSI, but we'd still like to match dynamic
# execution so we continue without and accept that the results may be incomplete
logger.warning(
@@ -216,15 +223,16 @@ class VMRayAnalysis:
# we expect monitor processes recorded in both SummaryV2.json and flog.xml to equal
# to ensure this, we compare the pid, monitor_id, and origin_monitor_id
# for the other fields we've observed cases with slight deviations, e.g.,
# the ppid, origin monitor id, etc. for a process in flog.xml is not set correctly, all other
# data is equal
# the ppid for a process in flog.xml is not set correctly, all other data is equal
sv2p = self.monitor_processes[monitor_process.process_id]
if self.monitor_processes[monitor_process.process_id] != vmray_monitor_process:
logger.debug("processes differ: %s (sv2) vs. %s (flog)", sv2p, vmray_monitor_process)
# we need, at a minimum, for the process id and monitor id to match, otherwise there is likely a bug
# in the way that VMRay tracked one of the processes
assert (sv2p.pid, sv2p.monitor_id) == (vmray_monitor_process.pid, vmray_monitor_process.monitor_id)
assert (sv2p.pid, sv2p.monitor_id, sv2p.origin_monitor_id) == (
vmray_monitor_process.pid,
vmray_monitor_process.monitor_id,
vmray_monitor_process.origin_monitor_id,
)
def _compute_monitor_threads(self):
for monitor_thread in self.flog.analysis.monitor_threads:

View File

@@ -17,6 +17,7 @@ import logging
import idaapi
import ida_kernwin
from capa.ida.plugin.form import CapaExplorerForm
from capa.ida.plugin.icon import ICON
logger = logging.getLogger(__name__)
@@ -73,9 +74,6 @@ class CapaExplorerPlugin(idaapi.plugin_t):
arg (int): bitflag. Setting LSB enables automatic analysis upon
loading. The other bits are currently undefined. See `form.Options`.
"""
# delay import to not trigger load of Qt components when not running in idaq, i.e., in idalib
from capa.ida.plugin.form import CapaExplorerForm
if not self.form:
self.form = CapaExplorerForm(self.PLUGIN_NAME, arg)
else:

View File

@@ -14,9 +14,9 @@
import ida_kernwin
from PyQt5 import QtCore
from capa.ida.plugin.error import UserCancelledError
from capa.ida.plugin.qt_compat import QtCore, Signal
from capa.features.extractors.ida.extractor import IdaFeatureExtractor
from capa.features.extractors.base_extractor import FunctionHandle
@@ -24,7 +24,7 @@ from capa.features.extractors.base_extractor import FunctionHandle
class CapaExplorerProgressIndicator(QtCore.QObject):
"""implement progress signal, used during feature extraction"""
progress = Signal(str)
progress = QtCore.pyqtSignal(str)
def update(self, text):
"""emit progress update

View File

@@ -23,6 +23,7 @@ from pathlib import Path
import idaapi
import ida_kernwin
import ida_settings
from PyQt5 import QtGui, QtCore, QtWidgets
import capa.main
import capa.rules
@@ -50,7 +51,6 @@ from capa.ida.plugin.hooks import CapaExplorerIdaHooks
from capa.ida.plugin.model import CapaExplorerDataModel
from capa.ida.plugin.proxy import CapaExplorerRangeProxyModel, CapaExplorerSearchProxyModel
from capa.ida.plugin.extractor import CapaExplorerFeatureExtractor
from capa.ida.plugin.qt_compat import QtGui, QtCore, QtWidgets
from capa.features.extractors.base_extractor import FunctionHandle
logger = logging.getLogger(__name__)
@@ -1358,7 +1358,7 @@ class CapaExplorerForm(idaapi.PluginForm):
@param state: checked state
"""
if state:
if state == QtCore.Qt.Checked:
self.limit_results_to_function(idaapi.get_func(idaapi.get_screen_ea()))
else:
self.range_model_proxy.reset_address_range_filter()
@@ -1367,7 +1367,7 @@ class CapaExplorerForm(idaapi.PluginForm):
def slot_checkbox_limit_features_by_ea(self, state):
""" """
if state:
if state == QtCore.Qt.Checked:
self.view_rulegen_features.filter_items_by_ea(idaapi.get_screen_ea())
else:
self.view_rulegen_features.show_all_items()

View File

@@ -1,38 +0,0 @@
{
"IDAMetadataDescriptorVersion": 1,
"plugin": {
"name": "capa",
"entryPoint": "capa_explorer.py",
"version": "9.3.1",
"idaVersions": ">=7.4",
"description": "Identify capabilities in executable files using FLARE's capa framework",
"license": "Apache-2.0",
"categories": [
"malware-analysis",
"api-scripting-and-automation",
"ui-ux-and-visualization"
],
"pythonDependencies": ["flare-capa==9.3.1"],
"urls": {
"repository": "https://github.com/mandiant/capa"
},
"authors": [
{"name": "Willi Ballenthin", "email": "wballenthin@hex-rays.com"},
{"name": "Moritz Raabe", "email": "moritzraabe@google.com"},
{"name": "Mike Hunhoff", "email": "mike.hunhoff@gmail.com"},
{"name": "Yacine Elhamer", "email": "elhamer.yacine@gmail.com"}
],
"keywords": [
"capability-detection",
"malware-analysis",
"behavior-analysis",
"reverse-engineering",
"att&ck",
"rule-engine",
"feature-extraction",
"yara-like-rules",
"static-analysis",
"dynamic-analysis"
]
}
}

View File

@@ -18,10 +18,10 @@ from typing import Iterator, Optional
import idc
import idaapi
from PyQt5 import QtCore
import capa.ida.helpers
from capa.features.address import Address, FileOffsetAddress, AbsoluteVirtualAddress
from capa.ida.plugin.qt_compat import QtCore, qt_get_item_flag_tristate
def info_to_name(display):
@@ -55,7 +55,7 @@ class CapaExplorerDataItem:
self.flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
if self._can_check:
self.flags = self.flags | QtCore.Qt.ItemIsUserCheckable | qt_get_item_flag_tristate()
self.flags = self.flags | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsTristate
if self.pred:
self.pred.appendChild(self)

View File

@@ -18,6 +18,7 @@ from collections import deque
import idc
import idaapi
from PyQt5 import QtGui, QtCore
import capa.rules
import capa.ida.helpers
@@ -41,7 +42,6 @@ from capa.ida.plugin.item import (
CapaExplorerInstructionViewItem,
)
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.ida.plugin.qt_compat import QtGui, QtCore
# default highlight color used in IDA window
DEFAULT_HIGHLIGHT = 0xE6C700
@@ -269,7 +269,7 @@ class CapaExplorerDataModel(QtCore.QAbstractItemModel):
visited.add(child_index)
for idx in range(self.rowCount(child_index)):
stack.append(self.index(idx, 0, child_index))
stack.append(child_index.child(idx, 0))
def reset_ida_highlighting(self, item, checked):
"""reset IDA highlight for item

View File

@@ -12,8 +12,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
from PyQt5 import QtCore
from PyQt5.QtCore import Qt
from capa.ida.plugin.model import CapaExplorerDataModel
from capa.ida.plugin.qt_compat import Qt, QtCore
class CapaExplorerRangeProxyModel(QtCore.QSortFilterProxyModel):

View File

@@ -1,79 +0,0 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Qt compatibility layer for capa IDA Pro plugin.
Handles PyQt5 (IDA < 9.2) vs PySide6 (IDA >= 9.2) differences.
This module provides a unified import interface for Qt modules and handles
API changes between Qt5 and Qt6.
"""
try:
# IDA 9.2+ uses PySide6
from PySide6 import QtGui, QtCore, QtWidgets
from PySide6.QtGui import QAction
QT_LIBRARY = "PySide6"
Signal = QtCore.Signal
except ImportError:
# Older IDA versions use PyQt5
try:
from PyQt5 import QtGui, QtCore, QtWidgets
from PyQt5.QtWidgets import QAction
QT_LIBRARY = "PyQt5"
Signal = QtCore.pyqtSignal
except ImportError:
raise ImportError("Neither PySide6 nor PyQt5 is available. Cannot initialize capa IDA plugin.")
Qt = QtCore.Qt
def qt_get_item_flag_tristate():
"""
Get the tristate item flag compatible with Qt5 and Qt6.
Qt5 (PyQt5): Uses Qt.ItemIsTristate
Qt6 (PySide6): Qt.ItemIsTristate was removed, uses Qt.ItemIsAutoTristate
ItemIsAutoTristate automatically manages tristate based on child checkboxes,
matching the original ItemIsTristate behavior where parent checkboxes reflect
the check state of their children.
Returns:
int: The appropriate flag value for the Qt version
Raises:
AttributeError: If the tristate flag cannot be found in the Qt library
"""
if QT_LIBRARY == "PySide6":
# Qt6: ItemIsTristate was removed, replaced with ItemIsAutoTristate
# Try different possible locations (API varies slightly across PySide6 versions)
if hasattr(Qt, "ItemIsAutoTristate"):
return Qt.ItemIsAutoTristate
elif hasattr(Qt, "ItemFlag") and hasattr(Qt.ItemFlag, "ItemIsAutoTristate"):
return Qt.ItemFlag.ItemIsAutoTristate
else:
raise AttributeError(
"Cannot find ItemIsAutoTristate in PySide6. "
+ "Your PySide6 version may be incompatible with capa. "
+ f"Available Qt attributes: {[attr for attr in dir(Qt) if 'Item' in attr]}"
)
else:
# Qt5: Use the original ItemIsTristate flag
return Qt.ItemIsTristate
__all__ = ["qt_get_item_flag_tristate", "Signal", "QAction", "QtGui", "QtCore", "QtWidgets"]

View File

@@ -18,6 +18,7 @@ from collections import Counter
import idc
import idaapi
from PyQt5 import QtGui, QtCore, QtWidgets
import capa.rules
import capa.engine
@@ -27,7 +28,6 @@ import capa.features.basicblock
from capa.ida.plugin.item import CapaExplorerFunctionItem
from capa.features.address import AbsoluteVirtualAddress, _NoAddress
from capa.ida.plugin.model import CapaExplorerDataModel
from capa.ida.plugin.qt_compat import QtGui, QtCore, Signal, QAction, QtWidgets
MAX_SECTION_SIZE = 750
@@ -147,7 +147,7 @@ def calc_item_depth(o):
def build_action(o, display, data, slot):
""" """
action = QAction(display, o)
action = QtWidgets.QAction(display, o)
action.setData(data)
action.triggered.connect(lambda checked: slot(action))
@@ -312,7 +312,7 @@ class CapaExplorerRulegenPreview(QtWidgets.QTextEdit):
class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
updated = Signal()
updated = QtCore.pyqtSignal()
def __init__(self, preview, parent=None):
""" """

View File

@@ -392,7 +392,6 @@ class ShouldExitError(Exception):
"""raised when a main-related routine indicates the program should exit."""
def __init__(self, status_code: int):
super().__init__(status_code)
self.status_code = status_code
@@ -996,27 +995,7 @@ def main(argv: Optional[list[str]] = None):
handle_common_args(args)
ensure_input_exists_from_cli(args)
input_format = get_input_format_from_cli(args)
except ShouldExitError as e:
return e.status_code
if input_format == FORMAT_RESULT:
# render the result document immediately,
# no need to load the rules or do other processing.
result_doc = capa.render.result_document.ResultDocument.from_file(args.input_file)
if args.json:
print(result_doc.model_dump_json(exclude_none=True))
elif args.vverbose:
print(capa.render.vverbose.render_vverbose(result_doc))
elif args.verbose:
print(capa.render.verbose.render_verbose(result_doc))
else:
print(capa.render.default.render_default(result_doc))
return 0
try:
rules: RuleSet = get_rules_from_cli(args)
rules = get_rules_from_cli(args)
found_limitation = False
file_extractors = get_file_extractors_from_cli(args, input_format)
if input_format in STATIC_FORMATS:
@@ -1024,30 +1003,45 @@ def main(argv: Optional[list[str]] = None):
found_limitation = find_static_limitations_from_cli(args, rules, file_extractors)
if input_format in DYNAMIC_FORMATS:
found_limitation = find_dynamic_limitations_from_cli(args, rules, file_extractors)
backend = get_backend_from_cli(args, input_format)
sample_path = get_sample_path_from_cli(args, backend)
if sample_path is None:
os_ = "unknown"
else:
os_ = capa.loader.get_os(sample_path)
extractor: FeatureExtractor = get_extractor_from_cli(args, input_format, backend)
except ShouldExitError as e:
return e.status_code
capabilities: Capabilities = find_capabilities(rules, extractor, disable_progress=args.quiet)
meta: rdoc.Metadata
capabilities: Capabilities
meta: rdoc.Metadata = capa.loader.collect_metadata(
argv, args.input_file, input_format, os_, args.rules, extractor, capabilities
)
meta.analysis.layout = capa.loader.compute_layout(rules, extractor, capabilities.matches)
if input_format == FORMAT_RESULT:
# result document directly parses into meta, capabilities
result_doc = capa.render.result_document.ResultDocument.from_file(args.input_file)
meta, capabilities = result_doc.to_capa()
if found_limitation:
# bail if capa's static feature extractor encountered file limitation e.g. a packed binary
# or capa's dynamic feature extractor encountered some limitation e.g. a dotnet sample
# do show the output in verbose mode, though.
if not (args.verbose or args.vverbose or args.json):
return E_FILE_LIMITATION
else:
# all other formats we must create an extractor
# and use that to extract meta and capabilities
try:
backend = get_backend_from_cli(args, input_format)
sample_path = get_sample_path_from_cli(args, backend)
if sample_path is None:
os_ = "unknown"
else:
os_ = capa.loader.get_os(sample_path)
extractor = get_extractor_from_cli(args, input_format, backend)
except ShouldExitError as e:
return e.status_code
capabilities = find_capabilities(rules, extractor, disable_progress=args.quiet)
meta = capa.loader.collect_metadata(
argv, args.input_file, input_format, os_, args.rules, extractor, capabilities
)
meta.analysis.layout = capa.loader.compute_layout(rules, extractor, capabilities.matches)
if found_limitation:
# bail if capa's static feature extractor encountered file limitation e.g. a packed binary
# or capa's dynamic feature extractor encountered some limitation e.g. a dotnet sample
# do show the output in verbose mode, though.
if not (args.verbose or args.vverbose or args.json):
return E_FILE_LIMITATION
if args.json:
print(capa.render.json.render(meta, rules, capabilities.matches))

View File

@@ -418,9 +418,8 @@ class Match(FrozenModel):
and a.id <= location.id
]
)
if matches_in_thread:
_, most_recent_match = matches_in_thread[-1]
children.append(Match.from_capa(rules, capabilities, most_recent_match))
_, most_recent_match = matches_in_thread[-1]
children.append(Match.from_capa(rules, capabilities, most_recent_match))
else:
children.append(Match.from_capa(rules, capabilities, rule_matches[location]))
@@ -479,11 +478,8 @@ class Match(FrozenModel):
and a.id <= location.id
]
)
# namespace matches may not occur within the same thread as the result, so only
# proceed if a match within the same thread is found
if matches_in_thread:
_, most_recent_match = matches_in_thread[-1]
children.append(Match.from_capa(rules, capabilities, most_recent_match))
_, most_recent_match = matches_in_thread[-1]
children.append(Match.from_capa(rules, capabilities, most_recent_match))
else:
if location in rule_matches:
children.append(Match.from_capa(rules, capabilities, rule_matches[location]))

View File

@@ -274,8 +274,12 @@ SUPPORTED_FEATURES[Scope.FUNCTION].update(SUPPORTED_FEATURES[Scope.BASIC_BLOCK])
class InvalidRule(ValueError):
def __init__(self, msg):
super().__init__()
self.msg = msg
def __str__(self):
return f"invalid rule: {super().__str__()}"
return f"invalid rule: {self.msg}"
def __repr__(self):
return str(self)
@@ -285,15 +289,20 @@ class InvalidRuleWithPath(InvalidRule):
def __init__(self, path, msg):
super().__init__(msg)
self.path = path
self.msg = msg
self.__cause__ = None
def __str__(self):
return f"invalid rule: {self.path}: {super(InvalidRule, self).__str__()}"
return f"invalid rule: {self.path}: {self.msg}"
class InvalidRuleSet(ValueError):
def __init__(self, msg):
super().__init__()
self.msg = msg
def __str__(self):
return f"invalid rule set: {super().__str__()}"
return f"invalid rule set: {self.msg}"
def __repr__(self):
return str(self)
@@ -1093,15 +1102,15 @@ class Rule:
@lru_cache()
def _get_yaml_loader():
try:
# prefer to use CLoader to be fast, see #306 / CSafeLoader is the same as CLoader but with safe loading
# prefer to use CLoader to be fast, see #306
# on Linux, make sure you install libyaml-dev or similar
# on Windows, get WHLs from pyyaml.org/pypi
logger.debug("using libyaml CSafeLoader.")
return yaml.CSafeLoader
logger.debug("using libyaml CLoader.")
return yaml.CLoader
except Exception:
logger.debug("unable to import libyaml CSafeLoader, falling back to Python yaml parser.")
logger.debug("unable to import libyaml CLoader, falling back to Python yaml parser.")
logger.debug("this will be slower to load rules.")
return yaml.SafeLoader
return yaml.Loader
@staticmethod
def _get_ruamel_yaml_parser():

View File

@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "9.3.1"
__version__ = "9.0.0"
def get_major_version():

View File

@@ -7,7 +7,6 @@
- [ ] Review changes
- capa https://github.com/mandiant/capa/compare/\<last-release\>...master
- capa-rules https://github.com/mandiant/capa-rules/compare/\<last-release>\...master
- [ ] Run `$ bump-my-version bump {patch/minor/major} [--allow-dirty]` to update [capa/version.py](https://github.com/mandiant/capa/blob/master/capa/version.py) and other version files
- [ ] Update [CHANGELOG.md](https://github.com/mandiant/capa/blob/master/CHANGELOG.md)
- Do not forget to add a nice introduction thanking contributors
- Remember that we need a major release if we introduce breaking changes
@@ -37,6 +36,7 @@
- [capa <release>...master](https://github.com/mandiant/capa/compare/<release>...master)
- [capa-rules <release>...master](https://github.com/mandiant/capa-rules/compare/<release>...master)
```
- [ ] Update [capa/version.py](https://github.com/mandiant/capa/blob/master/capa/version.py)
- [ ] Create a PR with the updated [CHANGELOG.md](https://github.com/mandiant/capa/blob/master/CHANGELOG.md) and [capa/version.py](https://github.com/mandiant/capa/blob/master/capa/version.py). Copy this checklist in the PR description.
- [ ] Update the [homepage](https://github.com/mandiant/capa/blob/master/web/public/index.html) (i.e. What's New section)
- [ ] After PR review, merge the PR and [create the release in GH](https://github.com/mandiant/capa/releases/new) using text from the [CHANGELOG.md](https://github.com/mandiant/capa/blob/master/CHANGELOG.md).

View File

@@ -74,8 +74,7 @@ dependencies = [
# comments and context.
"pyyaml>=6",
"colorama>=0.4",
"ida-netnode>=3.0",
"ida-settings>=3.1.0",
"ida-settings>=2",
"ruamel.yaml>=0.18",
"pefile>=2023.2.7",
"pyelftools>=0.31",
@@ -105,7 +104,7 @@ dependencies = [
"networkx>=3",
"dnfile>=0.17.0",
"dnfile>=0.15.0",
]
dynamic = ["version"]
@@ -122,35 +121,34 @@ dev = [
# we want all developer environments to be consistent.
# These dependencies are not used in production environments
# and should not conflict with other libraries/tooling.
"pre-commit==4.5.0",
"pre-commit==4.1.0",
"pytest==8.0.0",
"pytest-sugar==1.1.1",
"pytest-sugar==1.0.0",
"pytest-instafail==0.5.0",
"flake8==7.3.0",
"flake8-bugbear==25.10.21",
"flake8==7.1.1",
"flake8-bugbear==24.12.12",
"flake8-encodings==0.5.1",
"flake8-comprehensions==3.17.0",
"flake8-comprehensions==3.16.0",
"flake8-logging-format==0.9.0",
"flake8-no-implicit-concat==0.3.5",
"flake8-print==5.0.0",
"flake8-todos==0.3.1",
"flake8-simplify==0.22.0",
"flake8-simplify==0.21.0",
"flake8-use-pathlib==0.3.0",
"flake8-copyright==0.2.4",
"ruff==0.14.7",
"black==25.11.0",
"ruff==0.9.2",
"black==25.1.0",
"isort==6.0.0",
"mypy==1.17.1",
"mypy==1.15.0",
"mypy-protobuf==3.6.0",
"PyGithub==2.6.0",
"bump-my-version==1.2.4",
"PyGithub==2.5.0",
# type stubs for mypy
"types-backports==0.1.3",
"types-colorama==0.4.15.11",
"types-PyYAML==6.0.8",
"types-psutil==7.0.0.20250218",
"types-psutil==6.1.0.20241102",
"types_requests==2.32.0.20240712",
"types-protobuf==6.32.1.20250918",
"types-protobuf==5.29.1.20241207",
"deptry==0.23.0"
]
build = [
@@ -158,18 +156,16 @@ build = [
# we want all developer environments to be consistent.
# These dependencies are not used in production environments
# and should not conflict with other libraries/tooling.
"pyinstaller==6.16.0",
"setuptools==80.9.0",
"build==1.3.0"
"pyinstaller==6.12.0",
"setuptools==75.8.0",
"build==1.2.2"
]
scripts = [
# can (optionally) be more lenient on dependencies here
# see comment on dependencies for more context
"jschema_to_python==1.2.3",
"psutil==7.1.2",
"psutil==6.1.0",
"stix2==3.0.1",
"sarif_om==1.0.4",
"requests>=2.32.4",
"requests==2.32.3",
]
[tool.deptry]
@@ -201,8 +197,7 @@ known_first_party = [
"idc",
"java",
"netnode",
"PyQt5",
"PySide6"
"PyQt5"
]
[tool.deptry.per_rule_ignores]
@@ -210,7 +205,6 @@ known_first_party = [
DEP002 = [
"black",
"build",
"bump-my-version",
"deptry",
"flake8",
"flake8-bugbear",

View File

@@ -10,39 +10,38 @@ annotated-types==0.7.0
colorama==0.4.6
cxxfilt==0.3.0
dncil==1.0.2
dnfile==0.17.0
dnfile==0.15.0
funcy==2.0
humanize==4.14.0
humanize==4.10.0
ida-netnode==3.0
ida-settings==3.2.2
ida-settings==2.1.0
intervaltree==3.1.0
markdown-it-py==4.0.0
markdown-it-py==3.0.0
mdurl==0.1.2
msgpack==1.0.8
networkx==3.4.2
pefile==2024.8.26
pip==25.3
protobuf==6.33.1
pip==25.0
protobuf==5.29.3
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycparser==2.23
pydantic==2.12.4
pycparser==2.22
pydantic==2.10.1
# pydantic pins pydantic-core,
# but dependabot updates these separately (which is broken) and is annoying,
# so we rely on pydantic to pull in the right version of pydantic-core.
# pydantic-core==2.23.4
xmltodict==1.0.2
pyelftools==0.32
xmltodict==0.14.2
pyelftools==0.31
pygments==2.19.1
python-flirt==0.9.2
pyyaml==6.0.2
rich==14.2.0
rich==13.9.2
ruamel-yaml==0.18.6
ruamel-yaml-clib==0.2.14
setuptools==80.9.0
ruamel-yaml-clib==0.2.8
setuptools==75.8.0
six==1.17.0
sortedcontainers==2.4.0
viv-utils==0.8.0
vivisect==1.2.1
msgspec==0.19.0
bump-my-version==1.2.4

2
rules

Submodule rules updated: 6120dfb6e0...c0aa922f20

View File

@@ -175,6 +175,8 @@ def convert_rule(rule, rulename, cround, depth):
depth += 1
logger.info("recursion depth: %d", depth)
global var_names
def do_statement(s_type, kid):
yara_strings = ""
yara_condition = ""

View File

@@ -49,7 +49,7 @@ import capa.helpers
import capa.features.insn
import capa.capabilities.common
from capa.rules import Rule, RuleSet
from capa.features.common import OS_AUTO, Regex, String, Feature, Substring
from capa.features.common import OS_AUTO, String, Feature, Substring
from capa.render.result_document import RuleMetadata
logger = logging.getLogger("lint")
@@ -406,7 +406,6 @@ class DoesntMatchExample(Lint):
return True
if rule.name not in capabilities:
logger.info('rule "%s" does not match for sample %s', rule.name, example_id)
return True
@@ -722,76 +721,6 @@ class FeatureStringTooShort(Lint):
return False
class FeatureRegexRegistryControlSetMatchIncomplete(Lint):
name = "feature regex registry control set match incomplete"
recommendation = (
'use "(ControlSet\\d{3}|CurrentControlSet)" to match both indirect references '
+ 'via "CurrentControlSet" and direct references via "ControlSetXXX"'
)
def check_features(self, ctx: Context, features: list[Feature]):
for feature in features:
if not isinstance(feature, (Regex,)):
continue
assert isinstance(feature.value, str)
pat = feature.value.lower()
if "system\\\\" in pat and "controlset" in pat or "currentcontrolset" in pat:
if "system\\\\(controlset\\d{3}|currentcontrolset)" not in pat:
return True
return False
class FeatureRegexContainsUnescapedPeriod(Lint):
name = "feature regex contains unescaped period"
recommendation_template = 'escape the period in "{:s}" unless it should be treated as a regex dot operator'
level = Lint.WARN
def check_features(self, ctx: Context, features: list[Feature]):
for feature in features:
if isinstance(feature, (Regex,)):
assert isinstance(feature.value, str)
pat = feature.value.removeprefix("/")
pat = pat.removesuffix("/i").removesuffix("/")
index = pat.find(".")
if index == -1:
return False
if index < len(pat) - 1:
if pat[index + 1] in ("*", "+", "?", "{"):
# like "/VB5!.*/"
return False
if index == 0:
# like "/.exe/" which should be "/\.exe/"
self.recommendation = self.recommendation_template.format(feature.value)
return True
if pat[index - 1] != "\\":
# like "/test.exe/" which should be "/test\.exe/"
self.recommendation = self.recommendation_template.format(feature.value)
return True
if pat[index - 1] == "\\":
for i, char in enumerate(pat[0:index][::-1]):
if char == "\\":
continue
if i % 2 == 0:
# like "/\\\\.\\pipe\\VBoxTrayIPC/"
self.recommendation = self.recommendation_template.format(feature.value)
return True
break
return False
class FeatureNegativeNumber(Lint):
name = "feature value is negative"
recommendation = "specify the number's two's complement representation"
@@ -1002,13 +931,7 @@ def lint_meta(ctx: Context, rule: Rule):
return run_lints(META_LINTS, ctx, rule)
FEATURE_LINTS = (
FeatureStringTooShort(),
FeatureNegativeNumber(),
FeatureNtdllNtoskrnlApi(),
FeatureRegexContainsUnescapedPeriod(),
FeatureRegexRegistryControlSetMatchIncomplete(),
)
FEATURE_LINTS = (FeatureStringTooShort(), FeatureNegativeNumber(), FeatureNtdllNtoskrnlApi())
def lint_features(ctx: Context, rule: Rule):

View File

@@ -70,4 +70,4 @@ def test_standalone_binja_backend():
@pytest.mark.skipif(binja_present is False, reason="Skip binja tests if the binaryninja Python API is not installed")
def test_binja_version():
version = binaryninja.core_version_info()
assert version.major == 5 and version.minor == 2
assert version.major == 4 and version.minor == 2

File diff suppressed because it is too large Load Diff

View File

@@ -26,15 +26,15 @@
},
"devDependencies": {
"@rushstack/eslint-patch": "^1.8.0",
"@vitejs/plugin-vue": "^5.2.3",
"@vitejs/plugin-vue": "^5.0.5",
"@vue/eslint-config-prettier": "^9.0.0",
"@vue/test-utils": "^2.4.6",
"eslint": "^8.57.0",
"eslint-plugin-vue": "^9.23.0",
"jsdom": "^24.1.0",
"prettier": "^3.2.5",
"vite": "^6.4.1",
"vite-plugin-singlefile": "^2.2.0",
"vitest": "^3.0.9"
"vite": "^5.4.14",
"vite-plugin-singlefile": "^2.0.2",
"vitest": "^1.6.0"
}
}

View File

@@ -210,37 +210,27 @@
<div class="row flex-lg-row-reverse align-items-center g-5">
<h1>What's New</h1>
<h2 class="mt-3">Rule Updates</h2>
<ul class="mt-2 ps-5">
<!-- TODO(williballenthin): add date -->
<li>
added:
<a href="./rules/use bigint function/">
use bigint function
</a>
</li>
<li>
added:
<a href="./rules/encrypt data using RSA via embedded library/">
encrypt data using RSA via embedded library
</a>
</li>
</ul>
<h2 class="mt-3">Tool Updates</h2>
<h3 class="mt-2">v9.3.1 (<em>2025-11-19</em>)</h3>
<p class="mt-0">
This patch release fixes a missing import for the capa explorer plugin for IDA Pro.
</p>
<h3 class="mt-2">v9.3.0 (<em>2025-11-12</em>)</h3>
<p class="mt-0">
capa v9.3.0 comes with over 20 new and/or impoved rules.
For IDA users the capa explorer plugin is now available via the IDA Pro plugin repository and contains Qt compatibility layer for PyQt5 and PySide6 support.
Additionally a Binary Ninja bug has been fixed. Released binaries now include ARM64 binaries (Linux and macOS).
</p>
<h3 class="mt-2">v9.2.1 (<em>2025-06-06</em>)</h3>
<p class="mt-0">
This point release fixes bugs including removing an unnecessary PyInstaller warning message and enabling the standalone binary to execute on systems running older versions of glibc.
</p>
<h3 class="mt-2">v9.2.0 (<em>2025-06-03</em>)</h3>
<p class="mt-0">
This release improves a few aspects of dynamic analysis, including relaxing our validation on fields across many CAPE versions and processing additional VMRay submission file types, for example.
It also includes an updated rule pack containing new rules and rule fixes.
</p>
<h3 class="mt-2">v9.1.0 (<em>2025-03-02</em>)</h3>
<p class="mt-0">
This release improves a few aspects of dynamic analysis, relaxing our validation on fields across many CAPE versions, for example.
It also includes an updated rule pack in which many dynamic rules make better use of the "span of calls" scope.
</p>
<h3 class="mt-2">v9.0.0 (<em>2025-02-05</em>)</h3>
<p class="mt-0">
This release introduces a new scope for dynamic analysis, "span of calls",