Compare commits

..

1 Commits

Author SHA1 Message Date
Willi Ballenthin
d0bafd6ab7 add codemap script 2025-04-25 20:52:13 +02:00
40 changed files with 700 additions and 904 deletions

View File

@@ -1,27 +0,0 @@
[tool.bumpversion]
current_version = "9.3.1"
[[tool.bumpversion.files]]
filename = "capa/version.py"
search = '__version__ = "{current_version}"'
replace = '__version__ = "{new_version}"'
[[tool.bumpversion.files]]
filename = "capa/ida/plugin/ida-plugin.json"
search = '"version": "{current_version}"'
replace = '"version": "{new_version}"'
[[tool.bumpversion.files]]
filename = "capa/ida/plugin/ida-plugin.json"
search = '"flare-capa=={current_version}"'
replace = '"flare-capa=={new_version}"'
[[tool.bumpversion.files]]
filename = "CHANGELOG.md"
search = "v{current_version}...master"
replace = "v{current_version}...{new_version}"
[[tool.bumpversion.files]]
filename = "CHANGELOG.md"
search = "master (unreleased)"
replace = "v{new_version}"

View File

@@ -74,9 +74,6 @@ a = Analysis(
# only be installed locally. # only be installed locally.
"binaryninja", "binaryninja",
"ida", "ida",
# remove once https://github.com/mandiant/capa/issues/2681 has
# been addressed by PyInstaller
"pkg_resources",
], ],
) )

View File

@@ -9,7 +9,6 @@ on:
- '**.md' - '**.md'
release: release:
types: [edited, published] types: [edited, published]
workflow_dispatch: # manual trigger for testing
permissions: permissions:
contents: write contents: write
@@ -23,38 +22,24 @@ jobs:
fail-fast: true fail-fast: true
matrix: matrix:
include: include:
- os: ubuntu-22.04 - os: ubuntu-20.04
# use old linux so that the shared library versioning is more portable # use old linux so that the shared library versioning is more portable
artifact_name: capa artifact_name: capa
asset_name: linux asset_name: linux
python_version: '3.10' python_version: '3.10'
- os: ubuntu-22.04-arm - os: ubuntu-20.04
artifact_name: capa
asset_name: linux-arm64
python_version: '3.10'
- os: ubuntu-22.04
artifact_name: capa artifact_name: capa
asset_name: linux-py312 asset_name: linux-py312
python_version: '3.12' python_version: '3.12'
- os: windows-2022 - os: windows-2019
artifact_name: capa.exe artifact_name: capa.exe
asset_name: windows asset_name: windows
python_version: '3.10' python_version: '3.10'
# Windows 11 ARM64 complains of conflicting package version - os: macos-13
# Additionally, there is no ARM64 build of Python for Python 3.10 on Windows 11 ARM: https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json # use older macOS for assumed better portability
#- os: windows-11-arm
# artifact_name: capa.exe
# asset_name: windows-arm64
# python_version: '3.12'
- os: macos-15-intel
# macos-15-intel is the lowest native intel build
artifact_name: capa artifact_name: capa
asset_name: macos asset_name: macos
python_version: '3.10' python_version: '3.10'
- os: macos-14
artifact_name: capa
asset_name: macos-arm64
python_version: '3.10'
steps: steps:
- name: Checkout capa - name: Checkout capa
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
@@ -64,7 +49,7 @@ jobs:
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
with: with:
python-version: ${{ matrix.python_version }} python-version: ${{ matrix.python_version }}
- if: matrix.os == 'ubuntu-22.04' || matrix.os == 'ubuntu-22.04-arm' - if: matrix.os == 'ubuntu-20.04'
run: sudo apt-get install -y libyaml-dev run: sudo apt-get install -y libyaml-dev
- name: Upgrade pip, setuptools - name: Upgrade pip, setuptools
run: python -m pip install --upgrade pip setuptools run: python -m pip install --upgrade pip setuptools
@@ -74,28 +59,6 @@ jobs:
pip install -e .[build] pip install -e .[build]
- name: Build standalone executable - name: Build standalone executable
run: pyinstaller --log-level DEBUG .github/pyinstaller/pyinstaller.spec run: pyinstaller --log-level DEBUG .github/pyinstaller/pyinstaller.spec
- name: Does it run without warnings or errors?
shell: bash
run: |
if [[ "${{ matrix.os }}" == "windows-2022" ]] || [[ "${{ matrix.os }}" == "windows-11-arm" ]]; then
EXECUTABLE=".\\dist\\capa"
else
EXECUTABLE="./dist/capa"
fi
output=$(${EXECUTABLE} --version 2>&1)
exit_code=$?
echo "${output}"
echo "${exit_code}"
if echo "${output}" | grep -iE 'error|warning'; then
exit 1
fi
if [[ "${exit_code}" -ne 0 ]]; then
exit 1
fi
- name: Does it run (PE)? - name: Does it run (PE)?
run: dist/capa -d "tests/data/Practical Malware Analysis Lab 01-01.dll_" run: dist/capa -d "tests/data/Practical Malware Analysis Lab 01-01.dll_"
- name: Does it run (Shellcode)? - name: Does it run (Shellcode)?
@@ -111,6 +74,34 @@ jobs:
name: ${{ matrix.asset_name }} name: ${{ matrix.asset_name }}
path: dist/${{ matrix.artifact_name }} path: dist/${{ matrix.artifact_name }}
test_run:
name: Test run on ${{ matrix.os }} / ${{ matrix.asset_name }}
runs-on: ${{ matrix.os }}
needs: [build]
strategy:
matrix:
include:
# OSs not already tested above
- os: ubuntu-22.04
artifact_name: capa
asset_name: linux
- os: ubuntu-22.04
artifact_name: capa
asset_name: linux-py312
- os: windows-2022
artifact_name: capa.exe
asset_name: windows
steps:
- name: Download ${{ matrix.asset_name }}
uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
with:
name: ${{ matrix.asset_name }}
- name: Set executable flag
if: matrix.os != 'windows-2022'
run: chmod +x ${{ matrix.artifact_name }}
- name: Run capa
run: ./${{ matrix.artifact_name }} -h
zip_and_upload: zip_and_upload:
# upload zipped binaries to Release page # upload zipped binaries to Release page
if: github.event_name == 'release' if: github.event_name == 'release'
@@ -122,18 +113,12 @@ jobs:
include: include:
- asset_name: linux - asset_name: linux
artifact_name: capa artifact_name: capa
- asset_name: linux-arm64
artifact_name: capa
- asset_name: linux-py312 - asset_name: linux-py312
artifact_name: capa artifact_name: capa
- asset_name: windows - asset_name: windows
artifact_name: capa.exe artifact_name: capa.exe
#- asset_name: windows-arm64
# artifact_name: capa.exe
- asset_name: macos - asset_name: macos
artifact_name: capa artifact_name: capa
- asset_name: macos-arm64
artifact_name: capa
steps: steps:
- name: Download ${{ matrix.asset_name }} - name: Download ${{ matrix.asset_name }}
uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2 uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2

View File

@@ -35,7 +35,7 @@ jobs:
with: with:
path: dist/* path: dist/*
- name: publish package - name: publish package
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # release/v1.12.4 uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 # release/v1
with: with:
skip-existing: true skip-existing: true
verbose: true verbose: true

View File

@@ -42,10 +42,10 @@ jobs:
- name: Checkout capa - name: Checkout capa
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
# use latest available python to take advantage of best performance # use latest available python to take advantage of best performance
- name: Set up Python 3.13 - name: Set up Python 3.12
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
with: with:
python-version: "3.13" python-version: "3.12"
- name: Install dependencies - name: Install dependencies
run: | run: |
pip install -r requirements.txt pip install -r requirements.txt
@@ -70,10 +70,10 @@ jobs:
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
with: with:
submodules: recursive submodules: recursive
- name: Set up Python 3.13 - name: Set up Python 3.12
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0 uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
with: with:
python-version: "3.13" python-version: "3.12"
- name: Install capa - name: Install capa
run: | run: |
pip install -r requirements.txt pip install -r requirements.txt
@@ -88,14 +88,16 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
os: [ubuntu-22.04, ubuntu-22.04-arm, windows-2022, macos-15-intel, macos-14] os: [ubuntu-20.04, windows-2019, macos-13]
# across all operating systems # across all operating systems
python-version: ["3.10", "3.13"] python-version: ["3.10", "3.11"]
include: include:
# on Ubuntu run these as well # on Ubuntu run these as well
- os: ubuntu-22.04 - os: ubuntu-20.04
python-version: "3.10"
- os: ubuntu-20.04
python-version: "3.11" python-version: "3.11"
- os: ubuntu-22.04 - os: ubuntu-20.04
python-version: "3.12" python-version: "3.12"
steps: steps:
- name: Checkout capa with submodules - name: Checkout capa with submodules
@@ -107,7 +109,7 @@ jobs:
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install pyyaml - name: Install pyyaml
if: matrix.os == 'ubuntu-22.04' if: matrix.os == 'ubuntu-20.04'
run: sudo apt-get install -y libyaml-dev run: sudo apt-get install -y libyaml-dev
- name: Install capa - name: Install capa
run: | run: |
@@ -129,7 +131,7 @@ jobs:
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
python-version: ["3.10", "3.13"] python-version: ["3.10", "3.11"]
steps: steps:
- name: Checkout capa with submodules - name: Checkout capa with submodules
# do only run if BN_SERIAL is available, have to do this in every step, see https://github.com/orgs/community/discussions/26726#discussioncomment-3253118 # do only run if BN_SERIAL is available, have to do this in every step, see https://github.com/orgs/community/discussions/26726#discussioncomment-3253118
@@ -166,12 +168,12 @@ jobs:
ghidra-tests: ghidra-tests:
name: Ghidra tests for ${{ matrix.python-version }} name: Ghidra tests for ${{ matrix.python-version }}
runs-on: ubuntu-22.04 runs-on: ubuntu-20.04
needs: [tests] needs: [tests]
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
python-version: ["3.10", "3.13"] python-version: ["3.10", "3.11"]
java-version: ["17"] java-version: ["17"]
ghidra-version: ["11.0.1"] ghidra-version: ["11.0.1"]
public-version: ["PUBLIC_20240130"] # for ghidra releases public-version: ["PUBLIC_20240130"] # for ghidra releases

1
.gitignore vendored
View File

@@ -122,7 +122,6 @@ scripts/perf/*.zip
*/.DS_Store */.DS_Store
Pipfile Pipfile
Pipfile.lock Pipfile.lock
uv.lock
/cache/ /cache/
.github/binja/binaryninja .github/binja/binaryninja
.github/binja/download_headless.py .github/binja/download_headless.py

View File

@@ -138,7 +138,6 @@ repos:
- "--ignore=tests/test_ghidra_features.py" - "--ignore=tests/test_ghidra_features.py"
- "--ignore=tests/test_ida_features.py" - "--ignore=tests/test_ida_features.py"
- "--ignore=tests/test_viv_features.py" - "--ignore=tests/test_viv_features.py"
- "--ignore=tests/test_idalib_features.py"
- "--ignore=tests/test_main.py" - "--ignore=tests/test_main.py"
- "--ignore=tests/test_scripts.py" - "--ignore=tests/test_scripts.py"
always_run: true always_run: true

View File

@@ -6,134 +6,7 @@
### Breaking Changes ### Breaking Changes
### New Rules (4) ### New Rules (15)
- nursery/run-as-nodejs-native-module mehunhoff@google.com
- nursery/inject-shellcode-using-thread-pool-work-insertion-with-tp_io still@teamt5.org
- nursery/inject-shellcode-using-thread-pool-work-insertion-with-tp_timer still@teamt5.org
- nursery/inject-shellcode-using-thread-pool-work-insertion-with-tp_work still@teamt5.org
-
### Bug Fixes
- Fixed insecure deserialization vulnerability in YAML loading @0x1622 (#2770)
### capa Explorer Web
### capa Explorer IDA Pro plugin
### Development
- ci: deprecate macos-13 runner and use Python v3.13 for testing @mike-hunhoff #2777
### Raw diffs
- [capa v9.3.1...master](https://github.com/mandiant/capa/compare/v9.3.1...master)
- [capa-rules v9.3.1...master](https://github.com/mandiant/capa-rules/compare/v9.3.1...master)
## v9.3.1
This patch release fixes a missing import for the capa explorer plugin for IDA Pro.
### Bug Fixes
- add missing ida-netnode dependency to project.toml @mike-hunhoff #2765
### Development
- ci: bump binja min version @mike-hunhoff #2763
### Raw diffs
- [capa v9.3.0...master](https://github.com/mandiant/capa/compare/v9.3.0...master)
- [capa-rules v9.3.0...master](https://github.com/mandiant/capa-rules/compare/v9.3.0...master)
## v9.3.0
capa v9.3.0 comes with over 20 new and/or impoved rules.
For IDA users the capa explorer plugin is now available via the IDA Pro plugin repository and contains Qt compatibility layer for PyQt5 and PySide6 support.
Additionally a Binary Ninja bug has been fixed. Released binaries now include ARM64 binaries (Linux and macOS).
### New Features
- ci: add support for arm64 binary releases
- tests: run tests against IDA via idalib @williballenthin #2742
### Breaking Changes
### New Rules (24)
- anti-analysis/anti-vm/vm-detection/detect-mouse-movement-via-activity-checks-on-windows tevajdr@gmail.com
- nursery/create-executable-heap moritz.raabe@mandiant.com
- anti-analysis/packer/dxpack/packed-with-dxpack jakubjozwiak@google.com
- anti-analysis/anti-av/patch-bitdefender-hooking-dll-function jakubjozwiak@google.com
- nursery/acquire-load-driver-privileges mehunhoff@google.com
- nursery/communicate-using-ftp mehunhoff@google.com
- linking/static/eclipse-paho-mqtt-c/linked-against-eclipse-paho-mqtt-c jakubjozwiak@google.com
- linking/static/qmqtt/linked-against-qmqtt jakubjozwiak@google.com
- anti-analysis/anti-forensic/disable-powershell-transcription jakubjozwiak@google.com
- host-interaction/powershell/bypass-powershell-constrained-language-mode-via-getsystemlockdownpolicy-patch jakubjozwiak@google.com
- linking/static/grpc/linked-against-grpc jakubjozwiak@google.com
- linking/static/hp-socket/linked-against-hp-socket jakubjozwiak@google.com
- load-code/execute-jscript-via-vsaengine-in-dotnet jakubjozwiak@google.com
- linking/static/funchook/linked-against-funchook jakubjozwiak@google.com
- linking/static/plthook/linked-against-plthook jakubjozwiak@google.com
- host-interaction/network/enumerate-tcp-connections-via-wmi-com-api jakubjozwiak@google.com
- host-interaction/network/routing-table/create-routing-table-entry jakubjozwiak@google.com
- host-interaction/network/routing-table/get-routing-table michael.hunhoff@mandiant.com
- host-interaction/file-system/use-io_uring-io-interface-on-linux jakubjozwiak@google.com
- collection/keylog/log-keystrokes-via-direct-input zeze-zeze
- nursery/compiled-from-fsharp mehunhoff@google.com
- nursery/decrypt-data-using-aes-via-dotnet mehunhoff@google.com
- nursery/get-dotnet-assembly-entry-point mehunhoff@google.com
### Bug Fixes
- binja: fix a crash during feature extraction when the MLIL is unavailable @xusheng6 #2714
### capa Explorer Web
### capa Explorer IDA Pro plugin
- add `ida-plugin.json` for inclusion in the IDA Pro plugin repository @williballenthin
- ida plugin: add Qt compatibility layer for PyQt5 and PySide6 support @williballenthin #2707
- delay import to not load Qt* when running under idalib @mr-tz #2752
### Development
- ci: remove redundant "test_run" action from build workflow @mike-hunhoff #2692
- dev: add bumpmyversion to bump and sync versions across the project @mr-tz
### Raw diffs
- [capa v9.2.1...9.3.0](https://github.com/mandiant/capa/compare/v9.2.1...9.3.0)
- [capa-rules v9.2.1...9.3.0](https://github.com/mandiant/capa-rules/compare/v9.2.1...9.3.0)
## v9.2.1
This point release fixes bugs including removing an unnecessary PyInstaller warning message and enabling the standalone binary to execute on systems running older versions of glibc.
### Bug Fixes
- ci: exclude pkg_resources from PyInstaller build @mike-hunhoff #2684
- ci: downgrade Ubuntu version to accommodate older glibc versions @mike-hunhoff #2684
### Development
- ci: upgrade Windows version to avoid deprecation @mike-hunhoff #2684
- ci: check if build runs without warnings or errors @mike-hunhoff #2684
### Raw diffs
- [capa v9.2.0...v9.2.1](https://github.com/mandiant/capa/compare/v9.2.0...v9.2.1)
- [capa-rules v9.2.0...v9.2.1](https://github.com/mandiant/capa-rules/compare/v9.2.0...v9.2.1)
## v9.2.0
This release improves a few aspects of dynamic analysis, including relaxing our validation on fields across many CAPE versions and processing additional VMRay submission file types, for example.
It also includes an updated rule pack containing new rules and rule fixes.
### New Features
- vmray: do not restrict analysis to PE and ELF files, e.g. docx @mike-hunhoff #2672
### Breaking Changes
### New Rules (22)
- communication/socket/connect-socket moritz.raabe@mandiant.com joakim@intezer.com mrhafizfarhad@gmail.com - communication/socket/connect-socket moritz.raabe@mandiant.com joakim@intezer.com mrhafizfarhad@gmail.com
- communication/socket/udp/connect-udp-socket mrhafizfarhad@gmail.com - communication/socket/udp/connect-udp-socket mrhafizfarhad@gmail.com
@@ -149,23 +22,22 @@ It also includes an updated rule pack containing new rules and rule fixes.
- nursery/disable-firewall-features-via-registry-on-windows mehunhoff@google.com - nursery/disable-firewall-features-via-registry-on-windows mehunhoff@google.com
- nursery/disable-system-restore-features-via-registry-on-windows mehunhoff@google.com - nursery/disable-system-restore-features-via-registry-on-windows mehunhoff@google.com
- nursery/disable-windows-defender-features-via-registry-on-windows mehunhoff@google.com - nursery/disable-windows-defender-features-via-registry-on-windows mehunhoff@google.com
- host-interaction/file-system/write/clear-file-content jakeperalta7 -
- host-interaction/filter/unload-minifilter-driver JakePeralta7
- exploitation/enumeration/make-suspicious-ntquerysysteminformation-call zdw@google.com
- exploitation/gadgets/load-ntoskrnl zdw@google.com
- exploitation/gadgets/resolve-ntoskrnl-gadgets zdw@google.com
- exploitation/spraying/make-suspicious-ntfscontrolfile-call zdw@google.com
- anti-analysis/anti-forensic/unload-sysmon JakePeralta7
### Bug Fixes ### Bug Fixes
- cape: make some fields optional @williballenthin #2631 #2632 - cape: make some fields optional @williballenthin #2631 #2632
- lint: add WARN for regex features that contain unescaped dot #2635 - lint: add WARN for regex features that contain unescaped dot #2635
- lint: add ERROR for incomplete registry control set regex #2643 - lint: add ERROR for incomplete registry control set regex #2643
- binja: update unit test core version #2670
### capa Explorer Web
### capa Explorer IDA Pro plugin
### Development
### Raw diffs ### Raw diffs
- [capa v9.1.0...v9.2.0](https://github.com/mandiant/capa/compare/v9.1.0...v9.2.0) - [capa v9.1.0...master](https://github.com/mandiant/capa/compare/v9.1.0...master)
- [capa-rules v9.1.0...v9.2.0](https://github.com/mandiant/capa-rules/compare/v9.1.0...v9.2.0) - [capa-rules v9.1.0...master](https://github.com/mandiant/capa-rules/compare/v9.1.0...master)
## v9.1.0 ## v9.1.0

View File

@@ -315,6 +315,3 @@ If you use Ghidra, then you can use the [capa + Ghidra integration](/capa/ghidra
## capa testfiles ## capa testfiles
The [capa-testfiles repository](https://github.com/mandiant/capa-testfiles) contains the data we use to test capa's code and rules The [capa-testfiles repository](https://github.com/mandiant/capa-testfiles) contains the data we use to test capa's code and rules
## mailing list
Subscribe to the FLARE mailing list for community announcements! Email "subscribe" to [flare-external@google.com](mailto:flare-external@google.com?subject=subscribe).

View File

@@ -19,6 +19,7 @@ from binaryninja import (
Function, Function,
BinaryView, BinaryView,
SymbolType, SymbolType,
ILException,
RegisterValueType, RegisterValueType,
VariableSourceType, VariableSourceType,
LowLevelILOperation, LowLevelILOperation,
@@ -191,8 +192,9 @@ def extract_stackstring(fh: FunctionHandle):
if bv is None: if bv is None:
return return
mlil = func.mlil try:
if mlil is None: mlil = func.mlil
except ILException:
return return
for block in mlil.basic_blocks: for block in mlil.basic_blocks:

View File

@@ -18,7 +18,6 @@ import idaapi
import idautils import idautils
import capa.features.extractors.ida.helpers import capa.features.extractors.ida.helpers
from capa.features.file import FunctionName
from capa.features.common import Feature, Characteristic from capa.features.common import Feature, Characteristic
from capa.features.address import Address, AbsoluteVirtualAddress from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors import loops from capa.features.extractors import loops
@@ -51,22 +50,10 @@ def extract_recursive_call(fh: FunctionHandle):
yield Characteristic("recursive call"), fh.address yield Characteristic("recursive call"), fh.address
def extract_function_alternative_names(fh: FunctionHandle):
"""Get all alternative names for an address."""
for aname in capa.features.extractors.ida.helpers.get_function_alternative_names(fh.inner.start_ea):
yield FunctionName(aname), fh.address
def extract_features(fh: FunctionHandle) -> Iterator[tuple[Feature, Address]]: def extract_features(fh: FunctionHandle) -> Iterator[tuple[Feature, Address]]:
for func_handler in FUNCTION_HANDLERS: for func_handler in FUNCTION_HANDLERS:
for feature, addr in func_handler(fh): for feature, addr in func_handler(fh):
yield feature, addr yield feature, addr
FUNCTION_HANDLERS = ( FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call)
extract_function_calls_to,
extract_function_loop,
extract_recursive_call,
extract_function_alternative_names,
)

View File

@@ -20,7 +20,6 @@ import idaapi
import ida_nalt import ida_nalt
import idautils import idautils
import ida_bytes import ida_bytes
import ida_funcs
import ida_segment import ida_segment
from capa.features.address import AbsoluteVirtualAddress from capa.features.address import AbsoluteVirtualAddress
@@ -437,23 +436,3 @@ def is_basic_block_return(bb: idaapi.BasicBlock) -> bool:
def has_sib(oper: idaapi.op_t) -> bool: def has_sib(oper: idaapi.op_t) -> bool:
# via: https://reverseengineering.stackexchange.com/a/14300 # via: https://reverseengineering.stackexchange.com/a/14300
return oper.specflag1 == 1 return oper.specflag1 == 1
def get_function_alternative_names(fva: int):
"""Get all alternative names for an address."""
# Check indented comment
cmt = ida_bytes.get_cmt(fva, False) # False = non-repeatable
if cmt:
for line in cmt.split("\n"):
if line.startswith("Alternative name is '") and line.endswith("'"):
name = line[len("Alternative name is '") : -1] # Extract name between quotes
yield name
# Check function comment
func_cmt = ida_funcs.get_func_cmt(idaapi.get_func(fva), False)
if func_cmt:
for line in func_cmt.split("\n"):
if line.startswith("Alternative name is '") and line.endswith("'"):
name = line[len("Alternative name is '") : -1]
yield name

View File

@@ -22,7 +22,6 @@ import idautils
import capa.features.extractors.helpers import capa.features.extractors.helpers
import capa.features.extractors.ida.helpers import capa.features.extractors.ida.helpers
from capa.features.file import FunctionName
from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset
from capa.features.common import MAX_BYTES_FEATURE_SIZE, THUNK_CHAIN_DEPTH_DELTA, Bytes, String, Feature, Characteristic from capa.features.common import MAX_BYTES_FEATURE_SIZE, THUNK_CHAIN_DEPTH_DELTA, Bytes, String, Feature, Characteristic
from capa.features.address import Address, AbsoluteVirtualAddress from capa.features.address import Address, AbsoluteVirtualAddress
@@ -130,8 +129,8 @@ def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle)
# not a function (start) # not a function (start)
return return
name = idaapi.get_name(target_func.start_ea) if target_func.flags & idaapi.FUNC_LIB:
if target_func.flags & idaapi.FUNC_LIB or not name.startswith("sub_"): name = idaapi.get_name(target_func.start_ea)
yield API(name), ih.address yield API(name), ih.address
if name.startswith("_"): if name.startswith("_"):
# some linkers may prefix linked routines with a `_` to avoid name collisions. # some linkers may prefix linked routines with a `_` to avoid name collisions.
@@ -140,10 +139,6 @@ def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle)
# see: https://stackoverflow.com/a/2628384/87207 # see: https://stackoverflow.com/a/2628384/87207
yield API(name[1:]), ih.address yield API(name[1:]), ih.address
for altname in capa.features.extractors.ida.helpers.get_function_alternative_names(target_func.start_ea):
yield FunctionName(altname), ih.address
yield API(altname), ih.address
def extract_insn_number_features( def extract_insn_number_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle

View File

@@ -96,7 +96,14 @@ class VMRayAnalysis:
% (self.submission_name, self.submission_type) % (self.submission_name, self.submission_type)
) )
if self.submission_static is None: if self.submission_static is not None:
if self.submission_static.pe is None and self.submission_static.elf is None:
# we only support static analysis for PE and ELF files for now
raise UnsupportedFormatError(
"archive does not contain a supported file format (submission_name: %s, submission_type: %s)"
% (self.submission_name, self.submission_type)
)
else:
# VMRay may not record static analysis for certain file types, e.g. MSI, but we'd still like to match dynamic # VMRay may not record static analysis for certain file types, e.g. MSI, but we'd still like to match dynamic
# execution so we continue without and accept that the results may be incomplete # execution so we continue without and accept that the results may be incomplete
logger.warning( logger.warning(

View File

@@ -17,6 +17,7 @@ import logging
import idaapi import idaapi
import ida_kernwin import ida_kernwin
from capa.ida.plugin.form import CapaExplorerForm
from capa.ida.plugin.icon import ICON from capa.ida.plugin.icon import ICON
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -73,9 +74,6 @@ class CapaExplorerPlugin(idaapi.plugin_t):
arg (int): bitflag. Setting LSB enables automatic analysis upon arg (int): bitflag. Setting LSB enables automatic analysis upon
loading. The other bits are currently undefined. See `form.Options`. loading. The other bits are currently undefined. See `form.Options`.
""" """
# delay import to not trigger load of Qt components when not running in idaq, i.e., in idalib
from capa.ida.plugin.form import CapaExplorerForm
if not self.form: if not self.form:
self.form = CapaExplorerForm(self.PLUGIN_NAME, arg) self.form = CapaExplorerForm(self.PLUGIN_NAME, arg)
else: else:

View File

@@ -14,9 +14,9 @@
import ida_kernwin import ida_kernwin
from PyQt5 import QtCore
from capa.ida.plugin.error import UserCancelledError from capa.ida.plugin.error import UserCancelledError
from capa.ida.plugin.qt_compat import QtCore, Signal
from capa.features.extractors.ida.extractor import IdaFeatureExtractor from capa.features.extractors.ida.extractor import IdaFeatureExtractor
from capa.features.extractors.base_extractor import FunctionHandle from capa.features.extractors.base_extractor import FunctionHandle
@@ -24,7 +24,7 @@ from capa.features.extractors.base_extractor import FunctionHandle
class CapaExplorerProgressIndicator(QtCore.QObject): class CapaExplorerProgressIndicator(QtCore.QObject):
"""implement progress signal, used during feature extraction""" """implement progress signal, used during feature extraction"""
progress = Signal(str) progress = QtCore.pyqtSignal(str)
def update(self, text): def update(self, text):
"""emit progress update """emit progress update

View File

@@ -23,6 +23,7 @@ from pathlib import Path
import idaapi import idaapi
import ida_kernwin import ida_kernwin
import ida_settings import ida_settings
from PyQt5 import QtGui, QtCore, QtWidgets
import capa.main import capa.main
import capa.rules import capa.rules
@@ -50,7 +51,6 @@ from capa.ida.plugin.hooks import CapaExplorerIdaHooks
from capa.ida.plugin.model import CapaExplorerDataModel from capa.ida.plugin.model import CapaExplorerDataModel
from capa.ida.plugin.proxy import CapaExplorerRangeProxyModel, CapaExplorerSearchProxyModel from capa.ida.plugin.proxy import CapaExplorerRangeProxyModel, CapaExplorerSearchProxyModel
from capa.ida.plugin.extractor import CapaExplorerFeatureExtractor from capa.ida.plugin.extractor import CapaExplorerFeatureExtractor
from capa.ida.plugin.qt_compat import QtGui, QtCore, QtWidgets
from capa.features.extractors.base_extractor import FunctionHandle from capa.features.extractors.base_extractor import FunctionHandle
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@@ -1358,7 +1358,7 @@ class CapaExplorerForm(idaapi.PluginForm):
@param state: checked state @param state: checked state
""" """
if state: if state == QtCore.Qt.Checked:
self.limit_results_to_function(idaapi.get_func(idaapi.get_screen_ea())) self.limit_results_to_function(idaapi.get_func(idaapi.get_screen_ea()))
else: else:
self.range_model_proxy.reset_address_range_filter() self.range_model_proxy.reset_address_range_filter()
@@ -1367,7 +1367,7 @@ class CapaExplorerForm(idaapi.PluginForm):
def slot_checkbox_limit_features_by_ea(self, state): def slot_checkbox_limit_features_by_ea(self, state):
""" """ """ """
if state: if state == QtCore.Qt.Checked:
self.view_rulegen_features.filter_items_by_ea(idaapi.get_screen_ea()) self.view_rulegen_features.filter_items_by_ea(idaapi.get_screen_ea())
else: else:
self.view_rulegen_features.show_all_items() self.view_rulegen_features.show_all_items()

View File

@@ -1,38 +0,0 @@
{
"IDAMetadataDescriptorVersion": 1,
"plugin": {
"name": "capa",
"entryPoint": "capa_explorer.py",
"version": "9.3.1",
"idaVersions": ">=7.4",
"description": "Identify capabilities in executable files using FLARE's capa framework",
"license": "Apache-2.0",
"categories": [
"malware-analysis",
"api-scripting-and-automation",
"ui-ux-and-visualization"
],
"pythonDependencies": ["flare-capa==9.3.1"],
"urls": {
"repository": "https://github.com/mandiant/capa"
},
"authors": [
{"name": "Willi Ballenthin", "email": "wballenthin@hex-rays.com"},
{"name": "Moritz Raabe", "email": "moritzraabe@google.com"},
{"name": "Mike Hunhoff", "email": "mike.hunhoff@gmail.com"},
{"name": "Yacine Elhamer", "email": "elhamer.yacine@gmail.com"}
],
"keywords": [
"capability-detection",
"malware-analysis",
"behavior-analysis",
"reverse-engineering",
"att&ck",
"rule-engine",
"feature-extraction",
"yara-like-rules",
"static-analysis",
"dynamic-analysis"
]
}
}

View File

@@ -18,10 +18,10 @@ from typing import Iterator, Optional
import idc import idc
import idaapi import idaapi
from PyQt5 import QtCore
import capa.ida.helpers import capa.ida.helpers
from capa.features.address import Address, FileOffsetAddress, AbsoluteVirtualAddress from capa.features.address import Address, FileOffsetAddress, AbsoluteVirtualAddress
from capa.ida.plugin.qt_compat import QtCore, qt_get_item_flag_tristate
def info_to_name(display): def info_to_name(display):
@@ -55,7 +55,7 @@ class CapaExplorerDataItem:
self.flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable self.flags = QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
if self._can_check: if self._can_check:
self.flags = self.flags | QtCore.Qt.ItemIsUserCheckable | qt_get_item_flag_tristate() self.flags = self.flags | QtCore.Qt.ItemIsUserCheckable | QtCore.Qt.ItemIsTristate
if self.pred: if self.pred:
self.pred.appendChild(self) self.pred.appendChild(self)

View File

@@ -18,6 +18,7 @@ from collections import deque
import idc import idc
import idaapi import idaapi
from PyQt5 import QtGui, QtCore
import capa.rules import capa.rules
import capa.ida.helpers import capa.ida.helpers
@@ -41,7 +42,6 @@ from capa.ida.plugin.item import (
CapaExplorerInstructionViewItem, CapaExplorerInstructionViewItem,
) )
from capa.features.address import Address, AbsoluteVirtualAddress from capa.features.address import Address, AbsoluteVirtualAddress
from capa.ida.plugin.qt_compat import QtGui, QtCore
# default highlight color used in IDA window # default highlight color used in IDA window
DEFAULT_HIGHLIGHT = 0xE6C700 DEFAULT_HIGHLIGHT = 0xE6C700
@@ -269,7 +269,7 @@ class CapaExplorerDataModel(QtCore.QAbstractItemModel):
visited.add(child_index) visited.add(child_index)
for idx in range(self.rowCount(child_index)): for idx in range(self.rowCount(child_index)):
stack.append(self.index(idx, 0, child_index)) stack.append(child_index.child(idx, 0))
def reset_ida_highlighting(self, item, checked): def reset_ida_highlighting(self, item, checked):
"""reset IDA highlight for item """reset IDA highlight for item

View File

@@ -12,8 +12,10 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from PyQt5 import QtCore
from PyQt5.QtCore import Qt
from capa.ida.plugin.model import CapaExplorerDataModel from capa.ida.plugin.model import CapaExplorerDataModel
from capa.ida.plugin.qt_compat import Qt, QtCore
class CapaExplorerRangeProxyModel(QtCore.QSortFilterProxyModel): class CapaExplorerRangeProxyModel(QtCore.QSortFilterProxyModel):

View File

@@ -1,79 +0,0 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Qt compatibility layer for capa IDA Pro plugin.
Handles PyQt5 (IDA < 9.2) vs PySide6 (IDA >= 9.2) differences.
This module provides a unified import interface for Qt modules and handles
API changes between Qt5 and Qt6.
"""
try:
# IDA 9.2+ uses PySide6
from PySide6 import QtGui, QtCore, QtWidgets
from PySide6.QtGui import QAction
QT_LIBRARY = "PySide6"
Signal = QtCore.Signal
except ImportError:
# Older IDA versions use PyQt5
try:
from PyQt5 import QtGui, QtCore, QtWidgets
from PyQt5.QtWidgets import QAction
QT_LIBRARY = "PyQt5"
Signal = QtCore.pyqtSignal
except ImportError:
raise ImportError("Neither PySide6 nor PyQt5 is available. Cannot initialize capa IDA plugin.")
Qt = QtCore.Qt
def qt_get_item_flag_tristate():
"""
Get the tristate item flag compatible with Qt5 and Qt6.
Qt5 (PyQt5): Uses Qt.ItemIsTristate
Qt6 (PySide6): Qt.ItemIsTristate was removed, uses Qt.ItemIsAutoTristate
ItemIsAutoTristate automatically manages tristate based on child checkboxes,
matching the original ItemIsTristate behavior where parent checkboxes reflect
the check state of their children.
Returns:
int: The appropriate flag value for the Qt version
Raises:
AttributeError: If the tristate flag cannot be found in the Qt library
"""
if QT_LIBRARY == "PySide6":
# Qt6: ItemIsTristate was removed, replaced with ItemIsAutoTristate
# Try different possible locations (API varies slightly across PySide6 versions)
if hasattr(Qt, "ItemIsAutoTristate"):
return Qt.ItemIsAutoTristate
elif hasattr(Qt, "ItemFlag") and hasattr(Qt.ItemFlag, "ItemIsAutoTristate"):
return Qt.ItemFlag.ItemIsAutoTristate
else:
raise AttributeError(
"Cannot find ItemIsAutoTristate in PySide6. "
+ "Your PySide6 version may be incompatible with capa. "
+ f"Available Qt attributes: {[attr for attr in dir(Qt) if 'Item' in attr]}"
)
else:
# Qt5: Use the original ItemIsTristate flag
return Qt.ItemIsTristate
__all__ = ["qt_get_item_flag_tristate", "Signal", "QAction", "QtGui", "QtCore", "QtWidgets"]

View File

@@ -18,6 +18,7 @@ from collections import Counter
import idc import idc
import idaapi import idaapi
from PyQt5 import QtGui, QtCore, QtWidgets
import capa.rules import capa.rules
import capa.engine import capa.engine
@@ -27,7 +28,6 @@ import capa.features.basicblock
from capa.ida.plugin.item import CapaExplorerFunctionItem from capa.ida.plugin.item import CapaExplorerFunctionItem
from capa.features.address import AbsoluteVirtualAddress, _NoAddress from capa.features.address import AbsoluteVirtualAddress, _NoAddress
from capa.ida.plugin.model import CapaExplorerDataModel from capa.ida.plugin.model import CapaExplorerDataModel
from capa.ida.plugin.qt_compat import QtGui, QtCore, Signal, QAction, QtWidgets
MAX_SECTION_SIZE = 750 MAX_SECTION_SIZE = 750
@@ -147,7 +147,7 @@ def calc_item_depth(o):
def build_action(o, display, data, slot): def build_action(o, display, data, slot):
""" """ """ """
action = QAction(display, o) action = QtWidgets.QAction(display, o)
action.setData(data) action.setData(data)
action.triggered.connect(lambda checked: slot(action)) action.triggered.connect(lambda checked: slot(action))
@@ -312,7 +312,7 @@ class CapaExplorerRulegenPreview(QtWidgets.QTextEdit):
class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget): class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
updated = Signal() updated = QtCore.pyqtSignal()
def __init__(self, preview, parent=None): def __init__(self, preview, parent=None):
""" """ """ """

View File

@@ -392,7 +392,6 @@ class ShouldExitError(Exception):
"""raised when a main-related routine indicates the program should exit.""" """raised when a main-related routine indicates the program should exit."""
def __init__(self, status_code: int): def __init__(self, status_code: int):
super().__init__(status_code)
self.status_code = status_code self.status_code = status_code

View File

@@ -274,8 +274,12 @@ SUPPORTED_FEATURES[Scope.FUNCTION].update(SUPPORTED_FEATURES[Scope.BASIC_BLOCK])
class InvalidRule(ValueError): class InvalidRule(ValueError):
def __init__(self, msg):
super().__init__()
self.msg = msg
def __str__(self): def __str__(self):
return f"invalid rule: {super().__str__()}" return f"invalid rule: {self.msg}"
def __repr__(self): def __repr__(self):
return str(self) return str(self)
@@ -285,15 +289,20 @@ class InvalidRuleWithPath(InvalidRule):
def __init__(self, path, msg): def __init__(self, path, msg):
super().__init__(msg) super().__init__(msg)
self.path = path self.path = path
self.msg = msg
self.__cause__ = None self.__cause__ = None
def __str__(self): def __str__(self):
return f"invalid rule: {self.path}: {super(InvalidRule, self).__str__()}" return f"invalid rule: {self.path}: {self.msg}"
class InvalidRuleSet(ValueError): class InvalidRuleSet(ValueError):
def __init__(self, msg):
super().__init__()
self.msg = msg
def __str__(self): def __str__(self):
return f"invalid rule set: {super().__str__()}" return f"invalid rule set: {self.msg}"
def __repr__(self): def __repr__(self):
return str(self) return str(self)
@@ -1093,15 +1102,15 @@ class Rule:
@lru_cache() @lru_cache()
def _get_yaml_loader(): def _get_yaml_loader():
try: try:
# prefer to use CLoader to be fast, see #306 / CSafeLoader is the same as CLoader but with safe loading # prefer to use CLoader to be fast, see #306
# on Linux, make sure you install libyaml-dev or similar # on Linux, make sure you install libyaml-dev or similar
# on Windows, get WHLs from pyyaml.org/pypi # on Windows, get WHLs from pyyaml.org/pypi
logger.debug("using libyaml CSafeLoader.") logger.debug("using libyaml CLoader.")
return yaml.CSafeLoader return yaml.CLoader
except Exception: except Exception:
logger.debug("unable to import libyaml CSafeLoader, falling back to Python yaml parser.") logger.debug("unable to import libyaml CLoader, falling back to Python yaml parser.")
logger.debug("this will be slower to load rules.") logger.debug("this will be slower to load rules.")
return yaml.SafeLoader return yaml.Loader
@staticmethod @staticmethod
def _get_ruamel_yaml_parser(): def _get_ruamel_yaml_parser():

View File

@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
__version__ = "9.3.1" __version__ = "9.1.0"
def get_major_version(): def get_major_version():

View File

@@ -7,7 +7,6 @@
- [ ] Review changes - [ ] Review changes
- capa https://github.com/mandiant/capa/compare/\<last-release\>...master - capa https://github.com/mandiant/capa/compare/\<last-release\>...master
- capa-rules https://github.com/mandiant/capa-rules/compare/\<last-release>\...master - capa-rules https://github.com/mandiant/capa-rules/compare/\<last-release>\...master
- [ ] Run `$ bump-my-version bump {patch/minor/major} [--allow-dirty]` to update [capa/version.py](https://github.com/mandiant/capa/blob/master/capa/version.py) and other version files
- [ ] Update [CHANGELOG.md](https://github.com/mandiant/capa/blob/master/CHANGELOG.md) - [ ] Update [CHANGELOG.md](https://github.com/mandiant/capa/blob/master/CHANGELOG.md)
- Do not forget to add a nice introduction thanking contributors - Do not forget to add a nice introduction thanking contributors
- Remember that we need a major release if we introduce breaking changes - Remember that we need a major release if we introduce breaking changes
@@ -37,6 +36,7 @@
- [capa <release>...master](https://github.com/mandiant/capa/compare/<release>...master) - [capa <release>...master](https://github.com/mandiant/capa/compare/<release>...master)
- [capa-rules <release>...master](https://github.com/mandiant/capa-rules/compare/<release>...master) - [capa-rules <release>...master](https://github.com/mandiant/capa-rules/compare/<release>...master)
``` ```
- [ ] Update [capa/version.py](https://github.com/mandiant/capa/blob/master/capa/version.py)
- [ ] Create a PR with the updated [CHANGELOG.md](https://github.com/mandiant/capa/blob/master/CHANGELOG.md) and [capa/version.py](https://github.com/mandiant/capa/blob/master/capa/version.py). Copy this checklist in the PR description. - [ ] Create a PR with the updated [CHANGELOG.md](https://github.com/mandiant/capa/blob/master/CHANGELOG.md) and [capa/version.py](https://github.com/mandiant/capa/blob/master/capa/version.py). Copy this checklist in the PR description.
- [ ] Update the [homepage](https://github.com/mandiant/capa/blob/master/web/public/index.html) (i.e. What's New section) - [ ] Update the [homepage](https://github.com/mandiant/capa/blob/master/web/public/index.html) (i.e. What's New section)
- [ ] After PR review, merge the PR and [create the release in GH](https://github.com/mandiant/capa/releases/new) using text from the [CHANGELOG.md](https://github.com/mandiant/capa/blob/master/CHANGELOG.md). - [ ] After PR review, merge the PR and [create the release in GH](https://github.com/mandiant/capa/releases/new) using text from the [CHANGELOG.md](https://github.com/mandiant/capa/blob/master/CHANGELOG.md).

View File

@@ -74,8 +74,7 @@ dependencies = [
# comments and context. # comments and context.
"pyyaml>=6", "pyyaml>=6",
"colorama>=0.4", "colorama>=0.4",
"ida-netnode>=3.0", "ida-settings>=2",
"ida-settings>=3.1.0",
"ruamel.yaml>=0.18", "ruamel.yaml>=0.18",
"pefile>=2023.2.7", "pefile>=2023.2.7",
"pyelftools>=0.31", "pyelftools>=0.31",
@@ -105,7 +104,7 @@ dependencies = [
"networkx>=3", "networkx>=3",
"dnfile>=0.17.0", "dnfile>=0.15.0",
] ]
dynamic = ["version"] dynamic = ["version"]
@@ -122,35 +121,34 @@ dev = [
# we want all developer environments to be consistent. # we want all developer environments to be consistent.
# These dependencies are not used in production environments # These dependencies are not used in production environments
# and should not conflict with other libraries/tooling. # and should not conflict with other libraries/tooling.
"pre-commit==4.5.0", "pre-commit==4.1.0",
"pytest==8.0.0", "pytest==8.0.0",
"pytest-sugar==1.1.1", "pytest-sugar==1.0.0",
"pytest-instafail==0.5.0", "pytest-instafail==0.5.0",
"flake8==7.3.0", "flake8==7.1.1",
"flake8-bugbear==25.11.29", "flake8-bugbear==24.12.12",
"flake8-encodings==0.5.1", "flake8-encodings==0.5.1",
"flake8-comprehensions==3.17.0", "flake8-comprehensions==3.16.0",
"flake8-logging-format==0.9.0", "flake8-logging-format==0.9.0",
"flake8-no-implicit-concat==0.3.5", "flake8-no-implicit-concat==0.3.5",
"flake8-print==5.0.0", "flake8-print==5.0.0",
"flake8-todos==0.3.1", "flake8-todos==0.3.1",
"flake8-simplify==0.22.0", "flake8-simplify==0.21.0",
"flake8-use-pathlib==0.3.0", "flake8-use-pathlib==0.3.0",
"flake8-copyright==0.2.4", "flake8-copyright==0.2.4",
"ruff==0.14.7", "ruff==0.11.0",
"black==25.12.0", "black==25.1.0",
"isort==6.0.0", "isort==6.0.0",
"mypy==1.17.1", "mypy==1.15.0",
"mypy-protobuf==3.6.0", "mypy-protobuf==3.6.0",
"PyGithub==2.6.0", "PyGithub==2.6.0",
"bump-my-version==1.2.4",
# type stubs for mypy # type stubs for mypy
"types-backports==0.1.3", "types-backports==0.1.3",
"types-colorama==0.4.15.11", "types-colorama==0.4.15.11",
"types-PyYAML==6.0.8", "types-PyYAML==6.0.8",
"types-psutil==7.1.3.20251202", "types-psutil==7.0.0.20250218",
"types_requests==2.32.0.20240712", "types_requests==2.32.0.20240712",
"types-protobuf==6.32.1.20250918", "types-protobuf==5.29.1.20241207",
"deptry==0.23.0" "deptry==0.23.0"
] ]
build = [ build = [
@@ -158,18 +156,16 @@ build = [
# we want all developer environments to be consistent. # we want all developer environments to be consistent.
# These dependencies are not used in production environments # These dependencies are not used in production environments
# and should not conflict with other libraries/tooling. # and should not conflict with other libraries/tooling.
"pyinstaller==6.16.0", "pyinstaller==6.12.0",
"setuptools==80.9.0", "setuptools==76.0.0",
"build==1.3.0" "build==1.2.2"
] ]
scripts = [ scripts = [
# can (optionally) be more lenient on dependencies here
# see comment on dependencies for more context
"jschema_to_python==1.2.3", "jschema_to_python==1.2.3",
"psutil==7.1.2", "psutil==7.0.0",
"stix2==3.0.1", "stix2==3.0.1",
"sarif_om==1.0.4", "sarif_om==1.0.4",
"requests>=2.32.4", "requests==2.32.3",
] ]
[tool.deptry] [tool.deptry]
@@ -201,8 +197,7 @@ known_first_party = [
"idc", "idc",
"java", "java",
"netnode", "netnode",
"PyQt5", "PyQt5"
"PySide6"
] ]
[tool.deptry.per_rule_ignores] [tool.deptry.per_rule_ignores]
@@ -210,7 +205,6 @@ known_first_party = [
DEP002 = [ DEP002 = [
"black", "black",
"build", "build",
"bump-my-version",
"deptry", "deptry",
"flake8", "flake8",
"flake8-bugbear", "flake8-bugbear",

View File

@@ -10,39 +10,38 @@ annotated-types==0.7.0
colorama==0.4.6 colorama==0.4.6
cxxfilt==0.3.0 cxxfilt==0.3.0
dncil==1.0.2 dncil==1.0.2
dnfile==0.17.0 dnfile==0.15.0
funcy==2.0 funcy==2.0
humanize==4.14.0 humanize==4.12.0
ida-netnode==3.0 ida-netnode==3.0
ida-settings==3.2.2 ida-settings==2.1.0
intervaltree==3.1.0 intervaltree==3.1.0
markdown-it-py==4.0.0 markdown-it-py==3.0.0
mdurl==0.1.2 mdurl==0.1.2
msgpack==1.0.8 msgpack==1.0.8
networkx==3.4.2 networkx==3.4.2
pefile==2024.8.26 pefile==2024.8.26
pip==25.3 pip==25.0
protobuf==6.33.1 protobuf==6.30.1
pyasn1==0.5.1 pyasn1==0.5.1
pyasn1-modules==0.3.0 pyasn1-modules==0.3.0
pycparser==2.23 pycparser==2.22
pydantic==2.12.4 pydantic==2.10.1
# pydantic pins pydantic-core, # pydantic pins pydantic-core,
# but dependabot updates these separately (which is broken) and is annoying, # but dependabot updates these separately (which is broken) and is annoying,
# so we rely on pydantic to pull in the right version of pydantic-core. # so we rely on pydantic to pull in the right version of pydantic-core.
# pydantic-core==2.23.4 # pydantic-core==2.23.4
xmltodict==1.0.2 xmltodict==0.14.2
pyelftools==0.32 pyelftools==0.32
pygments==2.19.1 pygments==2.19.1
python-flirt==0.9.2 python-flirt==0.9.2
pyyaml==6.0.2 pyyaml==6.0.2
rich==14.2.0 rich==13.9.2
ruamel-yaml==0.18.6 ruamel-yaml==0.18.6
ruamel-yaml-clib==0.2.14 ruamel-yaml-clib==0.2.8
setuptools==80.9.0 setuptools==76.0.0
six==1.17.0 six==1.17.0
sortedcontainers==2.4.0 sortedcontainers==2.4.0
viv-utils==0.8.0 viv-utils==0.8.0
vivisect==1.2.1 vivisect==1.2.1
msgspec==0.20.0 msgspec==0.19.0
bump-my-version==1.2.4

2
rules

Submodule rules updated: 6120dfb6e0...d64c2c91ea

View File

@@ -175,6 +175,8 @@ def convert_rule(rule, rulename, cround, depth):
depth += 1 depth += 1
logger.info("recursion depth: %d", depth) logger.info("recursion depth: %d", depth)
global var_names
def do_statement(s_type, kid): def do_statement(s_type, kid):
yara_strings = "" yara_strings = ""
yara_condition = "" yara_condition = ""

490
scripts/codemap.py Normal file
View File

@@ -0,0 +1,490 @@
#!/usr/bin/env python
# /// script
# requires-python = ">=3.12"
# dependencies = [
# "protobuf",
# "python-lancelot",
# "rich",
# ]
# ///
#
# TODO:
# - ignore stack cookie check
import sys
import json
import time
import logging
import argparse
import contextlib
from typing import Any
from pathlib import Path
from collections import defaultdict
from dataclasses import dataclass
import lancelot
import rich.padding
import lancelot.be2utils
import google.protobuf.message
from rich.text import Text
from rich.theme import Theme
from rich.markup import escape
from rich.console import Console
from lancelot.be2utils.binexport2_pb2 import BinExport2
logger = logging.getLogger("codemap")
@contextlib.contextmanager
def timing(msg: str):
t0 = time.time()
yield
t1 = time.time()
logger.debug("perf: %s: %0.2fs", msg, t1 - t0)
class Renderer:
def __init__(self, console: Console):
self.console: Console = console
self.indent: int = 0
@contextlib.contextmanager
def indenting(self):
self.indent += 1
try:
yield
finally:
self.indent -= 1
@staticmethod
def markup(s: str, **kwargs) -> Text:
escaped_args = {k: (escape(v) if isinstance(v, str) else v) for k, v in kwargs.items()}
return Text.from_markup(s.format(**escaped_args))
def print(self, renderable, **kwargs):
if not kwargs:
return self.console.print(rich.padding.Padding(renderable, (0, 0, 0, self.indent * 2)))
assert isinstance(renderable, str)
return self.print(self.markup(renderable, **kwargs))
def writeln(self, s: str):
self.print(s)
@contextlib.contextmanager
def section(self, name):
if isinstance(name, str):
self.print("[title]{name}", name=name)
elif isinstance(name, Text):
name = name.copy()
name.stylize_before(self.console.get_style("title"))
self.print(name)
else:
raise ValueError("unexpected section name")
with self.indenting():
yield
@dataclass
class AssemblageLocation:
name: str
file: str
prototype: str
rva: int
@property
def path(self):
if not self.file.endswith(")"):
return self.file
return self.file.rpartition(" (")[0]
@classmethod
def from_dict(cls, data: dict[str, Any]):
return cls(
name=data["name"],
file=data["file"],
prototype=data["prototype"],
rva=data["function_start"],
)
@staticmethod
def from_json(doc: str):
return AssemblageLocation.from_dict(json.loads(doc))
def main(argv: list[str] | None = None):
if argv is None:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description="Inspect BinExport2 files")
parser.add_argument("input_file", type=Path, help="path to input file")
parser.add_argument("--capa", type=Path, help="path to capa JSON results file")
parser.add_argument("--assemblage", type=Path, help="path to Assemblage JSONL file")
parser.add_argument("-d", "--debug", action="store_true", help="enable debugging output on STDERR")
parser.add_argument("-q", "--quiet", action="store_true", help="disable all output but errors")
args = parser.parse_args(args=argv)
logging.basicConfig()
if args.quiet:
logging.getLogger().setLevel(logging.WARNING)
elif args.debug:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
theme = Theme(
{
"decoration": "grey54",
"title": "yellow",
"key": "black",
"value": "blue",
"default": "black",
},
inherit=False,
)
console = Console(theme=theme, markup=False, emoji=False)
o = Renderer(console)
be2: BinExport2
buf: bytes
try:
# easiest way to determine if this is a BinExport2 proto is...
# to just try to decode it.
buf = args.input_file.read_bytes()
with timing("loading BinExport2"):
be2 = BinExport2()
be2.ParseFromString(buf)
except google.protobuf.message.DecodeError:
with timing("analyzing file"):
input_file: Path = args.input_file
buf = lancelot.get_binexport2_bytes_from_bytes(input_file.read_bytes())
with timing("loading BinExport2"):
be2 = BinExport2()
be2.ParseFromString(buf)
with timing("indexing BinExport2"):
idx = lancelot.be2utils.BinExport2Index(be2)
matches_by_function: defaultdict[int, set[str]] = defaultdict(set)
if args.capa:
with timing("loading capa"):
doc = json.loads(args.capa.read_text())
functions_by_basic_block: dict[int, int] = {}
for function in doc["meta"]["analysis"]["layout"]["functions"]:
for basic_block in function["matched_basic_blocks"]:
functions_by_basic_block[basic_block["address"]["value"]] = function["address"]["value"]
matches_by_address: defaultdict[int, set[str]] = defaultdict(set)
for rule_name, results in doc["rules"].items():
for location, _ in results["matches"]:
if location["type"] != "absolute":
continue
address = location["value"]
matches_by_address[location["value"]].add(rule_name)
for address, matches in matches_by_address.items():
if function := functions_by_basic_block.get(address):
if function in idx.thunks:
# forward any capa for a thunk to its target
# since viv may not recognize the thunk as a separate function.
logger.debug("forwarding capa matches from thunk 0x%x to 0x%x", function, idx.thunks[function])
function = idx.thunks[function]
matches_by_function[function].update(matches)
for match in matches:
logger.info("capa: 0x%x: %s", function, match)
else:
# we don't know which function this is.
# hopefully its a function recognized in our BinExport analysis.
# *shrug*
#
# apparently viv doesn't emit function entries for thunks?
# or somehow our layout is messed up.
if address in idx.thunks:
# forward any capa for a thunk to its target
# since viv may not recognize the thunk as a separate function.
logger.debug("forwarding capa matches from thunk 0x%x to 0x%x", address, idx.thunks[address])
address = idx.thunks[address]
# since we found the thunk, we know this is a BinExport-recognized function.
# so thats nice.
for match in matches:
logger.info("capa: 0x%x: %s", address, match)
else:
logger.warning("unknown address: 0x%x: %s", address, matches)
matches_by_function[address].update(matches)
# guess the base address (which BinExport2) does not track explicitly,
# by assuming it is the lowest mapped page.
base_address = min(map(lambda section: section.address, be2.section))
logging.info("guessed base address: 0x%x", base_address)
assemblage_locations_by_va: dict[int, AssemblageLocation] = {}
if args.assemblage:
with timing("loading assemblage"):
with args.assemblage.open("rt", encoding="utf-8") as f:
for line in f:
if not line:
continue
location = AssemblageLocation.from_json(line)
assemblage_locations_by_va[base_address + location.rva] = location
# update function names for the in-memory BinExport2 using Assemblage data.
# this won't affect the be2 on disk, because we don't serialize it back out.
for address, location in assemblage_locations_by_va.items():
if not location.name:
continue
if vertex_index := idx.vertex_index_by_address.get(address):
vertex = be2.call_graph.vertex[vertex_index].demangled_name = location.name
# index all the callers of each function, resolving thunks.
# idx.callers_by_vertex_id does not resolve thunks.
resolved_callers_by_vertex_id = defaultdict(set)
for edge in be2.call_graph.edge:
source_index = edge.source_vertex_index
if lancelot.be2utils.is_thunk_vertex(be2.call_graph.vertex[source_index]):
# we don't care about the callers that are thunks.
continue
if lancelot.be2utils.is_thunk_vertex(be2.call_graph.vertex[edge.target_vertex_index]):
thunk_vertex = be2.call_graph.vertex[edge.target_vertex_index]
thunk_address = thunk_vertex.address
target_address = idx.thunks[thunk_address]
target_index = idx.vertex_index_by_address[target_address]
logger.debug(
"call %s -(thunk)-> %s",
idx.get_function_name_by_vertex(source_index),
idx.get_function_name_by_vertex(target_index),
)
else:
target_index = edge.target_vertex_index
logger.debug(
"call %s -> %s",
idx.get_function_name_by_vertex(source_index),
idx.get_function_name_by_vertex(target_index),
)
resolved_callers_by_vertex_id[target_index].add(source_index)
t0 = time.time()
with o.section("meta"):
o.writeln(f"name: {be2.meta_information.executable_name}")
o.writeln(f"sha256: {be2.meta_information.executable_id}")
o.writeln(f"arch: {be2.meta_information.architecture_name}")
o.writeln(f"ts: {be2.meta_information.timestamp}")
with o.section("modules"):
for module in be2.module:
o.writeln(f"- {module.name}")
if not be2.module:
o.writeln("(none)")
with o.section("sections"):
for section in be2.section:
perms = ""
perms += "r" if section.flag_r else "-"
perms += "w" if section.flag_w else "-"
perms += "x" if section.flag_x else "-"
o.writeln(f"- {hex(section.address)} {perms} {hex(section.size)}")
with o.section("libraries"):
for library in be2.library:
o.writeln(
f"- {library.name:<12s} {'(static)' if library.is_static else ''}{(' at ' + hex(library.load_address)) if library.HasField('load_address') else ''}"
)
if not be2.library:
o.writeln("(none)")
vertex_order_by_address = {address: i for (i, address) in enumerate(idx.vertex_index_by_address.keys())}
with o.section("functions"):
last_address = None
for _, vertex_index in idx.vertex_index_by_address.items():
vertex = be2.call_graph.vertex[vertex_index]
vertex_order = vertex_order_by_address[vertex.address]
if vertex.HasField("library_index"):
continue
if vertex.HasField("module_index"):
continue
function_name = idx.get_function_name_by_vertex(vertex_index)
if last_address:
try:
last_path = assemblage_locations_by_va[last_address].path
path = assemblage_locations_by_va[vertex.address].path
if last_path != path:
o.print(o.markup("[blue]~~~~~~~~~~~~~~~~~~~~~~~~~~~~~[/] [title]file[/] {path}\n", path=path))
except KeyError:
pass
last_address = vertex.address
if lancelot.be2utils.is_thunk_vertex(vertex):
with o.section(
o.markup(
"thunk [default]{function_name}[/] [decoration]@ {function_address}[/]",
function_name=function_name,
function_address=hex(vertex.address),
)
):
continue
with o.section(
o.markup(
"function [default]{function_name}[/] [decoration]@ {function_address}[/]",
function_name=function_name,
function_address=hex(vertex.address),
)
):
if vertex.address in idx.thunks:
o.writeln("")
continue
# keep the xrefs separate from the calls, since they're visually hard to distinguish.
# use local index of callers that has resolved intermediate thunks,
# since they are sometimes stored in a physically distant location.
for caller_index in resolved_callers_by_vertex_id.get(vertex_index, []):
caller_vertex = be2.call_graph.vertex[caller_index]
caller_order = vertex_order_by_address[caller_vertex.address]
caller_delta = caller_order - vertex_order
if caller_delta < 0:
direction = ""
else:
direction = ""
o.print(
"xref: [decoration]{direction}[/] {name} [decoration]({delta:+})[/]",
direction=direction,
name=idx.get_function_name_by_vertex(caller_index),
delta=caller_delta,
)
if vertex.address not in idx.flow_graph_index_by_address:
num_basic_blocks = 0
num_instructions = 0
num_edges = 0
total_instruction_size = 0
else:
flow_graph_index = idx.flow_graph_index_by_address[vertex.address]
flow_graph = be2.flow_graph[flow_graph_index]
num_basic_blocks = len(flow_graph.basic_block_index)
num_instructions = sum(
len(list(idx.instruction_indices(be2.basic_block[bb_idx])))
for bb_idx in flow_graph.basic_block_index
)
num_edges = len(flow_graph.edge)
total_instruction_size = 0
for bb_idx in flow_graph.basic_block_index:
basic_block = be2.basic_block[bb_idx]
for _, instruction, _ in idx.basic_block_instructions(basic_block):
total_instruction_size += len(instruction.raw_bytes)
o.writeln(
f"B/E/I: {num_basic_blocks} / {num_edges} / {num_instructions} ({total_instruction_size} bytes)"
)
for match in matches_by_function.get(vertex.address, []):
o.writeln(f"capa: {match}")
if vertex.address in idx.flow_graph_index_by_address:
flow_graph_index = idx.flow_graph_index_by_address[vertex.address]
flow_graph = be2.flow_graph[flow_graph_index]
seen_callees = set()
for basic_block_index in flow_graph.basic_block_index:
basic_block = be2.basic_block[basic_block_index]
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
if instruction.call_target:
for call_target_address in instruction.call_target:
if call_target_address in idx.thunks:
call_target_address = idx.thunks[call_target_address]
call_target_index = idx.vertex_index_by_address[call_target_address]
call_target_vertex = be2.call_graph.vertex[call_target_index]
if call_target_vertex.HasField("library_index"):
continue
if call_target_vertex.address in seen_callees:
continue
seen_callees.add(call_target_vertex.address)
call_target_order = vertex_order_by_address[call_target_address]
call_target_delta = call_target_order - vertex_order
call_target_name = idx.get_function_name_by_address(call_target_address)
if call_target_delta < 0:
direction = ""
else:
direction = ""
o.print(
"calls: [decoration]{direction}[/] {name} [decoration]({delta:+})[/]",
direction=direction,
name=call_target_name,
delta=call_target_delta,
)
for basic_block_index in flow_graph.basic_block_index:
basic_block = be2.basic_block[basic_block_index]
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
if instruction.call_target:
for call_target_address in instruction.call_target:
call_target_index = idx.vertex_index_by_address[call_target_address]
call_target_vertex = be2.call_graph.vertex[call_target_index]
if not call_target_vertex.HasField("library_index"):
continue
if call_target_vertex.address in seen_callees:
continue
seen_callees.add(call_target_vertex.address)
call_target_name = idx.get_function_name_by_address(call_target_address)
o.print(
"api: {name}",
name=call_target_name,
)
seen_strings = set()
for basic_block_index in flow_graph.basic_block_index:
basic_block = be2.basic_block[basic_block_index]
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
if instruction_index in idx.string_reference_index_by_source_instruction_index:
for string_reference_index in idx.string_reference_index_by_source_instruction_index[
instruction_index
]:
string_reference = be2.string_reference[string_reference_index]
string_index = string_reference.string_table_index
string = be2.string_table[string_index]
if string in seen_strings:
continue
seen_strings.add(string)
o.print(
'string: [decoration]"[/]{string}[decoration]"[/]',
string=string.rstrip(),
)
o.print("")
t1 = time.time()
logger.debug("perf: rendering BinExport2: %0.2fs", t1 - t0)
if __name__ == "__main__":
sys.exit(main())

View File

@@ -406,7 +406,6 @@ class DoesntMatchExample(Lint):
return True return True
if rule.name not in capabilities: if rule.name not in capabilities:
logger.info('rule "%s" does not match for sample %s', rule.name, example_id)
return True return True

View File

@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import logging
import contextlib import contextlib
import collections import collections
from pathlib import Path from pathlib import Path
@@ -21,7 +21,6 @@ from functools import lru_cache
import pytest import pytest
import capa.main import capa.main
import capa.helpers
import capa.features.file import capa.features.file
import capa.features.insn import capa.features.insn
import capa.features.common import capa.features.common
@@ -54,7 +53,6 @@ from capa.features.extractors.base_extractor import (
) )
from capa.features.extractors.dnfile.extractor import DnfileFeatureExtractor from capa.features.extractors.dnfile.extractor import DnfileFeatureExtractor
logger = logging.getLogger(__name__)
CD = Path(__file__).resolve().parent CD = Path(__file__).resolve().parent
DOTNET_DIR = CD / "data" / "dotnet" DOTNET_DIR = CD / "data" / "dotnet"
DNFILE_TESTFILES = DOTNET_DIR / "dnfile-testfiles" DNFILE_TESTFILES = DOTNET_DIR / "dnfile-testfiles"
@@ -202,65 +200,6 @@ def get_binja_extractor(path: Path):
return extractor return extractor
# we can't easily cache this because the extractor relies on global state (the opened database)
# which also has to be closed elsewhere. so, the idalib tests will just take a little bit to run.
def get_idalib_extractor(path: Path):
import capa.features.extractors.ida.idalib as idalib
if not idalib.has_idalib():
raise RuntimeError("cannot find IDA idalib module.")
if not idalib.load_idalib():
raise RuntimeError("failed to load IDA idalib module.")
import idapro
import ida_auto
import capa.features.extractors.ida.extractor
logger.debug("idalib: opening database...")
idapro.enable_console_messages(False)
# - 0 - Success (database not packed)
# - 1 - Success (database was packed)
# - 2 - User cancelled or 32-64 bit conversion failed
# - 4 - Database initialization failed
# - -1 - Generic errors (database already open, auto-analysis failed, etc.)
# - -2 - User cancelled operation
ret = idapro.open_database(str(path), run_auto_analysis=True)
if ret not in (0, 1):
raise RuntimeError("failed to analyze input file")
logger.debug("idalib: waiting for analysis...")
ida_auto.auto_wait()
logger.debug("idalib: opened database.")
extractor = capa.features.extractors.ida.extractor.IdaFeatureExtractor()
fixup_idalib(path, extractor)
return extractor
def fixup_idalib(path: Path, extractor):
"""
IDA fixups to overcome differences between backends
"""
import idaapi
import ida_funcs
def remove_library_id_flag(fva):
f = idaapi.get_func(fva)
f.flags &= ~ida_funcs.FUNC_LIB
ida_funcs.update_func(f)
if "kernel32-64" in path.name:
# remove (correct) library function id, so we can test x64 thunk
remove_library_id_flag(0x1800202B0)
if "al-khaser_x64" in path.name:
# remove (correct) library function id, so we can test x64 nested thunk
remove_library_id_flag(0x14004B4F0)
@lru_cache(maxsize=1) @lru_cache(maxsize=1)
def get_cape_extractor(path): def get_cape_extractor(path):
from capa.helpers import load_json_from_path from capa.helpers import load_json_from_path
@@ -955,8 +894,20 @@ FEATURE_PRESENCE_TESTS = sorted(
("mimikatz", "function=0x4556E5", capa.features.insn.API("advapi32.LsaQueryInformationPolicy"), False), ("mimikatz", "function=0x4556E5", capa.features.insn.API("advapi32.LsaQueryInformationPolicy"), False),
("mimikatz", "function=0x4556E5", capa.features.insn.API("LsaQueryInformationPolicy"), True), ("mimikatz", "function=0x4556E5", capa.features.insn.API("LsaQueryInformationPolicy"), True),
# insn/api: x64 # insn/api: x64
(
"kernel32-64",
"function=0x180001010",
capa.features.insn.API("RtlVirtualUnwind"),
True,
),
("kernel32-64", "function=0x180001010", capa.features.insn.API("RtlVirtualUnwind"), True), ("kernel32-64", "function=0x180001010", capa.features.insn.API("RtlVirtualUnwind"), True),
# insn/api: x64 thunk # insn/api: x64 thunk
(
"kernel32-64",
"function=0x1800202B0",
capa.features.insn.API("RtlCaptureContext"),
True,
),
("kernel32-64", "function=0x1800202B0", capa.features.insn.API("RtlCaptureContext"), True), ("kernel32-64", "function=0x1800202B0", capa.features.insn.API("RtlCaptureContext"), True),
# insn/api: x64 nested thunk # insn/api: x64 nested thunk
("al-khaser x64", "function=0x14004B4F0", capa.features.insn.API("__vcrt_GetModuleHandle"), True), ("al-khaser x64", "function=0x14004B4F0", capa.features.insn.API("__vcrt_GetModuleHandle"), True),
@@ -1044,20 +995,20 @@ FEATURE_PRESENCE_TESTS = sorted(
("pma16-01", "file", OS(OS_WINDOWS), True), ("pma16-01", "file", OS(OS_WINDOWS), True),
("pma16-01", "file", OS(OS_LINUX), False), ("pma16-01", "file", OS(OS_LINUX), False),
("mimikatz", "file", OS(OS_WINDOWS), True), ("mimikatz", "file", OS(OS_WINDOWS), True),
("pma16-01", "function=0x401100", OS(OS_WINDOWS), True), ("pma16-01", "function=0x404356", OS(OS_WINDOWS), True),
("pma16-01", "function=0x401100,bb=0x401130", OS(OS_WINDOWS), True), ("pma16-01", "function=0x404356,bb=0x4043B9", OS(OS_WINDOWS), True),
("mimikatz", "function=0x40105D", OS(OS_WINDOWS), True), ("mimikatz", "function=0x40105D", OS(OS_WINDOWS), True),
("pma16-01", "file", Arch(ARCH_I386), True), ("pma16-01", "file", Arch(ARCH_I386), True),
("pma16-01", "file", Arch(ARCH_AMD64), False), ("pma16-01", "file", Arch(ARCH_AMD64), False),
("mimikatz", "file", Arch(ARCH_I386), True), ("mimikatz", "file", Arch(ARCH_I386), True),
("pma16-01", "function=0x401100", Arch(ARCH_I386), True), ("pma16-01", "function=0x404356", Arch(ARCH_I386), True),
("pma16-01", "function=0x401100,bb=0x401130", Arch(ARCH_I386), True), ("pma16-01", "function=0x404356,bb=0x4043B9", Arch(ARCH_I386), True),
("mimikatz", "function=0x40105D", Arch(ARCH_I386), True), ("mimikatz", "function=0x40105D", Arch(ARCH_I386), True),
("pma16-01", "file", Format(FORMAT_PE), True), ("pma16-01", "file", Format(FORMAT_PE), True),
("pma16-01", "file", Format(FORMAT_ELF), False), ("pma16-01", "file", Format(FORMAT_ELF), False),
("mimikatz", "file", Format(FORMAT_PE), True), ("mimikatz", "file", Format(FORMAT_PE), True),
# format is also a global feature # format is also a global feature
("pma16-01", "function=0x401100", Format(FORMAT_PE), True), ("pma16-01", "function=0x404356", Format(FORMAT_PE), True),
("mimikatz", "function=0x456BB9", Format(FORMAT_PE), True), ("mimikatz", "function=0x456BB9", Format(FORMAT_PE), True),
# elf support # elf support
("7351f.elf", "file", OS(OS_LINUX), True), ("7351f.elf", "file", OS(OS_LINUX), True),

View File

@@ -70,4 +70,4 @@ def test_standalone_binja_backend():
@pytest.mark.skipif(binja_present is False, reason="Skip binja tests if the binaryninja Python API is not installed") @pytest.mark.skipif(binja_present is False, reason="Skip binja tests if the binaryninja Python API is not installed")
def test_binja_version(): def test_binja_version():
version = binaryninja.core_version_info() version = binaryninja.core_version_info()
assert version.major == 5 and version.minor == 2 assert version.major == 4 and version.minor == 2

View File

@@ -1,58 +0,0 @@
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import pytest
import fixtures
import capa.features.extractors.ida.idalib
logger = logging.getLogger(__name__)
idalib_present = capa.features.extractors.ida.idalib.has_idalib()
@pytest.mark.skipif(idalib_present is False, reason="Skip idalib tests if the idalib Python API is not installed")
@fixtures.parametrize(
"sample,scope,feature,expected",
fixtures.FEATURE_PRESENCE_TESTS + fixtures.FEATURE_SYMTAB_FUNC_TESTS,
indirect=["sample", "scope"],
)
def test_idalib_features(sample, scope, feature, expected):
try:
fixtures.do_test_feature_presence(fixtures.get_idalib_extractor, sample, scope, feature, expected)
finally:
logger.debug("closing database...")
import idapro
idapro.close_database(save=False)
logger.debug("opened database.")
@pytest.mark.skipif(idalib_present is False, reason="Skip idalib tests if the idalib Python API is not installed")
@fixtures.parametrize(
"sample,scope,feature,expected",
fixtures.FEATURE_COUNT_TESTS,
indirect=["sample", "scope"],
)
def test_idalib_feature_counts(sample, scope, feature, expected):
try:
fixtures.do_test_feature_count(fixtures.get_idalib_extractor, sample, scope, feature, expected)
finally:
logger.debug("closing database...")
import idapro
idapro.close_database(save=False)
logger.debug("closed database.")

View File

@@ -27,7 +27,7 @@
"eslint-plugin-vue": "^9.23.0", "eslint-plugin-vue": "^9.23.0",
"jsdom": "^24.1.0", "jsdom": "^24.1.0",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"vite": "^6.4.1", "vite": "^6.2.3",
"vite-plugin-singlefile": "^2.2.0", "vite-plugin-singlefile": "^2.2.0",
"vitest": "^3.0.9" "vitest": "^3.0.9"
} }
@@ -1416,20 +1416,6 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/call-bind-apply-helpers": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/callsites": { "node_modules/callsites": {
"version": "3.1.0", "version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@@ -1660,21 +1646,6 @@
"node": ">=6.0.0" "node": ">=6.0.0"
} }
}, },
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
"es-errors": "^1.3.0",
"gopd": "^1.2.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/eastasianwidth": { "node_modules/eastasianwidth": {
"version": "0.2.0", "version": "0.2.0",
"resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
@@ -1740,26 +1711,6 @@
"url": "https://github.com/fb55/entities?sponsor=1" "url": "https://github.com/fb55/entities?sponsor=1"
} }
}, },
"node_modules/es-define-property": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-module-lexer": { "node_modules/es-module-lexer": {
"version": "1.6.0", "version": "1.6.0",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz", "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz",
@@ -1767,35 +1718,6 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/es-object-atoms": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-set-tostringtag": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.6",
"has-tostringtag": "^1.0.2",
"hasown": "^2.0.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/esbuild": { "node_modules/esbuild": {
"version": "0.25.1", "version": "0.25.1",
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz",
@@ -2186,16 +2108,13 @@
} }
}, },
"node_modules/form-data": { "node_modules/form-data": {
"version": "4.0.4", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==", "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
"dev": true, "dev": true,
"license": "MIT",
"dependencies": { "dependencies": {
"asynckit": "^0.4.0", "asynckit": "^0.4.0",
"combined-stream": "^1.0.8", "combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12" "mime-types": "^2.1.12"
}, },
"engines": { "engines": {
@@ -2222,61 +2141,11 @@
"node": "^8.16.0 || ^10.6.0 || >=11.0.0" "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
} }
}, },
"node_modules/function-bind": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
"es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
"es-object-atoms": "^1.1.1",
"function-bind": "^1.1.2",
"get-proto": "^1.0.1",
"gopd": "^1.2.0",
"has-symbols": "^1.1.0",
"hasown": "^2.0.2",
"math-intrinsics": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
"dev": true,
"license": "MIT",
"dependencies": {
"dunder-proto": "^1.0.1",
"es-object-atoms": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/glob": { "node_modules/glob": {
"version": "10.5.0", "version": "10.4.2",
"resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.2.tgz",
"integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", "integrity": "sha512-GwMlUF6PkPo3Gk21UxkCohOv0PLcIXVtKyLlpEI28R/cO/4eNOdmLk3CMW1wROV/WR/EsZOWAfBbBOqYvs88/w==",
"dev": true, "dev": true,
"license": "ISC",
"dependencies": { "dependencies": {
"foreground-child": "^3.1.0", "foreground-child": "^3.1.0",
"jackspeak": "^3.1.2", "jackspeak": "^3.1.2",
@@ -2288,6 +2157,9 @@
"bin": { "bin": {
"glob": "dist/esm/bin.mjs" "glob": "dist/esm/bin.mjs"
}, },
"engines": {
"node": ">=16 || 14 >=14.18"
},
"funding": { "funding": {
"url": "https://github.com/sponsors/isaacs" "url": "https://github.com/sponsors/isaacs"
} }
@@ -2343,19 +2215,6 @@
"url": "https://github.com/sponsors/sindresorhus" "url": "https://github.com/sponsors/sindresorhus"
} }
}, },
"node_modules/gopd": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/graphemer": { "node_modules/graphemer": {
"version": "1.4.0", "version": "1.4.0",
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz", "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
@@ -2371,48 +2230,6 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/has-symbols": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/has-tostringtag": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/highlight.js": { "node_modules/highlight.js": {
"version": "11.9.0", "version": "11.9.0",
"resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.9.0.tgz", "resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.9.0.tgz",
@@ -2639,11 +2456,10 @@
} }
}, },
"node_modules/js-yaml": { "node_modules/js-yaml": {
"version": "4.1.1", "version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.1.tgz", "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-qQKT4zQxXl8lLwBtHMWwaTcGfFOZviOJet3Oy/xmGk2gZH677CJM9EvtfdSkgWcATZhj/55JZ0rmy3myCT5lsA==", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"dev": true, "dev": true,
"license": "MIT",
"dependencies": { "dependencies": {
"argparse": "^2.0.1" "argparse": "^2.0.1"
}, },
@@ -2792,16 +2608,6 @@
"@jridgewell/sourcemap-codec": "^1.5.0" "@jridgewell/sourcemap-codec": "^1.5.0"
} }
}, },
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/micromatch": { "node_modules/micromatch": {
"version": "4.0.8", "version": "4.0.8",
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
@@ -3620,51 +3426,6 @@
"dev": true, "dev": true,
"license": "MIT" "license": "MIT"
}, },
"node_modules/tinyglobby": {
"version": "0.2.13",
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz",
"integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==",
"dev": true,
"license": "MIT",
"dependencies": {
"fdir": "^6.4.4",
"picomatch": "^4.0.2"
},
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://github.com/sponsors/SuperchupuDev"
}
},
"node_modules/tinyglobby/node_modules/fdir": {
"version": "6.4.4",
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
"dev": true,
"license": "MIT",
"peerDependencies": {
"picomatch": "^3 || ^4"
},
"peerDependenciesMeta": {
"picomatch": {
"optional": true
}
}
},
"node_modules/tinyglobby/node_modules/picomatch": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/tinypool": { "node_modules/tinypool": {
"version": "1.0.2", "version": "1.0.2",
"resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz", "resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz",
@@ -3800,18 +3561,15 @@
"dev": true "dev": true
}, },
"node_modules/vite": { "node_modules/vite": {
"version": "6.4.1", "version": "6.2.3",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", "resolved": "https://registry.npmjs.org/vite/-/vite-6.2.3.tgz",
"integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", "integrity": "sha512-IzwM54g4y9JA/xAeBPNaDXiBF8Jsgl3VBQ2YQ/wOY6fyW3xMdSoltIV3Bo59DErdqdE6RxUfv8W69DvUorE4Eg==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"esbuild": "^0.25.0", "esbuild": "^0.25.0",
"fdir": "^6.4.4",
"picomatch": "^4.0.2",
"postcss": "^8.5.3", "postcss": "^8.5.3",
"rollup": "^4.34.9", "rollup": "^4.30.1"
"tinyglobby": "^0.2.13"
}, },
"bin": { "bin": {
"vite": "bin/vite.js" "vite": "bin/vite.js"
@@ -3914,34 +3672,6 @@
"vite": "^5.4.11 || ^6.0.0" "vite": "^5.4.11 || ^6.0.0"
} }
}, },
"node_modules/vite/node_modules/fdir": {
"version": "6.4.4",
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
"dev": true,
"license": "MIT",
"peerDependencies": {
"picomatch": "^3 || ^4"
},
"peerDependenciesMeta": {
"picomatch": {
"optional": true
}
}
},
"node_modules/vite/node_modules/picomatch": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/vitest": { "node_modules/vitest": {
"version": "3.0.9", "version": "3.0.9",
"resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz", "resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz",

View File

@@ -33,7 +33,7 @@
"eslint-plugin-vue": "^9.23.0", "eslint-plugin-vue": "^9.23.0",
"jsdom": "^24.1.0", "jsdom": "^24.1.0",
"prettier": "^3.2.5", "prettier": "^3.2.5",
"vite": "^6.4.1", "vite": "^6.2.3",
"vite-plugin-singlefile": "^2.2.0", "vite-plugin-singlefile": "^2.2.0",
"vitest": "^3.0.9" "vitest": "^3.0.9"
} }

View File

@@ -210,31 +210,35 @@
<div class="row flex-lg-row-reverse align-items-center g-5"> <div class="row flex-lg-row-reverse align-items-center g-5">
<h1>What's New</h1> <h1>What's New</h1>
<h2 class="mt-3">Rule Updates</h2>
<ul class="mt-2 ps-5">
<!-- TODO(williballenthin): add date -->
<li>
added:
<a href="./rules/change registry key timestamp/">
change registry key timestamp
</a>
</li>
<li>
added:
<a href="./rules/check mutex and terminate process on windows/">
check mutex and terminate process on Windows
</a>
</li>
<li>
added:
<a href="./rules/clear windows event logs remotely/">
clear windows event logs remotely
</a>
</li>
</ul>
<h2 class="mt-3">Tool Updates</h2> <h2 class="mt-3">Tool Updates</h2>
<h3 class="mt-2">v9.3.1 (<em>2025-11-19</em>)</h3>
<p class="mt-0">
This patch release fixes a missing import for the capa explorer plugin for IDA Pro.
</p>
<h3 class="mt-2">v9.3.0 (<em>2025-11-12</em>)</h3>
<p class="mt-0">
capa v9.3.0 comes with over 20 new and/or impoved rules.
For IDA users the capa explorer plugin is now available via the IDA Pro plugin repository and contains Qt compatibility layer for PyQt5 and PySide6 support.
Additionally a Binary Ninja bug has been fixed. Released binaries now include ARM64 binaries (Linux and macOS).
</p>
<h3 class="mt-2">v9.2.1 (<em>2025-06-06</em>)</h3>
<p class="mt-0">
This point release fixes bugs including removing an unnecessary PyInstaller warning message and enabling the standalone binary to execute on systems running older versions of glibc.
</p>
<h3 class="mt-2">v9.2.0 (<em>2025-06-03</em>)</h3>
<p class="mt-0">
This release improves a few aspects of dynamic analysis, including relaxing our validation on fields across many CAPE versions and processing additional VMRay submission file types, for example.
It also includes an updated rule pack containing new rules and rule fixes.
</p>
<h3 class="mt-2">v9.1.0 (<em>2025-03-02</em>)</h3> <h3 class="mt-2">v9.1.0 (<em>2025-03-02</em>)</h3>
<p class="mt-0"> <p class="mt-0">
This release improves a few aspects of dynamic analysis, relaxing our validation on fields across many CAPE versions, for example. This release improves a few aspects of dynamic analysis, relaxing our validation on fields across many CAPE versions, for example.