mirror of
https://github.com/mandiant/capa.git
synced 2025-12-12 07:40:38 -08:00
Compare commits
62 Commits
add-codema
...
copilot/fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eb6c0bd719 | ||
|
|
aa02a46f33 | ||
|
|
1a5f50195a | ||
|
|
aafca2e00a | ||
|
|
3a24fabeb6 | ||
|
|
2f81bb79f9 | ||
|
|
fc83b7b0a1 | ||
|
|
d430aea04e | ||
|
|
1eb42599cf | ||
|
|
618ae2111b | ||
|
|
42b6d8106a | ||
|
|
78a020e1ac | ||
|
|
a80f85aab4 | ||
|
|
f94f554d15 | ||
|
|
d456d52e81 | ||
|
|
2a18b08a80 | ||
|
|
dd2e350a1a | ||
|
|
164a7bdfb5 | ||
|
|
d7c896bbc6 | ||
|
|
8185ac4dde | ||
|
|
92a6ddff99 | ||
|
|
af87fae036 | ||
|
|
c774db26f0 | ||
|
|
eb0afc806e | ||
|
|
9a09f667bf | ||
|
|
15a1dc3409 | ||
|
|
a18fe34d01 | ||
|
|
edcea18c52 | ||
|
|
92f0306f96 | ||
|
|
f2ed75c339 | ||
|
|
6e18657ca7 | ||
|
|
8ba48d11d0 | ||
|
|
d6f442b5bd | ||
|
|
0da5d7c5b5 | ||
|
|
fa5d9a9302 | ||
|
|
30fb4751f6 | ||
|
|
a8eab7ddf0 | ||
|
|
5ad1dda918 | ||
|
|
eabb2cc809 | ||
|
|
a34c3ecc57 | ||
|
|
d22de5cf7f | ||
|
|
8f78834cae | ||
|
|
08dbb0e02d | ||
|
|
98725c52dc | ||
|
|
eb87153064 | ||
|
|
56aa7176b0 | ||
|
|
8b41671409 | ||
|
|
5dbbc2b468 | ||
|
|
96d1eb64c3 | ||
|
|
9234b33051 | ||
|
|
51f5114ad7 | ||
|
|
4b72f8a872 | ||
|
|
8206a97b0f | ||
|
|
5a33b4b2a8 | ||
|
|
fcfdeec377 | ||
|
|
37a63a751c | ||
|
|
3a9f2136bb | ||
|
|
390e2a6315 | ||
|
|
6a43084915 | ||
|
|
6d7ca57fa9 | ||
|
|
d1090e8391 | ||
|
|
b07efe773b |
3
.github/pyinstaller/pyinstaller.spec
vendored
3
.github/pyinstaller/pyinstaller.spec
vendored
@@ -74,6 +74,9 @@ a = Analysis(
|
||||
# only be installed locally.
|
||||
"binaryninja",
|
||||
"ida",
|
||||
# remove once https://github.com/mandiant/capa/issues/2681 has
|
||||
# been addressed by PyInstaller
|
||||
"pkg_resources",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
79
.github/workflows/build.yml
vendored
79
.github/workflows/build.yml
vendored
@@ -9,6 +9,7 @@ on:
|
||||
- '**.md'
|
||||
release:
|
||||
types: [edited, published]
|
||||
workflow_dispatch: # manual trigger for testing
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
@@ -22,24 +23,38 @@ jobs:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
include:
|
||||
- os: ubuntu-20.04
|
||||
- os: ubuntu-22.04
|
||||
# use old linux so that the shared library versioning is more portable
|
||||
artifact_name: capa
|
||||
asset_name: linux
|
||||
python_version: '3.10'
|
||||
- os: ubuntu-20.04
|
||||
- os: ubuntu-22.04-arm
|
||||
artifact_name: capa
|
||||
asset_name: linux-arm64
|
||||
python_version: '3.10'
|
||||
- os: ubuntu-22.04
|
||||
artifact_name: capa
|
||||
asset_name: linux-py312
|
||||
python_version: '3.12'
|
||||
- os: windows-2019
|
||||
- os: windows-2022
|
||||
artifact_name: capa.exe
|
||||
asset_name: windows
|
||||
python_version: '3.10'
|
||||
# Windows 11 ARM64 complains of conflicting package version
|
||||
# Additionally, there is no ARM64 build of Python for Python 3.10 on Windows 11 ARM: https://raw.githubusercontent.com/actions/python-versions/main/versions-manifest.json
|
||||
#- os: windows-11-arm
|
||||
# artifact_name: capa.exe
|
||||
# asset_name: windows-arm64
|
||||
# python_version: '3.12'
|
||||
- os: macos-13
|
||||
# use older macOS for assumed better portability
|
||||
artifact_name: capa
|
||||
asset_name: macos
|
||||
python_version: '3.10'
|
||||
- os: macos-14
|
||||
artifact_name: capa
|
||||
asset_name: macos-arm64
|
||||
python_version: '3.10'
|
||||
steps:
|
||||
- name: Checkout capa
|
||||
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
|
||||
@@ -49,7 +64,7 @@ jobs:
|
||||
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python_version }}
|
||||
- if: matrix.os == 'ubuntu-20.04'
|
||||
- if: matrix.os == 'ubuntu-22.04' || matrix.os == 'ubuntu-22.04-arm'
|
||||
run: sudo apt-get install -y libyaml-dev
|
||||
- name: Upgrade pip, setuptools
|
||||
run: python -m pip install --upgrade pip setuptools
|
||||
@@ -59,6 +74,28 @@ jobs:
|
||||
pip install -e .[build]
|
||||
- name: Build standalone executable
|
||||
run: pyinstaller --log-level DEBUG .github/pyinstaller/pyinstaller.spec
|
||||
- name: Does it run without warnings or errors?
|
||||
shell: bash
|
||||
run: |
|
||||
if [[ "${{ matrix.os }}" == "windows-2022" ]] || [[ "${{ matrix.os }}" == "windows-11-arm" ]]; then
|
||||
EXECUTABLE=".\\dist\\capa"
|
||||
else
|
||||
EXECUTABLE="./dist/capa"
|
||||
fi
|
||||
|
||||
output=$(${EXECUTABLE} --version 2>&1)
|
||||
exit_code=$?
|
||||
|
||||
echo "${output}"
|
||||
echo "${exit_code}"
|
||||
|
||||
if echo "${output}" | grep -iE 'error|warning'; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ "${exit_code}" -ne 0 ]]; then
|
||||
exit 1
|
||||
fi
|
||||
- name: Does it run (PE)?
|
||||
run: dist/capa -d "tests/data/Practical Malware Analysis Lab 01-01.dll_"
|
||||
- name: Does it run (Shellcode)?
|
||||
@@ -74,34 +111,6 @@ jobs:
|
||||
name: ${{ matrix.asset_name }}
|
||||
path: dist/${{ matrix.artifact_name }}
|
||||
|
||||
test_run:
|
||||
name: Test run on ${{ matrix.os }} / ${{ matrix.asset_name }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
needs: [build]
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
# OSs not already tested above
|
||||
- os: ubuntu-22.04
|
||||
artifact_name: capa
|
||||
asset_name: linux
|
||||
- os: ubuntu-22.04
|
||||
artifact_name: capa
|
||||
asset_name: linux-py312
|
||||
- os: windows-2022
|
||||
artifact_name: capa.exe
|
||||
asset_name: windows
|
||||
steps:
|
||||
- name: Download ${{ matrix.asset_name }}
|
||||
uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
|
||||
with:
|
||||
name: ${{ matrix.asset_name }}
|
||||
- name: Set executable flag
|
||||
if: matrix.os != 'windows-2022'
|
||||
run: chmod +x ${{ matrix.artifact_name }}
|
||||
- name: Run capa
|
||||
run: ./${{ matrix.artifact_name }} -h
|
||||
|
||||
zip_and_upload:
|
||||
# upload zipped binaries to Release page
|
||||
if: github.event_name == 'release'
|
||||
@@ -113,12 +122,18 @@ jobs:
|
||||
include:
|
||||
- asset_name: linux
|
||||
artifact_name: capa
|
||||
- asset_name: linux-arm64
|
||||
artifact_name: capa
|
||||
- asset_name: linux-py312
|
||||
artifact_name: capa
|
||||
- asset_name: windows
|
||||
artifact_name: capa.exe
|
||||
#- asset_name: windows-arm64
|
||||
# artifact_name: capa.exe
|
||||
- asset_name: macos
|
||||
artifact_name: capa
|
||||
- asset_name: macos-arm64
|
||||
artifact_name: capa
|
||||
steps:
|
||||
- name: Download ${{ matrix.asset_name }}
|
||||
uses: actions/download-artifact@eaceaf801fd36c7dee90939fad912460b18a1ffe # v4.1.2
|
||||
|
||||
2
.github/workflows/publish.yml
vendored
2
.github/workflows/publish.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
||||
with:
|
||||
path: dist/*
|
||||
- name: publish package
|
||||
uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 # release/v1
|
||||
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # release/v1.12.4
|
||||
with:
|
||||
skip-existing: true
|
||||
verbose: true
|
||||
|
||||
12
.github/workflows/tests.yml
vendored
12
.github/workflows/tests.yml
vendored
@@ -88,16 +88,16 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-20.04, windows-2019, macos-13]
|
||||
os: [ubuntu-22.04, windows-2022, macos-13]
|
||||
# across all operating systems
|
||||
python-version: ["3.10", "3.11"]
|
||||
include:
|
||||
# on Ubuntu run these as well
|
||||
- os: ubuntu-20.04
|
||||
- os: ubuntu-22.04
|
||||
python-version: "3.10"
|
||||
- os: ubuntu-20.04
|
||||
- os: ubuntu-22.04
|
||||
python-version: "3.11"
|
||||
- os: ubuntu-20.04
|
||||
- os: ubuntu-22.04
|
||||
python-version: "3.12"
|
||||
steps:
|
||||
- name: Checkout capa with submodules
|
||||
@@ -109,7 +109,7 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install pyyaml
|
||||
if: matrix.os == 'ubuntu-20.04'
|
||||
if: matrix.os == 'ubuntu-22.04'
|
||||
run: sudo apt-get install -y libyaml-dev
|
||||
- name: Install capa
|
||||
run: |
|
||||
@@ -168,7 +168,7 @@ jobs:
|
||||
|
||||
ghidra-tests:
|
||||
name: Ghidra tests for ${{ matrix.python-version }}
|
||||
runs-on: ubuntu-20.04
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [tests]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
|
||||
81
CHANGELOG.md
81
CHANGELOG.md
@@ -3,10 +3,70 @@
|
||||
## master (unreleased)
|
||||
|
||||
### New Features
|
||||
- ci: add support for arm64 binary releases
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
### New Rules (15)
|
||||
### New Rules (14)
|
||||
|
||||
- anti-analysis/anti-vm/vm-detection/detect-mouse-movement-via-activity-checks-on-windows tevajdr@gmail.com
|
||||
- nursery/create-executable-heap moritz.raabe@mandiant.com
|
||||
- anti-analysis/packer/dxpack/packed-with-dxpack jakubjozwiak@google.com
|
||||
- anti-analysis/anti-av/patch-bitdefender-hooking-dll-function jakubjozwiak@google.com
|
||||
- nursery/acquire-load-driver-privileges mehunhoff@google.com
|
||||
- nursery/communicate-using-ftp mehunhoff@google.com
|
||||
- linking/static/eclipse-paho-mqtt-c/linked-against-eclipse-paho-mqtt-c jakubjozwiak@google.com
|
||||
- linking/static/qmqtt/linked-against-qmqtt jakubjozwiak@google.com
|
||||
- anti-analysis/anti-forensic/disable-powershell-transcription jakubjozwiak@google.com
|
||||
- host-interaction/powershell/bypass-powershell-constrained-language-mode-via-getsystemlockdownpolicy-patch jakubjozwiak@google.com
|
||||
- linking/static/grpc/linked-against-grpc jakubjozwiak@google.com
|
||||
- linking/static/hp-socket/linked-against-hp-socket jakubjozwiak@google.com
|
||||
- load-code/execute-jscript-via-vsaengine-in-dotnet jakubjozwiak@google.com
|
||||
-
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
### capa Explorer Web
|
||||
|
||||
### capa Explorer IDA Pro plugin
|
||||
|
||||
### Development
|
||||
|
||||
- ci: remove redundant "test_run" action from build workflow @mike-hunhoff #2692
|
||||
|
||||
### Raw diffs
|
||||
- [capa v9.2.1...master](https://github.com/mandiant/capa/compare/v9.2.1...master)
|
||||
- [capa-rules v9.2.1...master](https://github.com/mandiant/capa-rules/compare/v9.2.1...master)
|
||||
|
||||
## v9.2.1
|
||||
|
||||
This point release fixes bugs including removing an unnecessary PyInstaller warning message and enabling the standalone binary to execute on systems running older versions of glibc.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- ci: exclude pkg_resources from PyInstaller build @mike-hunhoff #2684
|
||||
- ci: downgrade Ubuntu version to accommodate older glibc versions @mike-hunhoff #2684
|
||||
|
||||
### Development
|
||||
|
||||
- ci: upgrade Windows version to avoid deprecation @mike-hunhoff #2684
|
||||
- ci: check if build runs without warnings or errors @mike-hunhoff #2684
|
||||
|
||||
### Raw diffs
|
||||
- [capa v9.2.0...v9.2.1](https://github.com/mandiant/capa/compare/v9.2.0...v9.2.1)
|
||||
- [capa-rules v9.2.0...v9.2.1](https://github.com/mandiant/capa-rules/compare/v9.2.0...v9.2.1)
|
||||
|
||||
## v9.2.0
|
||||
|
||||
This release improves a few aspects of dynamic analysis, including relaxing our validation on fields across many CAPE versions and processing additional VMRay submission file types, for example.
|
||||
It also includes an updated rule pack containing new rules and rule fixes.
|
||||
|
||||
### New Features
|
||||
- vmray: do not restrict analysis to PE and ELF files, e.g. docx @mike-hunhoff #2672
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
### New Rules (22)
|
||||
|
||||
- communication/socket/connect-socket moritz.raabe@mandiant.com joakim@intezer.com mrhafizfarhad@gmail.com
|
||||
- communication/socket/udp/connect-udp-socket mrhafizfarhad@gmail.com
|
||||
@@ -22,22 +82,23 @@
|
||||
- nursery/disable-firewall-features-via-registry-on-windows mehunhoff@google.com
|
||||
- nursery/disable-system-restore-features-via-registry-on-windows mehunhoff@google.com
|
||||
- nursery/disable-windows-defender-features-via-registry-on-windows mehunhoff@google.com
|
||||
-
|
||||
- host-interaction/file-system/write/clear-file-content jakeperalta7
|
||||
- host-interaction/filter/unload-minifilter-driver JakePeralta7
|
||||
- exploitation/enumeration/make-suspicious-ntquerysysteminformation-call zdw@google.com
|
||||
- exploitation/gadgets/load-ntoskrnl zdw@google.com
|
||||
- exploitation/gadgets/resolve-ntoskrnl-gadgets zdw@google.com
|
||||
- exploitation/spraying/make-suspicious-ntfscontrolfile-call zdw@google.com
|
||||
- anti-analysis/anti-forensic/unload-sysmon JakePeralta7
|
||||
|
||||
### Bug Fixes
|
||||
- cape: make some fields optional @williballenthin #2631 #2632
|
||||
- lint: add WARN for regex features that contain unescaped dot #2635
|
||||
- lint: add ERROR for incomplete registry control set regex #2643
|
||||
|
||||
### capa Explorer Web
|
||||
|
||||
### capa Explorer IDA Pro plugin
|
||||
|
||||
### Development
|
||||
- binja: update unit test core version #2670
|
||||
|
||||
### Raw diffs
|
||||
- [capa v9.1.0...master](https://github.com/mandiant/capa/compare/v9.1.0...master)
|
||||
- [capa-rules v9.1.0...master](https://github.com/mandiant/capa-rules/compare/v9.1.0...master)
|
||||
- [capa v9.1.0...v9.2.0](https://github.com/mandiant/capa/compare/v9.1.0...v9.2.0)
|
||||
- [capa-rules v9.1.0...v9.2.0](https://github.com/mandiant/capa-rules/compare/v9.1.0...v9.2.0)
|
||||
|
||||
## v9.1.0
|
||||
|
||||
|
||||
@@ -197,6 +197,9 @@ def extract_stackstring(fh: FunctionHandle):
|
||||
except ILException:
|
||||
return
|
||||
|
||||
if mlil is None:
|
||||
return
|
||||
|
||||
for block in mlil.basic_blocks:
|
||||
if bb_contains_stackstring(func, block):
|
||||
yield Characteristic("stack string"), block.source_block.start
|
||||
|
||||
@@ -96,14 +96,7 @@ class VMRayAnalysis:
|
||||
% (self.submission_name, self.submission_type)
|
||||
)
|
||||
|
||||
if self.submission_static is not None:
|
||||
if self.submission_static.pe is None and self.submission_static.elf is None:
|
||||
# we only support static analysis for PE and ELF files for now
|
||||
raise UnsupportedFormatError(
|
||||
"archive does not contain a supported file format (submission_name: %s, submission_type: %s)"
|
||||
% (self.submission_name, self.submission_type)
|
||||
)
|
||||
else:
|
||||
if self.submission_static is None:
|
||||
# VMRay may not record static analysis for certain file types, e.g. MSI, but we'd still like to match dynamic
|
||||
# execution so we continue without and accept that the results may be incomplete
|
||||
logger.warning(
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__version__ = "9.1.0"
|
||||
__version__ = "9.2.1"
|
||||
|
||||
|
||||
def get_major_version():
|
||||
|
||||
@@ -121,11 +121,11 @@ dev = [
|
||||
# we want all developer environments to be consistent.
|
||||
# These dependencies are not used in production environments
|
||||
# and should not conflict with other libraries/tooling.
|
||||
"pre-commit==4.1.0",
|
||||
"pre-commit==4.2.0",
|
||||
"pytest==8.0.0",
|
||||
"pytest-sugar==1.0.0",
|
||||
"pytest-instafail==0.5.0",
|
||||
"flake8==7.1.1",
|
||||
"flake8==7.3.0",
|
||||
"flake8-bugbear==24.12.12",
|
||||
"flake8-encodings==0.5.1",
|
||||
"flake8-comprehensions==3.16.0",
|
||||
@@ -133,13 +133,13 @@ dev = [
|
||||
"flake8-no-implicit-concat==0.3.5",
|
||||
"flake8-print==5.0.0",
|
||||
"flake8-todos==0.3.1",
|
||||
"flake8-simplify==0.21.0",
|
||||
"flake8-simplify==0.22.0",
|
||||
"flake8-use-pathlib==0.3.0",
|
||||
"flake8-copyright==0.2.4",
|
||||
"ruff==0.11.0",
|
||||
"ruff==0.12.0",
|
||||
"black==25.1.0",
|
||||
"isort==6.0.0",
|
||||
"mypy==1.15.0",
|
||||
"mypy==1.17.1",
|
||||
"mypy-protobuf==3.6.0",
|
||||
"PyGithub==2.6.0",
|
||||
# type stubs for mypy
|
||||
@@ -148,7 +148,7 @@ dev = [
|
||||
"types-PyYAML==6.0.8",
|
||||
"types-psutil==7.0.0.20250218",
|
||||
"types_requests==2.32.0.20240712",
|
||||
"types-protobuf==5.29.1.20241207",
|
||||
"types-protobuf==6.30.2.20250516",
|
||||
"deptry==0.23.0"
|
||||
]
|
||||
build = [
|
||||
@@ -156,8 +156,8 @@ build = [
|
||||
# we want all developer environments to be consistent.
|
||||
# These dependencies are not used in production environments
|
||||
# and should not conflict with other libraries/tooling.
|
||||
"pyinstaller==6.12.0",
|
||||
"setuptools==76.0.0",
|
||||
"pyinstaller==6.14.1",
|
||||
"setuptools==80.9.0",
|
||||
"build==1.2.2"
|
||||
]
|
||||
scripts = [
|
||||
|
||||
@@ -10,7 +10,7 @@ annotated-types==0.7.0
|
||||
colorama==0.4.6
|
||||
cxxfilt==0.3.0
|
||||
dncil==1.0.2
|
||||
dnfile==0.15.0
|
||||
dnfile==0.16.4
|
||||
funcy==2.0
|
||||
humanize==4.12.0
|
||||
ida-netnode==3.0
|
||||
@@ -21,12 +21,12 @@ mdurl==0.1.2
|
||||
msgpack==1.0.8
|
||||
networkx==3.4.2
|
||||
pefile==2024.8.26
|
||||
pip==25.0
|
||||
protobuf==6.30.1
|
||||
pip==25.1.1
|
||||
protobuf==6.31.1
|
||||
pyasn1==0.5.1
|
||||
pyasn1-modules==0.3.0
|
||||
pycparser==2.22
|
||||
pydantic==2.10.1
|
||||
pydantic==2.11.4
|
||||
# pydantic pins pydantic-core,
|
||||
# but dependabot updates these separately (which is broken) and is annoying,
|
||||
# so we rely on pydantic to pull in the right version of pydantic-core.
|
||||
@@ -36,10 +36,10 @@ pyelftools==0.32
|
||||
pygments==2.19.1
|
||||
python-flirt==0.9.2
|
||||
pyyaml==6.0.2
|
||||
rich==13.9.2
|
||||
rich==14.0.0
|
||||
ruamel-yaml==0.18.6
|
||||
ruamel-yaml-clib==0.2.8
|
||||
setuptools==76.0.0
|
||||
setuptools==80.9.0
|
||||
six==1.17.0
|
||||
sortedcontainers==2.4.0
|
||||
viv-utils==0.8.0
|
||||
|
||||
2
rules
2
rules
Submodule rules updated: d64c2c91ea...13e8622f8a
@@ -175,8 +175,6 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
depth += 1
|
||||
logger.info("recursion depth: %d", depth)
|
||||
|
||||
global var_names
|
||||
|
||||
def do_statement(s_type, kid):
|
||||
yara_strings = ""
|
||||
yara_condition = ""
|
||||
|
||||
@@ -1,490 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# /// script
|
||||
# requires-python = ">=3.12"
|
||||
# dependencies = [
|
||||
# "protobuf",
|
||||
# "python-lancelot",
|
||||
# "rich",
|
||||
# ]
|
||||
# ///
|
||||
#
|
||||
# TODO:
|
||||
# - ignore stack cookie check
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
import argparse
|
||||
import contextlib
|
||||
from typing import Any
|
||||
from pathlib import Path
|
||||
from collections import defaultdict
|
||||
from dataclasses import dataclass
|
||||
|
||||
import lancelot
|
||||
import rich.padding
|
||||
import lancelot.be2utils
|
||||
import google.protobuf.message
|
||||
from rich.text import Text
|
||||
from rich.theme import Theme
|
||||
from rich.markup import escape
|
||||
from rich.console import Console
|
||||
from lancelot.be2utils.binexport2_pb2 import BinExport2
|
||||
|
||||
logger = logging.getLogger("codemap")
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def timing(msg: str):
|
||||
t0 = time.time()
|
||||
yield
|
||||
t1 = time.time()
|
||||
logger.debug("perf: %s: %0.2fs", msg, t1 - t0)
|
||||
|
||||
|
||||
class Renderer:
|
||||
def __init__(self, console: Console):
|
||||
self.console: Console = console
|
||||
self.indent: int = 0
|
||||
|
||||
@contextlib.contextmanager
|
||||
def indenting(self):
|
||||
self.indent += 1
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
self.indent -= 1
|
||||
|
||||
@staticmethod
|
||||
def markup(s: str, **kwargs) -> Text:
|
||||
escaped_args = {k: (escape(v) if isinstance(v, str) else v) for k, v in kwargs.items()}
|
||||
return Text.from_markup(s.format(**escaped_args))
|
||||
|
||||
def print(self, renderable, **kwargs):
|
||||
if not kwargs:
|
||||
return self.console.print(rich.padding.Padding(renderable, (0, 0, 0, self.indent * 2)))
|
||||
|
||||
assert isinstance(renderable, str)
|
||||
return self.print(self.markup(renderable, **kwargs))
|
||||
|
||||
def writeln(self, s: str):
|
||||
self.print(s)
|
||||
|
||||
@contextlib.contextmanager
|
||||
def section(self, name):
|
||||
if isinstance(name, str):
|
||||
self.print("[title]{name}", name=name)
|
||||
elif isinstance(name, Text):
|
||||
name = name.copy()
|
||||
name.stylize_before(self.console.get_style("title"))
|
||||
self.print(name)
|
||||
else:
|
||||
raise ValueError("unexpected section name")
|
||||
|
||||
with self.indenting():
|
||||
yield
|
||||
|
||||
|
||||
@dataclass
|
||||
class AssemblageLocation:
|
||||
name: str
|
||||
file: str
|
||||
prototype: str
|
||||
rva: int
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
if not self.file.endswith(")"):
|
||||
return self.file
|
||||
|
||||
return self.file.rpartition(" (")[0]
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]):
|
||||
return cls(
|
||||
name=data["name"],
|
||||
file=data["file"],
|
||||
prototype=data["prototype"],
|
||||
rva=data["function_start"],
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def from_json(doc: str):
|
||||
return AssemblageLocation.from_dict(json.loads(doc))
|
||||
|
||||
|
||||
def main(argv: list[str] | None = None):
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
|
||||
parser = argparse.ArgumentParser(description="Inspect BinExport2 files")
|
||||
parser.add_argument("input_file", type=Path, help="path to input file")
|
||||
parser.add_argument("--capa", type=Path, help="path to capa JSON results file")
|
||||
parser.add_argument("--assemblage", type=Path, help="path to Assemblage JSONL file")
|
||||
parser.add_argument("-d", "--debug", action="store_true", help="enable debugging output on STDERR")
|
||||
parser.add_argument("-q", "--quiet", action="store_true", help="disable all output but errors")
|
||||
args = parser.parse_args(args=argv)
|
||||
|
||||
logging.basicConfig()
|
||||
if args.quiet:
|
||||
logging.getLogger().setLevel(logging.WARNING)
|
||||
elif args.debug:
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
else:
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
theme = Theme(
|
||||
{
|
||||
"decoration": "grey54",
|
||||
"title": "yellow",
|
||||
"key": "black",
|
||||
"value": "blue",
|
||||
"default": "black",
|
||||
},
|
||||
inherit=False,
|
||||
)
|
||||
console = Console(theme=theme, markup=False, emoji=False)
|
||||
o = Renderer(console)
|
||||
|
||||
be2: BinExport2
|
||||
buf: bytes
|
||||
try:
|
||||
# easiest way to determine if this is a BinExport2 proto is...
|
||||
# to just try to decode it.
|
||||
buf = args.input_file.read_bytes()
|
||||
with timing("loading BinExport2"):
|
||||
be2 = BinExport2()
|
||||
be2.ParseFromString(buf)
|
||||
|
||||
except google.protobuf.message.DecodeError:
|
||||
with timing("analyzing file"):
|
||||
input_file: Path = args.input_file
|
||||
buf = lancelot.get_binexport2_bytes_from_bytes(input_file.read_bytes())
|
||||
|
||||
with timing("loading BinExport2"):
|
||||
be2 = BinExport2()
|
||||
be2.ParseFromString(buf)
|
||||
|
||||
with timing("indexing BinExport2"):
|
||||
idx = lancelot.be2utils.BinExport2Index(be2)
|
||||
|
||||
matches_by_function: defaultdict[int, set[str]] = defaultdict(set)
|
||||
if args.capa:
|
||||
with timing("loading capa"):
|
||||
doc = json.loads(args.capa.read_text())
|
||||
|
||||
functions_by_basic_block: dict[int, int] = {}
|
||||
for function in doc["meta"]["analysis"]["layout"]["functions"]:
|
||||
for basic_block in function["matched_basic_blocks"]:
|
||||
functions_by_basic_block[basic_block["address"]["value"]] = function["address"]["value"]
|
||||
|
||||
matches_by_address: defaultdict[int, set[str]] = defaultdict(set)
|
||||
for rule_name, results in doc["rules"].items():
|
||||
for location, _ in results["matches"]:
|
||||
if location["type"] != "absolute":
|
||||
continue
|
||||
address = location["value"]
|
||||
matches_by_address[location["value"]].add(rule_name)
|
||||
|
||||
for address, matches in matches_by_address.items():
|
||||
if function := functions_by_basic_block.get(address):
|
||||
if function in idx.thunks:
|
||||
# forward any capa for a thunk to its target
|
||||
# since viv may not recognize the thunk as a separate function.
|
||||
logger.debug("forwarding capa matches from thunk 0x%x to 0x%x", function, idx.thunks[function])
|
||||
function = idx.thunks[function]
|
||||
|
||||
matches_by_function[function].update(matches)
|
||||
for match in matches:
|
||||
logger.info("capa: 0x%x: %s", function, match)
|
||||
else:
|
||||
# we don't know which function this is.
|
||||
# hopefully its a function recognized in our BinExport analysis.
|
||||
# *shrug*
|
||||
#
|
||||
# apparently viv doesn't emit function entries for thunks?
|
||||
# or somehow our layout is messed up.
|
||||
|
||||
if address in idx.thunks:
|
||||
# forward any capa for a thunk to its target
|
||||
# since viv may not recognize the thunk as a separate function.
|
||||
logger.debug("forwarding capa matches from thunk 0x%x to 0x%x", address, idx.thunks[address])
|
||||
address = idx.thunks[address]
|
||||
# since we found the thunk, we know this is a BinExport-recognized function.
|
||||
# so thats nice.
|
||||
for match in matches:
|
||||
logger.info("capa: 0x%x: %s", address, match)
|
||||
else:
|
||||
logger.warning("unknown address: 0x%x: %s", address, matches)
|
||||
|
||||
matches_by_function[address].update(matches)
|
||||
|
||||
# guess the base address (which BinExport2) does not track explicitly,
|
||||
# by assuming it is the lowest mapped page.
|
||||
base_address = min(map(lambda section: section.address, be2.section))
|
||||
logging.info("guessed base address: 0x%x", base_address)
|
||||
|
||||
assemblage_locations_by_va: dict[int, AssemblageLocation] = {}
|
||||
if args.assemblage:
|
||||
with timing("loading assemblage"):
|
||||
with args.assemblage.open("rt", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
if not line:
|
||||
continue
|
||||
location = AssemblageLocation.from_json(line)
|
||||
assemblage_locations_by_va[base_address + location.rva] = location
|
||||
|
||||
# update function names for the in-memory BinExport2 using Assemblage data.
|
||||
# this won't affect the be2 on disk, because we don't serialize it back out.
|
||||
for address, location in assemblage_locations_by_va.items():
|
||||
if not location.name:
|
||||
continue
|
||||
|
||||
if vertex_index := idx.vertex_index_by_address.get(address):
|
||||
vertex = be2.call_graph.vertex[vertex_index].demangled_name = location.name
|
||||
|
||||
# index all the callers of each function, resolving thunks.
|
||||
# idx.callers_by_vertex_id does not resolve thunks.
|
||||
resolved_callers_by_vertex_id = defaultdict(set)
|
||||
for edge in be2.call_graph.edge:
|
||||
source_index = edge.source_vertex_index
|
||||
|
||||
if lancelot.be2utils.is_thunk_vertex(be2.call_graph.vertex[source_index]):
|
||||
# we don't care about the callers that are thunks.
|
||||
continue
|
||||
|
||||
if lancelot.be2utils.is_thunk_vertex(be2.call_graph.vertex[edge.target_vertex_index]):
|
||||
thunk_vertex = be2.call_graph.vertex[edge.target_vertex_index]
|
||||
thunk_address = thunk_vertex.address
|
||||
|
||||
target_address = idx.thunks[thunk_address]
|
||||
target_index = idx.vertex_index_by_address[target_address]
|
||||
logger.debug(
|
||||
"call %s -(thunk)-> %s",
|
||||
idx.get_function_name_by_vertex(source_index),
|
||||
idx.get_function_name_by_vertex(target_index),
|
||||
)
|
||||
else:
|
||||
target_index = edge.target_vertex_index
|
||||
logger.debug(
|
||||
"call %s -> %s",
|
||||
idx.get_function_name_by_vertex(source_index),
|
||||
idx.get_function_name_by_vertex(target_index),
|
||||
)
|
||||
resolved_callers_by_vertex_id[target_index].add(source_index)
|
||||
|
||||
t0 = time.time()
|
||||
|
||||
with o.section("meta"):
|
||||
o.writeln(f"name: {be2.meta_information.executable_name}")
|
||||
o.writeln(f"sha256: {be2.meta_information.executable_id}")
|
||||
o.writeln(f"arch: {be2.meta_information.architecture_name}")
|
||||
o.writeln(f"ts: {be2.meta_information.timestamp}")
|
||||
|
||||
with o.section("modules"):
|
||||
for module in be2.module:
|
||||
o.writeln(f"- {module.name}")
|
||||
if not be2.module:
|
||||
o.writeln("(none)")
|
||||
|
||||
with o.section("sections"):
|
||||
for section in be2.section:
|
||||
perms = ""
|
||||
perms += "r" if section.flag_r else "-"
|
||||
perms += "w" if section.flag_w else "-"
|
||||
perms += "x" if section.flag_x else "-"
|
||||
o.writeln(f"- {hex(section.address)} {perms} {hex(section.size)}")
|
||||
|
||||
with o.section("libraries"):
|
||||
for library in be2.library:
|
||||
o.writeln(
|
||||
f"- {library.name:<12s} {'(static)' if library.is_static else ''}{(' at ' + hex(library.load_address)) if library.HasField('load_address') else ''}"
|
||||
)
|
||||
if not be2.library:
|
||||
o.writeln("(none)")
|
||||
|
||||
vertex_order_by_address = {address: i for (i, address) in enumerate(idx.vertex_index_by_address.keys())}
|
||||
|
||||
with o.section("functions"):
|
||||
last_address = None
|
||||
for _, vertex_index in idx.vertex_index_by_address.items():
|
||||
vertex = be2.call_graph.vertex[vertex_index]
|
||||
vertex_order = vertex_order_by_address[vertex.address]
|
||||
|
||||
if vertex.HasField("library_index"):
|
||||
continue
|
||||
|
||||
if vertex.HasField("module_index"):
|
||||
continue
|
||||
|
||||
function_name = idx.get_function_name_by_vertex(vertex_index)
|
||||
|
||||
if last_address:
|
||||
try:
|
||||
last_path = assemblage_locations_by_va[last_address].path
|
||||
path = assemblage_locations_by_va[vertex.address].path
|
||||
if last_path != path:
|
||||
o.print(o.markup("[blue]~~~~~~~~~~~~~~~~~~~~~~~~~~~~~[/] [title]file[/] {path}\n", path=path))
|
||||
except KeyError:
|
||||
pass
|
||||
last_address = vertex.address
|
||||
|
||||
if lancelot.be2utils.is_thunk_vertex(vertex):
|
||||
with o.section(
|
||||
o.markup(
|
||||
"thunk [default]{function_name}[/] [decoration]@ {function_address}[/]",
|
||||
function_name=function_name,
|
||||
function_address=hex(vertex.address),
|
||||
)
|
||||
):
|
||||
continue
|
||||
|
||||
with o.section(
|
||||
o.markup(
|
||||
"function [default]{function_name}[/] [decoration]@ {function_address}[/]",
|
||||
function_name=function_name,
|
||||
function_address=hex(vertex.address),
|
||||
)
|
||||
):
|
||||
if vertex.address in idx.thunks:
|
||||
o.writeln("")
|
||||
continue
|
||||
|
||||
# keep the xrefs separate from the calls, since they're visually hard to distinguish.
|
||||
# use local index of callers that has resolved intermediate thunks,
|
||||
# since they are sometimes stored in a physically distant location.
|
||||
for caller_index in resolved_callers_by_vertex_id.get(vertex_index, []):
|
||||
caller_vertex = be2.call_graph.vertex[caller_index]
|
||||
caller_order = vertex_order_by_address[caller_vertex.address]
|
||||
caller_delta = caller_order - vertex_order
|
||||
if caller_delta < 0:
|
||||
direction = "↑"
|
||||
else:
|
||||
direction = "↓"
|
||||
|
||||
o.print(
|
||||
"xref: [decoration]{direction}[/] {name} [decoration]({delta:+})[/]",
|
||||
direction=direction,
|
||||
name=idx.get_function_name_by_vertex(caller_index),
|
||||
delta=caller_delta,
|
||||
)
|
||||
|
||||
if vertex.address not in idx.flow_graph_index_by_address:
|
||||
num_basic_blocks = 0
|
||||
num_instructions = 0
|
||||
num_edges = 0
|
||||
total_instruction_size = 0
|
||||
else:
|
||||
flow_graph_index = idx.flow_graph_index_by_address[vertex.address]
|
||||
flow_graph = be2.flow_graph[flow_graph_index]
|
||||
num_basic_blocks = len(flow_graph.basic_block_index)
|
||||
num_instructions = sum(
|
||||
len(list(idx.instruction_indices(be2.basic_block[bb_idx])))
|
||||
for bb_idx in flow_graph.basic_block_index
|
||||
)
|
||||
num_edges = len(flow_graph.edge)
|
||||
total_instruction_size = 0
|
||||
for bb_idx in flow_graph.basic_block_index:
|
||||
basic_block = be2.basic_block[bb_idx]
|
||||
for _, instruction, _ in idx.basic_block_instructions(basic_block):
|
||||
total_instruction_size += len(instruction.raw_bytes)
|
||||
|
||||
o.writeln(
|
||||
f"B/E/I: {num_basic_blocks} / {num_edges} / {num_instructions} ({total_instruction_size} bytes)"
|
||||
)
|
||||
|
||||
for match in matches_by_function.get(vertex.address, []):
|
||||
o.writeln(f"capa: {match}")
|
||||
|
||||
if vertex.address in idx.flow_graph_index_by_address:
|
||||
flow_graph_index = idx.flow_graph_index_by_address[vertex.address]
|
||||
flow_graph = be2.flow_graph[flow_graph_index]
|
||||
|
||||
seen_callees = set()
|
||||
|
||||
for basic_block_index in flow_graph.basic_block_index:
|
||||
basic_block = be2.basic_block[basic_block_index]
|
||||
|
||||
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
|
||||
if instruction.call_target:
|
||||
for call_target_address in instruction.call_target:
|
||||
if call_target_address in idx.thunks:
|
||||
call_target_address = idx.thunks[call_target_address]
|
||||
|
||||
call_target_index = idx.vertex_index_by_address[call_target_address]
|
||||
call_target_vertex = be2.call_graph.vertex[call_target_index]
|
||||
|
||||
if call_target_vertex.HasField("library_index"):
|
||||
continue
|
||||
|
||||
if call_target_vertex.address in seen_callees:
|
||||
continue
|
||||
seen_callees.add(call_target_vertex.address)
|
||||
|
||||
call_target_order = vertex_order_by_address[call_target_address]
|
||||
call_target_delta = call_target_order - vertex_order
|
||||
call_target_name = idx.get_function_name_by_address(call_target_address)
|
||||
if call_target_delta < 0:
|
||||
direction = "↑"
|
||||
else:
|
||||
direction = "↓"
|
||||
|
||||
o.print(
|
||||
"calls: [decoration]{direction}[/] {name} [decoration]({delta:+})[/]",
|
||||
direction=direction,
|
||||
name=call_target_name,
|
||||
delta=call_target_delta,
|
||||
)
|
||||
|
||||
for basic_block_index in flow_graph.basic_block_index:
|
||||
basic_block = be2.basic_block[basic_block_index]
|
||||
|
||||
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
|
||||
if instruction.call_target:
|
||||
for call_target_address in instruction.call_target:
|
||||
call_target_index = idx.vertex_index_by_address[call_target_address]
|
||||
call_target_vertex = be2.call_graph.vertex[call_target_index]
|
||||
|
||||
if not call_target_vertex.HasField("library_index"):
|
||||
continue
|
||||
|
||||
if call_target_vertex.address in seen_callees:
|
||||
continue
|
||||
seen_callees.add(call_target_vertex.address)
|
||||
|
||||
call_target_name = idx.get_function_name_by_address(call_target_address)
|
||||
o.print(
|
||||
"api: {name}",
|
||||
name=call_target_name,
|
||||
)
|
||||
|
||||
seen_strings = set()
|
||||
for basic_block_index in flow_graph.basic_block_index:
|
||||
basic_block = be2.basic_block[basic_block_index]
|
||||
|
||||
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
|
||||
if instruction_index in idx.string_reference_index_by_source_instruction_index:
|
||||
for string_reference_index in idx.string_reference_index_by_source_instruction_index[
|
||||
instruction_index
|
||||
]:
|
||||
string_reference = be2.string_reference[string_reference_index]
|
||||
string_index = string_reference.string_table_index
|
||||
string = be2.string_table[string_index]
|
||||
|
||||
if string in seen_strings:
|
||||
continue
|
||||
seen_strings.add(string)
|
||||
|
||||
o.print(
|
||||
'string: [decoration]"[/]{string}[decoration]"[/]',
|
||||
string=string.rstrip(),
|
||||
)
|
||||
|
||||
o.print("")
|
||||
|
||||
t1 = time.time()
|
||||
logger.debug("perf: rendering BinExport2: %0.2fs", t1 - t0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -406,6 +406,7 @@ class DoesntMatchExample(Lint):
|
||||
return True
|
||||
|
||||
if rule.name not in capabilities:
|
||||
logger.info('rule "%s" does not match for sample %s', rule.name, example_id)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
Submodule tests/data updated: 6cb0838954...1723908479
@@ -70,4 +70,4 @@ def test_standalone_binja_backend():
|
||||
@pytest.mark.skipif(binja_present is False, reason="Skip binja tests if the binaryninja Python API is not installed")
|
||||
def test_binja_version():
|
||||
version = binaryninja.core_version_info()
|
||||
assert version.major == 4 and version.minor == 2
|
||||
assert version.major == 5 and version.minor == 1
|
||||
|
||||
287
web/explorer/package-lock.json
generated
287
web/explorer/package-lock.json
generated
@@ -27,7 +27,7 @@
|
||||
"eslint-plugin-vue": "^9.23.0",
|
||||
"jsdom": "^24.1.0",
|
||||
"prettier": "^3.2.5",
|
||||
"vite": "^6.2.3",
|
||||
"vite": "^6.3.4",
|
||||
"vite-plugin-singlefile": "^2.2.0",
|
||||
"vitest": "^3.0.9"
|
||||
}
|
||||
@@ -1416,6 +1416,20 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/call-bind-apply-helpers": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
|
||||
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"function-bind": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/callsites": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
|
||||
@@ -1646,6 +1660,21 @@
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/dunder-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"gopd": "^1.2.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/eastasianwidth": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
|
||||
@@ -1711,6 +1740,26 @@
|
||||
"url": "https://github.com/fb55/entities?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/es-define-property": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
|
||||
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-errors": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
|
||||
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-module-lexer": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.6.0.tgz",
|
||||
@@ -1718,6 +1767,35 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/es-object-atoms": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
|
||||
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/es-set-tostringtag": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
|
||||
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"es-errors": "^1.3.0",
|
||||
"get-intrinsic": "^1.2.6",
|
||||
"has-tostringtag": "^1.0.2",
|
||||
"hasown": "^2.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/esbuild": {
|
||||
"version": "0.25.1",
|
||||
"resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.25.1.tgz",
|
||||
@@ -2108,13 +2186,16 @@
|
||||
}
|
||||
},
|
||||
"node_modules/form-data": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
|
||||
"integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
|
||||
"version": "4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
|
||||
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"asynckit": "^0.4.0",
|
||||
"combined-stream": "^1.0.8",
|
||||
"es-set-tostringtag": "^2.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"mime-types": "^2.1.12"
|
||||
},
|
||||
"engines": {
|
||||
@@ -2141,6 +2222,55 @@
|
||||
"node": "^8.16.0 || ^10.6.0 || >=11.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/function-bind": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
|
||||
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-intrinsic": {
|
||||
"version": "1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
|
||||
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"call-bind-apply-helpers": "^1.0.2",
|
||||
"es-define-property": "^1.0.1",
|
||||
"es-errors": "^1.3.0",
|
||||
"es-object-atoms": "^1.1.1",
|
||||
"function-bind": "^1.1.2",
|
||||
"get-proto": "^1.0.1",
|
||||
"gopd": "^1.2.0",
|
||||
"has-symbols": "^1.1.0",
|
||||
"hasown": "^2.0.2",
|
||||
"math-intrinsics": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/get-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
|
||||
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"dunder-proto": "^1.0.1",
|
||||
"es-object-atoms": "^1.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/glob": {
|
||||
"version": "10.4.2",
|
||||
"resolved": "https://registry.npmjs.org/glob/-/glob-10.4.2.tgz",
|
||||
@@ -2215,6 +2345,19 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/gopd": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
|
||||
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/graphemer": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
|
||||
@@ -2230,6 +2373,48 @@
|
||||
"node": ">=8"
|
||||
}
|
||||
},
|
||||
"node_modules/has-symbols": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
|
||||
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/has-tostringtag": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
|
||||
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"has-symbols": "^1.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/hasown": {
|
||||
"version": "2.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
|
||||
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"function-bind": "^1.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/highlight.js": {
|
||||
"version": "11.9.0",
|
||||
"resolved": "https://registry.npmjs.org/highlight.js/-/highlight.js-11.9.0.tgz",
|
||||
@@ -2608,6 +2793,16 @@
|
||||
"@jridgewell/sourcemap-codec": "^1.5.0"
|
||||
}
|
||||
},
|
||||
"node_modules/math-intrinsics": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
|
||||
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/micromatch": {
|
||||
"version": "4.0.8",
|
||||
"resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz",
|
||||
@@ -3426,6 +3621,51 @@
|
||||
"dev": true,
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/tinyglobby": {
|
||||
"version": "0.2.13",
|
||||
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz",
|
||||
"integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"fdir": "^6.4.4",
|
||||
"picomatch": "^4.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/SuperchupuDev"
|
||||
}
|
||||
},
|
||||
"node_modules/tinyglobby/node_modules/fdir": {
|
||||
"version": "6.4.4",
|
||||
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
|
||||
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"picomatch": "^3 || ^4"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"picomatch": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/tinyglobby/node_modules/picomatch": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
|
||||
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/tinypool": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz",
|
||||
@@ -3561,15 +3801,18 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/vite": {
|
||||
"version": "6.2.3",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-6.2.3.tgz",
|
||||
"integrity": "sha512-IzwM54g4y9JA/xAeBPNaDXiBF8Jsgl3VBQ2YQ/wOY6fyW3xMdSoltIV3Bo59DErdqdE6RxUfv8W69DvUorE4Eg==",
|
||||
"version": "6.3.4",
|
||||
"resolved": "https://registry.npmjs.org/vite/-/vite-6.3.4.tgz",
|
||||
"integrity": "sha512-BiReIiMS2fyFqbqNT/Qqt4CVITDU9M9vE+DKcVAsB+ZV0wvTKd+3hMbkpxz1b+NmEDMegpVbisKiAZOnvO92Sw==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"esbuild": "^0.25.0",
|
||||
"fdir": "^6.4.4",
|
||||
"picomatch": "^4.0.2",
|
||||
"postcss": "^8.5.3",
|
||||
"rollup": "^4.30.1"
|
||||
"rollup": "^4.34.9",
|
||||
"tinyglobby": "^0.2.13"
|
||||
},
|
||||
"bin": {
|
||||
"vite": "bin/vite.js"
|
||||
@@ -3672,6 +3915,34 @@
|
||||
"vite": "^5.4.11 || ^6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/fdir": {
|
||||
"version": "6.4.4",
|
||||
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
|
||||
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"picomatch": "^3 || ^4"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"picomatch": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/vite/node_modules/picomatch": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
|
||||
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
|
||||
"dev": true,
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/vitest": {
|
||||
"version": "3.0.9",
|
||||
"resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz",
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"eslint-plugin-vue": "^9.23.0",
|
||||
"jsdom": "^24.1.0",
|
||||
"prettier": "^3.2.5",
|
||||
"vite": "^6.2.3",
|
||||
"vite": "^6.3.4",
|
||||
"vite-plugin-singlefile": "^2.2.0",
|
||||
"vitest": "^3.0.9"
|
||||
}
|
||||
|
||||
@@ -210,35 +210,19 @@
|
||||
<div class="row flex-lg-row-reverse align-items-center g-5">
|
||||
<h1>What's New</h1>
|
||||
|
||||
<h2 class="mt-3">Rule Updates</h2>
|
||||
|
||||
<ul class="mt-2 ps-5">
|
||||
<!-- TODO(williballenthin): add date -->
|
||||
|
||||
<li>
|
||||
added:
|
||||
<a href="./rules/change registry key timestamp/">
|
||||
change registry key timestamp
|
||||
</a>
|
||||
</li>
|
||||
|
||||
<li>
|
||||
added:
|
||||
<a href="./rules/check mutex and terminate process on windows/">
|
||||
check mutex and terminate process on Windows
|
||||
</a>
|
||||
</li>
|
||||
|
||||
<li>
|
||||
added:
|
||||
<a href="./rules/clear windows event logs remotely/">
|
||||
clear windows event logs remotely
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
|
||||
<h2 class="mt-3">Tool Updates</h2>
|
||||
|
||||
|
||||
<h3 class="mt-2">v9.2.1 (<em>2025-06-06</em>)</h3>
|
||||
<p class="mt-0">
|
||||
This point release fixes bugs including removing an unnecessary PyInstaller warning message and enabling the standalone binary to execute on systems running older versions of glibc.
|
||||
</p>
|
||||
|
||||
<h3 class="mt-2">v9.2.0 (<em>2025-06-03</em>)</h3>
|
||||
<p class="mt-0">
|
||||
This release improves a few aspects of dynamic analysis, including relaxing our validation on fields across many CAPE versions and processing additional VMRay submission file types, for example.
|
||||
It also includes an updated rule pack containing new rules and rule fixes.
|
||||
</p>
|
||||
|
||||
<h3 class="mt-2">v9.1.0 (<em>2025-03-02</em>)</h3>
|
||||
<p class="mt-0">
|
||||
This release improves a few aspects of dynamic analysis, relaxing our validation on fields across many CAPE versions, for example.
|
||||
|
||||
Reference in New Issue
Block a user