mirror of
https://github.com/mandiant/capa.git
synced 2025-12-14 00:20:42 -08:00
Compare commits
28 Commits
add-codema
...
v9.2.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fa5d9a9302 | ||
|
|
30fb4751f6 | ||
|
|
a8eab7ddf0 | ||
|
|
5ad1dda918 | ||
|
|
eabb2cc809 | ||
|
|
a34c3ecc57 | ||
|
|
d22de5cf7f | ||
|
|
8f78834cae | ||
|
|
08dbb0e02d | ||
|
|
98725c52dc | ||
|
|
eb87153064 | ||
|
|
56aa7176b0 | ||
|
|
8b41671409 | ||
|
|
5dbbc2b468 | ||
|
|
96d1eb64c3 | ||
|
|
9234b33051 | ||
|
|
51f5114ad7 | ||
|
|
4b72f8a872 | ||
|
|
8206a97b0f | ||
|
|
5a33b4b2a8 | ||
|
|
fcfdeec377 | ||
|
|
37a63a751c | ||
|
|
3a9f2136bb | ||
|
|
390e2a6315 | ||
|
|
6a43084915 | ||
|
|
6d7ca57fa9 | ||
|
|
d1090e8391 | ||
|
|
b07efe773b |
3
.github/pyinstaller/pyinstaller.spec
vendored
3
.github/pyinstaller/pyinstaller.spec
vendored
@@ -74,6 +74,9 @@ a = Analysis(
|
|||||||
# only be installed locally.
|
# only be installed locally.
|
||||||
"binaryninja",
|
"binaryninja",
|
||||||
"ida",
|
"ida",
|
||||||
|
# remove once https://github.com/mandiant/capa/issues/2681 has
|
||||||
|
# been addressed by PyInstaller
|
||||||
|
"pkg_resources",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
30
.github/workflows/build.yml
vendored
30
.github/workflows/build.yml
vendored
@@ -22,16 +22,16 @@ jobs:
|
|||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-22.04
|
||||||
# use old linux so that the shared library versioning is more portable
|
# use old linux so that the shared library versioning is more portable
|
||||||
artifact_name: capa
|
artifact_name: capa
|
||||||
asset_name: linux
|
asset_name: linux
|
||||||
python_version: '3.10'
|
python_version: '3.10'
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-22.04
|
||||||
artifact_name: capa
|
artifact_name: capa
|
||||||
asset_name: linux-py312
|
asset_name: linux-py312
|
||||||
python_version: '3.12'
|
python_version: '3.12'
|
||||||
- os: windows-2019
|
- os: windows-2022
|
||||||
artifact_name: capa.exe
|
artifact_name: capa.exe
|
||||||
asset_name: windows
|
asset_name: windows
|
||||||
python_version: '3.10'
|
python_version: '3.10'
|
||||||
@@ -49,7 +49,7 @@ jobs:
|
|||||||
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
|
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python_version }}
|
python-version: ${{ matrix.python_version }}
|
||||||
- if: matrix.os == 'ubuntu-20.04'
|
- if: matrix.os == 'ubuntu-22.04'
|
||||||
run: sudo apt-get install -y libyaml-dev
|
run: sudo apt-get install -y libyaml-dev
|
||||||
- name: Upgrade pip, setuptools
|
- name: Upgrade pip, setuptools
|
||||||
run: python -m pip install --upgrade pip setuptools
|
run: python -m pip install --upgrade pip setuptools
|
||||||
@@ -59,6 +59,28 @@ jobs:
|
|||||||
pip install -e .[build]
|
pip install -e .[build]
|
||||||
- name: Build standalone executable
|
- name: Build standalone executable
|
||||||
run: pyinstaller --log-level DEBUG .github/pyinstaller/pyinstaller.spec
|
run: pyinstaller --log-level DEBUG .github/pyinstaller/pyinstaller.spec
|
||||||
|
- name: Does it run without warnings or errors?
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
if [[ "${{ matrix.os }}" == "windows-2022" ]]; then
|
||||||
|
EXECUTABLE=".\\dist\\capa"
|
||||||
|
else
|
||||||
|
EXECUTABLE="./dist/capa"
|
||||||
|
fi
|
||||||
|
|
||||||
|
output=$(${EXECUTABLE} --version 2>&1)
|
||||||
|
exit_code=$?
|
||||||
|
|
||||||
|
echo "${output}"
|
||||||
|
echo "${exit_code}"
|
||||||
|
|
||||||
|
if echo "${output}" | grep -iE 'error|warning'; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${exit_code}" -ne 0 ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
- name: Does it run (PE)?
|
- name: Does it run (PE)?
|
||||||
run: dist/capa -d "tests/data/Practical Malware Analysis Lab 01-01.dll_"
|
run: dist/capa -d "tests/data/Practical Malware Analysis Lab 01-01.dll_"
|
||||||
- name: Does it run (Shellcode)?
|
- name: Does it run (Shellcode)?
|
||||||
|
|||||||
2
.github/workflows/publish.yml
vendored
2
.github/workflows/publish.yml
vendored
@@ -35,7 +35,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
path: dist/*
|
path: dist/*
|
||||||
- name: publish package
|
- name: publish package
|
||||||
uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 # release/v1
|
uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # release/v1.12.4
|
||||||
with:
|
with:
|
||||||
skip-existing: true
|
skip-existing: true
|
||||||
verbose: true
|
verbose: true
|
||||||
|
|||||||
12
.github/workflows/tests.yml
vendored
12
.github/workflows/tests.yml
vendored
@@ -88,16 +88,16 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-20.04, windows-2019, macos-13]
|
os: [ubuntu-22.04, windows-2022, macos-13]
|
||||||
# across all operating systems
|
# across all operating systems
|
||||||
python-version: ["3.10", "3.11"]
|
python-version: ["3.10", "3.11"]
|
||||||
include:
|
include:
|
||||||
# on Ubuntu run these as well
|
# on Ubuntu run these as well
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-22.04
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-22.04
|
||||||
python-version: "3.11"
|
python-version: "3.11"
|
||||||
- os: ubuntu-20.04
|
- os: ubuntu-22.04
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout capa with submodules
|
- name: Checkout capa with submodules
|
||||||
@@ -109,7 +109,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Install pyyaml
|
- name: Install pyyaml
|
||||||
if: matrix.os == 'ubuntu-20.04'
|
if: matrix.os == 'ubuntu-22.04'
|
||||||
run: sudo apt-get install -y libyaml-dev
|
run: sudo apt-get install -y libyaml-dev
|
||||||
- name: Install capa
|
- name: Install capa
|
||||||
run: |
|
run: |
|
||||||
@@ -168,7 +168,7 @@ jobs:
|
|||||||
|
|
||||||
ghidra-tests:
|
ghidra-tests:
|
||||||
name: Ghidra tests for ${{ matrix.python-version }}
|
name: Ghidra tests for ${{ matrix.python-version }}
|
||||||
runs-on: ubuntu-20.04
|
runs-on: ubuntu-22.04
|
||||||
needs: [tests]
|
needs: [tests]
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
|
|||||||
65
CHANGELOG.md
65
CHANGELOG.md
@@ -6,7 +6,51 @@
|
|||||||
|
|
||||||
### Breaking Changes
|
### Breaking Changes
|
||||||
|
|
||||||
### New Rules (15)
|
### New Rules (0)
|
||||||
|
|
||||||
|
-
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
### capa Explorer Web
|
||||||
|
|
||||||
|
### capa Explorer IDA Pro plugin
|
||||||
|
|
||||||
|
### Development
|
||||||
|
|
||||||
|
### Raw diffs
|
||||||
|
- [capa v9.2.1...master](https://github.com/mandiant/capa/compare/v9.2.1...master)
|
||||||
|
- [capa-rules v9.2.1...master](https://github.com/mandiant/capa-rules/compare/v9.2.1...master)
|
||||||
|
|
||||||
|
## v9.2.1
|
||||||
|
|
||||||
|
This point release fixes bugs including removing an unnecessary PyInstaller warning message and enabling the standalone binary to execute on systems running older versions of glibc.
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
|
||||||
|
- ci: exclude pkg_resources from PyInstaller build @mike-hunhoff #2684
|
||||||
|
- ci: downgrade Ubuntu version to accommodate older glibc versions @mike-hunhoff #2684
|
||||||
|
|
||||||
|
### Development
|
||||||
|
|
||||||
|
- ci: upgrade Windows version to avoid deprecation @mike-hunhoff #2684
|
||||||
|
- ci: check if build runs without warnings or errors @mike-hunhoff #2684
|
||||||
|
|
||||||
|
### Raw diffs
|
||||||
|
- [capa v9.2.0...v9.2.1](https://github.com/mandiant/capa/compare/v9.2.0...v9.2.1)
|
||||||
|
- [capa-rules v9.2.0...v9.2.1](https://github.com/mandiant/capa-rules/compare/v9.2.0...v9.2.1)
|
||||||
|
|
||||||
|
## v9.2.0
|
||||||
|
|
||||||
|
This release improves a few aspects of dynamic analysis, including relaxing our validation on fields across many CAPE versions and processing additional VMRay submission file types, for example.
|
||||||
|
It also includes an updated rule pack containing new rules and rule fixes.
|
||||||
|
|
||||||
|
### New Features
|
||||||
|
- vmray: do not restrict analysis to PE and ELF files, e.g. docx @mike-hunhoff #2672
|
||||||
|
|
||||||
|
### Breaking Changes
|
||||||
|
|
||||||
|
### New Rules (22)
|
||||||
|
|
||||||
- communication/socket/connect-socket moritz.raabe@mandiant.com joakim@intezer.com mrhafizfarhad@gmail.com
|
- communication/socket/connect-socket moritz.raabe@mandiant.com joakim@intezer.com mrhafizfarhad@gmail.com
|
||||||
- communication/socket/udp/connect-udp-socket mrhafizfarhad@gmail.com
|
- communication/socket/udp/connect-udp-socket mrhafizfarhad@gmail.com
|
||||||
@@ -22,22 +66,23 @@
|
|||||||
- nursery/disable-firewall-features-via-registry-on-windows mehunhoff@google.com
|
- nursery/disable-firewall-features-via-registry-on-windows mehunhoff@google.com
|
||||||
- nursery/disable-system-restore-features-via-registry-on-windows mehunhoff@google.com
|
- nursery/disable-system-restore-features-via-registry-on-windows mehunhoff@google.com
|
||||||
- nursery/disable-windows-defender-features-via-registry-on-windows mehunhoff@google.com
|
- nursery/disable-windows-defender-features-via-registry-on-windows mehunhoff@google.com
|
||||||
-
|
- host-interaction/file-system/write/clear-file-content jakeperalta7
|
||||||
|
- host-interaction/filter/unload-minifilter-driver JakePeralta7
|
||||||
|
- exploitation/enumeration/make-suspicious-ntquerysysteminformation-call zdw@google.com
|
||||||
|
- exploitation/gadgets/load-ntoskrnl zdw@google.com
|
||||||
|
- exploitation/gadgets/resolve-ntoskrnl-gadgets zdw@google.com
|
||||||
|
- exploitation/spraying/make-suspicious-ntfscontrolfile-call zdw@google.com
|
||||||
|
- anti-analysis/anti-forensic/unload-sysmon JakePeralta7
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
- cape: make some fields optional @williballenthin #2631 #2632
|
- cape: make some fields optional @williballenthin #2631 #2632
|
||||||
- lint: add WARN for regex features that contain unescaped dot #2635
|
- lint: add WARN for regex features that contain unescaped dot #2635
|
||||||
- lint: add ERROR for incomplete registry control set regex #2643
|
- lint: add ERROR for incomplete registry control set regex #2643
|
||||||
|
- binja: update unit test core version #2670
|
||||||
### capa Explorer Web
|
|
||||||
|
|
||||||
### capa Explorer IDA Pro plugin
|
|
||||||
|
|
||||||
### Development
|
|
||||||
|
|
||||||
### Raw diffs
|
### Raw diffs
|
||||||
- [capa v9.1.0...master](https://github.com/mandiant/capa/compare/v9.1.0...master)
|
- [capa v9.1.0...v9.2.0](https://github.com/mandiant/capa/compare/v9.1.0...v9.2.0)
|
||||||
- [capa-rules v9.1.0...master](https://github.com/mandiant/capa-rules/compare/v9.1.0...master)
|
- [capa-rules v9.1.0...v9.2.0](https://github.com/mandiant/capa-rules/compare/v9.1.0...v9.2.0)
|
||||||
|
|
||||||
## v9.1.0
|
## v9.1.0
|
||||||
|
|
||||||
|
|||||||
@@ -96,14 +96,7 @@ class VMRayAnalysis:
|
|||||||
% (self.submission_name, self.submission_type)
|
% (self.submission_name, self.submission_type)
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.submission_static is not None:
|
if self.submission_static is None:
|
||||||
if self.submission_static.pe is None and self.submission_static.elf is None:
|
|
||||||
# we only support static analysis for PE and ELF files for now
|
|
||||||
raise UnsupportedFormatError(
|
|
||||||
"archive does not contain a supported file format (submission_name: %s, submission_type: %s)"
|
|
||||||
% (self.submission_name, self.submission_type)
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# VMRay may not record static analysis for certain file types, e.g. MSI, but we'd still like to match dynamic
|
# VMRay may not record static analysis for certain file types, e.g. MSI, but we'd still like to match dynamic
|
||||||
# execution so we continue without and accept that the results may be incomplete
|
# execution so we continue without and accept that the results may be incomplete
|
||||||
logger.warning(
|
logger.warning(
|
||||||
|
|||||||
@@ -12,7 +12,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
__version__ = "9.1.0"
|
__version__ = "9.2.1"
|
||||||
|
|
||||||
|
|
||||||
def get_major_version():
|
def get_major_version():
|
||||||
|
|||||||
@@ -121,11 +121,11 @@ dev = [
|
|||||||
# we want all developer environments to be consistent.
|
# we want all developer environments to be consistent.
|
||||||
# These dependencies are not used in production environments
|
# These dependencies are not used in production environments
|
||||||
# and should not conflict with other libraries/tooling.
|
# and should not conflict with other libraries/tooling.
|
||||||
"pre-commit==4.1.0",
|
"pre-commit==4.2.0",
|
||||||
"pytest==8.0.0",
|
"pytest==8.0.0",
|
||||||
"pytest-sugar==1.0.0",
|
"pytest-sugar==1.0.0",
|
||||||
"pytest-instafail==0.5.0",
|
"pytest-instafail==0.5.0",
|
||||||
"flake8==7.1.1",
|
"flake8==7.2.0",
|
||||||
"flake8-bugbear==24.12.12",
|
"flake8-bugbear==24.12.12",
|
||||||
"flake8-encodings==0.5.1",
|
"flake8-encodings==0.5.1",
|
||||||
"flake8-comprehensions==3.16.0",
|
"flake8-comprehensions==3.16.0",
|
||||||
@@ -133,7 +133,7 @@ dev = [
|
|||||||
"flake8-no-implicit-concat==0.3.5",
|
"flake8-no-implicit-concat==0.3.5",
|
||||||
"flake8-print==5.0.0",
|
"flake8-print==5.0.0",
|
||||||
"flake8-todos==0.3.1",
|
"flake8-todos==0.3.1",
|
||||||
"flake8-simplify==0.21.0",
|
"flake8-simplify==0.22.0",
|
||||||
"flake8-use-pathlib==0.3.0",
|
"flake8-use-pathlib==0.3.0",
|
||||||
"flake8-copyright==0.2.4",
|
"flake8-copyright==0.2.4",
|
||||||
"ruff==0.11.0",
|
"ruff==0.11.0",
|
||||||
@@ -157,7 +157,7 @@ build = [
|
|||||||
# These dependencies are not used in production environments
|
# These dependencies are not used in production environments
|
||||||
# and should not conflict with other libraries/tooling.
|
# and should not conflict with other libraries/tooling.
|
||||||
"pyinstaller==6.12.0",
|
"pyinstaller==6.12.0",
|
||||||
"setuptools==76.0.0",
|
"setuptools==80.9.0",
|
||||||
"build==1.2.2"
|
"build==1.2.2"
|
||||||
]
|
]
|
||||||
scripts = [
|
scripts = [
|
||||||
|
|||||||
@@ -21,12 +21,12 @@ mdurl==0.1.2
|
|||||||
msgpack==1.0.8
|
msgpack==1.0.8
|
||||||
networkx==3.4.2
|
networkx==3.4.2
|
||||||
pefile==2024.8.26
|
pefile==2024.8.26
|
||||||
pip==25.0
|
pip==25.1.1
|
||||||
protobuf==6.30.1
|
protobuf==6.30.1
|
||||||
pyasn1==0.5.1
|
pyasn1==0.5.1
|
||||||
pyasn1-modules==0.3.0
|
pyasn1-modules==0.3.0
|
||||||
pycparser==2.22
|
pycparser==2.22
|
||||||
pydantic==2.10.1
|
pydantic==2.11.4
|
||||||
# pydantic pins pydantic-core,
|
# pydantic pins pydantic-core,
|
||||||
# but dependabot updates these separately (which is broken) and is annoying,
|
# but dependabot updates these separately (which is broken) and is annoying,
|
||||||
# so we rely on pydantic to pull in the right version of pydantic-core.
|
# so we rely on pydantic to pull in the right version of pydantic-core.
|
||||||
@@ -36,10 +36,10 @@ pyelftools==0.32
|
|||||||
pygments==2.19.1
|
pygments==2.19.1
|
||||||
python-flirt==0.9.2
|
python-flirt==0.9.2
|
||||||
pyyaml==6.0.2
|
pyyaml==6.0.2
|
||||||
rich==13.9.2
|
rich==14.0.0
|
||||||
ruamel-yaml==0.18.6
|
ruamel-yaml==0.18.6
|
||||||
ruamel-yaml-clib==0.2.8
|
ruamel-yaml-clib==0.2.8
|
||||||
setuptools==76.0.0
|
setuptools==80.9.0
|
||||||
six==1.17.0
|
six==1.17.0
|
||||||
sortedcontainers==2.4.0
|
sortedcontainers==2.4.0
|
||||||
viv-utils==0.8.0
|
viv-utils==0.8.0
|
||||||
|
|||||||
2
rules
2
rules
Submodule rules updated: d64c2c91ea...edabdffa8c
@@ -175,8 +175,6 @@ def convert_rule(rule, rulename, cround, depth):
|
|||||||
depth += 1
|
depth += 1
|
||||||
logger.info("recursion depth: %d", depth)
|
logger.info("recursion depth: %d", depth)
|
||||||
|
|
||||||
global var_names
|
|
||||||
|
|
||||||
def do_statement(s_type, kid):
|
def do_statement(s_type, kid):
|
||||||
yara_strings = ""
|
yara_strings = ""
|
||||||
yara_condition = ""
|
yara_condition = ""
|
||||||
|
|||||||
@@ -1,490 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# /// script
|
|
||||||
# requires-python = ">=3.12"
|
|
||||||
# dependencies = [
|
|
||||||
# "protobuf",
|
|
||||||
# "python-lancelot",
|
|
||||||
# "rich",
|
|
||||||
# ]
|
|
||||||
# ///
|
|
||||||
#
|
|
||||||
# TODO:
|
|
||||||
# - ignore stack cookie check
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import logging
|
|
||||||
import argparse
|
|
||||||
import contextlib
|
|
||||||
from typing import Any
|
|
||||||
from pathlib import Path
|
|
||||||
from collections import defaultdict
|
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
import lancelot
|
|
||||||
import rich.padding
|
|
||||||
import lancelot.be2utils
|
|
||||||
import google.protobuf.message
|
|
||||||
from rich.text import Text
|
|
||||||
from rich.theme import Theme
|
|
||||||
from rich.markup import escape
|
|
||||||
from rich.console import Console
|
|
||||||
from lancelot.be2utils.binexport2_pb2 import BinExport2
|
|
||||||
|
|
||||||
logger = logging.getLogger("codemap")
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def timing(msg: str):
|
|
||||||
t0 = time.time()
|
|
||||||
yield
|
|
||||||
t1 = time.time()
|
|
||||||
logger.debug("perf: %s: %0.2fs", msg, t1 - t0)
|
|
||||||
|
|
||||||
|
|
||||||
class Renderer:
|
|
||||||
def __init__(self, console: Console):
|
|
||||||
self.console: Console = console
|
|
||||||
self.indent: int = 0
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def indenting(self):
|
|
||||||
self.indent += 1
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
self.indent -= 1
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def markup(s: str, **kwargs) -> Text:
|
|
||||||
escaped_args = {k: (escape(v) if isinstance(v, str) else v) for k, v in kwargs.items()}
|
|
||||||
return Text.from_markup(s.format(**escaped_args))
|
|
||||||
|
|
||||||
def print(self, renderable, **kwargs):
|
|
||||||
if not kwargs:
|
|
||||||
return self.console.print(rich.padding.Padding(renderable, (0, 0, 0, self.indent * 2)))
|
|
||||||
|
|
||||||
assert isinstance(renderable, str)
|
|
||||||
return self.print(self.markup(renderable, **kwargs))
|
|
||||||
|
|
||||||
def writeln(self, s: str):
|
|
||||||
self.print(s)
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def section(self, name):
|
|
||||||
if isinstance(name, str):
|
|
||||||
self.print("[title]{name}", name=name)
|
|
||||||
elif isinstance(name, Text):
|
|
||||||
name = name.copy()
|
|
||||||
name.stylize_before(self.console.get_style("title"))
|
|
||||||
self.print(name)
|
|
||||||
else:
|
|
||||||
raise ValueError("unexpected section name")
|
|
||||||
|
|
||||||
with self.indenting():
|
|
||||||
yield
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AssemblageLocation:
|
|
||||||
name: str
|
|
||||||
file: str
|
|
||||||
prototype: str
|
|
||||||
rva: int
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path(self):
|
|
||||||
if not self.file.endswith(")"):
|
|
||||||
return self.file
|
|
||||||
|
|
||||||
return self.file.rpartition(" (")[0]
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_dict(cls, data: dict[str, Any]):
|
|
||||||
return cls(
|
|
||||||
name=data["name"],
|
|
||||||
file=data["file"],
|
|
||||||
prototype=data["prototype"],
|
|
||||||
rva=data["function_start"],
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_json(doc: str):
|
|
||||||
return AssemblageLocation.from_dict(json.loads(doc))
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv: list[str] | None = None):
|
|
||||||
if argv is None:
|
|
||||||
argv = sys.argv[1:]
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description="Inspect BinExport2 files")
|
|
||||||
parser.add_argument("input_file", type=Path, help="path to input file")
|
|
||||||
parser.add_argument("--capa", type=Path, help="path to capa JSON results file")
|
|
||||||
parser.add_argument("--assemblage", type=Path, help="path to Assemblage JSONL file")
|
|
||||||
parser.add_argument("-d", "--debug", action="store_true", help="enable debugging output on STDERR")
|
|
||||||
parser.add_argument("-q", "--quiet", action="store_true", help="disable all output but errors")
|
|
||||||
args = parser.parse_args(args=argv)
|
|
||||||
|
|
||||||
logging.basicConfig()
|
|
||||||
if args.quiet:
|
|
||||||
logging.getLogger().setLevel(logging.WARNING)
|
|
||||||
elif args.debug:
|
|
||||||
logging.getLogger().setLevel(logging.DEBUG)
|
|
||||||
else:
|
|
||||||
logging.getLogger().setLevel(logging.INFO)
|
|
||||||
|
|
||||||
theme = Theme(
|
|
||||||
{
|
|
||||||
"decoration": "grey54",
|
|
||||||
"title": "yellow",
|
|
||||||
"key": "black",
|
|
||||||
"value": "blue",
|
|
||||||
"default": "black",
|
|
||||||
},
|
|
||||||
inherit=False,
|
|
||||||
)
|
|
||||||
console = Console(theme=theme, markup=False, emoji=False)
|
|
||||||
o = Renderer(console)
|
|
||||||
|
|
||||||
be2: BinExport2
|
|
||||||
buf: bytes
|
|
||||||
try:
|
|
||||||
# easiest way to determine if this is a BinExport2 proto is...
|
|
||||||
# to just try to decode it.
|
|
||||||
buf = args.input_file.read_bytes()
|
|
||||||
with timing("loading BinExport2"):
|
|
||||||
be2 = BinExport2()
|
|
||||||
be2.ParseFromString(buf)
|
|
||||||
|
|
||||||
except google.protobuf.message.DecodeError:
|
|
||||||
with timing("analyzing file"):
|
|
||||||
input_file: Path = args.input_file
|
|
||||||
buf = lancelot.get_binexport2_bytes_from_bytes(input_file.read_bytes())
|
|
||||||
|
|
||||||
with timing("loading BinExport2"):
|
|
||||||
be2 = BinExport2()
|
|
||||||
be2.ParseFromString(buf)
|
|
||||||
|
|
||||||
with timing("indexing BinExport2"):
|
|
||||||
idx = lancelot.be2utils.BinExport2Index(be2)
|
|
||||||
|
|
||||||
matches_by_function: defaultdict[int, set[str]] = defaultdict(set)
|
|
||||||
if args.capa:
|
|
||||||
with timing("loading capa"):
|
|
||||||
doc = json.loads(args.capa.read_text())
|
|
||||||
|
|
||||||
functions_by_basic_block: dict[int, int] = {}
|
|
||||||
for function in doc["meta"]["analysis"]["layout"]["functions"]:
|
|
||||||
for basic_block in function["matched_basic_blocks"]:
|
|
||||||
functions_by_basic_block[basic_block["address"]["value"]] = function["address"]["value"]
|
|
||||||
|
|
||||||
matches_by_address: defaultdict[int, set[str]] = defaultdict(set)
|
|
||||||
for rule_name, results in doc["rules"].items():
|
|
||||||
for location, _ in results["matches"]:
|
|
||||||
if location["type"] != "absolute":
|
|
||||||
continue
|
|
||||||
address = location["value"]
|
|
||||||
matches_by_address[location["value"]].add(rule_name)
|
|
||||||
|
|
||||||
for address, matches in matches_by_address.items():
|
|
||||||
if function := functions_by_basic_block.get(address):
|
|
||||||
if function in idx.thunks:
|
|
||||||
# forward any capa for a thunk to its target
|
|
||||||
# since viv may not recognize the thunk as a separate function.
|
|
||||||
logger.debug("forwarding capa matches from thunk 0x%x to 0x%x", function, idx.thunks[function])
|
|
||||||
function = idx.thunks[function]
|
|
||||||
|
|
||||||
matches_by_function[function].update(matches)
|
|
||||||
for match in matches:
|
|
||||||
logger.info("capa: 0x%x: %s", function, match)
|
|
||||||
else:
|
|
||||||
# we don't know which function this is.
|
|
||||||
# hopefully its a function recognized in our BinExport analysis.
|
|
||||||
# *shrug*
|
|
||||||
#
|
|
||||||
# apparently viv doesn't emit function entries for thunks?
|
|
||||||
# or somehow our layout is messed up.
|
|
||||||
|
|
||||||
if address in idx.thunks:
|
|
||||||
# forward any capa for a thunk to its target
|
|
||||||
# since viv may not recognize the thunk as a separate function.
|
|
||||||
logger.debug("forwarding capa matches from thunk 0x%x to 0x%x", address, idx.thunks[address])
|
|
||||||
address = idx.thunks[address]
|
|
||||||
# since we found the thunk, we know this is a BinExport-recognized function.
|
|
||||||
# so thats nice.
|
|
||||||
for match in matches:
|
|
||||||
logger.info("capa: 0x%x: %s", address, match)
|
|
||||||
else:
|
|
||||||
logger.warning("unknown address: 0x%x: %s", address, matches)
|
|
||||||
|
|
||||||
matches_by_function[address].update(matches)
|
|
||||||
|
|
||||||
# guess the base address (which BinExport2) does not track explicitly,
|
|
||||||
# by assuming it is the lowest mapped page.
|
|
||||||
base_address = min(map(lambda section: section.address, be2.section))
|
|
||||||
logging.info("guessed base address: 0x%x", base_address)
|
|
||||||
|
|
||||||
assemblage_locations_by_va: dict[int, AssemblageLocation] = {}
|
|
||||||
if args.assemblage:
|
|
||||||
with timing("loading assemblage"):
|
|
||||||
with args.assemblage.open("rt", encoding="utf-8") as f:
|
|
||||||
for line in f:
|
|
||||||
if not line:
|
|
||||||
continue
|
|
||||||
location = AssemblageLocation.from_json(line)
|
|
||||||
assemblage_locations_by_va[base_address + location.rva] = location
|
|
||||||
|
|
||||||
# update function names for the in-memory BinExport2 using Assemblage data.
|
|
||||||
# this won't affect the be2 on disk, because we don't serialize it back out.
|
|
||||||
for address, location in assemblage_locations_by_va.items():
|
|
||||||
if not location.name:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if vertex_index := idx.vertex_index_by_address.get(address):
|
|
||||||
vertex = be2.call_graph.vertex[vertex_index].demangled_name = location.name
|
|
||||||
|
|
||||||
# index all the callers of each function, resolving thunks.
|
|
||||||
# idx.callers_by_vertex_id does not resolve thunks.
|
|
||||||
resolved_callers_by_vertex_id = defaultdict(set)
|
|
||||||
for edge in be2.call_graph.edge:
|
|
||||||
source_index = edge.source_vertex_index
|
|
||||||
|
|
||||||
if lancelot.be2utils.is_thunk_vertex(be2.call_graph.vertex[source_index]):
|
|
||||||
# we don't care about the callers that are thunks.
|
|
||||||
continue
|
|
||||||
|
|
||||||
if lancelot.be2utils.is_thunk_vertex(be2.call_graph.vertex[edge.target_vertex_index]):
|
|
||||||
thunk_vertex = be2.call_graph.vertex[edge.target_vertex_index]
|
|
||||||
thunk_address = thunk_vertex.address
|
|
||||||
|
|
||||||
target_address = idx.thunks[thunk_address]
|
|
||||||
target_index = idx.vertex_index_by_address[target_address]
|
|
||||||
logger.debug(
|
|
||||||
"call %s -(thunk)-> %s",
|
|
||||||
idx.get_function_name_by_vertex(source_index),
|
|
||||||
idx.get_function_name_by_vertex(target_index),
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
target_index = edge.target_vertex_index
|
|
||||||
logger.debug(
|
|
||||||
"call %s -> %s",
|
|
||||||
idx.get_function_name_by_vertex(source_index),
|
|
||||||
idx.get_function_name_by_vertex(target_index),
|
|
||||||
)
|
|
||||||
resolved_callers_by_vertex_id[target_index].add(source_index)
|
|
||||||
|
|
||||||
t0 = time.time()
|
|
||||||
|
|
||||||
with o.section("meta"):
|
|
||||||
o.writeln(f"name: {be2.meta_information.executable_name}")
|
|
||||||
o.writeln(f"sha256: {be2.meta_information.executable_id}")
|
|
||||||
o.writeln(f"arch: {be2.meta_information.architecture_name}")
|
|
||||||
o.writeln(f"ts: {be2.meta_information.timestamp}")
|
|
||||||
|
|
||||||
with o.section("modules"):
|
|
||||||
for module in be2.module:
|
|
||||||
o.writeln(f"- {module.name}")
|
|
||||||
if not be2.module:
|
|
||||||
o.writeln("(none)")
|
|
||||||
|
|
||||||
with o.section("sections"):
|
|
||||||
for section in be2.section:
|
|
||||||
perms = ""
|
|
||||||
perms += "r" if section.flag_r else "-"
|
|
||||||
perms += "w" if section.flag_w else "-"
|
|
||||||
perms += "x" if section.flag_x else "-"
|
|
||||||
o.writeln(f"- {hex(section.address)} {perms} {hex(section.size)}")
|
|
||||||
|
|
||||||
with o.section("libraries"):
|
|
||||||
for library in be2.library:
|
|
||||||
o.writeln(
|
|
||||||
f"- {library.name:<12s} {'(static)' if library.is_static else ''}{(' at ' + hex(library.load_address)) if library.HasField('load_address') else ''}"
|
|
||||||
)
|
|
||||||
if not be2.library:
|
|
||||||
o.writeln("(none)")
|
|
||||||
|
|
||||||
vertex_order_by_address = {address: i for (i, address) in enumerate(idx.vertex_index_by_address.keys())}
|
|
||||||
|
|
||||||
with o.section("functions"):
|
|
||||||
last_address = None
|
|
||||||
for _, vertex_index in idx.vertex_index_by_address.items():
|
|
||||||
vertex = be2.call_graph.vertex[vertex_index]
|
|
||||||
vertex_order = vertex_order_by_address[vertex.address]
|
|
||||||
|
|
||||||
if vertex.HasField("library_index"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if vertex.HasField("module_index"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
function_name = idx.get_function_name_by_vertex(vertex_index)
|
|
||||||
|
|
||||||
if last_address:
|
|
||||||
try:
|
|
||||||
last_path = assemblage_locations_by_va[last_address].path
|
|
||||||
path = assemblage_locations_by_va[vertex.address].path
|
|
||||||
if last_path != path:
|
|
||||||
o.print(o.markup("[blue]~~~~~~~~~~~~~~~~~~~~~~~~~~~~~[/] [title]file[/] {path}\n", path=path))
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
last_address = vertex.address
|
|
||||||
|
|
||||||
if lancelot.be2utils.is_thunk_vertex(vertex):
|
|
||||||
with o.section(
|
|
||||||
o.markup(
|
|
||||||
"thunk [default]{function_name}[/] [decoration]@ {function_address}[/]",
|
|
||||||
function_name=function_name,
|
|
||||||
function_address=hex(vertex.address),
|
|
||||||
)
|
|
||||||
):
|
|
||||||
continue
|
|
||||||
|
|
||||||
with o.section(
|
|
||||||
o.markup(
|
|
||||||
"function [default]{function_name}[/] [decoration]@ {function_address}[/]",
|
|
||||||
function_name=function_name,
|
|
||||||
function_address=hex(vertex.address),
|
|
||||||
)
|
|
||||||
):
|
|
||||||
if vertex.address in idx.thunks:
|
|
||||||
o.writeln("")
|
|
||||||
continue
|
|
||||||
|
|
||||||
# keep the xrefs separate from the calls, since they're visually hard to distinguish.
|
|
||||||
# use local index of callers that has resolved intermediate thunks,
|
|
||||||
# since they are sometimes stored in a physically distant location.
|
|
||||||
for caller_index in resolved_callers_by_vertex_id.get(vertex_index, []):
|
|
||||||
caller_vertex = be2.call_graph.vertex[caller_index]
|
|
||||||
caller_order = vertex_order_by_address[caller_vertex.address]
|
|
||||||
caller_delta = caller_order - vertex_order
|
|
||||||
if caller_delta < 0:
|
|
||||||
direction = "↑"
|
|
||||||
else:
|
|
||||||
direction = "↓"
|
|
||||||
|
|
||||||
o.print(
|
|
||||||
"xref: [decoration]{direction}[/] {name} [decoration]({delta:+})[/]",
|
|
||||||
direction=direction,
|
|
||||||
name=idx.get_function_name_by_vertex(caller_index),
|
|
||||||
delta=caller_delta,
|
|
||||||
)
|
|
||||||
|
|
||||||
if vertex.address not in idx.flow_graph_index_by_address:
|
|
||||||
num_basic_blocks = 0
|
|
||||||
num_instructions = 0
|
|
||||||
num_edges = 0
|
|
||||||
total_instruction_size = 0
|
|
||||||
else:
|
|
||||||
flow_graph_index = idx.flow_graph_index_by_address[vertex.address]
|
|
||||||
flow_graph = be2.flow_graph[flow_graph_index]
|
|
||||||
num_basic_blocks = len(flow_graph.basic_block_index)
|
|
||||||
num_instructions = sum(
|
|
||||||
len(list(idx.instruction_indices(be2.basic_block[bb_idx])))
|
|
||||||
for bb_idx in flow_graph.basic_block_index
|
|
||||||
)
|
|
||||||
num_edges = len(flow_graph.edge)
|
|
||||||
total_instruction_size = 0
|
|
||||||
for bb_idx in flow_graph.basic_block_index:
|
|
||||||
basic_block = be2.basic_block[bb_idx]
|
|
||||||
for _, instruction, _ in idx.basic_block_instructions(basic_block):
|
|
||||||
total_instruction_size += len(instruction.raw_bytes)
|
|
||||||
|
|
||||||
o.writeln(
|
|
||||||
f"B/E/I: {num_basic_blocks} / {num_edges} / {num_instructions} ({total_instruction_size} bytes)"
|
|
||||||
)
|
|
||||||
|
|
||||||
for match in matches_by_function.get(vertex.address, []):
|
|
||||||
o.writeln(f"capa: {match}")
|
|
||||||
|
|
||||||
if vertex.address in idx.flow_graph_index_by_address:
|
|
||||||
flow_graph_index = idx.flow_graph_index_by_address[vertex.address]
|
|
||||||
flow_graph = be2.flow_graph[flow_graph_index]
|
|
||||||
|
|
||||||
seen_callees = set()
|
|
||||||
|
|
||||||
for basic_block_index in flow_graph.basic_block_index:
|
|
||||||
basic_block = be2.basic_block[basic_block_index]
|
|
||||||
|
|
||||||
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
|
|
||||||
if instruction.call_target:
|
|
||||||
for call_target_address in instruction.call_target:
|
|
||||||
if call_target_address in idx.thunks:
|
|
||||||
call_target_address = idx.thunks[call_target_address]
|
|
||||||
|
|
||||||
call_target_index = idx.vertex_index_by_address[call_target_address]
|
|
||||||
call_target_vertex = be2.call_graph.vertex[call_target_index]
|
|
||||||
|
|
||||||
if call_target_vertex.HasField("library_index"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if call_target_vertex.address in seen_callees:
|
|
||||||
continue
|
|
||||||
seen_callees.add(call_target_vertex.address)
|
|
||||||
|
|
||||||
call_target_order = vertex_order_by_address[call_target_address]
|
|
||||||
call_target_delta = call_target_order - vertex_order
|
|
||||||
call_target_name = idx.get_function_name_by_address(call_target_address)
|
|
||||||
if call_target_delta < 0:
|
|
||||||
direction = "↑"
|
|
||||||
else:
|
|
||||||
direction = "↓"
|
|
||||||
|
|
||||||
o.print(
|
|
||||||
"calls: [decoration]{direction}[/] {name} [decoration]({delta:+})[/]",
|
|
||||||
direction=direction,
|
|
||||||
name=call_target_name,
|
|
||||||
delta=call_target_delta,
|
|
||||||
)
|
|
||||||
|
|
||||||
for basic_block_index in flow_graph.basic_block_index:
|
|
||||||
basic_block = be2.basic_block[basic_block_index]
|
|
||||||
|
|
||||||
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
|
|
||||||
if instruction.call_target:
|
|
||||||
for call_target_address in instruction.call_target:
|
|
||||||
call_target_index = idx.vertex_index_by_address[call_target_address]
|
|
||||||
call_target_vertex = be2.call_graph.vertex[call_target_index]
|
|
||||||
|
|
||||||
if not call_target_vertex.HasField("library_index"):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if call_target_vertex.address in seen_callees:
|
|
||||||
continue
|
|
||||||
seen_callees.add(call_target_vertex.address)
|
|
||||||
|
|
||||||
call_target_name = idx.get_function_name_by_address(call_target_address)
|
|
||||||
o.print(
|
|
||||||
"api: {name}",
|
|
||||||
name=call_target_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
seen_strings = set()
|
|
||||||
for basic_block_index in flow_graph.basic_block_index:
|
|
||||||
basic_block = be2.basic_block[basic_block_index]
|
|
||||||
|
|
||||||
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
|
|
||||||
if instruction_index in idx.string_reference_index_by_source_instruction_index:
|
|
||||||
for string_reference_index in idx.string_reference_index_by_source_instruction_index[
|
|
||||||
instruction_index
|
|
||||||
]:
|
|
||||||
string_reference = be2.string_reference[string_reference_index]
|
|
||||||
string_index = string_reference.string_table_index
|
|
||||||
string = be2.string_table[string_index]
|
|
||||||
|
|
||||||
if string in seen_strings:
|
|
||||||
continue
|
|
||||||
seen_strings.add(string)
|
|
||||||
|
|
||||||
o.print(
|
|
||||||
'string: [decoration]"[/]{string}[decoration]"[/]',
|
|
||||||
string=string.rstrip(),
|
|
||||||
)
|
|
||||||
|
|
||||||
o.print("")
|
|
||||||
|
|
||||||
t1 = time.time()
|
|
||||||
logger.debug("perf: rendering BinExport2: %0.2fs", t1 - t0)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
sys.exit(main())
|
|
||||||
@@ -406,6 +406,7 @@ class DoesntMatchExample(Lint):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
if rule.name not in capabilities:
|
if rule.name not in capabilities:
|
||||||
|
logger.info('rule "%s" does not match for sample %s', rule.name, example_id)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Submodule tests/data updated: 6cb0838954...5ae1804c85
@@ -70,4 +70,4 @@ def test_standalone_binja_backend():
|
|||||||
@pytest.mark.skipif(binja_present is False, reason="Skip binja tests if the binaryninja Python API is not installed")
|
@pytest.mark.skipif(binja_present is False, reason="Skip binja tests if the binaryninja Python API is not installed")
|
||||||
def test_binja_version():
|
def test_binja_version():
|
||||||
version = binaryninja.core_version_info()
|
version = binaryninja.core_version_info()
|
||||||
assert version.major == 4 and version.minor == 2
|
assert version.major == 5 and version.minor == 0
|
||||||
|
|||||||
86
web/explorer/package-lock.json
generated
86
web/explorer/package-lock.json
generated
@@ -27,7 +27,7 @@
|
|||||||
"eslint-plugin-vue": "^9.23.0",
|
"eslint-plugin-vue": "^9.23.0",
|
||||||
"jsdom": "^24.1.0",
|
"jsdom": "^24.1.0",
|
||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
"vite": "^6.2.3",
|
"vite": "^6.3.4",
|
||||||
"vite-plugin-singlefile": "^2.2.0",
|
"vite-plugin-singlefile": "^2.2.0",
|
||||||
"vitest": "^3.0.9"
|
"vitest": "^3.0.9"
|
||||||
}
|
}
|
||||||
@@ -3426,6 +3426,51 @@
|
|||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
|
"node_modules/tinyglobby": {
|
||||||
|
"version": "0.2.13",
|
||||||
|
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz",
|
||||||
|
"integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"dependencies": {
|
||||||
|
"fdir": "^6.4.4",
|
||||||
|
"picomatch": "^4.0.2"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12.0.0"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/SuperchupuDev"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/tinyglobby/node_modules/fdir": {
|
||||||
|
"version": "6.4.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
|
||||||
|
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"peerDependencies": {
|
||||||
|
"picomatch": "^3 || ^4"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"picomatch": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/tinyglobby/node_modules/picomatch": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/jonschlinkert"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/tinypool": {
|
"node_modules/tinypool": {
|
||||||
"version": "1.0.2",
|
"version": "1.0.2",
|
||||||
"resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz",
|
"resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz",
|
||||||
@@ -3561,15 +3606,18 @@
|
|||||||
"dev": true
|
"dev": true
|
||||||
},
|
},
|
||||||
"node_modules/vite": {
|
"node_modules/vite": {
|
||||||
"version": "6.2.3",
|
"version": "6.3.4",
|
||||||
"resolved": "https://registry.npmjs.org/vite/-/vite-6.2.3.tgz",
|
"resolved": "https://registry.npmjs.org/vite/-/vite-6.3.4.tgz",
|
||||||
"integrity": "sha512-IzwM54g4y9JA/xAeBPNaDXiBF8Jsgl3VBQ2YQ/wOY6fyW3xMdSoltIV3Bo59DErdqdE6RxUfv8W69DvUorE4Eg==",
|
"integrity": "sha512-BiReIiMS2fyFqbqNT/Qqt4CVITDU9M9vE+DKcVAsB+ZV0wvTKd+3hMbkpxz1b+NmEDMegpVbisKiAZOnvO92Sw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"esbuild": "^0.25.0",
|
"esbuild": "^0.25.0",
|
||||||
|
"fdir": "^6.4.4",
|
||||||
|
"picomatch": "^4.0.2",
|
||||||
"postcss": "^8.5.3",
|
"postcss": "^8.5.3",
|
||||||
"rollup": "^4.30.1"
|
"rollup": "^4.34.9",
|
||||||
|
"tinyglobby": "^0.2.13"
|
||||||
},
|
},
|
||||||
"bin": {
|
"bin": {
|
||||||
"vite": "bin/vite.js"
|
"vite": "bin/vite.js"
|
||||||
@@ -3672,6 +3720,34 @@
|
|||||||
"vite": "^5.4.11 || ^6.0.0"
|
"vite": "^5.4.11 || ^6.0.0"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/vite/node_modules/fdir": {
|
||||||
|
"version": "6.4.4",
|
||||||
|
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
|
||||||
|
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"peerDependencies": {
|
||||||
|
"picomatch": "^3 || ^4"
|
||||||
|
},
|
||||||
|
"peerDependenciesMeta": {
|
||||||
|
"picomatch": {
|
||||||
|
"optional": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"node_modules/vite/node_modules/picomatch": {
|
||||||
|
"version": "4.0.2",
|
||||||
|
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
|
||||||
|
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
|
||||||
|
"dev": true,
|
||||||
|
"license": "MIT",
|
||||||
|
"engines": {
|
||||||
|
"node": ">=12"
|
||||||
|
},
|
||||||
|
"funding": {
|
||||||
|
"url": "https://github.com/sponsors/jonschlinkert"
|
||||||
|
}
|
||||||
|
},
|
||||||
"node_modules/vitest": {
|
"node_modules/vitest": {
|
||||||
"version": "3.0.9",
|
"version": "3.0.9",
|
||||||
"resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz",
|
"resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz",
|
||||||
|
|||||||
@@ -33,7 +33,7 @@
|
|||||||
"eslint-plugin-vue": "^9.23.0",
|
"eslint-plugin-vue": "^9.23.0",
|
||||||
"jsdom": "^24.1.0",
|
"jsdom": "^24.1.0",
|
||||||
"prettier": "^3.2.5",
|
"prettier": "^3.2.5",
|
||||||
"vite": "^6.2.3",
|
"vite": "^6.3.4",
|
||||||
"vite-plugin-singlefile": "^2.2.0",
|
"vite-plugin-singlefile": "^2.2.0",
|
||||||
"vitest": "^3.0.9"
|
"vitest": "^3.0.9"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -210,35 +210,19 @@
|
|||||||
<div class="row flex-lg-row-reverse align-items-center g-5">
|
<div class="row flex-lg-row-reverse align-items-center g-5">
|
||||||
<h1>What's New</h1>
|
<h1>What's New</h1>
|
||||||
|
|
||||||
<h2 class="mt-3">Rule Updates</h2>
|
|
||||||
|
|
||||||
<ul class="mt-2 ps-5">
|
|
||||||
<!-- TODO(williballenthin): add date -->
|
|
||||||
|
|
||||||
<li>
|
|
||||||
added:
|
|
||||||
<a href="./rules/change registry key timestamp/">
|
|
||||||
change registry key timestamp
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
|
|
||||||
<li>
|
|
||||||
added:
|
|
||||||
<a href="./rules/check mutex and terminate process on windows/">
|
|
||||||
check mutex and terminate process on Windows
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
|
|
||||||
<li>
|
|
||||||
added:
|
|
||||||
<a href="./rules/clear windows event logs remotely/">
|
|
||||||
clear windows event logs remotely
|
|
||||||
</a>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
<h2 class="mt-3">Tool Updates</h2>
|
<h2 class="mt-3">Tool Updates</h2>
|
||||||
|
|
||||||
|
<h3 class="mt-2">v9.2.1 (<em>2025-06-06</em>)</h3>
|
||||||
|
<p class="mt-0">
|
||||||
|
This point release fixes bugs including removing an unnecessary PyInstaller warning message and enabling the standalone binary to execute on systems running older versions of glibc.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h3 class="mt-2">v9.2.0 (<em>2025-06-03</em>)</h3>
|
||||||
|
<p class="mt-0">
|
||||||
|
This release improves a few aspects of dynamic analysis, including relaxing our validation on fields across many CAPE versions and processing additional VMRay submission file types, for example.
|
||||||
|
It also includes an updated rule pack containing new rules and rule fixes.
|
||||||
|
</p>
|
||||||
|
|
||||||
<h3 class="mt-2">v9.1.0 (<em>2025-03-02</em>)</h3>
|
<h3 class="mt-2">v9.1.0 (<em>2025-03-02</em>)</h3>
|
||||||
<p class="mt-0">
|
<p class="mt-0">
|
||||||
This release improves a few aspects of dynamic analysis, relaxing our validation on fields across many CAPE versions, for example.
|
This release improves a few aspects of dynamic analysis, relaxing our validation on fields across many CAPE versions, for example.
|
||||||
|
|||||||
Reference in New Issue
Block a user