mirror of
https://github.com/mandiant/capa.git
synced 2025-12-06 12:51:03 -08:00
Compare commits
407 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c49385e681 | ||
|
|
5277f3b640 | ||
|
|
dbfcbaa98e | ||
|
|
a2d70a12a9 | ||
|
|
be58f65ae5 | ||
|
|
15caa9ee6e | ||
|
|
0398baa752 | ||
|
|
b1214df621 | ||
|
|
c0ed955362 | ||
|
|
1c6434a380 | ||
|
|
fff1248ec4 | ||
|
|
14f0589194 | ||
|
|
d47703fada | ||
|
|
faf3ca53f7 | ||
|
|
18e0408577 | ||
|
|
972fbe7290 | ||
|
|
40793eeefb | ||
|
|
221a5a9f03 | ||
|
|
d1f5a6e76b | ||
|
|
d2567692a8 | ||
|
|
7c67fae52a | ||
|
|
ebae5e5ca0 | ||
|
|
244d56e32a | ||
|
|
5f2b92de40 | ||
|
|
1065ff9779 | ||
|
|
5253ad7014 | ||
|
|
82223dcdc9 | ||
|
|
724f9e4b81 | ||
|
|
c4da4bcfe7 | ||
|
|
fd36946c4b | ||
|
|
8c9853ad12 | ||
|
|
562a61930d | ||
|
|
f9d210367e | ||
|
|
bb6557ea0a | ||
|
|
cb8133467b | ||
|
|
718813bc1c | ||
|
|
394c3807c1 | ||
|
|
74924990a2 | ||
|
|
330f2a6b9b | ||
|
|
6b81c77d22 | ||
|
|
9e9f120c80 | ||
|
|
546789fea6 | ||
|
|
76901ced19 | ||
|
|
c29d0a4f56 | ||
|
|
6b6d7eb494 | ||
|
|
21b2aac8b5 | ||
|
|
7898ac24d5 | ||
|
|
5a3775455b | ||
|
|
892cd48713 | ||
|
|
c062115366 | ||
|
|
ff7a006ba1 | ||
|
|
7665d56f93 | ||
|
|
280e253286 | ||
|
|
7edf126a63 | ||
|
|
ad6b475dfe | ||
|
|
f897f00227 | ||
|
|
ea3090a066 | ||
|
|
b9090b86ce | ||
|
|
5088f45b6a | ||
|
|
ea51801806 | ||
|
|
04db034895 | ||
|
|
b547987b33 | ||
|
|
0511ef7093 | ||
|
|
e9ccc5276a | ||
|
|
36a840cb2c | ||
|
|
797021874b | ||
|
|
2370c5b50d | ||
|
|
b285985a79 | ||
|
|
59bd930881 | ||
|
|
c86ab51210 | ||
|
|
e987fc2034 | ||
|
|
7550cc8466 | ||
|
|
acaf6c1272 | ||
|
|
a28000b41a | ||
|
|
560dc358fa | ||
|
|
a32f2cc0f8 | ||
|
|
87a6459278 | ||
|
|
4e02e36d2c | ||
|
|
a35bf4c807 | ||
|
|
a106953fec | ||
|
|
65e8300145 | ||
|
|
7526ff876f | ||
|
|
78a6d9a511 | ||
|
|
2343e73f41 | ||
|
|
aae2e51688 | ||
|
|
fe57016abd | ||
|
|
de8bba41dc | ||
|
|
90a2fd936c | ||
|
|
deb6114530 | ||
|
|
d438b90879 | ||
|
|
c1cd272865 | ||
|
|
fdb53d97ce | ||
|
|
db5e735928 | ||
|
|
1baa7a5e4b | ||
|
|
ef39bc3c3a | ||
|
|
8e346cb411 | ||
|
|
d1a1c6875b | ||
|
|
b84af6a205 | ||
|
|
160c662e7c | ||
|
|
015056c54a | ||
|
|
babf99ea48 | ||
|
|
c8f5496008 | ||
|
|
aa8055229d | ||
|
|
454b6d1aca | ||
|
|
1373fabf02 | ||
|
|
320539bd26 | ||
|
|
ac12d5a7e2 | ||
|
|
506d677684 | ||
|
|
f983307c97 | ||
|
|
a712bf3389 | ||
|
|
dc1f2e728d | ||
|
|
1f8aa7cfe1 | ||
|
|
81b964386f | ||
|
|
cb289e3fc5 | ||
|
|
fb176196eb | ||
|
|
dd2bbc9a48 | ||
|
|
118b955e10 | ||
|
|
d89dd499b6 | ||
|
|
430f9da449 | ||
|
|
ae10a2ea34 | ||
|
|
4a49543d12 | ||
|
|
106b12e2a4 | ||
|
|
7fe738e28f | ||
|
|
54203f3be9 | ||
|
|
a949698b86 | ||
|
|
673af45c55 | ||
|
|
e0ed8c6e04 | ||
|
|
fc1dd401d2 | ||
|
|
4a2902512e | ||
|
|
a8f1067f8a | ||
|
|
ef9b0737a8 | ||
|
|
6218f31ea2 | ||
|
|
14924174c5 | ||
|
|
edeb458b33 | ||
|
|
b8f277b3c6 | ||
|
|
5bc85f39a6 | ||
|
|
13a8e252f0 | ||
|
|
ff47270681 | ||
|
|
3ad4de70bf | ||
|
|
9f6165f65c | ||
|
|
982dc46623 | ||
|
|
a43d2c115f | ||
|
|
e675bef062 | ||
|
|
511aa0fb51 | ||
|
|
90e607fe9a | ||
|
|
9441da4887 | ||
|
|
47074fd129 | ||
|
|
adbfb8db06 | ||
|
|
8c8601197b | ||
|
|
3ca233e0bd | ||
|
|
f17edb3151 | ||
|
|
691ef1c72f | ||
|
|
75a76b47be | ||
|
|
6f0d1f7518 | ||
|
|
25a6d78b88 | ||
|
|
65e309450d | ||
|
|
51292880fd | ||
|
|
26998efead | ||
|
|
cf9421aabf | ||
|
|
e53fd8d6c8 | ||
|
|
b62c011823 | ||
|
|
f9248262f5 | ||
|
|
bbafedc992 | ||
|
|
46ff798fae | ||
|
|
b57188e98c | ||
|
|
49ffbdd54d | ||
|
|
62db346b49 | ||
|
|
20e7acaa1a | ||
|
|
c0d712acea | ||
|
|
66e2a225d2 | ||
|
|
2e27745b5f | ||
|
|
b5a063b0d9 | ||
|
|
ba8040ace5 | ||
|
|
9bcd7678a4 | ||
|
|
23ed0a5d9d | ||
|
|
2b6cc6fee2 | ||
|
|
6a76760033 | ||
|
|
dd2d5431a9 | ||
|
|
5d1e26a95e | ||
|
|
bf5b2612c8 | ||
|
|
694143ce6b | ||
|
|
19a5ef8a64 | ||
|
|
169b3d60a8 | ||
|
|
bb053561ef | ||
|
|
b1eda6c24d | ||
|
|
1a2e034ee0 | ||
|
|
a6763d8882 | ||
|
|
16ce6a5ef2 | ||
|
|
0a74eb671f | ||
|
|
0c3c5e42ff | ||
|
|
1e258c3bc2 | ||
|
|
2d55976cb4 | ||
|
|
9a7ce0b048 | ||
|
|
446114acc3 | ||
|
|
30950f129e | ||
|
|
066e42e271 | ||
|
|
301d8425c1 | ||
|
|
165fe87aca | ||
|
|
06dd6f45c0 | ||
|
|
2cd6b8bdac | ||
|
|
7ab2a9b163 | ||
|
|
4548303a0c | ||
|
|
4ceff605bf | ||
|
|
39bb4ed842 | ||
|
|
8edeb0e6e8 | ||
|
|
e3b58eac67 | ||
|
|
8b23a86d2e | ||
|
|
d95acc9734 | ||
|
|
7c72b56a4e | ||
|
|
8429d6b8e2 | ||
|
|
842f76c8bd | ||
|
|
157dfac527 | ||
|
|
a92d91e82a | ||
|
|
33a3170bc4 | ||
|
|
2ce4f8769d | ||
|
|
4dedc24f9f | ||
|
|
1bc0174f6f | ||
|
|
90842f313a | ||
|
|
6aa2f6457c | ||
|
|
b7c600e60b | ||
|
|
d397b46b63 | ||
|
|
7a6b7c5ef0 | ||
|
|
7ef78fdbce | ||
|
|
366c55231e | ||
|
|
43b2ee3c52 | ||
|
|
85a7c87830 | ||
|
|
2d7e20f532 | ||
|
|
cc993b67a3 | ||
|
|
a74911e926 | ||
|
|
8cc16e8de9 | ||
|
|
0559e61af1 | ||
|
|
2fe0713faa | ||
|
|
28629b352c | ||
|
|
e5f79c9f5c | ||
|
|
c6815ef126 | ||
|
|
28b2cd5117 | ||
|
|
28c24c9d48 | ||
|
|
b2080cdfbc | ||
|
|
57095175d2 | ||
|
|
5b260c00f4 | ||
|
|
9b0fb74d94 | ||
|
|
103b384c09 | ||
|
|
65f18aecc8 | ||
|
|
e971bc4044 | ||
|
|
b4870b120e | ||
|
|
7dff76b122 | ||
|
|
be5ada26ea | ||
|
|
5b903ca4f3 | ||
|
|
6b2710ac7e | ||
|
|
764fda8e7b | ||
|
|
151ef95b79 | ||
|
|
4976375d74 | ||
|
|
0b834a1623 | ||
|
|
41c512624b | ||
|
|
9467ee6f10 | ||
|
|
dde76e301d | ||
|
|
5ded85f46e | ||
|
|
0cbe4618e1 | ||
|
|
f03ad2d208 | ||
|
|
8b867836e9 | ||
|
|
236c1c9d17 | ||
|
|
64dca7d801 | ||
|
|
3834314c2a | ||
|
|
144723be3c | ||
|
|
0f54a6f67e | ||
|
|
1cec768521 | ||
|
|
d85d01eea1 | ||
|
|
8d1e1cc54c | ||
|
|
0d9e74028e | ||
|
|
445214b23b | ||
|
|
994edf66fe | ||
|
|
f9291d4e50 | ||
|
|
ab089c024d | ||
|
|
ffb1cb3128 | ||
|
|
57386812f9 | ||
|
|
ce8e15a220 | ||
|
|
0d42ac3912 | ||
|
|
f10a43abe6 | ||
|
|
64ef2c8a65 | ||
|
|
d3c44a8263 | ||
|
|
8d016de217 | ||
|
|
ee3d3a964e | ||
|
|
d6e145936d | ||
|
|
9caea57cde | ||
|
|
99e81e1d8f | ||
|
|
1696a9ad2d | ||
|
|
6c2a83dda8 | ||
|
|
c113a3b5b8 | ||
|
|
a07b47c845 | ||
|
|
f789e144fd | ||
|
|
2e534a4128 | ||
|
|
e068ce7bc9 | ||
|
|
2daf880e39 | ||
|
|
7897fa9f29 | ||
|
|
456d4272ab | ||
|
|
52c3ea733b | ||
|
|
acdaeb26d3 | ||
|
|
932066bc0e | ||
|
|
66ea0451e9 | ||
|
|
bc05118ee7 | ||
|
|
275386806d | ||
|
|
0afc16fd02 | ||
|
|
6cafe14060 | ||
|
|
ad611c2058 | ||
|
|
b876adbc27 | ||
|
|
e428b74657 | ||
|
|
7ab083f19a | ||
|
|
931dcb1dc5 | ||
|
|
12c191582f | ||
|
|
d861b0798e | ||
|
|
b6e85b878e | ||
|
|
807efec40f | ||
|
|
41ff457d65 | ||
|
|
e605dfb483 | ||
|
|
2511f40ab8 | ||
|
|
61554dbaf0 | ||
|
|
ce56ab71d4 | ||
|
|
21c2705827 | ||
|
|
916db6c197 | ||
|
|
562e03d2d2 | ||
|
|
eca86470c6 | ||
|
|
a90eda50a7 | ||
|
|
187a4712cb | ||
|
|
58bbb8e3a4 | ||
|
|
d57ed97f9d | ||
|
|
b7b451dace | ||
|
|
d91070c116 | ||
|
|
39d2a70679 | ||
|
|
ec6b6a2266 | ||
|
|
9eacf72366 | ||
|
|
30516c33b7 | ||
|
|
615628805c | ||
|
|
8bac455bc9 | ||
|
|
0945d9aea2 | ||
|
|
45c6e74945 | ||
|
|
b32ab87bb7 | ||
|
|
8d2a186b1a | ||
|
|
a62996420f | ||
|
|
7dc4c44393 | ||
|
|
6ffcbfef3d | ||
|
|
1c558a203d | ||
|
|
ed5dabe432 | ||
|
|
ce28d60edf | ||
|
|
afa9410209 | ||
|
|
09865ccd9b | ||
|
|
256611bef5 | ||
|
|
7b0fac27dc | ||
|
|
c7b65cfe8a | ||
|
|
f811b6b803 | ||
|
|
ba43513172 | ||
|
|
f3bb2169c0 | ||
|
|
68b58f979b | ||
|
|
8e80bc844d | ||
|
|
a45cab06d3 | ||
|
|
695508aa4c | ||
|
|
957083d805 | ||
|
|
2aac99b037 | ||
|
|
2401dc785c | ||
|
|
f902add0ce | ||
|
|
2faae5d022 | ||
|
|
2a2878bba0 | ||
|
|
2bb6f924cd | ||
|
|
ee881ab82f | ||
|
|
b32a8ca510 | ||
|
|
b766d957b0 | ||
|
|
e7ccea44e7 | ||
|
|
861e96d33e | ||
|
|
07e6407115 | ||
|
|
69d44cdc16 | ||
|
|
97c8fd0525 | ||
|
|
259dfaed11 | ||
|
|
bf02b2ecb4 | ||
|
|
88c78bb411 | ||
|
|
2c73f08364 | ||
|
|
467c19be97 | ||
|
|
96d7f20980 | ||
|
|
8965fc8a79 | ||
|
|
f4968bc1f1 | ||
|
|
fe0702a06b | ||
|
|
44254bfffe | ||
|
|
c85050ac1a | ||
|
|
21f2cb6e6f | ||
|
|
c71cb55051 | ||
|
|
6ba5b2b72b | ||
|
|
dd207fb238 | ||
|
|
e9e06bb571 | ||
|
|
ae0e0a03a3 | ||
|
|
526fc15082 | ||
|
|
271107436b | ||
|
|
eaa4e15439 | ||
|
|
7cfeebfff7 | ||
|
|
6f3bffe689 | ||
|
|
efb07fafb3 | ||
|
|
eedd885683 | ||
|
|
e6248cd9ed | ||
|
|
3d1ef51863 | ||
|
|
068ac0ca2c | ||
|
|
eef1548baa | ||
|
|
6eaa46ea9a | ||
|
|
6641c8c9c9 | ||
|
|
a40126aeff | ||
|
|
ccc51dab35 | ||
|
|
89c6c235f7 | ||
|
|
a260b35c9d | ||
|
|
c04774b4b1 | ||
|
|
ed64986af8 | ||
|
|
84052c3ac5 |
13
.github/CONTRIBUTING.md
vendored
13
.github/CONTRIBUTING.md
vendored
@@ -159,12 +159,25 @@ The process described here has several goals:
|
||||
|
||||
Please follow these steps to have your contribution considered by the maintainers:
|
||||
|
||||
0. Sign the [Contributor License Agreement](#contributor-license-agreement)
|
||||
1. Follow the [styleguides](#styleguides)
|
||||
2. Update the CHANGELOG and add tests and documentation. In case they are not needed, indicate it in [the PR template](pull_request_template.md).
|
||||
3. After you submit your pull request, verify that all [status checks](https://help.github.com/articles/about-status-checks/) are passing <details><summary>What if the status checks are failing? </summary>If a status check is failing, and you believe that the failure is unrelated to your change, please leave a comment on the pull request explaining why you believe the failure is unrelated. A maintainer will re-run the status check for you. If we conclude that the failure was a false positive, then we will open an issue to track that problem with our status check suite.</details>
|
||||
|
||||
While the prerequisites above must be satisfied prior to having your pull request reviewed, the reviewer(s) may ask you to complete additional design work, tests, or other changes before your pull request can be ultimately accepted.
|
||||
|
||||
### Contributor License Agreement
|
||||
|
||||
Contributions to this project must be accompanied by a Contributor License
|
||||
Agreement. You (or your employer) retain the copyright to your contribution,
|
||||
this simply gives us permission to use and redistribute your contributions as
|
||||
part of the project. Head over to <https://cla.developers.google.com/> to see
|
||||
your current agreements on file or to sign a new one.
|
||||
|
||||
You generally only need to submit a CLA once, so if you've already submitted one
|
||||
(even if it was for a different project), you probably don't need to do it
|
||||
again.
|
||||
|
||||
## Styleguides
|
||||
|
||||
### Git Commit Messages
|
||||
|
||||
41
.github/flake8.ini
vendored
Normal file
41
.github/flake8.ini
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
[flake8]
|
||||
max-line-length = 120
|
||||
|
||||
extend-ignore =
|
||||
# E203: whitespace before ':' (black does this)
|
||||
E203,
|
||||
# F401: `foo` imported but unused (prefer ruff)
|
||||
F401,
|
||||
# F811 Redefinition of unused `foo` (prefer ruff)
|
||||
F811,
|
||||
# E501 line too long (prefer black)
|
||||
E501,
|
||||
# B010 Do not call setattr with a constant attribute value
|
||||
B010,
|
||||
# G200 Logging statement uses exception in arguments
|
||||
G200,
|
||||
# SIM102 Use a single if-statement instead of nested if-statements
|
||||
# doesn't provide a space for commenting or logical separation of conditions
|
||||
SIM102,
|
||||
# SIM114 Use logical or and a single body
|
||||
# makes logic trees too complex
|
||||
SIM114,
|
||||
# SIM117 Use 'with Foo, Bar:' instead of multiple with statements
|
||||
# makes lines too long
|
||||
SIM117
|
||||
|
||||
per-file-ignores =
|
||||
# T201 print found.
|
||||
#
|
||||
# scripts are meant to print output
|
||||
scripts/*: T201
|
||||
# capa.exe is meant to print output
|
||||
capa/main.py: T201
|
||||
# IDA tests emit results to output window so need to print
|
||||
tests/test_ida_features.py: T201
|
||||
# utility used to find the Binary Ninja API via invoking python.exe
|
||||
capa/features/extractors/binja/find_binja_api.py: T201
|
||||
|
||||
copyright-check = True
|
||||
copyright-min-file-size = 1
|
||||
copyright-regexp = Copyright \(C\) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
3
.github/mypy/mypy.ini
vendored
3
.github/mypy/mypy.ini
vendored
@@ -42,6 +42,9 @@ ignore_missing_imports = True
|
||||
[mypy-idautils.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ida_auto.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ida_bytes.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
|
||||
43
.github/pyinstaller/hooks/hook-vivisect.py
vendored
43
.github/pyinstaller/hooks/hook-vivisect.py
vendored
@@ -38,39 +38,36 @@ hiddenimports = [
|
||||
"vivisect",
|
||||
"vivisect.analysis",
|
||||
"vivisect.analysis.amd64",
|
||||
"vivisect.analysis.amd64",
|
||||
"vivisect.analysis.amd64.emulation",
|
||||
"vivisect.analysis.amd64.golang",
|
||||
"vivisect.analysis.crypto",
|
||||
"vivisect.analysis.crypto",
|
||||
"vivisect.analysis.crypto.constants",
|
||||
"vivisect.analysis.elf",
|
||||
"vivisect.analysis.elf.elfplt",
|
||||
"vivisect.analysis.elf.elfplt_late",
|
||||
"vivisect.analysis.elf.libc_start_main",
|
||||
"vivisect.analysis.generic",
|
||||
"vivisect.analysis.generic",
|
||||
"vivisect.analysis.generic.codeblocks",
|
||||
"vivisect.analysis.generic.emucode",
|
||||
"vivisect.analysis.generic.entrypoints",
|
||||
"vivisect.analysis.generic.funcentries",
|
||||
"vivisect.analysis.generic.impapi",
|
||||
"vivisect.analysis.generic.linker",
|
||||
"vivisect.analysis.generic.mkpointers",
|
||||
"vivisect.analysis.generic.noret",
|
||||
"vivisect.analysis.generic.pointers",
|
||||
"vivisect.analysis.generic.pointertables",
|
||||
"vivisect.analysis.generic.relocations",
|
||||
"vivisect.analysis.generic.strconst",
|
||||
"vivisect.analysis.generic.switchcase",
|
||||
"vivisect.analysis.generic.symswitchcase",
|
||||
"vivisect.analysis.generic.thunks",
|
||||
"vivisect.analysis.generic.noret",
|
||||
"vivisect.analysis.i386",
|
||||
"vivisect.analysis.i386",
|
||||
"vivisect.analysis.i386.calling",
|
||||
"vivisect.analysis.i386.golang",
|
||||
"vivisect.analysis.i386.importcalls",
|
||||
"vivisect.analysis.i386.instrhook",
|
||||
"vivisect.analysis.i386.thunk_bx",
|
||||
"vivisect.analysis.ms",
|
||||
"vivisect.analysis.i386.thunk_reg",
|
||||
"vivisect.analysis.ms",
|
||||
"vivisect.analysis.ms.hotpatch",
|
||||
"vivisect.analysis.ms.localhints",
|
||||
@@ -81,8 +78,40 @@ hiddenimports = [
|
||||
"vivisect.impapi.posix.amd64",
|
||||
"vivisect.impapi.posix.i386",
|
||||
"vivisect.impapi.windows",
|
||||
"vivisect.impapi.windows.advapi_32",
|
||||
"vivisect.impapi.windows.advapi_64",
|
||||
"vivisect.impapi.windows.amd64",
|
||||
"vivisect.impapi.windows.gdi_32",
|
||||
"vivisect.impapi.windows.gdi_64",
|
||||
"vivisect.impapi.windows.i386",
|
||||
"vivisect.impapi.windows.kernel_32",
|
||||
"vivisect.impapi.windows.kernel_64",
|
||||
"vivisect.impapi.windows.msvcr100_32",
|
||||
"vivisect.impapi.windows.msvcr100_64",
|
||||
"vivisect.impapi.windows.msvcr110_32",
|
||||
"vivisect.impapi.windows.msvcr110_64",
|
||||
"vivisect.impapi.windows.msvcr120_32",
|
||||
"vivisect.impapi.windows.msvcr120_64",
|
||||
"vivisect.impapi.windows.msvcr71_32",
|
||||
"vivisect.impapi.windows.msvcr80_32",
|
||||
"vivisect.impapi.windows.msvcr80_64",
|
||||
"vivisect.impapi.windows.msvcr90_32",
|
||||
"vivisect.impapi.windows.msvcr90_64",
|
||||
"vivisect.impapi.windows.msvcrt_32",
|
||||
"vivisect.impapi.windows.msvcrt_64",
|
||||
"vivisect.impapi.windows.ntdll_32",
|
||||
"vivisect.impapi.windows.ntdll_64",
|
||||
"vivisect.impapi.windows.ole_32",
|
||||
"vivisect.impapi.windows.ole_64",
|
||||
"vivisect.impapi.windows.rpcrt4_32",
|
||||
"vivisect.impapi.windows.rpcrt4_64",
|
||||
"vivisect.impapi.windows.shell_32",
|
||||
"vivisect.impapi.windows.shell_64",
|
||||
"vivisect.impapi.windows.user_32",
|
||||
"vivisect.impapi.windows.user_64",
|
||||
"vivisect.impapi.windows.ws2plus_32",
|
||||
"vivisect.impapi.windows.ws2plus_64",
|
||||
"vivisect.impapi.winkern",
|
||||
"vivisect.impapi.winkern.i386",
|
||||
"vivisect.impapi.winkern.amd64",
|
||||
"vivisect.parsers.blob",
|
||||
|
||||
43
.github/ruff.toml
vendored
Normal file
43
.github/ruff.toml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# Enable the pycodestyle (`E`) and Pyflakes (`F`) rules by default.
|
||||
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
|
||||
# McCabe complexity (`C901`) by default.
|
||||
select = ["E", "F"]
|
||||
|
||||
# Allow autofix for all enabled rules (when `--fix`) is provided.
|
||||
fixable = ["ALL"]
|
||||
unfixable = []
|
||||
|
||||
# E402 module level import not at top of file
|
||||
# E722 do not use bare 'except'
|
||||
# E501 line too long
|
||||
ignore = ["E402", "E722", "E501"]
|
||||
|
||||
line-length = 120
|
||||
|
||||
exclude = [
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
".bzr",
|
||||
".direnv",
|
||||
".eggs",
|
||||
".git",
|
||||
".git-rewrite",
|
||||
".hg",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".pytype",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"venv",
|
||||
# protobuf generated files
|
||||
"*_pb2.py",
|
||||
"*_pb2.pyi"
|
||||
]
|
||||
10
.github/tox.ini
vendored
10
.github/tox.ini
vendored
@@ -1,10 +0,0 @@
|
||||
[pycodestyle]
|
||||
; E402: module level import not at top of file
|
||||
; W503: line break before binary operator
|
||||
; E231 missing whitespace after ',' (emitted by black)
|
||||
; E203 whitespace before ':' (emitted by black)
|
||||
ignore = E402,W503,E203,E231
|
||||
max-line-length = 160
|
||||
statistics = True
|
||||
count = True
|
||||
exclude = .*
|
||||
3
.github/workflows/build.yml
vendored
3
.github/workflows/build.yml
vendored
@@ -6,6 +6,9 @@ on:
|
||||
release:
|
||||
types: [edited, published]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: PyInstaller for ${{ matrix.os }}
|
||||
|
||||
2
.github/workflows/changelog.yml
vendored
2
.github/workflows/changelog.yml
vendored
@@ -7,6 +7,8 @@ on:
|
||||
pull_request_target:
|
||||
types: [opened, edited, synchronize]
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
check_changelog:
|
||||
# no need to check for dependency updates via dependabot
|
||||
|
||||
46
.github/workflows/publish.yml
vendored
46
.github/workflows/publish.yml
vendored
@@ -1,29 +1,49 @@
|
||||
# This workflows will upload a Python Package using Twine when a release is created
|
||||
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
|
||||
|
||||
# use PyPI trusted publishing, as described here:
|
||||
# https://blog.trailofbits.com/2023/05/23/trusted-publishing-a-new-benchmark-for-packaging-security/
|
||||
name: publish to pypi
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-20.04
|
||||
pypi-publish:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
|
||||
with:
|
||||
python-version: '3.7'
|
||||
python-version: '3.8'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install setuptools wheel twine
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
pip install -e .[build]
|
||||
- name: build package
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload --skip-existing dist/*
|
||||
python -m build
|
||||
- name: upload package artifacts
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2
|
||||
with:
|
||||
path: dist/*
|
||||
- name: upload package to GitHub release
|
||||
uses: svenstaro/upload-release-action@2728235f7dc9ff598bd86ce3c274b74f802d2208 # v2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN}}
|
||||
file: dist/*
|
||||
tag: ${{ github.ref }}
|
||||
overwrite: true
|
||||
file_glob: true
|
||||
- name: publish package
|
||||
uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 # release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
verbose: true
|
||||
print-hash: true
|
||||
|
||||
2
.github/workflows/tag.yml
vendored
2
.github/workflows/tag.yml
vendored
@@ -4,6 +4,8 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
tag:
|
||||
name: Tag capa rules
|
||||
|
||||
44
.github/workflows/tests.yml
vendored
44
.github/workflows/tests.yml
vendored
@@ -6,6 +6,8 @@ on:
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# save workspaces to speed up testing
|
||||
env:
|
||||
CAPA_SAVE_WORKSPACE: "True"
|
||||
@@ -27,20 +29,23 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout capa
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||
- name: Set up Python 3.8
|
||||
# use latest available python to take advantage of best performance
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
|
||||
with:
|
||||
python-version: "3.8"
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: pip install -e .[dev]
|
||||
- name: Lint with ruff
|
||||
run: pre-commit run ruff
|
||||
- name: Lint with isort
|
||||
run: isort --profile black --length-sort --line-width 120 --skip-glob "*_pb2.py" -c .
|
||||
run: pre-commit run isort
|
||||
- name: Lint with black
|
||||
run: black -l 120 --extend-exclude ".*_pb2.py" --check .
|
||||
- name: Lint with pycodestyle
|
||||
run: pycodestyle --exclude="*_pb2.py" --show-source capa/ scripts/ tests/
|
||||
run: pre-commit run black
|
||||
- name: Lint with flake8
|
||||
run: pre-commit run flake8
|
||||
- name: Check types with mypy
|
||||
run: mypy --config-file .github/mypy/mypy.ini --check-untyped-defs capa/ scripts/ tests/
|
||||
run: pre-commit run mypy
|
||||
|
||||
rule_linter:
|
||||
runs-on: ubuntu-20.04
|
||||
@@ -49,12 +54,12 @@ jobs:
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Set up Python 3.8
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
|
||||
with:
|
||||
python-version: "3.8"
|
||||
python-version: "3.11"
|
||||
- name: Install capa
|
||||
run: pip install -e .
|
||||
run: pip install -e .[dev]
|
||||
- name: Run rule linter
|
||||
run: python scripts/lint.py rules/
|
||||
|
||||
@@ -67,7 +72,7 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-20.04, windows-2019, macos-11]
|
||||
# across all operating systems
|
||||
python-version: ["3.7", "3.11"]
|
||||
python-version: ["3.8", "3.11"]
|
||||
include:
|
||||
# on Ubuntu run these as well
|
||||
- os: ubuntu-20.04
|
||||
@@ -94,36 +99,43 @@ jobs:
|
||||
run: pytest -v tests/
|
||||
|
||||
binja-tests:
|
||||
name: Binary Ninja tests for ${{ matrix.python-version }} on ${{ matrix.os }}
|
||||
name: Binary Ninja tests for ${{ matrix.python-version }}
|
||||
env:
|
||||
BN_SERIAL: ${{ secrets.BN_SERIAL }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [code_style, rule_linter]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.11"]
|
||||
python-version: ["3.8", "3.11"]
|
||||
steps:
|
||||
- name: Checkout capa with submodules
|
||||
# do only run if BN_SERIAL is available, have to do this in every step, see https://github.com/orgs/community/discussions/26726#discussioncomment-3253118
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install pyyaml
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
run: sudo apt-get install -y libyaml-dev
|
||||
- name: Install capa
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
run: pip install -e .[dev]
|
||||
- name: install Binary Ninja
|
||||
env:
|
||||
BN_SERIAL: ${{ secrets.BN_SERIAL }}
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
run: |
|
||||
mkdir ./.github/binja
|
||||
curl "https://raw.githubusercontent.com/Vector35/binaryninja-api/6812c97/scripts/download_headless.py" -o ./.github/binja/download_headless.py
|
||||
python ./.github/binja/download_headless.py --serial $BN_SERIAL --output .github/binja/BinaryNinja-headless.zip
|
||||
python ./.github/binja/download_headless.py --serial ${{ env.BN_SERIAL }} --output .github/binja/BinaryNinja-headless.zip
|
||||
unzip .github/binja/BinaryNinja-headless.zip -d .github/binja/
|
||||
python .github/binja/binaryninja/scripts/install_api.py --install-on-root --silent
|
||||
- name: Run tests
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
env:
|
||||
BN_LICENSE: ${{ secrets.BN_LICENSE }}
|
||||
run: pytest -v tests/test_binja_features.py # explicitly refer to the binja tests for performance. other tests run above.
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -108,13 +108,10 @@ venv.bak/
|
||||
*.viv
|
||||
*.idb
|
||||
*.i64
|
||||
.vscode
|
||||
|
||||
!rules/lib
|
||||
|
||||
# hooks/ci.sh output
|
||||
isort-output.log
|
||||
black-output.log
|
||||
rule-linter-output.log
|
||||
.vscode
|
||||
scripts/perf/*.txt
|
||||
scripts/perf/*.svg
|
||||
scripts/perf/*.zip
|
||||
@@ -127,3 +124,5 @@ Pipfile
|
||||
Pipfile.lock
|
||||
/cache/
|
||||
.github/binja/binaryninja
|
||||
.github/binja/download_headless.py
|
||||
.github/binja/BinaryNinja-headless.zip
|
||||
|
||||
111
.pre-commit-config.yaml
Normal file
111
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,111 @@
|
||||
# install the pre-commit hooks:
|
||||
#
|
||||
# ❯ pre-commit install --hook-type pre-commit
|
||||
# pre-commit installed at .git/hooks/pre-commit
|
||||
#
|
||||
# ❯ pre-commit install --hook-type pre-push
|
||||
# pre-commit installed at .git/hooks/pre-push
|
||||
#
|
||||
# run all linters liks:
|
||||
#
|
||||
# ❯ pre-commit run --all-files
|
||||
# isort....................................................................Passed
|
||||
# black....................................................................Passed
|
||||
# ruff.....................................................................Passed
|
||||
# flake8...................................................................Passed
|
||||
# mypy.....................................................................Passed
|
||||
#
|
||||
# run a single linter like:
|
||||
#
|
||||
# ❯ pre-commit run --all-files isort
|
||||
# isort....................................................................Passed
|
||||
|
||||
repos:
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: isort
|
||||
args:
|
||||
- "--length-sort"
|
||||
- "--profile"
|
||||
- "black"
|
||||
- "--line-length=120"
|
||||
- "--skip-glob"
|
||||
- "*_pb2.py"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: black
|
||||
name: black
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: black
|
||||
args:
|
||||
- "--line-length=120"
|
||||
- "--extend-exclude"
|
||||
- ".*_pb2.py"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: ruff
|
||||
name: ruff
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: ruff
|
||||
args:
|
||||
- "check"
|
||||
- "--config"
|
||||
- ".github/ruff.toml"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: flake8
|
||||
name: flake8
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: flake8
|
||||
args:
|
||||
- "--config"
|
||||
- ".github/flake8.ini"
|
||||
- "--extend-exclude"
|
||||
- "capa/render/proto/capa_pb2.py"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: mypy
|
||||
name: mypy
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: mypy
|
||||
args:
|
||||
- "--check-untyped-defs"
|
||||
- "--ignore-missing-imports"
|
||||
- "--config-file=.github/mypy/mypy.ini"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
86
CHANGELOG.md
86
CHANGELOG.md
@@ -17,8 +17,86 @@
|
||||
### Development
|
||||
|
||||
### Raw diffs
|
||||
- [capa v5.1.0...master](https://github.com/mandiant/capa/compare/v5.1.0...master)
|
||||
- [capa-rules v5.1.0...master](https://github.com/mandiant/capa-rules/compare/v5.1.0...master)
|
||||
- [capa v6.0.0...master](https://github.com/mandiant/capa/compare/v6.0.0...master)
|
||||
- [capa-rules v6.0.0...master](https://github.com/mandiant/capa-rules/compare/v6.0.0...master)
|
||||
|
||||
## v6.0.0
|
||||
|
||||
capa v6.0 brings many bug fixes and quality improvements, including 64 rule updates and 26 new rules. We're now publishing to PyPI via [Trusted Publishing](https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/) and have migrated to using a `pyproject.toml` file. @Aayush-Goel-04 contributed a lot of new code across many files, so please welcome them to the project, along with @anders-v @crowface28 @dkelly2e @RonnieSalomonsen and @ejfocampo as first-time rule contributors!
|
||||
|
||||
For those that use capa as a library, we've introduced some limited breaking changes that better represent data types (versus less-structured data like dictionaries and strings). With the recent deprecation, we've also dropped support for Python 3.7.
|
||||
|
||||
### New Features
|
||||
- add script to detect feature overlap between new and existing capa rules [#1451](https://github.com/mandiant/capa/issues/1451) [@Aayush-Goel-04](https://github.com/aayush-goel-04)
|
||||
- extract forwarded exports from PE files #1624 @williballenthin
|
||||
- extract function and API names from ELF symtab entries @yelhamer https://github.com/mandiant/capa-rules/issues/736
|
||||
- use fancy box drawing characters for default output #1586 @williballenthin
|
||||
|
||||
### Breaking Changes
|
||||
- use a class to represent Metadata (not dict) #1411 @Aayush-Goel-04 @manasghandat
|
||||
- use pathlib.Path to represent file paths #1534 @Aayush-Goel-04
|
||||
- Python 3.8 is now the minimum supported Python version #1578 @williballenthin
|
||||
- Require a Contributor License Agreement (CLA) for PRs going forward #1642 @williballenthin
|
||||
|
||||
### New Rules (26)
|
||||
|
||||
- load-code/shellcode/execute-shellcode-via-windows-callback-function ervin.ocampo@mandiant.com jakub.jozwiak@mandiant.com
|
||||
- nursery/execute-shellcode-via-indirect-call ronnie.salomonsen@mandiant.com
|
||||
- data-manipulation/encryption/aes/encrypt-data-using-aes-mixcolumns-step @mr-tz
|
||||
- linking/static/aplib/linked-against-aplib still@teamt5.org
|
||||
- communication/mailslot/read-from-mailslot nick.simonian@mandiant.com
|
||||
- nursery/hash-data-using-sha512managed-in-dotnet jonathanlepore@google.com
|
||||
- nursery/compiled-with-exescript jonathanlepore@google.com
|
||||
- nursery/check-for-sandbox-via-mac-address-ouis-in-dotnet jonathanlepore@google.com
|
||||
- host-interaction/hardware/enumerate-devices-by-category @mr-tz
|
||||
- host-interaction/service/continue-service @mr-tz
|
||||
- host-interaction/service/pause-service @mr-tz
|
||||
- persistence/exchange/act-as-exchange-transport-agent jakub.jozwiak@mandiant.com
|
||||
- host-interaction/file-system/create-virtual-file-system-in-dotnet jakub.jozwiak@mandiant.com
|
||||
- compiler/cx_freeze/compiled-with-cx_freeze @mr-tz jakub.jozwiak@mandiant.com
|
||||
- communication/socket/create-vmci-socket jakub.jozwiak@mandiant.com
|
||||
- persistence/office/act-as-excel-xll-add-in jakub.jozwiak@mandiant.com
|
||||
- persistence/office/act-as-office-com-add-in jakub.jozwiak@mandiant.com
|
||||
- persistence/office/act-as-word-wll-add-in jakub.jozwiak@mandiant.com
|
||||
- anti-analysis/anti-debugging/debugger-evasion/hide-thread-from-debugger michael.hunhoff@mandiant.com jakub.jozwiak@mandiant.com
|
||||
- host-interaction/memory/create-new-application-domain-in-dotnet jakub.jozwiak@mandiant.com
|
||||
- host-interaction/gui/switch-active-desktop jakub.jozwiak@mandiant.com
|
||||
- host-interaction/service/query-service-configuration @mr-tz
|
||||
- anti-analysis/anti-av/patch-event-tracing-for-windows-function jakub.jozwiak@mandiant.com
|
||||
- data-manipulation/encoding/xor/covertly-decode-and-write-data-to-windows-directory-using-indirect-calls dan.kelly@mandiant.com
|
||||
- linking/runtime-linking/resolve-function-by-brute-ratel-badger-hash jakub.jozwiak@mandiant.com
|
||||
|
||||
### Bug Fixes
|
||||
- extractor: add a Binary Ninja test that asserts its version #1487 @xusheng6
|
||||
- extractor: update Binary Ninja stack string detection after the new constant outlining feature #1473 @xusheng6
|
||||
- extractor: update vivisect Arch extraction #1334 @mr-tz
|
||||
- extractor: avoid Binary Ninja exception when analyzing certain files #1441 @xusheng6
|
||||
- symtab: fix struct.unpack() format for 64-bit ELF files @yelhamer
|
||||
- symtab: safeguard against ZeroDivisionError for files containing a symtab with a null entry size @yelhamer
|
||||
- improve ELF strtab and needed parsing @mr-tz
|
||||
- better handle exceptional cases when parsing ELF files #1458 @Aayush-Goel-04
|
||||
- improved testing coverage for Binary Ninja backend #1446 @Aayush-Goel-04
|
||||
- add logging and print redirect to tqdm for capa main #749 @Aayush-Goel-04
|
||||
- extractor: fix binja installation path detection does not work with Python 3.11
|
||||
- tests: refine the IDA test runner script #1513 @williballenthin
|
||||
- output: don't leave behind traces of progress bar @williballenthin
|
||||
- import-to-ida: fix bug introduced with JSON report changes in v5 #1584 @williballenthin
|
||||
- main: don't show spinner when emitting debug messages #1636 @williballenthin
|
||||
|
||||
### capa explorer IDA Pro plugin
|
||||
|
||||
### Development
|
||||
- update ATT&CK/MBC data for linting #1568 @mr-tz
|
||||
- log time taken to analyze each function #1290 @williballenthin
|
||||
- tests: make fixture available via conftest.py #1592 @williballenthin
|
||||
- publish via PyPI trusted publishing #1491 @williballenthin
|
||||
- migrate to pyproject.toml #1301 @williballenthin
|
||||
- use [pre-commit](https://pre-commit.com/) to invoke linters #1579 @williballenthin
|
||||
|
||||
|
||||
### Raw diffs
|
||||
- [capa v5.1.0...v6.0.0](https://github.com/mandiant/capa/compare/v5.1.0...v6.0.0a1)
|
||||
- [capa-rules v5.1.0...v6.0.0](https://github.com/mandiant/capa-rules/compare/v5.1.0...v6.0.0a1)
|
||||
|
||||
## v5.1.0
|
||||
capa version 5.1.0 adds a Protocol Buffers (protobuf) format for result documents. Additionally, the [Vector35](https://vector35.com/) team contributed a new feature extractor using Binary Ninja. Other new features are a new CLI flag to override the detected operating system, functionality to read and render existing result documents, and a output color format that's easier to read.
|
||||
@@ -65,12 +143,14 @@ Thanks for all the support, especially to @xusheng6, @captainGeech42, @ggold7046
|
||||
- nursery/contain-a-thread-local-storage-tls-section-in-dotnet michael.hunhoff@mandiant.com
|
||||
|
||||
### Bug Fixes
|
||||
- extractor: interface of cache modified to prevent extracting file and global features multiple times @stevemk14ebr
|
||||
- extractor: removed '.dynsym' as the library name for ELF imports #1318 @stevemk14ebr
|
||||
- extractor: fix vivisect loop detection corner case #1310 @mr-tz
|
||||
- match: extend OS characteristic to match OS_ANY to all supported OSes #1324 @mike-hunhoff
|
||||
- extractor: fix IDA and vivisect string and bytes features overlap and tests #1327 #1336 @xusheng6
|
||||
- extractor: fix IDA and vivisect string and bytes features overlap and tests #1327 #1336 @xusheng6
|
||||
|
||||
### capa explorer IDA Pro plugin
|
||||
- rule generator plugin now loads faster when jumping between functions @stevemk14ebr
|
||||
- fix exception when plugin loaded in IDA hosted under idat #1341 @mike-hunhoff
|
||||
- improve embedded PE detection performance and reduce FP potential #1344 @mike-hunhoff
|
||||
|
||||
|
||||
@@ -187,7 +187,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright (C) 2020 Mandiant, Inc.
|
||||
Copyright (C) 2023 Mandiant, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
[](https://pypi.org/project/flare-capa)
|
||||
[](https://github.com/mandiant/capa/releases)
|
||||
[](https://github.com/mandiant/capa-rules)
|
||||
[](https://github.com/mandiant/capa-rules)
|
||||
[](https://github.com/mandiant/capa/actions?query=workflow%3ACI+event%3Apush+branch%3Amaster)
|
||||
[](https://github.com/mandiant/capa/releases)
|
||||
[](LICENSE.txt)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
import copy
|
||||
import collections
|
||||
from typing import TYPE_CHECKING, Set, Dict, List, Tuple, Union, Mapping, Iterable, Iterator, cast
|
||||
from typing import TYPE_CHECKING, Set, Dict, List, Tuple, Union, Mapping, Iterable, Iterator
|
||||
|
||||
import capa.perf
|
||||
import capa.features.common
|
||||
@@ -71,7 +71,7 @@ class Statement:
|
||||
yield child
|
||||
|
||||
if hasattr(self, "children"):
|
||||
for child in getattr(self, "children"):
|
||||
for child in self.children:
|
||||
assert isinstance(child, (Statement, Feature))
|
||||
yield child
|
||||
|
||||
@@ -83,7 +83,7 @@ class Statement:
|
||||
self.child = new
|
||||
|
||||
if hasattr(self, "children"):
|
||||
children = getattr(self, "children")
|
||||
children = self.children
|
||||
for i, child in enumerate(children):
|
||||
if child is existing:
|
||||
children[i] = new
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
class UnsupportedRuntimeError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import abc
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -100,7 +100,10 @@ class Result:
|
||||
return self.success
|
||||
|
||||
|
||||
class Feature(abc.ABC):
|
||||
class Feature(abc.ABC): # noqa: B024
|
||||
# this is an abstract class, since we don't want anyone to instantiate it directly,
|
||||
# but it doesn't have any abstract methods.
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: Union[str, int, float, bytes],
|
||||
@@ -124,7 +127,12 @@ class Feature(abc.ABC):
|
||||
return self.name == other.name and self.value == other.value
|
||||
|
||||
def __lt__(self, other):
|
||||
# TODO: this is a huge hack!
|
||||
# implementing sorting by serializing to JSON is a huge hack.
|
||||
# its slow, inelegant, and probably doesn't work intuitively;
|
||||
# however, we only use it for deterministic output, so it's good enough for now.
|
||||
|
||||
# circular import
|
||||
# we should fix if this wasn't already a huge hack.
|
||||
import capa.features.freeze.features
|
||||
|
||||
return (
|
||||
@@ -267,7 +275,7 @@ class _MatchedSubstring(Substring):
|
||||
self.matches = matches
|
||||
|
||||
def __str__(self):
|
||||
matches = ", ".join(map(lambda s: '"' + s + '"', (self.matches or {}).keys()))
|
||||
matches = ", ".join(f'"{s}"' for s in (self.matches or {}).keys())
|
||||
assert isinstance(self.value, str)
|
||||
return f'substring("{self.value}", matches = {matches})'
|
||||
|
||||
@@ -359,7 +367,7 @@ class _MatchedRegex(Regex):
|
||||
self.matches = matches
|
||||
|
||||
def __str__(self):
|
||||
matches = ", ".join(map(lambda s: '"' + s + '"', (self.matches or {}).keys()))
|
||||
matches = ", ".join(f'"{s}"' for s in (self.matches or {}).keys())
|
||||
assert isinstance(self.value, str)
|
||||
return f"regex(string =~ {self.value}, matches = {matches})"
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,15 +6,16 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import sys
|
||||
import string
|
||||
import struct
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
from binaryninja import Function
|
||||
from binaryninja import Function, Settings
|
||||
from binaryninja import BasicBlock as BinjaBasicBlock
|
||||
from binaryninja import (
|
||||
BinaryView,
|
||||
SymbolType,
|
||||
RegisterValueType,
|
||||
VariableSourceType,
|
||||
MediumLevelILSetVar,
|
||||
MediumLevelILOperation,
|
||||
@@ -23,11 +24,71 @@ from binaryninja import (
|
||||
)
|
||||
|
||||
from capa.features.common import Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.address import Address
|
||||
from capa.features.basicblock import BasicBlock
|
||||
from capa.features.extractors.helpers import MIN_STACKSTRING_LEN
|
||||
from capa.features.extractors.base_extractor import BBHandle, FunctionHandle
|
||||
|
||||
use_const_outline: bool = False
|
||||
settings: Settings = Settings()
|
||||
if settings.contains("analysis.outlining.builtins") and settings.get_bool("analysis.outlining.builtins"):
|
||||
use_const_outline = True
|
||||
|
||||
|
||||
def get_printable_len_ascii(s: bytes) -> int:
|
||||
"""Return string length if all operand bytes are ascii or utf16-le printable"""
|
||||
count = 0
|
||||
for c in s:
|
||||
if c == 0:
|
||||
return count
|
||||
if c < 127 and chr(c) in string.printable:
|
||||
count += 1
|
||||
return count
|
||||
|
||||
|
||||
def get_printable_len_wide(s: bytes) -> int:
|
||||
"""Return string length if all operand bytes are ascii or utf16-le printable"""
|
||||
if all(c == 0x00 for c in s[1::2]):
|
||||
return get_printable_len_ascii(s[::2])
|
||||
return 0
|
||||
|
||||
|
||||
def get_stack_string_len(f: Function, il: MediumLevelILInstruction) -> int:
|
||||
bv: BinaryView = f.view
|
||||
|
||||
if il.operation != MediumLevelILOperation.MLIL_CALL:
|
||||
return 0
|
||||
|
||||
target = il.dest
|
||||
if target.operation not in [MediumLevelILOperation.MLIL_CONST, MediumLevelILOperation.MLIL_CONST_PTR]:
|
||||
return 0
|
||||
|
||||
addr = target.value.value
|
||||
sym = bv.get_symbol_at(addr)
|
||||
if not sym or sym.type != SymbolType.LibraryFunctionSymbol:
|
||||
return 0
|
||||
|
||||
if sym.name not in ["__builtin_strncpy", "__builtin_strcpy", "__builtin_wcscpy"]:
|
||||
return 0
|
||||
|
||||
if len(il.params) < 2:
|
||||
return 0
|
||||
|
||||
dest = il.params[0]
|
||||
if dest.operation != MediumLevelILOperation.MLIL_ADDRESS_OF:
|
||||
return 0
|
||||
|
||||
var = dest.src
|
||||
if var.source_type != VariableSourceType.StackVariableSourceType:
|
||||
return 0
|
||||
|
||||
src = il.params[1]
|
||||
if src.value.type != RegisterValueType.ConstantDataAggregateValue:
|
||||
return 0
|
||||
|
||||
s = f.get_constant_data(RegisterValueType.ConstantDataAggregateValue, src.value.value)
|
||||
return max(get_printable_len_ascii(bytes(s)), get_printable_len_wide(bytes(s)))
|
||||
|
||||
|
||||
def get_printable_len(il: MediumLevelILSetVar) -> int:
|
||||
"""Return string length if all operand bytes are ascii or utf16-le printable"""
|
||||
@@ -69,7 +130,7 @@ def is_mov_imm_to_stack(il: MediumLevelILInstruction) -> bool:
|
||||
if il.src.operation != MediumLevelILOperation.MLIL_CONST:
|
||||
return False
|
||||
|
||||
if not il.dest.source_type == VariableSourceType.StackVariableSourceType:
|
||||
if il.dest.source_type != VariableSourceType.StackVariableSourceType:
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -82,8 +143,11 @@ def bb_contains_stackstring(f: Function, bb: MediumLevelILBasicBlock) -> bool:
|
||||
"""
|
||||
count = 0
|
||||
for il in bb:
|
||||
if is_mov_imm_to_stack(il):
|
||||
count += get_printable_len(il)
|
||||
if use_const_outline:
|
||||
count += get_stack_string_len(f, il)
|
||||
else:
|
||||
if is_mov_imm_to_stack(il):
|
||||
count += get_printable_len(il)
|
||||
|
||||
if count > MIN_STACKSTRING_LEN:
|
||||
return True
|
||||
@@ -117,30 +181,3 @@ BASIC_BLOCK_HANDLERS = (
|
||||
extract_bb_tight_loop,
|
||||
extract_bb_stackstring,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
return
|
||||
|
||||
from binaryninja import BinaryViewType
|
||||
|
||||
from capa.features.extractors.binja.extractor import BinjaFeatureExtractor
|
||||
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(sys.argv[1])
|
||||
if bv is None:
|
||||
return
|
||||
|
||||
features = []
|
||||
extractor = BinjaFeatureExtractor(bv)
|
||||
for fh in extractor.get_functions():
|
||||
for bbh in extractor.get_basic_blocks(fh):
|
||||
features.extend(list(extract_features(fh, bbh)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -53,9 +53,7 @@ class BinjaFeatureExtractor(FeatureExtractor):
|
||||
mlil_lookup[mlil_bb.source_block.start] = mlil_bb
|
||||
|
||||
for bb in f.basic_blocks:
|
||||
mlil_bb = None
|
||||
if bb.start in mlil_lookup:
|
||||
mlil_bb = mlil_lookup[bb.start]
|
||||
mlil_bb = mlil_lookup.get(bb.start)
|
||||
|
||||
yield BBHandle(address=AbsoluteVirtualAddress(bb.start), inner=(bb, mlil_bb))
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,11 +6,10 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import sys
|
||||
import struct
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
from binaryninja import Symbol, Segment, BinaryView, SymbolType, SymbolBinding
|
||||
from binaryninja import Segment, BinaryView, SymbolType, SymbolBinding
|
||||
|
||||
import capa.features.extractors.common
|
||||
import capa.features.extractors.helpers
|
||||
@@ -166,23 +165,3 @@ FILE_HANDLERS = (
|
||||
extract_file_function_names,
|
||||
extract_file_format,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
if len(sys.argv) < 2:
|
||||
return
|
||||
|
||||
from binaryninja import BinaryViewType
|
||||
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(sys.argv[1])
|
||||
if bv is None:
|
||||
return
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(list(extract_features(bv)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,6 +6,7 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
# When the script gets executed as a standalone executable (via PyInstaller), `import binaryninja` does not work because
|
||||
# we have excluded the binaryninja module in `pyinstaller.spec`. The trick here is to call the system Python and try
|
||||
@@ -15,8 +16,8 @@ import subprocess
|
||||
# binaryninja module is extracted by the PyInstaller.
|
||||
code = r"""
|
||||
from pathlib import Path
|
||||
import importlib
|
||||
spec = importlib.util.find_spec('binaryninja')
|
||||
from importlib import util
|
||||
spec = util.find_spec('binaryninja')
|
||||
if spec is not None:
|
||||
if len(spec.submodule_search_locations) > 0:
|
||||
path = Path(spec.submodule_search_locations[0])
|
||||
@@ -25,9 +26,9 @@ if spec is not None:
|
||||
"""
|
||||
|
||||
|
||||
def find_binja_path() -> str:
|
||||
def find_binja_path() -> Path:
|
||||
raw_output = subprocess.check_output(["python", "-c", code]).decode("ascii").strip()
|
||||
return bytes.fromhex(raw_output).decode("utf8")
|
||||
return Path(bytes.fromhex(raw_output).decode("utf8"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import sys
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
from binaryninja import Function, BinaryView, LowLevelILOperation
|
||||
@@ -19,13 +18,12 @@ from capa.features.extractors.base_extractor import FunctionHandle
|
||||
def extract_function_calls_to(fh: FunctionHandle):
|
||||
"""extract callers to a function"""
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
for caller in func.caller_sites:
|
||||
# Everything that is a code reference to the current function is considered a caller, which actually includes
|
||||
# many other references that are NOT a caller. For example, an instruction `push function_start` will also be
|
||||
# considered a caller to the function
|
||||
if caller.llil.operation in [
|
||||
if caller.llil is not None and caller.llil.operation in [
|
||||
LowLevelILOperation.LLIL_CALL,
|
||||
LowLevelILOperation.LLIL_CALL_STACK_ADJUST,
|
||||
LowLevelILOperation.LLIL_JUMP,
|
||||
@@ -68,30 +66,3 @@ def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
|
||||
|
||||
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
if len(sys.argv) < 2:
|
||||
return
|
||||
|
||||
from binaryninja import BinaryViewType
|
||||
|
||||
from capa.features.extractors.binja.extractor import BinjaFeatureExtractor
|
||||
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(sys.argv[1])
|
||||
if bv is None:
|
||||
return
|
||||
|
||||
features = []
|
||||
extractor = BinjaFeatureExtractor(bv)
|
||||
for fh in extractor.get_functions():
|
||||
features.extend(list(extract_features(fh)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
import contextlib
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
from binaryninja import BinaryView
|
||||
|
||||
import capa.features.extractors.elf
|
||||
from capa.features.common import OS, OS_MACOS, ARCH_I386, ARCH_AMD64, OS_WINDOWS, Arch, Feature
|
||||
from capa.features.address import NO_ADDRESS, Address
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -41,7 +41,10 @@ def unmangle_c_name(name: str) -> str:
|
||||
# _lstrlenWStub@4
|
||||
|
||||
# A small optimization to avoid running the regex too many times
|
||||
# TODO: this still increases the unit test execution time from 170s to 200s, should be able to accelerate it
|
||||
# this still increases the unit test execution time from 170s to 200s, should be able to accelerate it
|
||||
#
|
||||
# TODO(xusheng): performance optimizations to improve test execution time
|
||||
# https://github.com/mandiant/capa/issues/1610
|
||||
if name[0] in ["@", "_"]:
|
||||
match = re.match(r"^[@|_](.*?)(Stub)?(@\d+)?$", name)
|
||||
if match:
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import sys
|
||||
from typing import Any, Dict, List, Tuple, Iterator, Optional
|
||||
from typing import Any, List, Tuple, Iterator, Optional
|
||||
|
||||
from binaryninja import Function
|
||||
from binaryninja import BasicBlock as BinjaBasicBlock
|
||||
@@ -18,12 +17,11 @@ from binaryninja import (
|
||||
RegisterValueType,
|
||||
LowLevelILOperation,
|
||||
LowLevelILInstruction,
|
||||
InstructionTextTokenType,
|
||||
)
|
||||
|
||||
import capa.features.extractors.helpers
|
||||
from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset
|
||||
from capa.features.common import MAX_BYTES_FEATURE_SIZE, THUNK_CHAIN_DEPTH_DELTA, Bytes, String, Feature, Characteristic
|
||||
from capa.features.common import MAX_BYTES_FEATURE_SIZE, Bytes, String, Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors.binja.helpers import DisassemblyInstruction, visit_llil_exprs
|
||||
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
|
||||
@@ -73,7 +71,6 @@ def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle)
|
||||
example:
|
||||
call dword [0x00473038]
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -128,12 +125,9 @@ def extract_insn_number_features(
|
||||
example:
|
||||
push 3136B0h ; dwControlCode
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
results: List[Tuple[Any[Number, OperandNumber], Address]] = []
|
||||
address_size = func.view.arch.address_size * 8
|
||||
|
||||
def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index: int) -> bool:
|
||||
if il.operation == LowLevelILOperation.LLIL_LOAD:
|
||||
@@ -161,8 +155,7 @@ def extract_insn_number_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def extract_insn_bytes_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
@@ -171,7 +164,6 @@ def extract_insn_bytes_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandl
|
||||
example:
|
||||
push offset iid_004118d4_IShellLinkA ; riid
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -220,7 +212,6 @@ def extract_insn_string_features(
|
||||
example:
|
||||
push offset aAcr ; "ACR > "
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -278,7 +269,6 @@ def extract_insn_offset_features(
|
||||
example:
|
||||
.text:0040112F cmp [esi+4], ebx
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
results: List[Tuple[Any[Offset, OperandOffset], Address]] = []
|
||||
@@ -327,13 +317,13 @@ def extract_insn_offset_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def is_nzxor_stack_cookie(f: Function, bb: BinjaBasicBlock, llil: LowLevelILInstruction) -> bool:
|
||||
"""check if nzxor exists within stack cookie delta"""
|
||||
# TODO: we can do a much accurate analysi using LLIL SSA
|
||||
# TODO(xusheng): use LLIL SSA to do more accurate analysis
|
||||
# https://github.com/mandiant/capa/issues/1609
|
||||
|
||||
reg_names = []
|
||||
if llil.left.operation == LowLevelILOperation.LLIL_REG:
|
||||
@@ -364,7 +354,6 @@ def extract_insn_nzxor_characteristic_features(
|
||||
parse instruction non-zeroing XOR instruction
|
||||
ignore expected non-zeroing XORs, e.g. security cookies
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
results = []
|
||||
@@ -384,8 +373,7 @@ def extract_insn_nzxor_characteristic_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def extract_insn_mnemonic_features(
|
||||
@@ -414,7 +402,6 @@ def extract_insn_peb_access_characteristic_features(
|
||||
|
||||
fs:[0x30] on x86, gs:[0x60] on x64
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
results = []
|
||||
@@ -439,7 +426,7 @@ def extract_insn_peb_access_characteristic_features(
|
||||
return True
|
||||
|
||||
value = right.value.value
|
||||
if not (reg, value) in (("fsbase", 0x30), ("gsbase", 0x60)):
|
||||
if (reg, value) not in (("fsbase", 0x30), ("gsbase", 0x60)):
|
||||
return True
|
||||
|
||||
results.append((Characteristic("peb access"), ih.address))
|
||||
@@ -448,15 +435,13 @@ def extract_insn_peb_access_characteristic_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def extract_insn_segment_access_features(
|
||||
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""parse instruction fs or gs access"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
results = []
|
||||
@@ -477,15 +462,13 @@ def extract_insn_segment_access_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def extract_insn_cross_section_cflow(
|
||||
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""inspect the instruction for a CALL or JMP that crosses section boundaries"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -509,7 +492,6 @@ def extract_function_calls_from(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandl
|
||||
|
||||
most relevant at the function scope, however, its most efficient to extract at the instruction scope
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -555,7 +537,6 @@ def extract_function_indirect_call_characteristic_features(
|
||||
most relevant at the function or basic block scope;
|
||||
however, its most efficient to extract at the instruction scope
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
llil = func.get_llil_at(ih.address)
|
||||
@@ -599,32 +580,3 @@ INSTRUCTION_HANDLERS = (
|
||||
extract_function_calls_from,
|
||||
extract_function_indirect_call_characteristic_features,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
if len(sys.argv) < 2:
|
||||
return
|
||||
|
||||
from binaryninja import BinaryViewType
|
||||
|
||||
from capa.features.extractors.binja.extractor import BinjaFeatureExtractor
|
||||
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(sys.argv[1])
|
||||
if bv is None:
|
||||
return
|
||||
|
||||
features = []
|
||||
extractor = BinjaFeatureExtractor(bv)
|
||||
for fh in extractor.get_functions():
|
||||
for bbh in extractor.get_basic_blocks(fh):
|
||||
for insn in extractor.get_instructions(fh, bbh):
|
||||
features.extend(list(extract_features(fh, bbh, insn)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import io
|
||||
import logging
|
||||
import binascii
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -9,6 +9,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict, List, Tuple, Union, Iterator, Optional
|
||||
from pathlib import Path
|
||||
|
||||
import dnfile
|
||||
from dncil.cil.opcode import OpCodes
|
||||
@@ -52,25 +53,25 @@ class DnFileFeatureExtractorCache:
|
||||
self.types[type_.token] = type_
|
||||
|
||||
def get_import(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.imports.get(token, None)
|
||||
return self.imports.get(token)
|
||||
|
||||
def get_native_import(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.native_imports.get(token, None)
|
||||
return self.native_imports.get(token)
|
||||
|
||||
def get_method(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.methods.get(token, None)
|
||||
return self.methods.get(token)
|
||||
|
||||
def get_field(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.fields.get(token, None)
|
||||
return self.fields.get(token)
|
||||
|
||||
def get_type(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.types.get(token, None)
|
||||
return self.types.get(token)
|
||||
|
||||
|
||||
class DnfileFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(path)
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(str(path))
|
||||
|
||||
# pre-compute .NET token lookup tables; each .NET method has access to this cache for feature extraction
|
||||
# most relevant at instruction scope
|
||||
@@ -119,7 +120,7 @@ class DnfileFeatureExtractor(FeatureExtractor):
|
||||
address: DNTokenAddress = DNTokenAddress(insn.operand.value)
|
||||
|
||||
# record call to destination method; note: we only consider MethodDef methods for destinations
|
||||
dest: Optional[FunctionHandle] = methods.get(address, None)
|
||||
dest: Optional[FunctionHandle] = methods.get(address)
|
||||
if dest is not None:
|
||||
dest.ctx["calls_to"].add(fh.address)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -52,7 +52,7 @@ def resolve_dotnet_token(pe: dnfile.dnPE, token: Token) -> Union[dnfile.base.MDT
|
||||
return InvalidToken(token.value)
|
||||
return user_string
|
||||
|
||||
table: Optional[dnfile.base.ClrMetaDataTable] = pe.net.mdtables.tables.get(token.table, None)
|
||||
table: Optional[dnfile.base.ClrMetaDataTable] = pe.net.mdtables.tables.get(token.table)
|
||||
if table is None:
|
||||
# table index is not valid
|
||||
return InvalidToken(token.value)
|
||||
@@ -204,7 +204,7 @@ def get_dotnet_managed_methods(pe: dnfile.dnPE) -> Iterator[DnType]:
|
||||
continue
|
||||
|
||||
token: int = calculate_dotnet_token_value(method.table.number, method.row_index)
|
||||
access: Optional[str] = accessor_map.get(token, None)
|
||||
access: Optional[str] = accessor_map.get(token)
|
||||
|
||||
method_name: str = method.row.Name
|
||||
if method_name.startswith(("get_", "set_")):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -9,7 +9,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict, Tuple, Union, Iterator, Optional
|
||||
from typing import TYPE_CHECKING, Tuple, Union, Iterator, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from capa.features.extractors.dnfile.extractor import DnFileFeatureExtractorCache
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,11 +6,10 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
from enum import Enum
|
||||
from typing import Union, Optional
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class DnType(object):
|
||||
class DnType:
|
||||
def __init__(self, token: int, class_: str, namespace: str = "", member: str = "", access: Optional[str] = None):
|
||||
self.token: int = token
|
||||
self.access: Optional[str] = access
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
from typing import Tuple, Iterator
|
||||
from pathlib import Path
|
||||
|
||||
import dnfile
|
||||
import pefile
|
||||
@@ -74,10 +82,10 @@ GLOBAL_HANDLERS = (
|
||||
|
||||
|
||||
class DnfileFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.path: str = path
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(path)
|
||||
self.path: Path = path
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(str(path))
|
||||
|
||||
def get_base_address(self) -> AbsoluteVirtualAddress:
|
||||
return AbsoluteVirtualAddress(0x0)
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
from typing import Tuple, Iterator, cast
|
||||
from typing import Tuple, Iterator
|
||||
from pathlib import Path
|
||||
|
||||
import dnfile
|
||||
import pefile
|
||||
@@ -158,10 +166,10 @@ GLOBAL_HANDLERS = (
|
||||
|
||||
|
||||
class DotnetFileFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.path: str = path
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(path)
|
||||
self.path: Path = path
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(str(path))
|
||||
|
||||
def get_base_address(self):
|
||||
return NO_ADDRESS
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -24,7 +24,7 @@ def align(v, alignment):
|
||||
return v + (alignment - remainder)
|
||||
|
||||
|
||||
def read_cstr(buf, offset):
|
||||
def read_cstr(buf, offset) -> str:
|
||||
s = buf[offset:]
|
||||
s, _, _ = s.partition(b"\x00")
|
||||
return s.decode("utf-8")
|
||||
@@ -91,6 +91,20 @@ class Shdr:
|
||||
entsize: int
|
||||
buf: bytes
|
||||
|
||||
@classmethod
|
||||
def from_viv(cls, section, buf: bytes) -> "Shdr":
|
||||
return cls(
|
||||
section.sh_name,
|
||||
section.sh_type,
|
||||
section.sh_flags,
|
||||
section.sh_addr,
|
||||
section.sh_offset,
|
||||
section.sh_size,
|
||||
section.sh_link,
|
||||
section.sh_entsize,
|
||||
buf,
|
||||
)
|
||||
|
||||
|
||||
class ELF:
|
||||
def __init__(self, f: BinaryIO):
|
||||
@@ -397,7 +411,7 @@ class ELF:
|
||||
# there should be vn_cnt of these.
|
||||
# each entry describes an ABI name required by the shared object.
|
||||
vna_offset = vn_offset + vn_aux
|
||||
for i in range(vn_cnt):
|
||||
for _ in range(vn_cnt):
|
||||
# ElfXX_Vernaux layout is the same on 32 and 64 bit
|
||||
_, _, _, vna_name, vna_next = struct.unpack_from(self.endian + "IHHII", shdr.buf, vna_offset)
|
||||
|
||||
@@ -458,10 +472,12 @@ class ELF:
|
||||
for d_tag, d_val in self.dynamic_entries:
|
||||
if d_tag == DT_STRTAB:
|
||||
strtab_addr = d_val
|
||||
break
|
||||
|
||||
for d_tag, d_val in self.dynamic_entries:
|
||||
if d_tag == DT_STRSZ:
|
||||
strtab_size = d_val
|
||||
break
|
||||
|
||||
if strtab_addr is None:
|
||||
return None
|
||||
@@ -471,8 +487,10 @@ class ELF:
|
||||
|
||||
strtab_offset = None
|
||||
for shdr in self.section_headers:
|
||||
if shdr.addr <= strtab_addr < shdr.addr + shdr.size:
|
||||
# the section header address should be defined
|
||||
if shdr.addr and shdr.addr <= strtab_addr < shdr.addr + shdr.size:
|
||||
strtab_offset = shdr.offset + (strtab_addr - shdr.addr)
|
||||
break
|
||||
|
||||
if strtab_offset is None:
|
||||
return None
|
||||
@@ -501,7 +519,10 @@ class ELF:
|
||||
if d_tag != DT_NEEDED:
|
||||
continue
|
||||
|
||||
yield read_cstr(strtab, d_val)
|
||||
try:
|
||||
yield read_cstr(strtab, d_val)
|
||||
except UnicodeDecodeError as e:
|
||||
logger.warning("failed to read DT_NEEDED entry: %s", str(e))
|
||||
|
||||
@property
|
||||
def symtab(self) -> Optional[Tuple[Shdr, Shdr]]:
|
||||
@@ -651,6 +672,9 @@ class SymTab:
|
||||
return the symbol's information in
|
||||
the order specified by sys/elf32.h
|
||||
"""
|
||||
if self.symtab.entsize == 0:
|
||||
return
|
||||
|
||||
for i in range(int(len(self.symtab.buf) / self.symtab.entsize)):
|
||||
if bitness == 32:
|
||||
name_offset, value, size, info, other, shndx = struct.unpack_from(
|
||||
@@ -658,7 +682,7 @@ class SymTab:
|
||||
)
|
||||
elif bitness == 64:
|
||||
name_offset, info, other, shndx, value, size = struct.unpack_from(
|
||||
endian + "IBBBQQ", symtab_buf, i * self.symtab.entsize
|
||||
endian + "IBBHQQ", symtab_buf, i * self.symtab.entsize
|
||||
)
|
||||
|
||||
self.symbols.append(Symbol(name_offset, value, size, info, other, shndx))
|
||||
@@ -682,8 +706,30 @@ class SymTab:
|
||||
return a tuple: (name, value, size, info, other, shndx)
|
||||
for each symbol contained in the symbol table
|
||||
"""
|
||||
for symbol in self.symbols:
|
||||
yield symbol
|
||||
yield from self.symbols
|
||||
|
||||
@classmethod
|
||||
def from_Elf(cls, ElfBinary) -> Optional["SymTab"]:
|
||||
endian = "<" if ElfBinary.getEndian() == 0 else ">"
|
||||
bitness = ElfBinary.bits
|
||||
|
||||
SHT_SYMTAB = 0x2
|
||||
for section in ElfBinary.sections:
|
||||
if section.sh_info & SHT_SYMTAB:
|
||||
strtab_section = ElfBinary.sections[section.sh_link]
|
||||
sh_symtab = Shdr.from_viv(section, ElfBinary.readAtOffset(section.sh_offset, section.sh_size))
|
||||
sh_strtab = Shdr.from_viv(
|
||||
strtab_section, ElfBinary.readAtOffset(strtab_section.sh_offset, strtab_section.sh_size)
|
||||
)
|
||||
|
||||
try:
|
||||
return cls(endian, bitness, sh_symtab, sh_strtab)
|
||||
except NameError:
|
||||
return None
|
||||
except Exception:
|
||||
# all exceptions that could be encountered by
|
||||
# cls._parse() imply a faulty symbol's table.
|
||||
raise CorruptElfFile("malformed symbol's table")
|
||||
|
||||
|
||||
def guess_os_from_osabi(elf: ELF) -> Optional[OS]:
|
||||
@@ -777,7 +823,7 @@ def guess_os_from_abi_versions_needed(elf: ELF) -> Optional[OS]:
|
||||
# this will let us guess about linux/hurd in some cases.
|
||||
|
||||
versions_needed = elf.versions_needed
|
||||
if any(map(lambda abi: abi.startswith("GLIBC"), itertools.chain(*versions_needed.values()))):
|
||||
if any(abi.startswith("GLIBC") for abi in itertools.chain(*versions_needed.values())):
|
||||
# there are any GLIBC versions needed
|
||||
|
||||
if elf.e_machine != "i386":
|
||||
@@ -834,7 +880,7 @@ def guess_os_from_symtab(elf: ELF) -> Optional[OS]:
|
||||
sym_name = symtab.get_name(symbol)
|
||||
|
||||
for os, hints in keywords.items():
|
||||
if any(map(lambda x: x in sym_name, hints)):
|
||||
if any(hint in sym_name for hint in hints):
|
||||
return os
|
||||
|
||||
return None
|
||||
@@ -844,28 +890,60 @@ def detect_elf_os(f) -> str:
|
||||
"""
|
||||
f: type Union[BinaryIO, IDAIO]
|
||||
"""
|
||||
elf = ELF(f)
|
||||
try:
|
||||
elf = ELF(f)
|
||||
except Exception as e:
|
||||
logger.warning("Error parsing ELF file: %s", e)
|
||||
return "unknown"
|
||||
|
||||
osabi_guess = guess_os_from_osabi(elf)
|
||||
logger.debug("guess: osabi: %s", osabi_guess)
|
||||
try:
|
||||
osabi_guess = guess_os_from_osabi(elf)
|
||||
logger.debug("guess: osabi: %s", osabi_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from OSABI: %s", e)
|
||||
osabi_guess = None
|
||||
|
||||
ph_notes_guess = guess_os_from_ph_notes(elf)
|
||||
logger.debug("guess: ph notes: %s", ph_notes_guess)
|
||||
try:
|
||||
ph_notes_guess = guess_os_from_ph_notes(elf)
|
||||
logger.debug("guess: ph notes: %s", ph_notes_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from program header notes: %s", e)
|
||||
ph_notes_guess = None
|
||||
|
||||
sh_notes_guess = guess_os_from_sh_notes(elf)
|
||||
logger.debug("guess: sh notes: %s", sh_notes_guess)
|
||||
try:
|
||||
sh_notes_guess = guess_os_from_sh_notes(elf)
|
||||
logger.debug("guess: sh notes: %s", sh_notes_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from section header notes: %s", e)
|
||||
sh_notes_guess = None
|
||||
|
||||
linker_guess = guess_os_from_linker(elf)
|
||||
logger.debug("guess: linker: %s", linker_guess)
|
||||
try:
|
||||
linker_guess = guess_os_from_linker(elf)
|
||||
logger.debug("guess: linker: %s", linker_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from linker: %s", e)
|
||||
linker_guess = None
|
||||
|
||||
abi_versions_needed_guess = guess_os_from_abi_versions_needed(elf)
|
||||
logger.debug("guess: ABI versions needed: %s", abi_versions_needed_guess)
|
||||
try:
|
||||
abi_versions_needed_guess = guess_os_from_abi_versions_needed(elf)
|
||||
logger.debug("guess: ABI versions needed: %s", abi_versions_needed_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from ABI versions needed: %s", e)
|
||||
abi_versions_needed_guess = None
|
||||
|
||||
needed_dependencies_guess = guess_os_from_needed_dependencies(elf)
|
||||
logger.debug("guess: needed dependencies: %s", needed_dependencies_guess)
|
||||
try:
|
||||
needed_dependencies_guess = guess_os_from_needed_dependencies(elf)
|
||||
logger.debug("guess: needed dependencies: %s", needed_dependencies_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from needed dependencies: %s", e)
|
||||
needed_dependencies_guess = None
|
||||
|
||||
symtab_guess = guess_os_from_symtab(elf)
|
||||
logger.debug("guess: pertinent symbol name: %s", symtab_guess)
|
||||
try:
|
||||
symtab_guess = guess_os_from_symtab(elf)
|
||||
logger.debug("guess: pertinent symbol name: %s", symtab_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from symbol table: %s", e)
|
||||
symtab_guess = None
|
||||
|
||||
ret = None
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -8,6 +8,7 @@
|
||||
import io
|
||||
import logging
|
||||
from typing import Tuple, Iterator
|
||||
from pathlib import Path
|
||||
|
||||
from elftools.elf.elffile import ELFFile, SymbolTableSection
|
||||
|
||||
@@ -36,8 +37,8 @@ def extract_file_import_names(elf, **kwargs):
|
||||
|
||||
for _, symbol in enumerate(section.iter_symbols()):
|
||||
if symbol.name and symbol.entry.st_info.type == "STT_FUNC":
|
||||
# TODO symbol address
|
||||
# TODO symbol version info?
|
||||
# TODO(williballenthin): extract symbol address
|
||||
# https://github.com/mandiant/capa/issues/1608
|
||||
yield Import(symbol.name), FileOffsetAddress(0x0)
|
||||
|
||||
|
||||
@@ -68,7 +69,6 @@ def extract_file_format(**kwargs):
|
||||
|
||||
|
||||
def extract_file_arch(elf, **kwargs):
|
||||
# TODO merge with capa.features.extractors.elf.detect_elf_arch()
|
||||
arch = elf.get_machine_arch()
|
||||
if arch == "x86":
|
||||
yield Arch("i386"), NO_ADDRESS
|
||||
@@ -85,7 +85,8 @@ def extract_file_features(elf: ELFFile, buf: bytes) -> Iterator[Tuple[Feature, i
|
||||
|
||||
|
||||
FILE_HANDLERS = (
|
||||
# TODO extract_file_export_names,
|
||||
# TODO(williballenthin): implement extract_file_export_names
|
||||
# https://github.com/mandiant/capa/issues/1607
|
||||
extract_file_import_names,
|
||||
extract_file_section_names,
|
||||
extract_file_strings,
|
||||
@@ -107,11 +108,10 @@ GLOBAL_HANDLERS = (
|
||||
|
||||
|
||||
class ElfFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.path = path
|
||||
with open(self.path, "rb") as f:
|
||||
self.elf = ELFFile(io.BytesIO(f.read()))
|
||||
self.path: Path = path
|
||||
self.elf = ELFFile(io.BytesIO(path.read_bytes()))
|
||||
|
||||
def get_base_address(self):
|
||||
# virtual address of the first segment with type LOAD
|
||||
@@ -120,15 +120,13 @@ class ElfFeatureExtractor(FeatureExtractor):
|
||||
return AbsoluteVirtualAddress(segment.header.p_vaddr)
|
||||
|
||||
def extract_global_features(self):
|
||||
with open(self.path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = self.path.read_bytes()
|
||||
|
||||
for feature, addr in extract_global_features(self.elf, buf):
|
||||
yield feature, addr
|
||||
|
||||
def extract_file_features(self):
|
||||
with open(self.path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = self.path.read_bytes()
|
||||
|
||||
for feature, addr in extract_file_features(self.elf, buf):
|
||||
yield feature, addr
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -70,6 +70,23 @@ def generate_symbols(dll: str, symbol: str) -> Iterator[str]:
|
||||
yield symbol[:-1]
|
||||
|
||||
|
||||
def reformat_forwarded_export_name(forwarded_name: str) -> str:
|
||||
"""
|
||||
a forwarded export has a DLL name/path an symbol name.
|
||||
we want the former to be lowercase, and the latter to be verbatim.
|
||||
"""
|
||||
|
||||
# use rpartition so we can split on separator between dll and name.
|
||||
# the dll name can be a full path, like in the case of
|
||||
# ef64d6d7c34250af8e21a10feb931c9b
|
||||
# which i assume means the path can have embedded periods.
|
||||
# so we don't want the first period, we want the last.
|
||||
forwarded_dll, _, forwarded_symbol = forwarded_name.rpartition(".")
|
||||
forwarded_dll = forwarded_dll.lower()
|
||||
|
||||
return f"{forwarded_dll}.{forwarded_symbol}"
|
||||
|
||||
|
||||
def all_zeros(bytez: bytes) -> bool:
|
||||
return all(b == 0 for b in builtins.bytes(bytez))
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -104,19 +104,3 @@ BASIC_BLOCK_HANDLERS = (
|
||||
extract_bb_tight_loop,
|
||||
extract_bb_stackstring,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
features = []
|
||||
for fhandle in helpers.get_functions(skip_thunks=True, skip_libs=True):
|
||||
f: idaapi.func_t = fhandle.inner
|
||||
for bb in idaapi.FlowChart(f, flags=idaapi.FC_PREDS):
|
||||
features.extend(list(extract_features(fhandle, bb)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -12,6 +12,7 @@ from typing import Tuple, Iterator
|
||||
import idc
|
||||
import idaapi
|
||||
import idautils
|
||||
import ida_entry
|
||||
|
||||
import capa.features.extractors.common
|
||||
import capa.features.extractors.helpers
|
||||
@@ -83,8 +84,14 @@ def extract_file_embedded_pe() -> Iterator[Tuple[Feature, Address]]:
|
||||
|
||||
def extract_file_export_names() -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract function exports"""
|
||||
for _, _, ea, name in idautils.Entries():
|
||||
yield Export(name), AbsoluteVirtualAddress(ea)
|
||||
for _, ordinal, ea, name in idautils.Entries():
|
||||
forwarded_name = ida_entry.get_entry_forwarder(ordinal)
|
||||
if forwarded_name is None:
|
||||
yield Export(name), AbsoluteVirtualAddress(ea)
|
||||
else:
|
||||
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(forwarded_name)
|
||||
yield Export(forwarded_name), AbsoluteVirtualAddress(ea)
|
||||
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(ea)
|
||||
|
||||
|
||||
def extract_file_import_names() -> Iterator[Tuple[Feature, Address]]:
|
||||
@@ -199,14 +206,3 @@ FILE_HANDLERS = (
|
||||
extract_file_function_names,
|
||||
extract_file_format,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
import pprint
|
||||
|
||||
pprint.pprint(list(extract_features()))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -50,18 +50,3 @@ def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
|
||||
|
||||
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
features = []
|
||||
for fhandle in capa.features.extractors.ida.helpers.get_functions(skip_thunks=True, skip_libs=True):
|
||||
features.extend(list(extract_features(fhandle)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
import contextlib
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import functools
|
||||
from typing import Any, Dict, Tuple, Iterator, Optional
|
||||
|
||||
import idc
|
||||
@@ -27,7 +28,8 @@ def find_byte_sequence(start: int, end: int, seq: bytes) -> Iterator[int]:
|
||||
"""
|
||||
seqstr = " ".join([f"{b:02x}" for b in seq])
|
||||
while True:
|
||||
# TODO find_binary: Deprecated. Please use ida_bytes.bin_search() instead.
|
||||
# TODO(mike-hunhoff): find_binary is deprecated. Please use ida_bytes.bin_search() instead.
|
||||
# https://github.com/mandiant/capa/issues/1606
|
||||
ea = idaapi.find_binary(start, end, seqstr, 0, idaapi.SEARCH_DOWN)
|
||||
if ea == idaapi.BADADDR:
|
||||
break
|
||||
@@ -80,9 +82,22 @@ def get_segment_buffer(seg: idaapi.segment_t) -> bytes:
|
||||
return buff if buff else b""
|
||||
|
||||
|
||||
def inspect_import(imports, library, ea, function, ordinal):
|
||||
if function and function.startswith("__imp_"):
|
||||
# handle mangled PE imports
|
||||
function = function[len("__imp_") :]
|
||||
|
||||
if function and "@@" in function:
|
||||
# handle mangled ELF imports, like "fopen@@glibc_2.2.5"
|
||||
function, _, _ = function.partition("@@")
|
||||
|
||||
imports[ea] = (library.lower(), function, ordinal)
|
||||
return True
|
||||
|
||||
|
||||
def get_file_imports() -> Dict[int, Tuple[str, str, int]]:
|
||||
"""get file imports"""
|
||||
imports = {}
|
||||
imports: Dict[int, Tuple[str, str, int]] = {}
|
||||
|
||||
for idx in range(idaapi.get_import_module_qty()):
|
||||
library = idaapi.get_import_module_name(idx)
|
||||
@@ -92,23 +107,13 @@ def get_file_imports() -> Dict[int, Tuple[str, str, int]]:
|
||||
|
||||
# IDA uses section names for the library of ELF imports, like ".dynsym".
|
||||
# These are not useful to us, we may need to expand this list over time
|
||||
# TODO: exhaust this list, see #1419
|
||||
# TODO(williballenthin): find all section names used by IDA
|
||||
# https://github.com/mandiant/capa/issues/1419
|
||||
if library == ".dynsym":
|
||||
library = ""
|
||||
|
||||
def inspect_import(ea, function, ordinal):
|
||||
if function and function.startswith("__imp_"):
|
||||
# handle mangled PE imports
|
||||
function = function[len("__imp_") :]
|
||||
|
||||
if function and "@@" in function:
|
||||
# handle mangled ELF imports, like "fopen@@glibc_2.2.5"
|
||||
function, _, _ = function.partition("@@")
|
||||
|
||||
imports[ea] = (library.lower(), function, ordinal)
|
||||
return True
|
||||
|
||||
idaapi.enum_import_names(idx, inspect_import)
|
||||
cb = functools.partial(inspect_import, imports, library)
|
||||
idaapi.enum_import_names(idx, cb)
|
||||
|
||||
return imports
|
||||
|
||||
@@ -117,7 +122,7 @@ def get_file_externs() -> Dict[int, Tuple[str, str, int]]:
|
||||
externs = {}
|
||||
|
||||
for seg in get_segments(skip_header_segments=True):
|
||||
if not (seg.type == ida_segment.SEG_XTRN):
|
||||
if seg.type != ida_segment.SEG_XTRN:
|
||||
continue
|
||||
|
||||
for ea in idautils.Functions(seg.start_ea, seg.end_ea):
|
||||
@@ -270,20 +275,18 @@ def is_op_offset(insn: idaapi.insn_t, op: idaapi.op_t) -> bool:
|
||||
|
||||
def is_sp_modified(insn: idaapi.insn_t) -> bool:
|
||||
"""determine if instruction modifies SP, ESP, RSP"""
|
||||
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,)):
|
||||
if op.reg == idautils.procregs.sp.reg and is_op_write(insn, op):
|
||||
# register is stack and written
|
||||
return True
|
||||
return False
|
||||
return any(
|
||||
op.reg == idautils.procregs.sp.reg and is_op_write(insn, op)
|
||||
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,))
|
||||
)
|
||||
|
||||
|
||||
def is_bp_modified(insn: idaapi.insn_t) -> bool:
|
||||
"""check if instruction modifies BP, EBP, RBP"""
|
||||
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,)):
|
||||
if op.reg == idautils.procregs.bp.reg and is_op_write(insn, op):
|
||||
# register is base and written
|
||||
return True
|
||||
return False
|
||||
return any(
|
||||
op.reg == idautils.procregs.bp.reg and is_op_write(insn, op)
|
||||
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,))
|
||||
)
|
||||
|
||||
|
||||
def is_frame_register(reg: int) -> bool:
|
||||
@@ -329,10 +332,7 @@ def mask_op_val(op: idaapi.op_t) -> int:
|
||||
|
||||
def is_function_recursive(f: idaapi.func_t) -> bool:
|
||||
"""check if function is recursive"""
|
||||
for ref in idautils.CodeRefsTo(f.start_ea, True):
|
||||
if f.contains(ref):
|
||||
return True
|
||||
return False
|
||||
return any(f.contains(ref) for ref in idautils.CodeRefsTo(f.start_ea, True))
|
||||
|
||||
|
||||
def is_basic_block_tight_loop(bb: idaapi.BasicBlock) -> bool:
|
||||
@@ -381,8 +381,7 @@ def find_data_reference_from_insn(insn: idaapi.insn_t, max_depth: int = 10) -> i
|
||||
def get_function_blocks(f: idaapi.func_t) -> Iterator[idaapi.BasicBlock]:
|
||||
"""yield basic blocks contained in specified function"""
|
||||
# leverage idaapi.FC_NOEXT flag to ignore useless external blocks referenced by the function
|
||||
for block in idaapi.FlowChart(f, flags=(idaapi.FC_PREDS | idaapi.FC_NOEXT)):
|
||||
yield block
|
||||
yield from idaapi.FlowChart(f, flags=(idaapi.FC_PREDS | idaapi.FC_NOEXT))
|
||||
|
||||
|
||||
def is_basic_block_return(bb: idaapi.BasicBlock) -> bool:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -73,7 +73,7 @@ def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle)
|
||||
"""
|
||||
insn: idaapi.insn_t = ih.inner
|
||||
|
||||
if not insn.get_canon_mnem() in ("call", "jmp"):
|
||||
if insn.get_canon_mnem() not in ("call", "jmp"):
|
||||
return
|
||||
|
||||
# check calls to imported functions
|
||||
@@ -216,7 +216,7 @@ def extract_insn_offset_features(
|
||||
|
||||
p_info = capa.features.extractors.ida.helpers.get_op_phrase_info(op)
|
||||
|
||||
op_off = p_info.get("offset", None)
|
||||
op_off = p_info.get("offset")
|
||||
if op_off is None:
|
||||
continue
|
||||
|
||||
@@ -398,14 +398,16 @@ def extract_insn_peb_access_characteristic_features(
|
||||
if insn.itype not in (idaapi.NN_push, idaapi.NN_mov):
|
||||
return
|
||||
|
||||
if all(map(lambda op: op.type != idaapi.o_mem, insn.ops)):
|
||||
if all(op.type != idaapi.o_mem for op in insn.ops):
|
||||
# try to optimize for only memory references
|
||||
return
|
||||
|
||||
disasm = idc.GetDisasm(insn.ea)
|
||||
|
||||
if " fs:30h" in disasm or " gs:60h" in disasm:
|
||||
# TODO: replace above with proper IDA
|
||||
# TODO(mike-hunhoff): use proper IDA API for fetching segment access
|
||||
# scanning the disassembly text is a hack.
|
||||
# https://github.com/mandiant/capa/issues/1605
|
||||
yield Characteristic("peb access"), ih.address
|
||||
|
||||
|
||||
@@ -419,18 +421,22 @@ def extract_insn_segment_access_features(
|
||||
"""
|
||||
insn: idaapi.insn_t = ih.inner
|
||||
|
||||
if all(map(lambda op: op.type != idaapi.o_mem, insn.ops)):
|
||||
if all(op.type != idaapi.o_mem for op in insn.ops):
|
||||
# try to optimize for only memory references
|
||||
return
|
||||
|
||||
disasm = idc.GetDisasm(insn.ea)
|
||||
|
||||
if " fs:" in disasm:
|
||||
# TODO: replace above with proper IDA
|
||||
# TODO(mike-hunhoff): use proper IDA API for fetching segment access
|
||||
# scanning the disassembly text is a hack.
|
||||
# https://github.com/mandiant/capa/issues/1605
|
||||
yield Characteristic("fs access"), ih.address
|
||||
|
||||
if " gs:" in disasm:
|
||||
# TODO: replace above with proper IDA
|
||||
# TODO(mike-hunhoff): use proper IDA API for fetching segment access
|
||||
# scanning the disassembly text is a hack.
|
||||
# https://github.com/mandiant/capa/issues/1605
|
||||
yield Characteristic("gs access"), ih.address
|
||||
|
||||
|
||||
@@ -441,7 +447,7 @@ def extract_insn_cross_section_cflow(
|
||||
insn: idaapi.insn_t = ih.inner
|
||||
|
||||
for ref in idautils.CodeRefsFrom(insn.ea, False):
|
||||
if ref in get_imports(fh.ctx).keys():
|
||||
if ref in get_imports(fh.ctx):
|
||||
# ignore API calls
|
||||
continue
|
||||
if not idaapi.getseg(ref):
|
||||
@@ -501,20 +507,3 @@ INSTRUCTION_HANDLERS = (
|
||||
extract_function_calls_from,
|
||||
extract_function_indirect_call_characteristic_features,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
features = []
|
||||
for f in capa.features.extractors.ida.helpers.get_functions(skip_thunks=True, skip_libs=True):
|
||||
for bb in idaapi.FlowChart(f, flags=idaapi.FC_PREDS):
|
||||
for insn in capa.features.extractors.ida.helpers.get_instructions_in_range(bb.start_ea, bb.end_ea):
|
||||
features.extend(list(extract_features(f, bb, insn)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
from typing import Dict, List, Tuple
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -7,6 +7,7 @@
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import pefile
|
||||
|
||||
@@ -39,8 +40,20 @@ def extract_file_export_names(pe, **kwargs):
|
||||
name = export.name.partition(b"\x00")[0].decode("ascii")
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
va = base_address + export.address
|
||||
yield Export(name), AbsoluteVirtualAddress(va)
|
||||
|
||||
if export.forwarder is None:
|
||||
va = base_address + export.address
|
||||
yield Export(name), AbsoluteVirtualAddress(va)
|
||||
|
||||
else:
|
||||
try:
|
||||
forwarded_name = export.forwarder.partition(b"\x00")[0].decode("ascii")
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(forwarded_name)
|
||||
va = base_address + export.address
|
||||
yield Export(forwarded_name), AbsoluteVirtualAddress(va)
|
||||
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(va)
|
||||
|
||||
|
||||
def extract_file_import_names(pe, **kwargs):
|
||||
@@ -173,23 +186,21 @@ GLOBAL_HANDLERS = (
|
||||
|
||||
|
||||
class PefileFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.path = path
|
||||
self.pe = pefile.PE(path)
|
||||
self.path: Path = path
|
||||
self.pe = pefile.PE(str(path))
|
||||
|
||||
def get_base_address(self):
|
||||
return AbsoluteVirtualAddress(self.pe.OPTIONAL_HEADER.ImageBase)
|
||||
|
||||
def extract_global_features(self):
|
||||
with open(self.path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = Path(self.path).read_bytes()
|
||||
|
||||
yield from extract_global_features(self.pe, buf)
|
||||
|
||||
def extract_file_features(self):
|
||||
with open(self.path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = Path(self.path).read_bytes()
|
||||
|
||||
yield from extract_file_features(self.pe, buf)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# strings code from FLOSS, https://github.com/mandiant/flare-floss
|
||||
#
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -9,6 +9,7 @@
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import re
|
||||
import contextlib
|
||||
from collections import namedtuple
|
||||
|
||||
ASCII_BYTE = r" !\"#\$%&\'\(\)\*\+,-\./0123456789:;<=>\?@ABCDEFGHIJKLMNOPQRSTUVWXYZ\[\]\^_`abcdefghijklmnopqrstuvwxyz\{\|\}\\\~\t".encode(
|
||||
@@ -81,24 +82,5 @@ def extract_unicode_strings(buf, n=4):
|
||||
reg = b"((?:[%s]\x00){%d,})" % (ASCII_BYTE, n)
|
||||
r = re.compile(reg)
|
||||
for match in r.finditer(buf):
|
||||
try:
|
||||
with contextlib.suppress(UnicodeDecodeError):
|
||||
yield String(match.group().decode("utf-16"), match.start())
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
with open(sys.argv[1], "rb") as f:
|
||||
b = f.read()
|
||||
|
||||
for s in extract_ascii_strings(b):
|
||||
print("0x{:x}: {:s}".format(s.offset, s.s))
|
||||
|
||||
for s in extract_unicode_strings(b):
|
||||
print("0x{:x}: {:s}".format(s.offset, s.s))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -92,7 +92,6 @@ def is_mov_imm_to_stack(instr: envi.archs.i386.disasm.i386Opcode) -> bool:
|
||||
if not src.isImmed():
|
||||
return False
|
||||
|
||||
# TODO what about 64-bit operands?
|
||||
if not isinstance(dst, envi.archs.i386.disasm.i386SibOper) and not isinstance(
|
||||
dst, envi.archs.i386.disasm.i386RegMemOper
|
||||
):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,7 +6,8 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
from typing import List, Tuple, Iterator
|
||||
from typing import Any, Dict, List, Tuple, Iterator
|
||||
from pathlib import Path
|
||||
|
||||
import viv_utils
|
||||
import viv_utils.flirt
|
||||
@@ -25,12 +26,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VivisectFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, vw, path, os):
|
||||
def __init__(self, vw, path: Path, os):
|
||||
super().__init__()
|
||||
self.vw = vw
|
||||
self.path = path
|
||||
with open(self.path, "rb") as f:
|
||||
self.buf = f.read()
|
||||
self.buf = path.read_bytes()
|
||||
|
||||
# pre-compute these because we'll yield them at *every* scope.
|
||||
self.global_features: List[Tuple[Feature, Address]] = []
|
||||
@@ -49,8 +49,11 @@ class VivisectFeatureExtractor(FeatureExtractor):
|
||||
yield from capa.features.extractors.viv.file.extract_features(self.vw, self.buf)
|
||||
|
||||
def get_functions(self) -> Iterator[FunctionHandle]:
|
||||
cache: Dict[str, Any] = {}
|
||||
for va in sorted(self.vw.getFunctions()):
|
||||
yield FunctionHandle(address=AbsoluteVirtualAddress(va), inner=viv_utils.Function(self.vw, va))
|
||||
yield FunctionHandle(
|
||||
address=AbsoluteVirtualAddress(va), inner=viv_utils.Function(self.vw, va), ctx={"cache": cache}
|
||||
)
|
||||
|
||||
def extract_function_features(self, fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
yield from capa.features.extractors.viv.function.extract_features(fh)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -8,6 +8,7 @@
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
import PE.carve as pe_carve # vivisect PE
|
||||
import vivisect
|
||||
import viv_utils
|
||||
import viv_utils.flirt
|
||||
|
||||
@@ -16,7 +17,7 @@ import capa.features.extractors.common
|
||||
import capa.features.extractors.helpers
|
||||
import capa.features.extractors.strings
|
||||
from capa.features.file import Export, Import, Section, FunctionName
|
||||
from capa.features.common import String, Feature, Characteristic
|
||||
from capa.features.common import Feature, Characteristic
|
||||
from capa.features.address import Address, FileOffsetAddress, AbsoluteVirtualAddress
|
||||
|
||||
|
||||
@@ -25,10 +26,35 @@ def extract_file_embedded_pe(buf, **kwargs) -> Iterator[Tuple[Feature, Address]]
|
||||
yield Characteristic("embedded pe"), FileOffsetAddress(offset)
|
||||
|
||||
|
||||
def extract_file_export_names(vw, **kwargs) -> Iterator[Tuple[Feature, Address]]:
|
||||
def get_first_vw_filename(vw: vivisect.VivWorkspace):
|
||||
# vivisect associates metadata with each file that its loaded into the workspace.
|
||||
# capa only loads a single file into each workspace.
|
||||
# so to access the metadata for the file in question, we can just take the first one.
|
||||
# otherwise, we'd have to pass around the module name of the file we're analyzing,
|
||||
# which is a pain.
|
||||
#
|
||||
# so this is a simplifying assumption.
|
||||
return next(iter(vw.filemeta.keys()))
|
||||
|
||||
|
||||
def extract_file_export_names(vw: vivisect.VivWorkspace, **kwargs) -> Iterator[Tuple[Feature, Address]]:
|
||||
for va, _, name, _ in vw.getExports():
|
||||
yield Export(name), AbsoluteVirtualAddress(va)
|
||||
|
||||
if vw.getMeta("Format") == "pe":
|
||||
pe = vw.parsedbin
|
||||
baseaddr = pe.IMAGE_NT_HEADERS.OptionalHeader.ImageBase
|
||||
for rva, _, forwarded_name in vw.getFileMeta(get_first_vw_filename(vw), "forwarders"):
|
||||
try:
|
||||
forwarded_name = forwarded_name.partition(b"\x00")[0].decode("ascii")
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
|
||||
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(forwarded_name)
|
||||
va = baseaddr + rva
|
||||
yield Export(forwarded_name), AbsoluteVirtualAddress(va)
|
||||
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(va)
|
||||
|
||||
|
||||
def extract_file_import_names(vw, **kwargs) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -11,9 +11,11 @@ import envi
|
||||
import viv_utils
|
||||
import vivisect.const
|
||||
|
||||
from capa.features.file import FunctionName
|
||||
from capa.features.common import Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors import loops
|
||||
from capa.features.extractors.elf import SymTab
|
||||
from capa.features.extractors.base_extractor import FunctionHandle
|
||||
|
||||
|
||||
@@ -30,6 +32,28 @@ def interface_extract_function_XXX(fh: FunctionHandle) -> Iterator[Tuple[Feature
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def extract_function_symtab_names(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
if fh.inner.vw.metadata["Format"] == "elf":
|
||||
# the file's symbol table gets added to the metadata of the vivisect workspace.
|
||||
# this is in order to eliminate the computational overhead of refetching symtab each time.
|
||||
if "symtab" not in fh.ctx["cache"]:
|
||||
try:
|
||||
fh.ctx["cache"]["symtab"] = SymTab.from_Elf(fh.inner.vw.parsedbin)
|
||||
except Exception:
|
||||
fh.ctx["cache"]["symtab"] = None
|
||||
|
||||
symtab = fh.ctx["cache"]["symtab"]
|
||||
if symtab:
|
||||
for symbol in symtab.get_symbols():
|
||||
sym_name = symtab.get_name(symbol)
|
||||
sym_value = symbol.value
|
||||
sym_info = symbol.info
|
||||
|
||||
STT_FUNC = 0x2
|
||||
if sym_value == fh.address and sym_info & STT_FUNC != 0:
|
||||
yield FunctionName(sym_name), fh.address
|
||||
|
||||
|
||||
def extract_function_calls_to(fhandle: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
f: viv_utils.Function = fhandle.inner
|
||||
for src, _, _, _ in f.vw.getXrefsTo(f.va, rtype=vivisect.const.REF_CODE):
|
||||
@@ -79,4 +103,8 @@ def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
yield feature, addr
|
||||
|
||||
|
||||
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop)
|
||||
FUNCTION_HANDLERS = (
|
||||
extract_function_symtab_names,
|
||||
extract_function_calls_to,
|
||||
extract_function_loop,
|
||||
)
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
import envi.archs.i386
|
||||
import envi.archs.amd64
|
||||
|
||||
from capa.features.common import ARCH_I386, ARCH_AMD64, Arch, Feature
|
||||
from capa.features.address import NO_ADDRESS, Address
|
||||
|
||||
@@ -11,10 +15,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def extract_arch(vw) -> Iterator[Tuple[Feature, Address]]:
|
||||
if isinstance(vw.arch, envi.archs.amd64.Amd64Module):
|
||||
arch = vw.getMeta("Architecture")
|
||||
if arch == "amd64":
|
||||
yield Arch(ARCH_AMD64), NO_ADDRESS
|
||||
|
||||
elif isinstance(vw.arch, envi.archs.i386.i386Module):
|
||||
elif arch == "i386":
|
||||
yield Arch(ARCH_I386), NO_ADDRESS
|
||||
|
||||
else:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -7,7 +7,7 @@
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import collections
|
||||
from typing import Set, List, Deque, Tuple, Union, Optional
|
||||
from typing import Set, List, Deque, Tuple, Optional
|
||||
|
||||
import envi
|
||||
import vivisect.const
|
||||
@@ -71,7 +71,7 @@ class NotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Union[int, None]]:
|
||||
def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Optional[int]]:
|
||||
"""
|
||||
scan backwards from the given address looking for assignments to the given register.
|
||||
if a constant, return that value.
|
||||
@@ -87,8 +87,8 @@ def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Union[int
|
||||
raises:
|
||||
NotFoundError: when the definition cannot be found.
|
||||
"""
|
||||
q = collections.deque() # type: Deque[int]
|
||||
seen = set([]) # type: Set[int]
|
||||
q: Deque[int] = collections.deque()
|
||||
seen: Set[int] = set()
|
||||
|
||||
q.extend(get_previous_instructions(vw, va))
|
||||
while q:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -22,6 +22,7 @@ import capa.features.extractors.viv.helpers
|
||||
from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset
|
||||
from capa.features.common import MAX_BYTES_FEATURE_SIZE, THUNK_CHAIN_DEPTH_DELTA, Bytes, String, Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors.elf import SymTab
|
||||
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
|
||||
from capa.features.extractors.viv.indirect_calls import NotFoundError, resolve_indirect_call
|
||||
|
||||
@@ -109,6 +110,26 @@ def extract_insn_api_features(fh: FunctionHandle, bb, ih: InsnHandle) -> Iterato
|
||||
if not target:
|
||||
return
|
||||
|
||||
if f.vw.metadata["Format"] == "elf":
|
||||
if "symtab" not in fh.ctx["cache"]:
|
||||
# the symbol table gets stored as a function's attribute in order to avoid running
|
||||
# this code everytime the call is made, thus preventing the computational overhead.
|
||||
try:
|
||||
fh.ctx["cache"]["symtab"] = SymTab.from_Elf(f.vw.parsedbin)
|
||||
except Exception:
|
||||
fh.ctx["cache"]["symtab"] = None
|
||||
|
||||
symtab = fh.ctx["cache"]["symtab"]
|
||||
if symtab:
|
||||
for symbol in symtab.get_symbols():
|
||||
sym_name = symtab.get_name(symbol)
|
||||
sym_value = symbol.value
|
||||
sym_info = symbol.info
|
||||
|
||||
STT_FUNC = 0x2
|
||||
if sym_value == target and sym_info & STT_FUNC != 0:
|
||||
yield API(sym_name), ih.address
|
||||
|
||||
if viv_utils.flirt.is_library_function(f.vw, target):
|
||||
name = viv_utils.get_function_name(f.vw, target)
|
||||
yield API(name), ih.address
|
||||
@@ -267,16 +288,16 @@ def extract_insn_bytes_features(fh: FunctionHandle, bb, ih: InsnHandle) -> Itera
|
||||
else:
|
||||
continue
|
||||
|
||||
for v in derefs(f.vw, v):
|
||||
for vv in derefs(f.vw, v):
|
||||
try:
|
||||
buf = read_bytes(f.vw, v)
|
||||
buf = read_bytes(f.vw, vv)
|
||||
except envi.exc.SegmentationViolation:
|
||||
continue
|
||||
|
||||
if capa.features.extractors.helpers.all_zeros(buf):
|
||||
continue
|
||||
|
||||
if f.vw.isProbablyString(v) or f.vw.isProbablyUnicode(v):
|
||||
if f.vw.isProbablyString(vv) or f.vw.isProbablyUnicode(vv):
|
||||
# don't extract byte features for obvious strings
|
||||
continue
|
||||
|
||||
@@ -330,7 +351,6 @@ def is_security_cookie(f, bb, insn) -> bool:
|
||||
if oper.isReg() and oper.reg not in [
|
||||
envi.archs.i386.regs.REG_ESP,
|
||||
envi.archs.i386.regs.REG_EBP,
|
||||
# TODO: do x64 support for real.
|
||||
envi.archs.amd64.regs.REG_RBP,
|
||||
envi.archs.amd64.regs.REG_RSP,
|
||||
]:
|
||||
@@ -390,9 +410,7 @@ def extract_insn_obfs_call_plus_5_characteristic_features(f, bb, ih: InsnHandle)
|
||||
if insn.va + 5 == insn.opers[0].getOperValue(insn):
|
||||
yield Characteristic("call $+5"), ih.address
|
||||
|
||||
if isinstance(insn.opers[0], envi.archs.i386.disasm.i386ImmMemOper) or isinstance(
|
||||
insn.opers[0], envi.archs.amd64.disasm.Amd64RipRelOper
|
||||
):
|
||||
if isinstance(insn.opers[0], (envi.archs.i386.disasm.i386ImmMemOper, envi.archs.amd64.disasm.Amd64RipRelOper)):
|
||||
if insn.va + 5 == insn.opers[0].getOperAddr(insn):
|
||||
yield Characteristic("call $+5"), ih.address
|
||||
|
||||
@@ -401,7 +419,6 @@ def extract_insn_peb_access_characteristic_features(f, bb, ih: InsnHandle) -> It
|
||||
"""
|
||||
parse peb access from the given function. fs:[0x30] on x86, gs:[0x60] on x64
|
||||
"""
|
||||
# TODO handle where fs/gs are loaded into a register or onto the stack and used later
|
||||
insn: envi.Opcode = ih.inner
|
||||
|
||||
if insn.mnem not in ["push", "mov"]:
|
||||
@@ -625,7 +642,6 @@ def extract_op_offset_features(
|
||||
if oper.reg == envi.archs.i386.regs.REG_EBP:
|
||||
return
|
||||
|
||||
# TODO: do x64 support for real.
|
||||
if oper.reg == envi.archs.amd64.regs.REG_RBP:
|
||||
return
|
||||
|
||||
@@ -679,9 +695,9 @@ def extract_op_string_features(
|
||||
else:
|
||||
return
|
||||
|
||||
for v in derefs(f.vw, v):
|
||||
for vv in derefs(f.vw, v):
|
||||
try:
|
||||
s = read_string(f.vw, v).rstrip("\x00")
|
||||
s = read_string(f.vw, vv).rstrip("\x00")
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""
|
||||
capa freeze file format: `| capa0000 | + zlib(utf-8(json(...)))`
|
||||
|
||||
Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -12,7 +12,7 @@ See the License for the specific language governing permissions and limitations
|
||||
import zlib
|
||||
import logging
|
||||
from enum import Enum
|
||||
from typing import Any, List, Tuple, Union
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
from pydantic import Field, BaseModel
|
||||
|
||||
@@ -382,6 +382,7 @@ def load(buf: bytes) -> capa.features.extractors.base_extractor.FeatureExtractor
|
||||
def main(argv=None):
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
import capa.main
|
||||
|
||||
@@ -398,8 +399,7 @@ def main(argv=None):
|
||||
|
||||
extractor = capa.main.get_extractor(args.sample, args.format, args.os, args.backend, sigpaths, False)
|
||||
|
||||
with open(args.output, "wb") as f:
|
||||
f.write(dump(extractor))
|
||||
Path(args.output).write_bytes(dump(extractor))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import binascii
|
||||
from typing import Union, Optional
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import os
|
||||
import inspect
|
||||
import logging
|
||||
import contextlib
|
||||
import importlib.util
|
||||
from typing import NoReturn
|
||||
from pathlib import Path
|
||||
|
||||
import tqdm
|
||||
|
||||
from capa.exceptions import UnsupportedFormatError
|
||||
from capa.features.common import FORMAT_PE, FORMAT_SC32, FORMAT_SC64, FORMAT_DOTNET, FORMAT_UNKNOWN, Format
|
||||
@@ -27,36 +32,32 @@ def hex(n: int) -> str:
|
||||
return f"0x{(n):X}"
|
||||
|
||||
|
||||
def get_file_taste(sample_path: str) -> bytes:
|
||||
if not os.path.exists(sample_path):
|
||||
def get_file_taste(sample_path: Path) -> bytes:
|
||||
if not sample_path.exists():
|
||||
raise IOError(f"sample path {sample_path} does not exist or cannot be accessed")
|
||||
with open(sample_path, "rb") as f:
|
||||
taste = f.read(8)
|
||||
taste = sample_path.open("rb").read(8)
|
||||
return taste
|
||||
|
||||
|
||||
def is_runtime_ida():
|
||||
try:
|
||||
import idc
|
||||
except ImportError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
return importlib.util.find_spec("idc") is not None
|
||||
|
||||
|
||||
def assert_never(value) -> NoReturn:
|
||||
assert False, f"Unhandled value: {value} ({type(value).__name__})"
|
||||
# careful: python -O will remove this assertion.
|
||||
# but this is only used for type checking, so it's ok.
|
||||
assert False, f"Unhandled value: {value} ({type(value).__name__})" # noqa: B011
|
||||
|
||||
|
||||
def get_format_from_extension(sample: str) -> str:
|
||||
if sample.endswith(EXTENSIONS_SHELLCODE_32):
|
||||
def get_format_from_extension(sample: Path) -> str:
|
||||
if sample.name.endswith(EXTENSIONS_SHELLCODE_32):
|
||||
return FORMAT_SC32
|
||||
elif sample.endswith(EXTENSIONS_SHELLCODE_64):
|
||||
elif sample.name.endswith(EXTENSIONS_SHELLCODE_64):
|
||||
return FORMAT_SC64
|
||||
return FORMAT_UNKNOWN
|
||||
|
||||
|
||||
def get_auto_format(path: str) -> str:
|
||||
def get_auto_format(path: Path) -> str:
|
||||
format_ = get_format(path)
|
||||
if format_ == FORMAT_UNKNOWN:
|
||||
format_ = get_format_from_extension(path)
|
||||
@@ -65,13 +66,12 @@ def get_auto_format(path: str) -> str:
|
||||
return format_
|
||||
|
||||
|
||||
def get_format(sample: str) -> str:
|
||||
def get_format(sample: Path) -> str:
|
||||
# imported locally to avoid import cycle
|
||||
from capa.features.extractors.common import extract_format
|
||||
from capa.features.extractors.dnfile_ import DnfileFeatureExtractor
|
||||
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = sample.read_bytes()
|
||||
|
||||
for feature, _ in extract_format(buf):
|
||||
if feature == Format(FORMAT_PE):
|
||||
@@ -85,6 +85,39 @@ def get_format(sample: str) -> str:
|
||||
return FORMAT_UNKNOWN
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def redirecting_print_to_tqdm(disable_progress):
|
||||
"""
|
||||
tqdm (progress bar) expects to have fairly tight control over console output.
|
||||
so calls to `print()` will break the progress bar and make things look bad.
|
||||
so, this context manager temporarily replaces the `print` implementation
|
||||
with one that is compatible with tqdm.
|
||||
via: https://stackoverflow.com/a/42424890/87207
|
||||
"""
|
||||
old_print = print # noqa: T202 [reserved word print used]
|
||||
|
||||
def new_print(*args, **kwargs):
|
||||
# If tqdm.tqdm.write raises error, use builtin print
|
||||
if disable_progress:
|
||||
old_print(*args, **kwargs)
|
||||
else:
|
||||
try:
|
||||
tqdm.tqdm.write(*args, **kwargs)
|
||||
except Exception:
|
||||
old_print(*args, **kwargs)
|
||||
|
||||
try:
|
||||
# Globally replace print with new_print.
|
||||
# Verified this works manually on Python 3.11:
|
||||
# >>> import inspect
|
||||
# >>> inspect.builtins
|
||||
# <module 'builtins' (built-in)>
|
||||
inspect.builtins.print = new_print # type: ignore
|
||||
yield
|
||||
finally:
|
||||
inspect.builtins.print = old_print # type: ignore
|
||||
|
||||
|
||||
def log_unsupported_format_error():
|
||||
logger.error("-" * 80)
|
||||
logger.error(" Input file does not appear to be a PE or ELF file.")
|
||||
@@ -118,7 +151,7 @@ def log_unsupported_runtime_error():
|
||||
logger.error("-" * 80)
|
||||
logger.error(" Unsupported runtime or Python interpreter.")
|
||||
logger.error(" ")
|
||||
logger.error(" capa supports running under Python 3.7 and higher.")
|
||||
logger.error(" capa supports running under Python 3.8 and higher.")
|
||||
logger.error(" ")
|
||||
logger.error(
|
||||
" If you're seeing this message on the command line, please ensure you're running a supported Python version."
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -9,7 +9,8 @@ import json
|
||||
import logging
|
||||
import datetime
|
||||
import contextlib
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
from pathlib import Path
|
||||
|
||||
import idc
|
||||
import idaapi
|
||||
@@ -22,7 +23,8 @@ import capa
|
||||
import capa.version
|
||||
import capa.render.utils as rutils
|
||||
import capa.features.common
|
||||
import capa.render.result_document
|
||||
import capa.features.freeze
|
||||
import capa.render.result_document as rdoc
|
||||
from capa.features.address import AbsoluteVirtualAddress
|
||||
|
||||
logger = logging.getLogger("capa")
|
||||
@@ -45,7 +47,8 @@ NETNODE_RULES_CACHE_ID = "rules-cache-id"
|
||||
|
||||
|
||||
def inform_user_ida_ui(message):
|
||||
idaapi.info(f"{message}. Please refer to IDA Output window for more information.")
|
||||
# this isn't a logger, this is IDA's logging facility
|
||||
idaapi.info(f"{message}. Please refer to IDA Output window for more information.") # noqa: G004
|
||||
|
||||
|
||||
def is_supported_ida_version():
|
||||
@@ -53,7 +56,7 @@ def is_supported_ida_version():
|
||||
if version < 7.4 or version >= 9:
|
||||
warning_msg = "This plugin does not support your IDA Pro version"
|
||||
logger.warning(warning_msg)
|
||||
logger.warning("Your IDA Pro version is: %s. Supported versions are: IDA >= 7.4 and IDA < 9.0." % version)
|
||||
logger.warning("Your IDA Pro version is: %s. Supported versions are: IDA >= 7.4 and IDA < 9.0.", version)
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -118,7 +121,7 @@ def get_file_sha256():
|
||||
return sha256
|
||||
|
||||
|
||||
def collect_metadata(rules):
|
||||
def collect_metadata(rules: List[Path]):
|
||||
""" """
|
||||
md5 = get_file_md5()
|
||||
sha256 = get_file_sha256()
|
||||
@@ -140,37 +143,35 @@ def collect_metadata(rules):
|
||||
else:
|
||||
os = "unknown os"
|
||||
|
||||
return {
|
||||
"timestamp": datetime.datetime.now().isoformat(),
|
||||
"argv": [],
|
||||
"sample": {
|
||||
"md5": md5,
|
||||
"sha1": "", # not easily accessible
|
||||
"sha256": sha256,
|
||||
"path": idaapi.get_input_file_path(),
|
||||
},
|
||||
"analysis": {
|
||||
"format": idaapi.get_file_type_name(),
|
||||
"arch": arch,
|
||||
"os": os,
|
||||
"extractor": "ida",
|
||||
"rules": rules,
|
||||
"base_address": idaapi.get_imagebase(),
|
||||
"layout": {
|
||||
return rdoc.Metadata(
|
||||
timestamp=datetime.datetime.now(),
|
||||
version=capa.version.__version__,
|
||||
argv=(),
|
||||
sample=rdoc.Sample(
|
||||
md5=md5,
|
||||
sha1="", # not easily accessible
|
||||
sha256=sha256,
|
||||
path=idaapi.get_input_file_path(),
|
||||
),
|
||||
analysis=rdoc.Analysis(
|
||||
format=idaapi.get_file_type_name(),
|
||||
arch=arch,
|
||||
os=os,
|
||||
extractor="ida",
|
||||
rules=tuple(r.resolve().absolute().as_posix() for r in rules),
|
||||
base_address=capa.features.freeze.Address.from_capa(idaapi.get_imagebase()),
|
||||
layout=rdoc.Layout(
|
||||
functions=(),
|
||||
# this is updated after capabilities have been collected.
|
||||
# will look like:
|
||||
#
|
||||
# "functions": { 0x401000: { "matched_basic_blocks": [ 0x401000, 0x401005, ... ] }, ... }
|
||||
},
|
||||
),
|
||||
# ignore these for now - not used by IDA plugin.
|
||||
"feature_counts": {
|
||||
"file": {},
|
||||
"functions": {},
|
||||
},
|
||||
"library_functions": {},
|
||||
},
|
||||
"version": capa.version.__version__,
|
||||
}
|
||||
feature_counts=rdoc.FeatureCounts(file=0, functions=()),
|
||||
library_functions=(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class IDAIO:
|
||||
@@ -213,16 +214,16 @@ def idb_contains_cached_results() -> bool:
|
||||
n = netnode.Netnode(CAPA_NETNODE)
|
||||
return bool(n.get(NETNODE_RESULTS))
|
||||
except netnode.NetnodeCorruptError as e:
|
||||
logger.error("%s", e, exc_info=True)
|
||||
logger.exception(str(e))
|
||||
return False
|
||||
|
||||
|
||||
def load_and_verify_cached_results() -> Optional[capa.render.result_document.ResultDocument]:
|
||||
def load_and_verify_cached_results() -> Optional[rdoc.ResultDocument]:
|
||||
"""verifies that cached results have valid (mapped) addresses for the current database"""
|
||||
logger.debug("loading cached capa results from netnode '%s'", CAPA_NETNODE)
|
||||
|
||||
n = netnode.Netnode(CAPA_NETNODE)
|
||||
doc = capa.render.result_document.ResultDocument.parse_obj(json.loads(n[NETNODE_RESULTS]))
|
||||
doc = rdoc.ResultDocument.parse_obj(json.loads(n[NETNODE_RESULTS]))
|
||||
|
||||
for rule in rutils.capability_rules(doc):
|
||||
for location_, _ in rule.matches:
|
||||
|
||||
@@ -95,7 +95,7 @@ can update using the `Settings` button.
|
||||
|
||||
### Requirements
|
||||
|
||||
capa explorer supports Python versions >= 3.7.x and IDA Pro versions >= 7.4. The following IDA Pro versions have been tested:
|
||||
capa explorer supports Python versions >= 3.8.x and IDA Pro versions >= 7.4. The following IDA Pro versions have been tested:
|
||||
|
||||
* IDA 7.4
|
||||
* IDA 7.5
|
||||
@@ -105,7 +105,7 @@ capa explorer supports Python versions >= 3.7.x and IDA Pro versions >= 7.4. The
|
||||
* IDA 8.1
|
||||
* IDA 8.2
|
||||
|
||||
capa explorer is however limited to the Python versions supported by your IDA installation (which may not include all Python versions >= 3.7.x).
|
||||
capa explorer is however limited to the Python versions supported by your IDA installation (which may not include all Python versions >= 3.8.x).
|
||||
|
||||
If you encounter issues with your specific setup, please open a new [Issue](https://github.com/mandiant/capa/issues).
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -10,7 +10,7 @@ from __future__ import annotations
|
||||
|
||||
import itertools
|
||||
import collections
|
||||
from typing import Set, Dict, List, Tuple, Union, Optional
|
||||
from typing import Set, Dict, Tuple, Union, Optional
|
||||
|
||||
import capa.engine
|
||||
from capa.rules import Scope, RuleSet
|
||||
@@ -37,18 +37,21 @@ class CapaRuleGenFeatureCacheNode:
|
||||
self.children: Set[CapaRuleGenFeatureCacheNode] = set()
|
||||
|
||||
def __hash__(self):
|
||||
# TODO: unique enough?
|
||||
# TODO(mike-hunhoff): confirm this is unique enough
|
||||
# https://github.com/mandiant/capa/issues/1604
|
||||
return hash((self.address,))
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, type(self)):
|
||||
return NotImplemented
|
||||
# TODO: unique enough?
|
||||
# TODO(mike-hunhoff): confirm this is unique enough
|
||||
# https://github.com/mandiant/capa/issues/1604
|
||||
return self.address == other.address
|
||||
|
||||
|
||||
class CapaRuleGenFeatureCache:
|
||||
def __init__(self, fh_list: List[FunctionHandle], extractor: CapaExplorerFeatureExtractor):
|
||||
def __init__(self, extractor: CapaExplorerFeatureExtractor):
|
||||
self.extractor = extractor
|
||||
self.global_features: FeatureSet = collections.defaultdict(set)
|
||||
|
||||
self.file_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(None, None)
|
||||
@@ -56,12 +59,11 @@ class CapaRuleGenFeatureCache:
|
||||
self.bb_nodes: Dict[Address, CapaRuleGenFeatureCacheNode] = {}
|
||||
self.insn_nodes: Dict[Address, CapaRuleGenFeatureCacheNode] = {}
|
||||
|
||||
self._find_global_features(extractor)
|
||||
self._find_file_features(extractor)
|
||||
self._find_function_and_below_features(fh_list, extractor)
|
||||
self._find_global_features()
|
||||
self._find_file_features()
|
||||
|
||||
def _find_global_features(self, extractor: CapaExplorerFeatureExtractor):
|
||||
for feature, addr in extractor.extract_global_features():
|
||||
def _find_global_features(self):
|
||||
for feature, addr in self.extractor.extract_global_features():
|
||||
# not all global features may have virtual addresses.
|
||||
# if not, then at least ensure the feature shows up in the index.
|
||||
# the set of addresses will still be empty.
|
||||
@@ -71,46 +73,45 @@ class CapaRuleGenFeatureCache:
|
||||
if feature not in self.global_features:
|
||||
self.global_features[feature] = set()
|
||||
|
||||
def _find_file_features(self, extractor: CapaExplorerFeatureExtractor):
|
||||
def _find_file_features(self):
|
||||
# not all file features may have virtual addresses.
|
||||
# if not, then at least ensure the feature shows up in the index.
|
||||
# the set of addresses will still be empty.
|
||||
for feature, addr in extractor.extract_file_features():
|
||||
for feature, addr in self.extractor.extract_file_features():
|
||||
if addr is not None:
|
||||
self.file_node.features[feature].add(addr)
|
||||
else:
|
||||
if feature not in self.file_node.features:
|
||||
self.file_node.features[feature] = set()
|
||||
|
||||
def _find_function_and_below_features(self, fh_list: List[FunctionHandle], extractor: CapaExplorerFeatureExtractor):
|
||||
for fh in fh_list:
|
||||
f_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(fh, self.file_node)
|
||||
def _find_function_and_below_features(self, fh: FunctionHandle):
|
||||
f_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(fh, self.file_node)
|
||||
|
||||
# extract basic block and below features
|
||||
for bbh in extractor.get_basic_blocks(fh):
|
||||
bb_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(bbh, f_node)
|
||||
# extract basic block and below features
|
||||
for bbh in self.extractor.get_basic_blocks(fh):
|
||||
bb_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(bbh, f_node)
|
||||
|
||||
# extract instruction features
|
||||
for ih in extractor.get_instructions(fh, bbh):
|
||||
inode: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(ih, bb_node)
|
||||
# extract instruction features
|
||||
for ih in self.extractor.get_instructions(fh, bbh):
|
||||
inode: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(ih, bb_node)
|
||||
|
||||
for feature, addr in extractor.extract_insn_features(fh, bbh, ih):
|
||||
inode.features[feature].add(addr)
|
||||
for feature, addr in self.extractor.extract_insn_features(fh, bbh, ih):
|
||||
inode.features[feature].add(addr)
|
||||
|
||||
self.insn_nodes[inode.address] = inode
|
||||
self.insn_nodes[inode.address] = inode
|
||||
|
||||
# extract basic block features
|
||||
for feature, addr in extractor.extract_basic_block_features(fh, bbh):
|
||||
bb_node.features[feature].add(addr)
|
||||
# extract basic block features
|
||||
for feature, addr in self.extractor.extract_basic_block_features(fh, bbh):
|
||||
bb_node.features[feature].add(addr)
|
||||
|
||||
# store basic block features in cache and function parent
|
||||
self.bb_nodes[bb_node.address] = bb_node
|
||||
# store basic block features in cache and function parent
|
||||
self.bb_nodes[bb_node.address] = bb_node
|
||||
|
||||
# extract function features
|
||||
for feature, addr in extractor.extract_function_features(fh):
|
||||
f_node.features[feature].add(addr)
|
||||
# extract function features
|
||||
for feature, addr in self.extractor.extract_function_features(fh):
|
||||
f_node.features[feature].add(addr)
|
||||
|
||||
self.func_nodes[f_node.address] = f_node
|
||||
self.func_nodes[f_node.address] = f_node
|
||||
|
||||
def _find_instruction_capabilities(
|
||||
self, ruleset: RuleSet, insn: CapaRuleGenFeatureCacheNode
|
||||
@@ -155,7 +156,7 @@ class CapaRuleGenFeatureCache:
|
||||
def find_code_capabilities(
|
||||
self, ruleset: RuleSet, fh: FunctionHandle
|
||||
) -> Tuple[FeatureSet, MatchResults, MatchResults, MatchResults]:
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self.func_nodes.get(fh.address, None)
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self._get_cached_func_node(fh)
|
||||
if f_node is None:
|
||||
return {}, {}, {}, {}
|
||||
|
||||
@@ -195,8 +196,16 @@ class CapaRuleGenFeatureCache:
|
||||
_, matches = ruleset.match(Scope.FILE, features, NO_ADDRESS)
|
||||
return features, matches
|
||||
|
||||
def _get_cached_func_node(self, fh: FunctionHandle) -> Optional[CapaRuleGenFeatureCacheNode]:
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self.func_nodes.get(fh.address)
|
||||
if f_node is None:
|
||||
# function is not in our cache, do extraction now
|
||||
self._find_function_and_below_features(fh)
|
||||
f_node = self.func_nodes.get(fh.address)
|
||||
return f_node
|
||||
|
||||
def get_all_function_features(self, fh: FunctionHandle) -> FeatureSet:
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self.func_nodes.get(fh.address, None)
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self._get_cached_func_node(fh)
|
||||
if f_node is None:
|
||||
return {}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import os
|
||||
import copy
|
||||
import logging
|
||||
import itertools
|
||||
import collections
|
||||
from enum import IntFlag
|
||||
from typing import Any, List, Optional
|
||||
from pathlib import Path
|
||||
|
||||
import idaapi
|
||||
import ida_kernwin
|
||||
@@ -57,9 +58,6 @@ CAPA_OFFICIAL_RULESET_URL = f"https://github.com/mandiant/capa-rules/releases/ta
|
||||
CAPA_RULESET_DOC_URL = "https://github.com/mandiant/capa/blob/master/doc/rules.md"
|
||||
|
||||
|
||||
from enum import IntFlag
|
||||
|
||||
|
||||
class Options(IntFlag):
|
||||
NO_ANALYSIS = 0 # No auto analysis
|
||||
ANALYZE_AUTO = 1 # Runs the analysis when starting the explorer, see details below
|
||||
@@ -73,10 +71,9 @@ AnalyzeOptionsText = {
|
||||
}
|
||||
|
||||
|
||||
def write_file(path, data):
|
||||
def write_file(path: Path, data):
|
||||
""" """
|
||||
with open(path, "wb") as save_file:
|
||||
save_file.write(data)
|
||||
path.write_bytes(data)
|
||||
|
||||
|
||||
def trim_function_name(f, max_length=25):
|
||||
@@ -192,8 +189,10 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
# caches used to speed up capa explorer analysis - these must be init to None
|
||||
self.resdoc_cache: Optional[capa.render.result_document.ResultDocument] = None
|
||||
self.program_analysis_ruleset_cache: Optional[capa.rules.RuleSet] = None
|
||||
self.rulegen_ruleset_cache: Optional[capa.rules.RuleSet] = None
|
||||
self.feature_extractor: Optional[CapaExplorerFeatureExtractor] = None
|
||||
self.rulegen_feature_extractor: Optional[CapaExplorerFeatureExtractor] = None
|
||||
self.rulegen_feature_cache: Optional[CapaRuleGenFeatureCache] = None
|
||||
self.rulegen_ruleset_cache: Optional[capa.rules.RuleSet] = None
|
||||
self.rulegen_current_function: Optional[FunctionHandle] = None
|
||||
|
||||
# models
|
||||
@@ -536,7 +535,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
@param new_ea: destination ea
|
||||
@param old_ea: source ea
|
||||
"""
|
||||
if not self.view_tabs.currentIndex() in (0, 1):
|
||||
if self.view_tabs.currentIndex() not in (0, 1):
|
||||
return
|
||||
|
||||
if idaapi.get_widget_type(widget) != idaapi.BWN_DISASM:
|
||||
@@ -574,10 +573,10 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
|
||||
def ensure_capa_settings_rule_path(self):
|
||||
try:
|
||||
path: str = settings.user.get(CAPA_SETTINGS_RULE_PATH, "")
|
||||
path: Path = Path(settings.user.get(CAPA_SETTINGS_RULE_PATH, ""))
|
||||
|
||||
# resolve rules directory - check self and settings first, then ask user
|
||||
if not os.path.exists(path):
|
||||
if not path.exists():
|
||||
# configure rules selection messagebox
|
||||
rules_message = QtWidgets.QMessageBox()
|
||||
rules_message.setIcon(QtWidgets.QMessageBox.Information)
|
||||
@@ -585,7 +584,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
rules_message.setText("You must specify a directory containing capa rules before running analysis.")
|
||||
rules_message.setInformativeText(
|
||||
"Click 'Ok' to specify a local directory of rules or you can download and extract the official "
|
||||
f"rules from the URL listed in the details."
|
||||
+ "rules from the URL listed in the details."
|
||||
)
|
||||
rules_message.setDetailedText(f"{CAPA_OFFICIAL_RULESET_URL}")
|
||||
rules_message.setStandardButtons(QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel)
|
||||
@@ -595,24 +594,25 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
if pressed == QtWidgets.QMessageBox.Cancel:
|
||||
raise UserCancelledError()
|
||||
|
||||
path = self.ask_user_directory()
|
||||
path = Path(self.ask_user_directory())
|
||||
if not path:
|
||||
raise UserCancelledError()
|
||||
|
||||
if not os.path.exists(path):
|
||||
logger.error("rule path %s does not exist or cannot be accessed" % path)
|
||||
if not path.exists():
|
||||
logger.error("rule path %s does not exist or cannot be accessed", path)
|
||||
return False
|
||||
|
||||
settings.user[CAPA_SETTINGS_RULE_PATH] = path
|
||||
except UserCancelledError as e:
|
||||
settings.user[CAPA_SETTINGS_RULE_PATH] = str(path)
|
||||
except UserCancelledError:
|
||||
capa.ida.helpers.inform_user_ida_ui("Analysis requires capa rules")
|
||||
logger.warning(
|
||||
f"You must specify a directory containing capa rules before running analysis. Download and extract the official rules from {CAPA_OFFICIAL_RULESET_URL} (recommended)."
|
||||
"You must specify a directory containing capa rules before running analysis.%s",
|
||||
f"Download and extract the official rules from {CAPA_OFFICIAL_RULESET_URL} (recommended).",
|
||||
)
|
||||
return False
|
||||
except Exception as e:
|
||||
capa.ida.helpers.inform_user_ida_ui("Failed to load capa rules")
|
||||
logger.error("Failed to load capa rules (error: %s).", e, exc_info=True)
|
||||
logger.exception("Failed to load capa rules (error: %s).", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -626,7 +626,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
if not self.ensure_capa_settings_rule_path():
|
||||
return False
|
||||
|
||||
rule_path: str = settings.user.get(CAPA_SETTINGS_RULE_PATH, "")
|
||||
rule_path: Path = Path(settings.user.get(CAPA_SETTINGS_RULE_PATH, ""))
|
||||
try:
|
||||
|
||||
def on_load_rule(_, i, total):
|
||||
@@ -645,9 +645,9 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
|
||||
logger.error("Failed to load capa rules from %s (error: %s).", settings.user[CAPA_SETTINGS_RULE_PATH], e)
|
||||
logger.error(
|
||||
"Make sure your file directory contains properly "
|
||||
"formatted capa rules. You can download and extract the official rules from %s. "
|
||||
"Or, for more details, see the rules documentation here: %s",
|
||||
"Make sure your file directory contains properly " # noqa: G003 [logging statement uses +]
|
||||
+ "formatted capa rules. You can download and extract the official rules from %s. "
|
||||
+ "Or, for more details, see the rules documentation here: %s",
|
||||
CAPA_OFFICIAL_RULESET_URL,
|
||||
CAPA_RULESET_DOC_URL,
|
||||
)
|
||||
@@ -705,14 +705,15 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
|
||||
capa.ida.helpers.inform_user_ida_ui("Cached results were generated using different capas rules")
|
||||
logger.warning(
|
||||
"capa is showing you cached results from a previous analysis run. Your rules have changed since and you should reanalyze the program to see new results."
|
||||
"capa is showing you cached results from a previous analysis run.%s ",
|
||||
"Your rules have changed since and you should reanalyze the program to see new results.",
|
||||
)
|
||||
view_status_rules = "no rules matched for cache"
|
||||
|
||||
cached_results_time = self.resdoc_cache.meta.timestamp.strftime("%Y-%m-%d %H:%M:%S")
|
||||
new_view_status = f"capa rules: {view_status_rules}, cached results (created {cached_results_time})"
|
||||
except Exception as e:
|
||||
logger.error("Failed to load cached capa results (error: %s).", e, exc_info=True)
|
||||
logger.exception("Failed to load cached capa results (error: %s).", e)
|
||||
return False
|
||||
else:
|
||||
# load results from fresh anlaysis
|
||||
@@ -725,13 +726,11 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
update_wait_box(f"{text} ({self.process_count} of {self.process_total})")
|
||||
self.process_count += 1
|
||||
|
||||
update_wait_box("initializing feature extractor")
|
||||
|
||||
try:
|
||||
extractor = CapaExplorerFeatureExtractor()
|
||||
extractor.indicator.progress.connect(slot_progress_feature_extraction)
|
||||
self.feature_extractor = CapaExplorerFeatureExtractor()
|
||||
self.feature_extractor.indicator.progress.connect(slot_progress_feature_extraction)
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize feature extractor (error: %s).", e, exc_info=True)
|
||||
logger.exception("Failed to initialize feature extractor (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -741,9 +740,9 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
update_wait_box("calculating analysis")
|
||||
|
||||
try:
|
||||
self.process_total += len(tuple(extractor.get_functions()))
|
||||
self.process_total += len(tuple(self.feature_extractor.get_functions()))
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate analysis (error: %s).", e, exc_info=True)
|
||||
logger.exception("Failed to calculate analysis (error: %s).", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -767,15 +766,19 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
update_wait_box("extracting features")
|
||||
|
||||
try:
|
||||
meta = capa.ida.helpers.collect_metadata([settings.user[CAPA_SETTINGS_RULE_PATH]])
|
||||
capabilities, counts = capa.main.find_capabilities(ruleset, extractor, disable_progress=True)
|
||||
meta["analysis"].update(counts)
|
||||
meta["analysis"]["layout"] = capa.main.compute_layout(ruleset, extractor, capabilities)
|
||||
meta = capa.ida.helpers.collect_metadata([Path(settings.user[CAPA_SETTINGS_RULE_PATH])])
|
||||
capabilities, counts = capa.main.find_capabilities(
|
||||
ruleset, self.feature_extractor, disable_progress=True
|
||||
)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
meta.analysis.layout = capa.main.compute_layout(ruleset, self.feature_extractor, capabilities)
|
||||
except UserCancelledError:
|
||||
logger.info("User cancelled analysis.")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error("Failed to extract capabilities from database (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to extract capabilities from database (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -787,7 +790,8 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
try:
|
||||
# support binary files specifically for x86/AMD64 shellcode
|
||||
# warn user binary file is loaded but still allow capa to process it
|
||||
# TODO: check specific architecture of binary files based on how user configured IDA processors
|
||||
# TODO(mike-hunhoff): check specific architecture of binary files based on how user configured IDA processors
|
||||
# https://github.com/mandiant/capa/issues/1603
|
||||
if idaapi.get_file_type_name() == "Binary file":
|
||||
logger.warning("-" * 80)
|
||||
logger.warning(" Input file appears to be a binary file.")
|
||||
@@ -808,7 +812,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
if capa.main.has_file_limitation(ruleset, capabilities, is_standalone=False):
|
||||
capa.ida.helpers.inform_user_ida_ui("capa encountered file limitation warnings during analysis")
|
||||
except Exception as e:
|
||||
logger.error("Failed to check for file limitations (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to check for file limitations (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -822,7 +826,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
meta, ruleset, capabilities
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to collect results (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to collect results (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -838,7 +842,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
capa.ida.helpers.save_rules_cache_id(ruleset_id)
|
||||
logger.info("Saved cached results to database")
|
||||
except Exception as e:
|
||||
logger.error("Failed to save results to database (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to save results to database (error: %s)", e)
|
||||
return False
|
||||
user_settings = settings.user[CAPA_SETTINGS_RULE_PATH]
|
||||
count_source_rules = self.program_analysis_ruleset_cache.source_rule_count
|
||||
@@ -859,7 +863,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
|
||||
self.model_data.render_capa_doc(self.resdoc_cache, self.view_show_results_by_function.isChecked())
|
||||
except Exception as e:
|
||||
logger.error("Failed to render results (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to render results (error: %s)", e)
|
||||
return False
|
||||
|
||||
self.set_view_status_label(new_view_status)
|
||||
@@ -911,7 +915,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
has_cache: bool = capa.ida.helpers.idb_contains_cached_results()
|
||||
except Exception as e:
|
||||
capa.ida.helpers.inform_user_ida_ui("Failed to check for cached results, reanalyzing program")
|
||||
logger.error("Failed to check for cached results (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to check for cached results (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -931,7 +935,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
] = capa.ida.helpers.load_and_verify_cached_results()
|
||||
except Exception as e:
|
||||
capa.ida.helpers.inform_user_ida_ui("Failed to verify cached results, reanalyzing program")
|
||||
logger.error("Failed to verify cached results (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to verify cached results (error: %s)", e)
|
||||
return False
|
||||
|
||||
if results is None:
|
||||
@@ -944,9 +948,9 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
"Reanalyze program",
|
||||
"",
|
||||
ida_kernwin.ASKBTN_YES,
|
||||
f"This database contains capa results generated on "
|
||||
f"{results.meta.timestamp.strftime('%Y-%m-%d at %H:%M:%S')}.\n"
|
||||
f"Load existing data or analyze program again?",
|
||||
"This database contains capa results generated on "
|
||||
+ results.meta.timestamp.strftime("%Y-%m-%d at %H:%M:%S")
|
||||
+ ".\nLoad existing data or analyze program again?",
|
||||
)
|
||||
|
||||
if btn_id == ida_kernwin.ASKBTN_CANCEL:
|
||||
@@ -973,26 +977,21 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
# so we'll work with a local copy of the ruleset.
|
||||
ruleset = copy.deepcopy(self.rulegen_ruleset_cache)
|
||||
|
||||
# clear feature cache
|
||||
if self.rulegen_feature_cache is not None:
|
||||
self.rulegen_feature_cache = None
|
||||
|
||||
# clear cached function
|
||||
if self.rulegen_current_function is not None:
|
||||
self.rulegen_current_function = None
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
logger.info("User cancelled analysis.")
|
||||
return False
|
||||
|
||||
update_wait_box("Initializing feature extractor")
|
||||
|
||||
try:
|
||||
# must use extractor to get function, as capa analysis requires casted object
|
||||
extractor = CapaExplorerFeatureExtractor()
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize feature extractor (error: %s)", e, exc_info=True)
|
||||
return False
|
||||
# these are init once objects, create on tab change
|
||||
if self.rulegen_feature_cache is None or self.rulegen_feature_extractor is None:
|
||||
try:
|
||||
update_wait_box("performing one-time file analysis")
|
||||
self.rulegen_feature_extractor = CapaExplorerFeatureExtractor()
|
||||
self.rulegen_feature_cache = CapaRuleGenFeatureCache(self.rulegen_feature_extractor)
|
||||
except Exception as e:
|
||||
logger.exception("Failed to initialize feature extractor (error: %s)", e)
|
||||
return False
|
||||
else:
|
||||
logger.info("Reusing prior rulegen cache")
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
logger.info("User cancelled analysis.")
|
||||
@@ -1004,24 +1003,9 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
try:
|
||||
f = idaapi.get_func(idaapi.get_screen_ea())
|
||||
if f is not None:
|
||||
self.rulegen_current_function = extractor.get_function(f.start_ea)
|
||||
self.rulegen_current_function = self.rulegen_feature_extractor.get_function(f.start_ea)
|
||||
except Exception as e:
|
||||
logger.error("Failed to resolve function at address 0x%X (error: %s)", f.start_ea, e, exc_info=True)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
logger.info("User cancelled analysis.")
|
||||
return False
|
||||
|
||||
# extract features
|
||||
try:
|
||||
fh_list: List[FunctionHandle] = []
|
||||
if self.rulegen_current_function is not None:
|
||||
fh_list.append(self.rulegen_current_function)
|
||||
|
||||
self.rulegen_feature_cache = CapaRuleGenFeatureCache(fh_list, extractor)
|
||||
except Exception as e:
|
||||
logger.error("Failed to extract features (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to resolve function at address 0x%X (error: %s)", f.start_ea, e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -1047,7 +1031,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
for addr, _ in result:
|
||||
all_function_features[capa.features.common.MatchedRule(name)].add(addr)
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate rule matches (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to generate rule matches (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -1068,7 +1052,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
for addr, _ in result:
|
||||
all_file_features[capa.features.common.MatchedRule(name)].add(addr)
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate file rule matches (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to generate file rule matches (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -1091,7 +1075,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
f"capa rules: {settings.user[CAPA_SETTINGS_RULE_PATH]} ({settings.user[CAPA_SETTINGS_RULE_PATH]} rules)"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to render views (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to render views (error: %s)", e)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1176,7 +1160,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
assert self.rulegen_ruleset_cache is not None
|
||||
assert self.rulegen_feature_cache is not None
|
||||
except Exception as e:
|
||||
logger.error("Failed to access cache (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to access cache (error: %s)", e)
|
||||
self.set_rulegen_status("Error: see console output for more details")
|
||||
return
|
||||
|
||||
@@ -1220,11 +1204,11 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
self.set_rulegen_status(f"Failed to create function rule matches from rule set ({e})")
|
||||
return
|
||||
|
||||
if rule.scope == capa.rules.Scope.FUNCTION and rule.name in func_matches.keys():
|
||||
if rule.scope == capa.rules.Scope.FUNCTION and rule.name in func_matches:
|
||||
is_match = True
|
||||
elif rule.scope == capa.rules.Scope.BASIC_BLOCK and rule.name in bb_matches.keys():
|
||||
elif rule.scope == capa.rules.Scope.BASIC_BLOCK and rule.name in bb_matches:
|
||||
is_match = True
|
||||
elif rule.scope == capa.rules.Scope.INSTRUCTION and rule.name in insn_matches.keys():
|
||||
elif rule.scope == capa.rules.Scope.INSTRUCTION and rule.name in insn_matches:
|
||||
is_match = True
|
||||
elif rule.scope == capa.rules.Scope.FILE:
|
||||
try:
|
||||
@@ -1232,7 +1216,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
except Exception as e:
|
||||
self.set_rulegen_status(f"Failed to create file rule matches from rule set ({e})")
|
||||
return
|
||||
if rule.name in file_matches.keys():
|
||||
if rule.name in file_matches:
|
||||
is_match = True
|
||||
else:
|
||||
is_match = False
|
||||
@@ -1259,7 +1243,6 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
elif index == 1:
|
||||
self.set_view_status_label(self.view_status_label_rulegen_cache)
|
||||
self.view_status_label_analysis_cache = status_prev
|
||||
|
||||
self.view_reset_button.setText("Clear")
|
||||
|
||||
def slot_rulegen_editor_update(self):
|
||||
@@ -1323,8 +1306,8 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
|
||||
s = self.resdoc_cache.json().encode("utf-8")
|
||||
|
||||
path = self.ask_user_capa_json_file()
|
||||
if not path:
|
||||
path = Path(self.ask_user_capa_json_file())
|
||||
if not path.exists():
|
||||
return
|
||||
|
||||
write_file(path, s)
|
||||
@@ -1336,8 +1319,8 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
idaapi.info("No rule to save.")
|
||||
return
|
||||
|
||||
path = self.ask_user_capa_rule_file()
|
||||
if not path:
|
||||
path = Path(self.ask_user_capa_rule_file())
|
||||
if not path.exists():
|
||||
return
|
||||
|
||||
write_file(path, s)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -30,7 +30,7 @@ class CapaExplorerIdaHooks(idaapi.UI_Hooks):
|
||||
|
||||
@retval must be 0
|
||||
"""
|
||||
self.process_action_handle = self.process_action_hooks.get(name, None)
|
||||
self.process_action_handle = self.process_action_hooks.get(name)
|
||||
|
||||
if self.process_action_handle:
|
||||
self.process_action_handle(self.process_action_meta)
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import base64
|
||||
|
||||
# this is just `capa/.github/icon.png`.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -130,8 +130,7 @@ class CapaExplorerDataItem:
|
||||
|
||||
def children(self) -> Iterator["CapaExplorerDataItem"]:
|
||||
"""yield children"""
|
||||
for child in self._children:
|
||||
yield child
|
||||
yield from self._children
|
||||
|
||||
def removeChildren(self):
|
||||
"""remove children"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -372,7 +372,8 @@ class CapaExplorerDataModel(QtCore.QAbstractItemModel):
|
||||
display += f" ({statement.description})"
|
||||
return CapaExplorerDefaultItem(parent, display)
|
||||
elif isinstance(statement, rd.CompoundStatement) and statement.type == rd.CompoundStatementType.NOT:
|
||||
# TODO: do we display 'not'
|
||||
# TODO(mike-hunhoff): verify that we can display NOT statements
|
||||
# https://github.com/mandiant/capa/issues/1602
|
||||
pass
|
||||
elif isinstance(statement, rd.SomeStatement):
|
||||
display = f"{statement.count} or more"
|
||||
@@ -421,12 +422,13 @@ class CapaExplorerDataModel(QtCore.QAbstractItemModel):
|
||||
@param doc: result doc
|
||||
"""
|
||||
if not match.success:
|
||||
# TODO: display failed branches at some point? Help with debugging rules?
|
||||
# TODO(mike-hunhoff): display failed branches at some point? Help with debugging rules?
|
||||
# https://github.com/mandiant/capa/issues/1601
|
||||
return
|
||||
|
||||
# optional statement with no successful children is empty
|
||||
if isinstance(match.node, rd.StatementNode) and match.node.statement.type == rd.CompoundStatementType.OPTIONAL:
|
||||
if not any(map(lambda m: m.success, match.children)):
|
||||
if not any(m.success for m in match.children):
|
||||
return
|
||||
|
||||
if isinstance(match.node, rd.StatementNode):
|
||||
@@ -626,7 +628,7 @@ class CapaExplorerDataModel(QtCore.QAbstractItemModel):
|
||||
matched_rule_source = ""
|
||||
|
||||
# check if match is a matched rule
|
||||
matched_rule = doc.rules.get(feature.match, None)
|
||||
matched_rule = doc.rules.get(feature.match)
|
||||
if matched_rule is not None:
|
||||
matched_rule_source = matched_rule.source
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,6 +6,7 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import re
|
||||
from typing import Dict, Optional
|
||||
from collections import Counter
|
||||
|
||||
import idc
|
||||
@@ -63,7 +64,7 @@ def parse_yaml_line(feature):
|
||||
feature, _, comment = feature.partition("#")
|
||||
feature, _, description = feature.partition("=")
|
||||
|
||||
return map(lambda o: o.strip(), (feature, description, comment))
|
||||
return (o.strip() for o in (feature, description, comment))
|
||||
|
||||
|
||||
def parse_node_for_feature(feature, description, comment, depth):
|
||||
@@ -498,12 +499,13 @@ class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
|
||||
rule_text += "\n features:\n"
|
||||
|
||||
for o in iterate_tree(self):
|
||||
feature, description, comment = map(lambda o: o.strip(), tuple(o.text(i) for i in range(3)))
|
||||
feature, description, comment = (o.strip() for o in tuple(o.text(i) for i in range(3)))
|
||||
rule_text += parse_node_for_feature(feature, description, comment, calc_item_depth(o))
|
||||
|
||||
# FIXME we avoid circular update by disabling signals when updating
|
||||
# TODO(mike-hunhoff): we avoid circular update by disabling signals when updating
|
||||
# the preview. Preferably we would refactor the code to avoid this
|
||||
# in the first place
|
||||
# in the first place.
|
||||
# https://github.com/mandiant/capa/issues/1600
|
||||
self.preview.blockSignals(True)
|
||||
self.preview.setPlainText(rule_text)
|
||||
self.preview.blockSignals(False)
|
||||
@@ -646,7 +648,7 @@ class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
|
||||
counted = list(zip(Counter(features).keys(), Counter(features).values()))
|
||||
|
||||
# single features
|
||||
for k, v in filter(lambda t: t[1] == 1, counted):
|
||||
for k, _ in filter(lambda t: t[1] == 1, counted):
|
||||
if isinstance(k, (capa.features.common.String,)):
|
||||
value = f'"{capa.features.common.escape_string(k.get_value_str())}"'
|
||||
else:
|
||||
@@ -682,10 +684,12 @@ class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
|
||||
|
||||
# we don't add a new node for description; either set description column of parent's last child
|
||||
# or the parent itself
|
||||
if parent.childCount():
|
||||
parent.child(parent.childCount() - 1).setText(1, feature.lstrip("description:").lstrip())
|
||||
else:
|
||||
parent.setText(1, feature.lstrip("description:").lstrip())
|
||||
if feature.startswith("description:"):
|
||||
description = feature[len("description:") :].lstrip()
|
||||
if parent.childCount():
|
||||
parent.child(parent.childCount() - 1).setText(1, description)
|
||||
else:
|
||||
parent.setText(1, description)
|
||||
return None
|
||||
elif feature.startswith("- description:"):
|
||||
if not parent:
|
||||
@@ -693,7 +697,8 @@ class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
|
||||
return None
|
||||
|
||||
# we don't add a new node for description; set the description column of the parent instead
|
||||
parent.setText(1, feature.lstrip("- description:").lstrip())
|
||||
description = feature[len("- description:") :].lstrip()
|
||||
parent.setText(1, description)
|
||||
return None
|
||||
|
||||
node = QtWidgets.QTreeWidgetItem(parent)
|
||||
@@ -1010,7 +1015,7 @@ class CapaExplorerRulegenFeatures(QtWidgets.QTreeWidget):
|
||||
|
||||
return o
|
||||
|
||||
def load_features(self, file_features, func_features={}):
|
||||
def load_features(self, file_features, func_features: Optional[Dict] = None):
|
||||
""" """
|
||||
self.parse_features_for_tree(self.new_parent_node(self, ("File Scope",)), file_features)
|
||||
if func_features:
|
||||
@@ -1219,8 +1224,7 @@ class CapaExplorerQtreeView(QtWidgets.QTreeView):
|
||||
yield self.new_action(*action)
|
||||
|
||||
# add default actions
|
||||
for action in self.load_default_context_menu_actions(data):
|
||||
yield action
|
||||
yield from self.load_default_context_menu_actions(data)
|
||||
|
||||
def load_default_context_menu(self, pos, item, model_index):
|
||||
"""create default custom context menu
|
||||
|
||||
440
capa/main.py
440
capa/main.py
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -8,38 +8,43 @@ Unless required by applicable law or agreed to in writing, software distributed
|
||||
is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and limitations under the License.
|
||||
"""
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import hashlib
|
||||
import logging
|
||||
import os.path
|
||||
import argparse
|
||||
import datetime
|
||||
import textwrap
|
||||
import itertools
|
||||
import contextlib
|
||||
import collections
|
||||
from typing import Any, Dict, List, Tuple, Callable
|
||||
from typing import Any, Dict, List, Tuple, Callable, Optional
|
||||
from pathlib import Path
|
||||
|
||||
import halo
|
||||
import tqdm
|
||||
import colorama
|
||||
import tqdm.contrib.logging
|
||||
from pefile import PEFormatError
|
||||
from elftools.common.exceptions import ELFError
|
||||
|
||||
import capa.perf
|
||||
import capa.rules
|
||||
import capa.engine
|
||||
import capa.helpers
|
||||
import capa.version
|
||||
import capa.render.json
|
||||
import capa.rules.cache
|
||||
import capa.render.default
|
||||
import capa.render.verbose
|
||||
import capa.features.common
|
||||
import capa.features.freeze
|
||||
import capa.features.freeze as frz
|
||||
import capa.render.vverbose
|
||||
import capa.features.extractors
|
||||
import capa.render.result_document
|
||||
import capa.render.result_document as rdoc
|
||||
import capa.features.extractors.common
|
||||
import capa.features.extractors.pefile
|
||||
import capa.features.extractors.dnfile_
|
||||
@@ -53,6 +58,7 @@ from capa.helpers import (
|
||||
get_file_taste,
|
||||
get_auto_format,
|
||||
log_unsupported_os_error,
|
||||
redirecting_print_to_tqdm,
|
||||
log_unsupported_arch_error,
|
||||
log_unsupported_format_error,
|
||||
)
|
||||
@@ -79,6 +85,7 @@ SIGNATURES_PATH_DEFAULT_STRING = "(embedded signatures)"
|
||||
BACKEND_VIV = "vivisect"
|
||||
BACKEND_DOTNET = "dotnet"
|
||||
BACKEND_BINJA = "binja"
|
||||
BACKEND_PEFILE = "pefile"
|
||||
|
||||
E_MISSING_RULES = 10
|
||||
E_MISSING_FILE = 11
|
||||
@@ -243,45 +250,61 @@ def find_capabilities(ruleset: RuleSet, extractor: FeatureExtractor, disable_pro
|
||||
all_bb_matches = collections.defaultdict(list) # type: MatchResults
|
||||
all_insn_matches = collections.defaultdict(list) # type: MatchResults
|
||||
|
||||
meta = {
|
||||
"feature_counts": {
|
||||
"file": 0,
|
||||
"functions": {},
|
||||
},
|
||||
"library_functions": {},
|
||||
} # type: Dict[str, Any]
|
||||
feature_counts = rdoc.FeatureCounts(file=0, functions=())
|
||||
library_functions: Tuple[rdoc.LibraryFunction, ...] = ()
|
||||
|
||||
pbar = tqdm.tqdm
|
||||
if disable_progress:
|
||||
# do not use tqdm to avoid unnecessary side effects when caller intends
|
||||
# to disable progress completely
|
||||
pbar = lambda s, *args, **kwargs: s
|
||||
with redirecting_print_to_tqdm(disable_progress):
|
||||
with tqdm.contrib.logging.logging_redirect_tqdm():
|
||||
pbar = tqdm.tqdm
|
||||
if disable_progress:
|
||||
# do not use tqdm to avoid unnecessary side effects when caller intends
|
||||
# to disable progress completely
|
||||
def pbar(s, *args, **kwargs):
|
||||
return s
|
||||
|
||||
functions = list(extractor.get_functions())
|
||||
n_funcs = len(functions)
|
||||
functions = list(extractor.get_functions())
|
||||
n_funcs = len(functions)
|
||||
|
||||
pb = pbar(functions, desc="matching", unit=" functions", postfix="skipped 0 library functions")
|
||||
for f in pb:
|
||||
if extractor.is_library_function(f.address):
|
||||
function_name = extractor.get_function_name(f.address)
|
||||
logger.debug("skipping library function 0x%x (%s)", f.address, function_name)
|
||||
meta["library_functions"][f.address] = function_name
|
||||
n_libs = len(meta["library_functions"])
|
||||
percentage = round(100 * (n_libs / n_funcs))
|
||||
if isinstance(pb, tqdm.tqdm):
|
||||
pb.set_postfix_str(f"skipped {n_libs} library functions ({percentage}%)")
|
||||
continue
|
||||
pb = pbar(functions, desc="matching", unit=" functions", postfix="skipped 0 library functions", leave=False)
|
||||
for f in pb:
|
||||
t0 = time.time()
|
||||
if extractor.is_library_function(f.address):
|
||||
function_name = extractor.get_function_name(f.address)
|
||||
logger.debug("skipping library function 0x%x (%s)", f.address, function_name)
|
||||
library_functions += (
|
||||
rdoc.LibraryFunction(address=frz.Address.from_capa(f.address), name=function_name),
|
||||
)
|
||||
n_libs = len(library_functions)
|
||||
percentage = round(100 * (n_libs / n_funcs))
|
||||
if isinstance(pb, tqdm.tqdm):
|
||||
pb.set_postfix_str(f"skipped {n_libs} library functions ({percentage}%)")
|
||||
continue
|
||||
|
||||
function_matches, bb_matches, insn_matches, feature_count = find_code_capabilities(ruleset, extractor, f)
|
||||
meta["feature_counts"]["functions"][f.address] = feature_count
|
||||
logger.debug("analyzed function 0x%x and extracted %d features", f.address, feature_count)
|
||||
function_matches, bb_matches, insn_matches, feature_count = find_code_capabilities(
|
||||
ruleset, extractor, f
|
||||
)
|
||||
feature_counts.functions += (
|
||||
rdoc.FunctionFeatureCount(address=frz.Address.from_capa(f.address), count=feature_count),
|
||||
)
|
||||
t1 = time.time()
|
||||
|
||||
for rule_name, res in function_matches.items():
|
||||
all_function_matches[rule_name].extend(res)
|
||||
for rule_name, res in bb_matches.items():
|
||||
all_bb_matches[rule_name].extend(res)
|
||||
for rule_name, res in insn_matches.items():
|
||||
all_insn_matches[rule_name].extend(res)
|
||||
match_count = sum(len(res) for res in function_matches.values())
|
||||
match_count += sum(len(res) for res in bb_matches.values())
|
||||
match_count += sum(len(res) for res in insn_matches.values())
|
||||
logger.debug(
|
||||
"analyzed function 0x%x and extracted %d features, %d matches in %0.02fs",
|
||||
f.address,
|
||||
feature_count,
|
||||
match_count,
|
||||
t1 - t0,
|
||||
)
|
||||
|
||||
for rule_name, res in function_matches.items():
|
||||
all_function_matches[rule_name].extend(res)
|
||||
for rule_name, res in bb_matches.items():
|
||||
all_bb_matches[rule_name].extend(res)
|
||||
for rule_name, res in insn_matches.items():
|
||||
all_insn_matches[rule_name].extend(res)
|
||||
|
||||
# collection of features that captures the rule matches within function, BB, and instruction scopes.
|
||||
# mapping from feature (matched rule) to set of addresses at which it matched.
|
||||
@@ -289,16 +312,15 @@ def find_capabilities(ruleset: RuleSet, extractor: FeatureExtractor, disable_pro
|
||||
for rule_name, results in itertools.chain(
|
||||
all_function_matches.items(), all_bb_matches.items(), all_insn_matches.items()
|
||||
):
|
||||
locations = set(map(lambda p: p[0], results))
|
||||
locations = {p[0] for p in results}
|
||||
rule = ruleset[rule_name]
|
||||
capa.engine.index_rule_matches(function_and_lower_features, rule, locations)
|
||||
|
||||
all_file_matches, feature_count = find_file_capabilities(ruleset, extractor, function_and_lower_features)
|
||||
meta["feature_counts"]["file"] = feature_count
|
||||
feature_counts.file = feature_count
|
||||
|
||||
matches = {
|
||||
rule_name: results
|
||||
for rule_name, results in itertools.chain(
|
||||
matches = dict(
|
||||
itertools.chain(
|
||||
# each rule exists in exactly one scope,
|
||||
# so there won't be any overlap among these following MatchResults,
|
||||
# and we can merge the dictionaries naively.
|
||||
@@ -307,17 +329,20 @@ def find_capabilities(ruleset: RuleSet, extractor: FeatureExtractor, disable_pro
|
||||
all_function_matches.items(),
|
||||
all_file_matches.items(),
|
||||
)
|
||||
)
|
||||
|
||||
meta = {
|
||||
"feature_counts": feature_counts,
|
||||
"library_functions": library_functions,
|
||||
}
|
||||
|
||||
return matches, meta
|
||||
|
||||
|
||||
# TODO move all to helpers?
|
||||
def has_rule_with_namespace(rules, capabilities, rule_cat):
|
||||
for rule_name in capabilities.keys():
|
||||
if rules.rules[rule_name].meta.get("namespace", "").startswith(rule_cat):
|
||||
return True
|
||||
return False
|
||||
def has_rule_with_namespace(rules: RuleSet, capabilities: MatchResults, namespace: str) -> bool:
|
||||
return any(
|
||||
rules.rules[rule_name].meta.get("namespace", "").startswith(namespace) for rule_name in capabilities.keys()
|
||||
)
|
||||
|
||||
|
||||
def is_internal_rule(rule: Rule) -> bool:
|
||||
@@ -350,26 +375,23 @@ def has_file_limitation(rules: RuleSet, capabilities: MatchResults, is_standalon
|
||||
return False
|
||||
|
||||
|
||||
def is_supported_format(sample: str) -> bool:
|
||||
def is_supported_format(sample: Path) -> bool:
|
||||
"""
|
||||
Return if this is a supported file based on magic header values
|
||||
"""
|
||||
with open(sample, "rb") as f:
|
||||
taste = f.read(0x100)
|
||||
taste = sample.open("rb").read(0x100)
|
||||
|
||||
return len(list(capa.features.extractors.common.extract_format(taste))) == 1
|
||||
|
||||
|
||||
def is_supported_arch(sample: str) -> bool:
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
def is_supported_arch(sample: Path) -> bool:
|
||||
buf = sample.read_bytes()
|
||||
|
||||
return len(list(capa.features.extractors.common.extract_arch(buf))) == 1
|
||||
|
||||
|
||||
def get_arch(sample: str) -> str:
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
def get_arch(sample: Path) -> str:
|
||||
buf = sample.read_bytes()
|
||||
|
||||
for feature, _ in capa.features.extractors.common.extract_arch(buf):
|
||||
assert isinstance(feature.value, str)
|
||||
@@ -378,16 +400,14 @@ def get_arch(sample: str) -> str:
|
||||
return "unknown"
|
||||
|
||||
|
||||
def is_supported_os(sample: str) -> bool:
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
def is_supported_os(sample: Path) -> bool:
|
||||
buf = sample.read_bytes()
|
||||
|
||||
return len(list(capa.features.extractors.common.extract_os(buf))) == 1
|
||||
|
||||
|
||||
def get_os(sample: str) -> str:
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
def get_os(sample: Path) -> str:
|
||||
buf = sample.read_bytes()
|
||||
|
||||
for feature, _ in capa.features.extractors.common.extract_os(buf):
|
||||
assert isinstance(feature.value, str)
|
||||
@@ -415,7 +435,7 @@ def is_running_standalone() -> bool:
|
||||
return hasattr(sys, "frozen") and hasattr(sys, "_MEIPASS")
|
||||
|
||||
|
||||
def get_default_root() -> str:
|
||||
def get_default_root() -> Path:
|
||||
"""
|
||||
get the file system path to the default resources directory.
|
||||
under PyInstaller, this comes from _MEIPASS.
|
||||
@@ -425,30 +445,28 @@ def get_default_root() -> str:
|
||||
# pylance/mypy don't like `sys._MEIPASS` because this isn't standard.
|
||||
# its injected by pyinstaller.
|
||||
# so we'll fetch this attribute dynamically.
|
||||
return getattr(sys, "_MEIPASS")
|
||||
assert hasattr(sys, "_MEIPASS")
|
||||
return Path(sys._MEIPASS)
|
||||
else:
|
||||
return os.path.join(os.path.dirname(__file__), "..")
|
||||
return Path(__file__).resolve().parent.parent
|
||||
|
||||
|
||||
def get_default_signatures() -> List[str]:
|
||||
def get_default_signatures() -> List[Path]:
|
||||
"""
|
||||
compute a list of file system paths to the default FLIRT signatures.
|
||||
"""
|
||||
sigs_path = os.path.join(get_default_root(), "sigs")
|
||||
sigs_path = get_default_root() / "sigs"
|
||||
logger.debug("signatures path: %s", sigs_path)
|
||||
|
||||
ret = []
|
||||
for root, _, files in os.walk(sigs_path):
|
||||
for file in files:
|
||||
if not (file.endswith(".pat") or file.endswith(".pat.gz") or file.endswith(".sig")):
|
||||
continue
|
||||
|
||||
ret.append(os.path.join(root, file))
|
||||
for file in sigs_path.rglob("*"):
|
||||
if file.is_file() and file.suffix.lower() in (".pat", ".pat.gz", ".sig"):
|
||||
ret.append(file)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def get_workspace(path, format_, sigpaths):
|
||||
def get_workspace(path: Path, format_: str, sigpaths: List[Path]):
|
||||
"""
|
||||
load the program at the given path into a vivisect workspace using the given format.
|
||||
also apply the given FLIRT signatures.
|
||||
@@ -469,24 +487,23 @@ def get_workspace(path, format_, sigpaths):
|
||||
import viv_utils.flirt
|
||||
|
||||
logger.debug("generating vivisect workspace for: %s", path)
|
||||
# TODO should not be auto at this point, anymore
|
||||
if format_ == FORMAT_AUTO:
|
||||
if not is_supported_format(path):
|
||||
raise UnsupportedFormatError()
|
||||
|
||||
# don't analyze, so that we can add our Flirt function analyzer first.
|
||||
vw = viv_utils.getWorkspace(path, analyze=False, should_save=False)
|
||||
vw = viv_utils.getWorkspace(str(path), analyze=False, should_save=False)
|
||||
elif format_ in {FORMAT_PE, FORMAT_ELF}:
|
||||
vw = viv_utils.getWorkspace(path, analyze=False, should_save=False)
|
||||
vw = viv_utils.getWorkspace(str(path), analyze=False, should_save=False)
|
||||
elif format_ == FORMAT_SC32:
|
||||
# these are not analyzed nor saved.
|
||||
vw = viv_utils.getShellcodeWorkspaceFromFile(path, arch="i386", analyze=False)
|
||||
vw = viv_utils.getShellcodeWorkspaceFromFile(str(path), arch="i386", analyze=False)
|
||||
elif format_ == FORMAT_SC64:
|
||||
vw = viv_utils.getShellcodeWorkspaceFromFile(path, arch="amd64", analyze=False)
|
||||
vw = viv_utils.getShellcodeWorkspaceFromFile(str(path), arch="amd64", analyze=False)
|
||||
else:
|
||||
raise ValueError("unexpected format: " + format_)
|
||||
|
||||
viv_utils.flirt.register_flirt_signature_analyzers(vw, sigpaths)
|
||||
viv_utils.flirt.register_flirt_signature_analyzers(vw, [str(s) for s in sigpaths])
|
||||
|
||||
vw.analyze()
|
||||
|
||||
@@ -494,13 +511,12 @@ def get_workspace(path, format_, sigpaths):
|
||||
return vw
|
||||
|
||||
|
||||
# TODO get_extractors -> List[FeatureExtractor]?
|
||||
def get_extractor(
|
||||
path: str,
|
||||
path: Path,
|
||||
format_: str,
|
||||
os_: str,
|
||||
backend: str,
|
||||
sigpaths: List[str],
|
||||
sigpaths: List[Path],
|
||||
should_save_workspace=False,
|
||||
disable_progress=False,
|
||||
) -> FeatureExtractor:
|
||||
@@ -532,28 +548,32 @@ def get_extractor(
|
||||
# We need to fist find the binja API installation path and add it into sys.path
|
||||
if is_running_standalone():
|
||||
bn_api = find_binja_path()
|
||||
if os.path.exists(bn_api):
|
||||
sys.path.append(bn_api)
|
||||
if bn_api.exists():
|
||||
sys.path.append(str(bn_api))
|
||||
|
||||
try:
|
||||
from binaryninja import BinaryView, BinaryViewType
|
||||
except ImportError:
|
||||
raise RuntimeError(
|
||||
"Cannot import binaryninja module. Please install the Binary Ninja Python API first: "
|
||||
"https://docs.binary.ninja/dev/batch.html#install-the-api)."
|
||||
+ "https://docs.binary.ninja/dev/batch.html#install-the-api)."
|
||||
)
|
||||
|
||||
import capa.features.extractors.binja.extractor
|
||||
|
||||
with halo.Halo(text="analyzing program", spinner="simpleDots", stream=sys.stderr, enabled=not disable_progress):
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(path)
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(str(path))
|
||||
if bv is None:
|
||||
raise RuntimeError(f"Binary Ninja cannot open file {path}")
|
||||
|
||||
return capa.features.extractors.binja.extractor.BinjaFeatureExtractor(bv)
|
||||
|
||||
# default to use vivisect backend
|
||||
else:
|
||||
elif backend == BACKEND_PEFILE:
|
||||
import capa.features.extractors.pefile
|
||||
|
||||
return capa.features.extractors.pefile.PefileFeatureExtractor(path)
|
||||
|
||||
elif backend == BACKEND_VIV:
|
||||
import capa.features.extractors.viv.extractor
|
||||
|
||||
with halo.Halo(text="analyzing program", spinner="simpleDots", stream=sys.stderr, enabled=not disable_progress):
|
||||
@@ -571,9 +591,12 @@ def get_extractor(
|
||||
|
||||
return capa.features.extractors.viv.extractor.VivisectFeatureExtractor(vw, path, os_)
|
||||
|
||||
else:
|
||||
raise ValueError("unexpected backend: " + backend)
|
||||
|
||||
def get_file_extractors(sample: str, format_: str) -> List[FeatureExtractor]:
|
||||
file_extractors: List[FeatureExtractor] = list()
|
||||
|
||||
def get_file_extractors(sample: Path, format_: str) -> List[FeatureExtractor]:
|
||||
file_extractors: List[FeatureExtractor] = []
|
||||
|
||||
if format_ == FORMAT_PE:
|
||||
file_extractors.append(capa.features.extractors.pefile.PefileFeatureExtractor(sample))
|
||||
@@ -588,7 +611,7 @@ def get_file_extractors(sample: str, format_: str) -> List[FeatureExtractor]:
|
||||
return file_extractors
|
||||
|
||||
|
||||
def is_nursery_rule_path(path: str) -> bool:
|
||||
def is_nursery_rule_path(path: Path) -> bool:
|
||||
"""
|
||||
The nursery is a spot for rules that have not yet been fully polished.
|
||||
For example, they may not have references to public example of a technique.
|
||||
@@ -598,21 +621,21 @@ def is_nursery_rule_path(path: str) -> bool:
|
||||
When nursery rules are loaded, their metadata section should be updated with:
|
||||
`nursery=True`.
|
||||
"""
|
||||
return "nursery" in path
|
||||
return "nursery" in path.parts
|
||||
|
||||
|
||||
def collect_rule_file_paths(rule_paths: List[str]) -> List[str]:
|
||||
def collect_rule_file_paths(rule_paths: List[Path]) -> List[Path]:
|
||||
"""
|
||||
collect all rule file paths, including those in subdirectories.
|
||||
"""
|
||||
rule_file_paths = []
|
||||
for rule_path in rule_paths:
|
||||
if not os.path.exists(rule_path):
|
||||
if not rule_path.exists():
|
||||
raise IOError(f"rule path {rule_path} does not exist or cannot be accessed")
|
||||
|
||||
if os.path.isfile(rule_path):
|
||||
if rule_path.is_file():
|
||||
rule_file_paths.append(rule_path)
|
||||
elif os.path.isdir(rule_path):
|
||||
elif rule_path.is_dir():
|
||||
logger.debug("reading rules from directory %s", rule_path)
|
||||
for root, _, files in os.walk(rule_path):
|
||||
if ".git" in root:
|
||||
@@ -629,14 +652,12 @@ def collect_rule_file_paths(rule_paths: List[str]) -> List[str]:
|
||||
# other things maybe are rules, but are mis-named.
|
||||
logger.warning("skipping non-.yml file: %s", file)
|
||||
continue
|
||||
rule_path = os.path.join(root, file)
|
||||
rule_file_paths.append(rule_path)
|
||||
|
||||
rule_file_paths.append(Path(root) / file)
|
||||
return rule_file_paths
|
||||
|
||||
|
||||
# TypeAlias. note: using `foo: TypeAlias = bar` is Python 3.10+
|
||||
RulePath = str
|
||||
RulePath = Path
|
||||
|
||||
|
||||
def on_load_rule_default(_path: RulePath, i: int, _total: int) -> None:
|
||||
@@ -656,17 +677,13 @@ def get_rules(
|
||||
"""
|
||||
if cache_dir is None:
|
||||
cache_dir = capa.rules.cache.get_default_cache_directory()
|
||||
|
||||
# rule_paths may contain directory paths,
|
||||
# so search for file paths recursively.
|
||||
rule_file_paths = collect_rule_file_paths(rule_paths)
|
||||
|
||||
# this list is parallel to `rule_file_paths`:
|
||||
# rule_file_paths[i] corresponds to rule_contents[i].
|
||||
rule_contents = []
|
||||
for file_path in rule_file_paths:
|
||||
with open(file_path, "rb") as f:
|
||||
rule_contents.append(f.read())
|
||||
rule_contents = [file_path.read_bytes() for file_path in rule_file_paths]
|
||||
|
||||
ruleset = capa.rules.cache.load_cached_ruleset(cache_dir, rule_contents)
|
||||
if ruleset is not None:
|
||||
@@ -683,9 +700,8 @@ def get_rules(
|
||||
except capa.rules.InvalidRule:
|
||||
raise
|
||||
else:
|
||||
rule.meta["capa/path"] = path
|
||||
if is_nursery_rule_path(path):
|
||||
rule.meta["capa/nursery"] = True
|
||||
rule.meta["capa/path"] = path.as_posix()
|
||||
rule.meta["capa/nursery"] = is_nursery_rule_path(path)
|
||||
|
||||
rules.append(rule)
|
||||
logger.debug("loaded rule: '%s' with scope: %s", rule.name, rule.scope)
|
||||
@@ -697,27 +713,25 @@ def get_rules(
|
||||
return ruleset
|
||||
|
||||
|
||||
def get_signatures(sigs_path):
|
||||
if not os.path.exists(sigs_path):
|
||||
def get_signatures(sigs_path: Path) -> List[Path]:
|
||||
if not sigs_path.exists():
|
||||
raise IOError(f"signatures path {sigs_path} does not exist or cannot be accessed")
|
||||
|
||||
paths = []
|
||||
if os.path.isfile(sigs_path):
|
||||
paths: List[Path] = []
|
||||
if sigs_path.is_file():
|
||||
paths.append(sigs_path)
|
||||
elif os.path.isdir(sigs_path):
|
||||
logger.debug("reading signatures from directory %s", os.path.abspath(os.path.normpath(sigs_path)))
|
||||
for root, _, files in os.walk(sigs_path):
|
||||
for file in files:
|
||||
if file.endswith((".pat", ".pat.gz", ".sig")):
|
||||
sig_path = os.path.join(root, file)
|
||||
paths.append(sig_path)
|
||||
elif sigs_path.is_dir():
|
||||
logger.debug("reading signatures from directory %s", sigs_path.resolve())
|
||||
for file in sigs_path.rglob("*"):
|
||||
if file.is_file() and file.suffix.lower() in (".pat", ".pat.gz", ".sig"):
|
||||
paths.append(file)
|
||||
|
||||
# nicely normalize and format path so that debugging messages are clearer
|
||||
paths = [os.path.abspath(os.path.normpath(path)) for path in paths]
|
||||
# Convert paths to their absolute and normalized forms
|
||||
paths = [path.resolve().absolute() for path in paths]
|
||||
|
||||
# load signatures in deterministic order: the alphabetic sorting of filename.
|
||||
# this means that `0_sigs.pat` loads before `1_sigs.pat`.
|
||||
paths = sorted(paths, key=os.path.basename)
|
||||
paths = sorted(paths, key=lambda path: path.name)
|
||||
|
||||
for path in paths:
|
||||
logger.debug("found signature file: %s", path)
|
||||
@@ -727,58 +741,58 @@ def get_signatures(sigs_path):
|
||||
|
||||
def collect_metadata(
|
||||
argv: List[str],
|
||||
sample_path: str,
|
||||
sample_path: Path,
|
||||
format_: str,
|
||||
os_: str,
|
||||
rules_path: List[str],
|
||||
rules_path: List[Path],
|
||||
extractor: capa.features.extractors.base_extractor.FeatureExtractor,
|
||||
):
|
||||
) -> rdoc.Metadata:
|
||||
md5 = hashlib.md5()
|
||||
sha1 = hashlib.sha1()
|
||||
sha256 = hashlib.sha256()
|
||||
|
||||
with open(sample_path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = sample_path.read_bytes()
|
||||
|
||||
md5.update(buf)
|
||||
sha1.update(buf)
|
||||
sha256.update(buf)
|
||||
|
||||
if rules_path != [RULES_PATH_DEFAULT_STRING]:
|
||||
rules_path = [os.path.abspath(os.path.normpath(r)) for r in rules_path]
|
||||
|
||||
rules = tuple(r.resolve().absolute().as_posix() for r in rules_path)
|
||||
format_ = get_format(sample_path) if format_ == FORMAT_AUTO else format_
|
||||
arch = get_arch(sample_path)
|
||||
os_ = get_os(sample_path) if os_ == OS_AUTO else os_
|
||||
|
||||
return {
|
||||
"timestamp": datetime.datetime.now().isoformat(),
|
||||
"version": capa.version.__version__,
|
||||
"argv": argv,
|
||||
"sample": {
|
||||
"md5": md5.hexdigest(),
|
||||
"sha1": sha1.hexdigest(),
|
||||
"sha256": sha256.hexdigest(),
|
||||
"path": os.path.normpath(sample_path),
|
||||
},
|
||||
"analysis": {
|
||||
"format": format_,
|
||||
"arch": arch,
|
||||
"os": os_,
|
||||
"extractor": extractor.__class__.__name__,
|
||||
"rules": rules_path,
|
||||
"base_address": extractor.get_base_address(),
|
||||
"layout": {
|
||||
return rdoc.Metadata(
|
||||
timestamp=datetime.datetime.now(),
|
||||
version=capa.version.__version__,
|
||||
argv=tuple(argv) if argv else None,
|
||||
sample=rdoc.Sample(
|
||||
md5=md5.hexdigest(),
|
||||
sha1=sha1.hexdigest(),
|
||||
sha256=sha256.hexdigest(),
|
||||
path=sample_path.resolve().absolute().as_posix(),
|
||||
),
|
||||
analysis=rdoc.Analysis(
|
||||
format=format_,
|
||||
arch=arch,
|
||||
os=os_,
|
||||
extractor=extractor.__class__.__name__,
|
||||
rules=rules,
|
||||
base_address=frz.Address.from_capa(extractor.get_base_address()),
|
||||
layout=rdoc.Layout(
|
||||
functions=(),
|
||||
# this is updated after capabilities have been collected.
|
||||
# will look like:
|
||||
#
|
||||
# "functions": { 0x401000: { "matched_basic_blocks": [ 0x401000, 0x401005, ... ] }, ... }
|
||||
},
|
||||
},
|
||||
}
|
||||
),
|
||||
feature_counts=rdoc.FeatureCounts(file=0, functions=()),
|
||||
library_functions=(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def compute_layout(rules, extractor, capabilities):
|
||||
def compute_layout(rules, extractor, capabilities) -> rdoc.Layout:
|
||||
"""
|
||||
compute a metadata structure that links basic blocks
|
||||
to the functions in which they're found.
|
||||
@@ -803,16 +817,19 @@ def compute_layout(rules, extractor, capabilities):
|
||||
assert addr in functions_by_bb
|
||||
matched_bbs.add(addr)
|
||||
|
||||
layout = {
|
||||
"functions": {
|
||||
f: {
|
||||
"matched_basic_blocks": [bb for bb in bbs if bb in matched_bbs]
|
||||
# this object is open to extension in the future,
|
||||
layout = rdoc.Layout(
|
||||
functions=tuple(
|
||||
rdoc.FunctionLayout(
|
||||
address=frz.Address.from_capa(f),
|
||||
matched_basic_blocks=tuple(
|
||||
rdoc.BasicBlockLayout(address=frz.Address.from_capa(bb)) for bb in bbs if bb in matched_bbs
|
||||
) # this object is open to extension in the future,
|
||||
# such as with the function name, etc.
|
||||
}
|
||||
)
|
||||
for f, bbs in bbs_by_function.items()
|
||||
}
|
||||
}
|
||||
if len([bb for bb in bbs if bb in matched_bbs]) > 0
|
||||
)
|
||||
)
|
||||
|
||||
return layout
|
||||
|
||||
@@ -902,7 +919,7 @@ def install_common_args(parser, wanted=None):
|
||||
"--backend",
|
||||
type=str,
|
||||
help="select the backend to use",
|
||||
choices=(BACKEND_VIV, BACKEND_BINJA),
|
||||
choices=(BACKEND_VIV, BACKEND_BINJA, BACKEND_PEFILE),
|
||||
default=BACKEND_VIV,
|
||||
)
|
||||
|
||||
@@ -971,12 +988,20 @@ def handle_common_args(args):
|
||||
# disable vivisect-related logging, it's verbose and not relevant for capa users
|
||||
set_vivisect_log_level(logging.CRITICAL)
|
||||
|
||||
# Since Python 3.8 cp65001 is an alias to utf_8, but not for Python < 3.8
|
||||
# TODO: remove this code when only supporting Python 3.8+
|
||||
# https://stackoverflow.com/a/3259271/87207
|
||||
import codecs
|
||||
|
||||
codecs.register(lambda name: codecs.lookup("utf-8") if name == "cp65001" else None)
|
||||
if isinstance(sys.stdout, io.TextIOWrapper) or hasattr(sys.stdout, "reconfigure"):
|
||||
# from sys.stdout type hint:
|
||||
#
|
||||
# TextIO is used instead of more specific types for the standard streams,
|
||||
# since they are often monkeypatched at runtime. At startup, the objects
|
||||
# are initialized to instances of TextIOWrapper.
|
||||
#
|
||||
# To use methods from TextIOWrapper, use an isinstance check to ensure that
|
||||
# the streams have not been overridden:
|
||||
#
|
||||
# if isinstance(sys.stdout, io.TextIOWrapper):
|
||||
# sys.stdout.reconfigure(...)
|
||||
sys.stdout.reconfigure(encoding="utf-8")
|
||||
colorama.just_fix_windows_console()
|
||||
|
||||
if args.color == "always":
|
||||
colorama.init(strip=False)
|
||||
@@ -991,8 +1016,11 @@ def handle_common_args(args):
|
||||
else:
|
||||
raise RuntimeError("unexpected --color value: " + args.color)
|
||||
|
||||
if hasattr(args, "sample"):
|
||||
args.sample = Path(args.sample)
|
||||
|
||||
if hasattr(args, "rules"):
|
||||
rules_paths: List[str] = []
|
||||
rules_paths: List[Path] = []
|
||||
|
||||
if args.rules == [RULES_PATH_DEFAULT_STRING]:
|
||||
logger.debug("-" * 80)
|
||||
@@ -1002,9 +1030,9 @@ def handle_common_args(args):
|
||||
logger.debug(" https://github.com/mandiant/capa-rules")
|
||||
logger.debug("-" * 80)
|
||||
|
||||
default_rule_path = os.path.join(get_default_root(), "rules")
|
||||
default_rule_path = get_default_root() / "rules"
|
||||
|
||||
if not os.path.exists(default_rule_path):
|
||||
if not default_rule_path.exists():
|
||||
# when a users installs capa via pip,
|
||||
# this pulls down just the source code - not the default rules.
|
||||
# i'm not sure the default rules should even be written to the library directory,
|
||||
@@ -1016,10 +1044,9 @@ def handle_common_args(args):
|
||||
rules_paths.append(default_rule_path)
|
||||
args.is_default_rules = True
|
||||
else:
|
||||
rules_paths = args.rules
|
||||
|
||||
if RULES_PATH_DEFAULT_STRING in rules_paths:
|
||||
rules_paths.remove(RULES_PATH_DEFAULT_STRING)
|
||||
for rule in args.rules:
|
||||
if RULES_PATH_DEFAULT_STRING != rule:
|
||||
rules_paths.append(Path(rule))
|
||||
|
||||
for rule_path in rules_paths:
|
||||
logger.debug("using rules path: %s", rule_path)
|
||||
@@ -1037,24 +1064,25 @@ def handle_common_args(args):
|
||||
)
|
||||
logger.debug("-" * 80)
|
||||
|
||||
sigs_path = os.path.join(get_default_root(), "sigs")
|
||||
if not os.path.exists(sigs_path):
|
||||
sigs_path = get_default_root() / "sigs"
|
||||
|
||||
if not sigs_path.exists():
|
||||
logger.error(
|
||||
"Using default signature path, but it doesn't exist. "
|
||||
"Please install the signatures first: "
|
||||
"https://github.com/mandiant/capa/blob/master/doc/installation.md#method-2-using-capa-as-a-python-library."
|
||||
"Using default signature path, but it doesn't exist. " # noqa: G003 [logging statement uses +]
|
||||
+ "Please install the signatures first: "
|
||||
+ "https://github.com/mandiant/capa/blob/master/doc/installation.md#method-2-using-capa-as-a-python-library."
|
||||
)
|
||||
raise IOError(f"signatures path {sigs_path} does not exist or cannot be accessed")
|
||||
else:
|
||||
sigs_path = args.signatures
|
||||
sigs_path = Path(args.signatures)
|
||||
logger.debug("using signatures path: %s", sigs_path)
|
||||
|
||||
args.signatures = sigs_path
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if sys.version_info < (3, 7):
|
||||
raise UnsupportedRuntimeError("This version of capa can only be used with Python 3.7+")
|
||||
def main(argv: Optional[List[str]] = None):
|
||||
if sys.version_info < (3, 8):
|
||||
raise UnsupportedRuntimeError("This version of capa can only be used with Python 3.8+")
|
||||
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
@@ -1119,7 +1147,7 @@ def main(argv=None):
|
||||
|
||||
try:
|
||||
if is_running_standalone() and args.is_default_rules:
|
||||
cache_dir = os.path.join(get_default_root(), "cache")
|
||||
cache_dir = get_default_root() / "cache"
|
||||
else:
|
||||
cache_dir = capa.rules.cache.get_default_cache_directory()
|
||||
|
||||
@@ -1136,13 +1164,13 @@ def main(argv=None):
|
||||
rules = rules.filter_rules_by_meta(args.tag)
|
||||
logger.debug("selected %d rules", len(rules))
|
||||
for i, r in enumerate(rules.rules, 1):
|
||||
# TODO don't display subscope rules?
|
||||
logger.debug(" %d. %s", i, r)
|
||||
|
||||
except (IOError, capa.rules.InvalidRule, capa.rules.InvalidRuleSet) as e:
|
||||
logger.error("%s", str(e))
|
||||
logger.error(
|
||||
"Make sure your file directory contains properly formatted capa rules. You can download the standard "
|
||||
"collection of capa rules from https://github.com/mandiant/capa-rules/releases."
|
||||
"Make sure your file directory contains properly formatted capa rules. You can download the standard " # noqa: G003 [logging statement uses +]
|
||||
+ "collection of capa rules from https://github.com/mandiant/capa-rules/releases."
|
||||
)
|
||||
logger.error(
|
||||
"Please ensure you're using the rules that correspond to your major version of capa (%s)",
|
||||
@@ -1189,8 +1217,7 @@ def main(argv=None):
|
||||
logger.debug("file limitation short circuit, won't analyze fully.")
|
||||
return E_FILE_LIMITATION
|
||||
|
||||
# TODO: #1411 use a real type, not a dict here.
|
||||
meta: Dict[str, Any]
|
||||
meta: rdoc.Metadata
|
||||
capabilities: MatchResults
|
||||
counts: Dict[str, Any]
|
||||
|
||||
@@ -1205,8 +1232,7 @@ def main(argv=None):
|
||||
|
||||
if format_ == FORMAT_FREEZE:
|
||||
# freeze format deserializes directly into an extractor
|
||||
with open(args.sample, "rb") as f:
|
||||
extractor = capa.features.freeze.load(f.read())
|
||||
extractor = frz.load(Path(args.sample).read_bytes())
|
||||
else:
|
||||
# all other formats we must create an extractor,
|
||||
# such as viv, binary ninja, etc. workspaces
|
||||
@@ -1232,7 +1258,7 @@ def main(argv=None):
|
||||
args.backend,
|
||||
sig_paths,
|
||||
should_save_workspace,
|
||||
disable_progress=args.quiet,
|
||||
disable_progress=args.quiet or args.debug,
|
||||
)
|
||||
except UnsupportedFormatError:
|
||||
log_unsupported_format_error()
|
||||
@@ -1247,15 +1273,16 @@ def main(argv=None):
|
||||
meta = collect_metadata(argv, args.sample, args.format, args.os, args.rules, extractor)
|
||||
|
||||
capabilities, counts = find_capabilities(rules, extractor, disable_progress=args.quiet)
|
||||
meta["analysis"].update(counts)
|
||||
meta["analysis"]["layout"] = compute_layout(rules, extractor, capabilities)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
meta.analysis.layout = compute_layout(rules, extractor, capabilities)
|
||||
|
||||
if has_file_limitation(rules, capabilities):
|
||||
# bail if capa encountered file limitation e.g. a packed binary
|
||||
# do show the output in verbose mode, though.
|
||||
if not (args.verbose or args.vverbose or args.json):
|
||||
return E_FILE_LIMITATION
|
||||
|
||||
if args.json:
|
||||
print(capa.render.json.render(meta, rules, capabilities))
|
||||
elif args.vverbose:
|
||||
@@ -1293,14 +1320,16 @@ def ida_main():
|
||||
logger.debug(" https://github.com/mandiant/capa-rules")
|
||||
logger.debug("-" * 80)
|
||||
|
||||
rules_path = os.path.join(get_default_root(), "rules")
|
||||
rules_path = get_default_root() / "rules"
|
||||
logger.debug("rule path: %s", rules_path)
|
||||
rules = get_rules([rules_path])
|
||||
|
||||
meta = capa.ida.helpers.collect_metadata([rules_path])
|
||||
|
||||
capabilities, counts = find_capabilities(rules, capa.features.extractors.ida.extractor.IdaFeatureExtractor())
|
||||
meta["analysis"].update(counts)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
|
||||
if has_file_limitation(rules, capabilities, is_standalone=False):
|
||||
capa.ida.helpers.inform_user_ida_ui("capa encountered warnings during analysis")
|
||||
@@ -1309,17 +1338,8 @@ def ida_main():
|
||||
print(capa.render.default.render(meta, rules, capabilities))
|
||||
|
||||
|
||||
def is_runtime_ida():
|
||||
try:
|
||||
import idc
|
||||
except ImportError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if is_runtime_ida():
|
||||
if capa.helpers.is_runtime_ida():
|
||||
ida_main()
|
||||
else:
|
||||
sys.exit(main())
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
|
||||
import capa.engine as ceng
|
||||
@@ -22,7 +29,7 @@ def get_node_cost(node):
|
||||
# substring and regex features require a full scan of each string
|
||||
# which we anticipate is more expensive then a hash lookup feature (e.g. mnemonic or count).
|
||||
#
|
||||
# TODO: compute the average cost of these feature relative to hash feature
|
||||
# fun research: compute the average cost of these feature relative to hash feature
|
||||
# and adjust the factor accordingly.
|
||||
return 2
|
||||
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import typing
|
||||
import collections
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -11,7 +11,6 @@ import collections
|
||||
import tabulate
|
||||
|
||||
import capa.render.utils as rutils
|
||||
import capa.features.freeze as frz
|
||||
import capa.render.result_document as rd
|
||||
import capa.features.freeze.features as frzf
|
||||
from capa.rules import RuleSet
|
||||
@@ -40,7 +39,7 @@ def render_meta(doc: rd.ResultDocument, ostream: StringIO):
|
||||
("path", doc.meta.sample.path),
|
||||
]
|
||||
|
||||
ostream.write(tabulate.tabulate(rows, tablefmt="psql"))
|
||||
ostream.write(tabulate.tabulate(rows, tablefmt="mixed_outline"))
|
||||
ostream.write("\n")
|
||||
|
||||
|
||||
@@ -49,7 +48,7 @@ def find_subrule_matches(doc: rd.ResultDocument):
|
||||
collect the rule names that have been matched as a subrule match.
|
||||
this way we can avoid displaying entries for things that are too specific.
|
||||
"""
|
||||
matches = set([])
|
||||
matches = set()
|
||||
|
||||
def rec(match: rd.Match):
|
||||
if not match.success:
|
||||
@@ -65,7 +64,7 @@ def find_subrule_matches(doc: rd.ResultDocument):
|
||||
matches.add(match.node.feature.match)
|
||||
|
||||
for rule in rutils.capability_rules(doc):
|
||||
for address, match in rule.matches:
|
||||
for _, match in rule.matches:
|
||||
rec(match)
|
||||
|
||||
return matches
|
||||
@@ -102,7 +101,7 @@ def render_capabilities(doc: rd.ResultDocument, ostream: StringIO):
|
||||
|
||||
if rows:
|
||||
ostream.write(
|
||||
tabulate.tabulate(rows, headers=[width("CAPABILITY", 50), width("NAMESPACE", 50)], tablefmt="psql")
|
||||
tabulate.tabulate(rows, headers=[width("Capability", 50), width("Namespace", 50)], tablefmt="mixed_outline")
|
||||
)
|
||||
ostream.write("\n")
|
||||
else:
|
||||
@@ -148,7 +147,7 @@ def render_attack(doc: rd.ResultDocument, ostream: StringIO):
|
||||
if rows:
|
||||
ostream.write(
|
||||
tabulate.tabulate(
|
||||
rows, headers=[width("ATT&CK Tactic", 20), width("ATT&CK Technique", 80)], tablefmt="psql"
|
||||
rows, headers=[width("ATT&CK Tactic", 20), width("ATT&CK Technique", 80)], tablefmt="mixed_grid"
|
||||
)
|
||||
)
|
||||
ostream.write("\n")
|
||||
@@ -190,7 +189,9 @@ def render_mbc(doc: rd.ResultDocument, ostream: StringIO):
|
||||
|
||||
if rows:
|
||||
ostream.write(
|
||||
tabulate.tabulate(rows, headers=[width("MBC Objective", 25), width("MBC Behavior", 75)], tablefmt="psql")
|
||||
tabulate.tabulate(
|
||||
rows, headers=[width("MBC Objective", 25), width("MBC Behavior", 75)], tablefmt="mixed_grid"
|
||||
)
|
||||
)
|
||||
ostream.write("\n")
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -24,14 +24,10 @@ $ protoc.exe --python_out=. --mypy_out=. <path_to_proto> (e.g. capa/render/proto
|
||||
|
||||
Alternatively, --pyi_out=. can be used to generate a Python Interface file that supports development
|
||||
"""
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import datetime
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
import google.protobuf.json_format
|
||||
from google.protobuf.json_format import MessageToJson
|
||||
|
||||
import capa.rules
|
||||
import capa.features.freeze as frz
|
||||
@@ -43,7 +39,7 @@ from capa.features.freeze import AddressType
|
||||
|
||||
|
||||
def dict_tuple_to_list_values(d: Dict) -> Dict:
|
||||
o = dict()
|
||||
o = {}
|
||||
for k, v in d.items():
|
||||
if isinstance(v, tuple):
|
||||
o[k] = list(v)
|
||||
@@ -136,7 +132,7 @@ def metadata_to_pb2(meta: rd.Metadata) -> capa_pb2.Metadata:
|
||||
arch=meta.analysis.arch,
|
||||
os=meta.analysis.os,
|
||||
extractor=meta.analysis.extractor,
|
||||
rules=meta.analysis.rules,
|
||||
rules=list(meta.analysis.rules),
|
||||
base_address=addr_to_pb2(meta.analysis.base_address),
|
||||
layout=capa_pb2.Layout(
|
||||
functions=[
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -7,7 +7,7 @@
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import datetime
|
||||
import collections
|
||||
from typing import Any, Dict, List, Tuple, Union, Optional
|
||||
from typing import Dict, List, Tuple, Union, Optional
|
||||
|
||||
from pydantic import Field, BaseModel
|
||||
|
||||
@@ -28,42 +28,47 @@ class FrozenModel(BaseModel):
|
||||
extra = "forbid"
|
||||
|
||||
|
||||
class Sample(FrozenModel):
|
||||
class Model(BaseModel):
|
||||
class Config:
|
||||
extra = "forbid"
|
||||
|
||||
|
||||
class Sample(Model):
|
||||
md5: str
|
||||
sha1: str
|
||||
sha256: str
|
||||
path: str
|
||||
|
||||
|
||||
class BasicBlockLayout(FrozenModel):
|
||||
class BasicBlockLayout(Model):
|
||||
address: frz.Address
|
||||
|
||||
|
||||
class FunctionLayout(FrozenModel):
|
||||
class FunctionLayout(Model):
|
||||
address: frz.Address
|
||||
matched_basic_blocks: Tuple[BasicBlockLayout, ...]
|
||||
|
||||
|
||||
class Layout(FrozenModel):
|
||||
class Layout(Model):
|
||||
functions: Tuple[FunctionLayout, ...]
|
||||
|
||||
|
||||
class LibraryFunction(FrozenModel):
|
||||
class LibraryFunction(Model):
|
||||
address: frz.Address
|
||||
name: str
|
||||
|
||||
|
||||
class FunctionFeatureCount(FrozenModel):
|
||||
class FunctionFeatureCount(Model):
|
||||
address: frz.Address
|
||||
count: int
|
||||
|
||||
|
||||
class FeatureCounts(FrozenModel):
|
||||
class FeatureCounts(Model):
|
||||
file: int
|
||||
functions: Tuple[FunctionFeatureCount, ...]
|
||||
|
||||
|
||||
class Analysis(FrozenModel):
|
||||
class Analysis(Model):
|
||||
format: str
|
||||
arch: str
|
||||
os: str
|
||||
@@ -75,92 +80,13 @@ class Analysis(FrozenModel):
|
||||
library_functions: Tuple[LibraryFunction, ...]
|
||||
|
||||
|
||||
class Metadata(FrozenModel):
|
||||
class Metadata(Model):
|
||||
timestamp: datetime.datetime
|
||||
version: str
|
||||
argv: Optional[Tuple[str, ...]]
|
||||
sample: Sample
|
||||
analysis: Analysis
|
||||
|
||||
@classmethod
|
||||
def from_capa(cls, meta: Any) -> "Metadata":
|
||||
return cls(
|
||||
timestamp=meta["timestamp"],
|
||||
version=meta["version"],
|
||||
argv=meta["argv"] if "argv" in meta else None,
|
||||
sample=Sample(
|
||||
md5=meta["sample"]["md5"],
|
||||
sha1=meta["sample"]["sha1"],
|
||||
sha256=meta["sample"]["sha256"],
|
||||
path=meta["sample"]["path"],
|
||||
),
|
||||
analysis=Analysis(
|
||||
format=meta["analysis"]["format"],
|
||||
arch=meta["analysis"]["arch"],
|
||||
os=meta["analysis"]["os"],
|
||||
extractor=meta["analysis"]["extractor"],
|
||||
rules=meta["analysis"]["rules"],
|
||||
base_address=frz.Address.from_capa(meta["analysis"]["base_address"]),
|
||||
layout=Layout(
|
||||
functions=tuple(
|
||||
FunctionLayout(
|
||||
address=frz.Address.from_capa(address),
|
||||
matched_basic_blocks=tuple(
|
||||
BasicBlockLayout(address=frz.Address.from_capa(bb)) for bb in f["matched_basic_blocks"]
|
||||
),
|
||||
)
|
||||
for address, f in meta["analysis"]["layout"]["functions"].items()
|
||||
)
|
||||
),
|
||||
feature_counts=FeatureCounts(
|
||||
file=meta["analysis"]["feature_counts"]["file"],
|
||||
functions=tuple(
|
||||
FunctionFeatureCount(address=frz.Address.from_capa(address), count=count)
|
||||
for address, count in meta["analysis"]["feature_counts"]["functions"].items()
|
||||
),
|
||||
),
|
||||
library_functions=tuple(
|
||||
LibraryFunction(address=frz.Address.from_capa(address), name=name)
|
||||
for address, name in meta["analysis"]["library_functions"].items()
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
def to_capa(self) -> Dict[str, Any]:
|
||||
capa_meta = {
|
||||
"timestamp": self.timestamp.isoformat(),
|
||||
"version": self.version,
|
||||
"sample": {
|
||||
"md5": self.sample.md5,
|
||||
"sha1": self.sample.sha1,
|
||||
"sha256": self.sample.sha256,
|
||||
"path": self.sample.path,
|
||||
},
|
||||
"analysis": {
|
||||
"format": self.analysis.format,
|
||||
"arch": self.analysis.arch,
|
||||
"os": self.analysis.os,
|
||||
"extractor": self.analysis.extractor,
|
||||
"rules": self.analysis.rules,
|
||||
"base_address": self.analysis.base_address.to_capa(),
|
||||
"layout": {
|
||||
"functions": {
|
||||
f.address.to_capa(): {
|
||||
"matched_basic_blocks": [bb.address.to_capa() for bb in f.matched_basic_blocks]
|
||||
}
|
||||
for f in self.analysis.layout.functions
|
||||
}
|
||||
},
|
||||
"feature_counts": {
|
||||
"file": self.analysis.feature_counts.file,
|
||||
"functions": {fc.address.to_capa(): fc.count for fc in self.analysis.feature_counts.functions},
|
||||
},
|
||||
"library_functions": {lf.address.to_capa(): lf.name for lf in self.analysis.library_functions},
|
||||
},
|
||||
}
|
||||
|
||||
return capa_meta
|
||||
|
||||
|
||||
class CompoundStatementType:
|
||||
AND = "and"
|
||||
@@ -376,7 +302,7 @@ class Match(FrozenModel):
|
||||
# pull matches from the referenced rule into our tree here.
|
||||
rule_name = name
|
||||
rule = rules[rule_name]
|
||||
rule_matches = {address: result for (address, result) in capabilities[rule_name]}
|
||||
rule_matches = dict(capabilities[rule_name])
|
||||
|
||||
if rule.is_subscope_rule():
|
||||
# for a subscope rule, fixup the node to be a scope node, rather than a match feature node.
|
||||
@@ -421,7 +347,7 @@ class Match(FrozenModel):
|
||||
# we could introduce an intermediate node here.
|
||||
# this would be a breaking change and require updates to the renderers.
|
||||
# in the meantime, the above might be sufficient.
|
||||
rule_matches = {address: result for (address, result) in capabilities[rule.name]}
|
||||
rule_matches = dict(capabilities[rule.name])
|
||||
for location in result.locations:
|
||||
# doc[locations] contains all matches for the given namespace.
|
||||
# for example, the feature might be `match: anti-analysis/packer`
|
||||
@@ -642,7 +568,7 @@ class ResultDocument(FrozenModel):
|
||||
rules: Dict[str, RuleMatches]
|
||||
|
||||
@classmethod
|
||||
def from_capa(cls, meta, rules: RuleSet, capabilities: MatchResults) -> "ResultDocument":
|
||||
def from_capa(cls, meta: Metadata, rules: RuleSet, capabilities: MatchResults) -> "ResultDocument":
|
||||
rule_matches: Dict[str, RuleMatches] = {}
|
||||
for rule_name, matches in capabilities.items():
|
||||
rule = rules[rule_name]
|
||||
@@ -659,10 +585,9 @@ class ResultDocument(FrozenModel):
|
||||
),
|
||||
)
|
||||
|
||||
return ResultDocument(meta=Metadata.from_capa(meta), rules=rule_matches)
|
||||
return ResultDocument(meta=meta, rules=rule_matches)
|
||||
|
||||
def to_capa(self) -> Tuple[Dict, Dict]:
|
||||
meta = self.meta.to_capa()
|
||||
def to_capa(self) -> Tuple[Metadata, Dict]:
|
||||
capabilities: Dict[
|
||||
str, List[Tuple[capa.features.address.Address, capa.features.common.Result]]
|
||||
] = collections.defaultdict(list)
|
||||
@@ -678,4 +603,4 @@ class ResultDocument(FrozenModel):
|
||||
|
||||
capabilities[rule_name].append((addr.to_capa(), result))
|
||||
|
||||
return meta, capabilities
|
||||
return self.meta, capabilities
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -37,7 +37,7 @@ def format_parts_id(data: Union[rd.AttackSpec, rd.MBCSpec]):
|
||||
|
||||
def capability_rules(doc: rd.ResultDocument) -> Iterator[rd.RuleMatches]:
|
||||
"""enumerate the rules in (namespace, name) order that are 'capability' rules (not lib/subscope/disposition/etc)."""
|
||||
for _, _, rule in sorted(map(lambda rule: (rule.meta.namespace or "", rule.meta.name, rule), doc.rules.values())):
|
||||
for _, _, rule in sorted((rule.meta.namespace or "", rule.meta.name, rule) for rule in doc.rules.values()):
|
||||
if rule.meta.lib:
|
||||
continue
|
||||
if rule.meta.is_subscope_rule:
|
||||
|
||||
@@ -14,7 +14,7 @@ example::
|
||||
0x10003415
|
||||
0x10003797
|
||||
|
||||
Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -96,8 +96,7 @@ def render_meta(ostream, doc: rd.ResultDocument):
|
||||
("library function count", len(doc.meta.analysis.library_functions)),
|
||||
(
|
||||
"total feature count",
|
||||
doc.meta.analysis.feature_counts.file
|
||||
+ sum(map(lambda f: f.count, doc.meta.analysis.feature_counts.functions)),
|
||||
doc.meta.analysis.feature_counts.file + sum(f.count for f in doc.meta.analysis.feature_counts.functions),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -141,7 +140,7 @@ def render_rules(ostream, doc: rd.ResultDocument):
|
||||
rows.append((key, v))
|
||||
|
||||
if rule.meta.scope != capa.rules.FILE_SCOPE:
|
||||
locations = list(map(lambda m: m[0], doc.rules[rule.meta.name].matches))
|
||||
locations = [m[0] for m in doc.rules[rule.meta.name].matches]
|
||||
rows.append(("matches", "\n".join(map(format_address, locations))))
|
||||
|
||||
ostream.writeln(tabulate.tabulate(rows, tablefmt="plain"))
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,7 +6,7 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
from typing import Dict, Iterable
|
||||
from typing import Dict, Iterable, Optional
|
||||
|
||||
import tabulate
|
||||
|
||||
@@ -29,7 +29,7 @@ def render_locations(ostream, locations: Iterable[frz.Address]):
|
||||
# its possible to have an empty locations array here,
|
||||
# such as when we're in MODE_FAILURE and showing the logic
|
||||
# under a `not` statement (which will have no matched locations).
|
||||
locations = list(sorted(locations))
|
||||
locations = sorted(locations)
|
||||
|
||||
if len(locations) == 0:
|
||||
return
|
||||
@@ -129,6 +129,7 @@ def render_feature(ostream, match: rd.Match, feature: frzf.Feature, indent=0):
|
||||
ostream.write(" " * indent)
|
||||
|
||||
key = feature.type
|
||||
value: Optional[str]
|
||||
if isinstance(feature, frzf.BasicBlockFeature):
|
||||
# i don't think it makes sense to have standalone basic block features.
|
||||
# we don't parse them from rules, only things like: `count(basic block) > 1`
|
||||
@@ -140,7 +141,7 @@ def render_feature(ostream, match: rd.Match, feature: frzf.Feature, indent=0):
|
||||
value = feature.class_
|
||||
else:
|
||||
# convert attributes to dictionary using aliased names, if applicable
|
||||
value = feature.dict(by_alias=True).get(key, None)
|
||||
value = feature.dict(by_alias=True).get(key)
|
||||
|
||||
if value is None:
|
||||
raise ValueError(f"{key} contains None")
|
||||
@@ -222,7 +223,7 @@ def render_match(ostream, match: rd.Match, indent=0, mode=MODE_SUCCESS):
|
||||
|
||||
# optional statement with no successful children is empty
|
||||
if isinstance(match.node, rd.StatementNode) and match.node.statement.type == rd.CompoundStatementType.OPTIONAL:
|
||||
if not any(map(lambda m: m.success, match.children)):
|
||||
if not any(m.success for m in match.children):
|
||||
return
|
||||
|
||||
# not statement, so invert the child mode to show failed evaluations
|
||||
@@ -236,7 +237,7 @@ def render_match(ostream, match: rd.Match, indent=0, mode=MODE_SUCCESS):
|
||||
|
||||
# optional statement with successful children is not relevant
|
||||
if isinstance(match.node, rd.StatementNode) and match.node.statement.type == rd.CompoundStatementType.OPTIONAL:
|
||||
if any(map(lambda m: m.success, match.children)):
|
||||
if any(m.success for m in match.children):
|
||||
return
|
||||
|
||||
# not statement, so invert the child mode to show successful evaluations
|
||||
@@ -277,7 +278,7 @@ def render_rules(ostream, doc: rd.ResultDocument):
|
||||
|
||||
had_match = False
|
||||
|
||||
for _, _, rule in sorted(map(lambda rule: (rule.meta.namespace or "", rule.meta.name, rule), doc.rules.values())):
|
||||
for _, _, rule in sorted((rule.meta.namespace or "", rule.meta.name, rule) for rule in doc.rules.values()):
|
||||
# default scope hides things like lib rules, malware-category rules, etc.
|
||||
# but in vverbose mode, we really want to show everything.
|
||||
#
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -14,6 +14,7 @@ import logging
|
||||
import binascii
|
||||
import collections
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
|
||||
from capa.helpers import assert_never
|
||||
|
||||
@@ -510,7 +511,9 @@ def build_statements(d, scope: str):
|
||||
# arg is string (which doesn't support inline descriptions), like:
|
||||
#
|
||||
# count(string(error))
|
||||
# TODO: what about embedded newlines?
|
||||
#
|
||||
# known problem that embedded newlines may not work here?
|
||||
# this may become a problem (or not), so address it when encountered.
|
||||
feature = Feature(arg)
|
||||
else:
|
||||
feature = Feature()
|
||||
@@ -634,7 +637,7 @@ class Rule:
|
||||
Returns:
|
||||
List[str]: names of rules upon which this rule depends.
|
||||
"""
|
||||
deps: Set[str] = set([])
|
||||
deps: Set[str] = set()
|
||||
|
||||
def rec(statement):
|
||||
if isinstance(statement, capa.features.common.MatchedRule):
|
||||
@@ -648,7 +651,7 @@ class Rule:
|
||||
# but, namespaces tend to use `-` while rule names use ` `. so, unlikely, but possible.
|
||||
if statement.value in namespaces:
|
||||
# matches a namespace, so take precedence and don't even check rule names.
|
||||
deps.update(map(lambda r: r.name, namespaces[statement.value]))
|
||||
deps.update(r.name for r in namespaces[statement.value])
|
||||
else:
|
||||
# not a namespace, assume its a rule name.
|
||||
assert isinstance(statement.value, str)
|
||||
@@ -706,8 +709,7 @@ class Rule:
|
||||
# note: we cannot recurse into the subscope sub-tree,
|
||||
# because its been replaced by a `match` statement.
|
||||
for child in statement.get_children():
|
||||
for new_rule in self._extract_subscope_rules_rec(child):
|
||||
yield new_rule
|
||||
yield from self._extract_subscope_rules_rec(child)
|
||||
|
||||
def is_subscope_rule(self):
|
||||
return bool(self.meta.get("capa/subscope-rule", False))
|
||||
@@ -733,8 +735,7 @@ class Rule:
|
||||
# replace old node with reference to new rule
|
||||
# yield new rule
|
||||
|
||||
for new_rule in self._extract_subscope_rules_rec(self.statement):
|
||||
yield new_rule
|
||||
yield from self._extract_subscope_rules_rec(self.statement)
|
||||
|
||||
def evaluate(self, features: FeatureSet, short_circuit=True):
|
||||
capa.perf.counters["evaluate.feature"] += 1
|
||||
@@ -778,7 +779,7 @@ class Rule:
|
||||
# on Windows, get WHLs from pyyaml.org/pypi
|
||||
logger.debug("using libyaml CLoader.")
|
||||
return yaml.CLoader
|
||||
except:
|
||||
except Exception:
|
||||
logger.debug("unable to import libyaml CLoader, falling back to Python yaml parser.")
|
||||
logger.debug("this will be slower to load rules.")
|
||||
return yaml.Loader
|
||||
@@ -823,7 +824,7 @@ class Rule:
|
||||
|
||||
@classmethod
|
||||
def from_yaml_file(cls, path, use_ruamel=False) -> "Rule":
|
||||
with open(path, "rb") as f:
|
||||
with Path(path).open("rb") as f:
|
||||
try:
|
||||
rule = cls.from_yaml(f.read().decode("utf-8"), use_ruamel=use_ruamel)
|
||||
# import here to avoid circular dependency
|
||||
@@ -950,7 +951,7 @@ def get_rules_with_scope(rules, scope) -> List[Rule]:
|
||||
from the given collection of rules, select those with the given scope.
|
||||
`scope` is one of the capa.rules.*_SCOPE constants.
|
||||
"""
|
||||
return list(rule for rule in rules if rule.scope == scope)
|
||||
return [rule for rule in rules if rule.scope == scope]
|
||||
|
||||
|
||||
def get_rules_and_dependencies(rules: List[Rule], rule_name: str) -> Iterator[Rule]:
|
||||
@@ -961,7 +962,7 @@ def get_rules_and_dependencies(rules: List[Rule], rule_name: str) -> Iterator[Ru
|
||||
rules = list(rules)
|
||||
namespaces = index_rules_by_namespace(rules)
|
||||
rules_by_name = {rule.name: rule for rule in rules}
|
||||
wanted = set([rule_name])
|
||||
wanted = {rule_name}
|
||||
|
||||
def rec(rule):
|
||||
wanted.add(rule.name)
|
||||
@@ -976,7 +977,7 @@ def get_rules_and_dependencies(rules: List[Rule], rule_name: str) -> Iterator[Ru
|
||||
|
||||
|
||||
def ensure_rules_are_unique(rules: List[Rule]) -> None:
|
||||
seen = set([])
|
||||
seen = set()
|
||||
for rule in rules:
|
||||
if rule.name in seen:
|
||||
raise InvalidRule("duplicate rule name: " + rule.name)
|
||||
@@ -1041,7 +1042,7 @@ def topologically_order_rules(rules: List[Rule]) -> List[Rule]:
|
||||
rules = list(rules)
|
||||
namespaces = index_rules_by_namespace(rules)
|
||||
rules_by_name = {rule.name: rule for rule in rules}
|
||||
seen = set([])
|
||||
seen = set()
|
||||
ret = []
|
||||
|
||||
def rec(rule):
|
||||
@@ -1190,7 +1191,6 @@ class RuleSet:
|
||||
# so thats not helpful to decide how to downselect.
|
||||
#
|
||||
# and, a global rule will never be the sole selector in a rule.
|
||||
# TODO: probably want a lint for this.
|
||||
pass
|
||||
else:
|
||||
# easy feature: hash lookup
|
||||
@@ -1247,7 +1247,7 @@ class RuleSet:
|
||||
# the set of subtypes of type A is unbounded,
|
||||
# because any user might come along and create a new subtype B,
|
||||
# so mypy can't reason about this set of types.
|
||||
assert False, f"Unhandled value: {node} ({type(node).__name__})"
|
||||
assert_never(node)
|
||||
else:
|
||||
# programming error
|
||||
assert_never(node)
|
||||
@@ -1284,7 +1284,7 @@ class RuleSet:
|
||||
don't include auto-generated "subscope" rules.
|
||||
we want to include general "lib" rules here - even if they are not dependencies of other rules, see #398
|
||||
"""
|
||||
scope_rules: Set[Rule] = set([])
|
||||
scope_rules: Set[Rule] = set()
|
||||
|
||||
# we need to process all rules, not just rules with the given scope.
|
||||
# this is because rules with a higher scope, e.g. file scope, may have subscope rules
|
||||
@@ -1329,7 +1329,7 @@ class RuleSet:
|
||||
TODO support -t=metafield <k>
|
||||
"""
|
||||
rules = list(self.rules.values())
|
||||
rules_filtered = set([])
|
||||
rules_filtered = set()
|
||||
for rule in rules:
|
||||
for k, v in rule.meta.items():
|
||||
if isinstance(v, str) and tag in v:
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import os
|
||||
import sys
|
||||
import zlib
|
||||
import pickle
|
||||
import hashlib
|
||||
import logging
|
||||
import os.path
|
||||
from typing import List, Optional
|
||||
from pathlib import Path
|
||||
from dataclasses import dataclass
|
||||
|
||||
import capa.rules
|
||||
@@ -28,7 +36,7 @@ def compute_cache_identifier(rule_content: List[bytes]) -> CacheIdentifier:
|
||||
hash.update(version.encode("utf-8"))
|
||||
hash.update(b"\x00")
|
||||
|
||||
rule_hashes = list(sorted([hashlib.sha256(buf).hexdigest() for buf in rule_content]))
|
||||
rule_hashes = sorted([hashlib.sha256(buf).hexdigest() for buf in rule_content])
|
||||
for rule_hash in rule_hashes:
|
||||
hash.update(rule_hash.encode("ascii"))
|
||||
hash.update(b"\x00")
|
||||
@@ -36,7 +44,7 @@ def compute_cache_identifier(rule_content: List[bytes]) -> CacheIdentifier:
|
||||
return hash.hexdigest()
|
||||
|
||||
|
||||
def get_default_cache_directory() -> str:
|
||||
def get_default_cache_directory() -> Path:
|
||||
# ref: https://github.com/mandiant/capa/issues/1212#issuecomment-1361259813
|
||||
#
|
||||
# Linux: $XDG_CACHE_HOME/capa/
|
||||
@@ -45,22 +53,22 @@ def get_default_cache_directory() -> str:
|
||||
|
||||
# ref: https://stackoverflow.com/a/8220141/87207
|
||||
if sys.platform == "linux" or sys.platform == "linux2":
|
||||
directory = os.environ.get("XDG_CACHE_HOME", os.path.join(os.environ["HOME"], ".cache", "capa"))
|
||||
directory = Path(os.environ.get("XDG_CACHE_HOME", Path.home() / ".cache" / "capa"))
|
||||
elif sys.platform == "darwin":
|
||||
directory = os.path.join(os.environ["HOME"], "Library", "Caches", "capa")
|
||||
directory = Path.home() / "Library" / "Caches" / "capa"
|
||||
elif sys.platform == "win32":
|
||||
directory = os.path.join(os.environ["LOCALAPPDATA"], "flare", "capa", "cache")
|
||||
directory = Path(os.environ["LOCALAPPDATA"]) / "flare" / "capa" / "cache"
|
||||
else:
|
||||
raise NotImplementedError(f"unsupported platform: {sys.platform}")
|
||||
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
directory.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
return directory
|
||||
|
||||
|
||||
def get_cache_path(cache_dir: str, id: CacheIdentifier) -> str:
|
||||
def get_cache_path(cache_dir: Path, id: CacheIdentifier) -> Path:
|
||||
filename = "capa-" + id[:8] + ".cache"
|
||||
return os.path.join(cache_dir, filename)
|
||||
return cache_dir / filename
|
||||
|
||||
|
||||
MAGIC = b"capa"
|
||||
@@ -102,7 +110,7 @@ def compute_ruleset_cache_identifier(ruleset: capa.rules.RuleSet) -> CacheIdenti
|
||||
return compute_cache_identifier(rule_contents)
|
||||
|
||||
|
||||
def cache_ruleset(cache_dir: str, ruleset: capa.rules.RuleSet):
|
||||
def cache_ruleset(cache_dir: Path, ruleset: capa.rules.RuleSet):
|
||||
"""
|
||||
cache the given ruleset to disk, using the given cache directory.
|
||||
this can subsequently be reloaded via `load_cached_ruleset`,
|
||||
@@ -113,19 +121,18 @@ def cache_ruleset(cache_dir: str, ruleset: capa.rules.RuleSet):
|
||||
"""
|
||||
id = compute_ruleset_cache_identifier(ruleset)
|
||||
path = get_cache_path(cache_dir, id)
|
||||
if os.path.exists(path):
|
||||
if path.exists():
|
||||
logger.debug("rule set already cached to %s", path)
|
||||
return
|
||||
|
||||
cache = RuleCache(id, ruleset)
|
||||
with open(path, "wb") as f:
|
||||
f.write(cache.dump())
|
||||
path.write_bytes(cache.dump())
|
||||
|
||||
logger.debug("rule set cached to %s", path)
|
||||
return
|
||||
|
||||
|
||||
def load_cached_ruleset(cache_dir: str, rule_contents: List[bytes]) -> Optional[capa.rules.RuleSet]:
|
||||
def load_cached_ruleset(cache_dir: Path, rule_contents: List[bytes]) -> Optional[capa.rules.RuleSet]:
|
||||
"""
|
||||
load a cached ruleset from disk, using the given cache directory.
|
||||
the raw rule contents are required here to prove that the rules haven't changed
|
||||
@@ -136,20 +143,19 @@ def load_cached_ruleset(cache_dir: str, rule_contents: List[bytes]) -> Optional[
|
||||
"""
|
||||
id = compute_cache_identifier(rule_contents)
|
||||
path = get_cache_path(cache_dir, id)
|
||||
if not os.path.exists(path):
|
||||
if not path.exists():
|
||||
logger.debug("rule set cache does not exist: %s", path)
|
||||
return None
|
||||
|
||||
logger.debug("loading rule set from cache: %s", path)
|
||||
with open(path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = path.read_bytes()
|
||||
|
||||
try:
|
||||
cache = RuleCache.load(buf)
|
||||
except AssertionError:
|
||||
logger.debug("rule set cache is invalid: %s", path)
|
||||
# delete the cache that seems to be invalid.
|
||||
os.remove(path)
|
||||
path.unlink()
|
||||
return None
|
||||
else:
|
||||
return cache.ruleset
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
__version__ = "5.1.0"
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
__version__ = "6.0.0a3"
|
||||
|
||||
|
||||
def get_major_version():
|
||||
|
||||
@@ -93,28 +93,43 @@ For more details about creating and using virtual environments, check out the [v
|
||||
|
||||
We use the following tools to ensure consistent code style and formatting:
|
||||
- [black](https://github.com/psf/black) code formatter
|
||||
- [isort 5](https://pypi.org/project/isort/) code formatter
|
||||
- [dos2unix](https://linux.die.net/man/1/dos2unix) for UNIX-style LF newlines
|
||||
- [isort](https://pypi.org/project/isort/) code formatter
|
||||
- [ruff](https://beta.ruff.rs/docs/) code linter
|
||||
- [flake8](https://flake8.pycqa.org/en/latest/) code linter
|
||||
- [mypy](https://mypy-lang.org/) type checking
|
||||
- [capafmt](https://github.com/mandiant/capa/blob/master/scripts/capafmt.py) rule formatter
|
||||
|
||||
To install these development dependencies, run:
|
||||
|
||||
`$ pip install -e /local/path/to/src[dev]`
|
||||
|
||||
To check the code style, formatting and run the tests you can run the script `scripts/ci.sh`.
|
||||
You can run it with the argument `no_tests` to skip the tests and only run the code style and formatting: `scripts/ci.sh no_tests`
|
||||
We use [pre-commit](https://pre-commit.com/) so that its trivial to run the same linters & configuration locally as in CI.
|
||||
|
||||
##### Setup hooks [optional]
|
||||
Run all linters liks:
|
||||
|
||||
If you plan to contribute to capa, you may want to setup the provided hooks.
|
||||
Run `scripts/setup-hooks.sh` to set the following hooks up:
|
||||
- The `pre-commit` hook runs checks before every `git commit`.
|
||||
It runs `scripts/ci.sh no_tests` aborting the commit if there are code style or rule linter offenses you need to fix.
|
||||
- The `pre-push` hook runs checks before every `git push`.
|
||||
It runs `scripts/ci.sh` aborting the push if there are code style or rule linter offenses or if the tests fail.
|
||||
This way you can ensure everything is alright before sending a pull request.
|
||||
❯ pre-commit run --all-files
|
||||
isort....................................................................Passed
|
||||
black....................................................................Passed
|
||||
ruff.....................................................................Passed
|
||||
flake8...................................................................Passed
|
||||
mypy.....................................................................Passed
|
||||
|
||||
You can skip the checks by using the `-n`/`--no-verify` git option.
|
||||
Or run a single linter like:
|
||||
|
||||
❯ pre-commit run --all-files isort
|
||||
isort....................................................................Passed
|
||||
|
||||
|
||||
Importantly, you can configure pre-commit to run automatically before every commit by running:
|
||||
|
||||
❯ pre-commit install --hook-type pre-commit
|
||||
pre-commit installed at .git/hooks/pre-commit
|
||||
|
||||
❯ pre-commit install --hook-type pre-push
|
||||
pre-commit installed at .git/hooks/pre-push
|
||||
|
||||
This way you can ensure that you don't commit code style or formatting offenses.
|
||||
You can always temporarily skip the checks by using the `-n`/`--no-verify` git option.
|
||||
|
||||
### 3. Compile binary using PyInstaller
|
||||
We compile capa standalone binaries using PyInstaller. To reproduce the build process check out the source code as described above and follow the following steps.
|
||||
|
||||
112
pyproject.toml
Normal file
112
pyproject.toml
Normal file
@@ -0,0 +1,112 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools", "setuptools-scm"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "flare-capa"
|
||||
authors = [
|
||||
{name = "Willi Ballenthin", email = "william.ballenthin@mandiant.com"},
|
||||
{name = "Moritz Raabe", email = "moritz.raabe@mandiant.com"},
|
||||
{name = "Mike Hunhoff", email = "michael.hunhoff@mandiant.com"},
|
||||
]
|
||||
description = "The FLARE team's open-source tool to identify capabilities in executable files."
|
||||
license = {file = "LICENSE.txt"}
|
||||
requires-python = ">=3.8"
|
||||
keywords = ["malware analysis", "reverse engineering", "capability detection", "software behaviors", "capa", "FLARE"]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: Information Technology",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Topic :: Security",
|
||||
]
|
||||
dependencies = [
|
||||
"tqdm==4.65.0",
|
||||
"pyyaml==6.0",
|
||||
"tabulate==0.9.0",
|
||||
"colorama==0.4.6",
|
||||
"termcolor==2.3.0",
|
||||
"wcwidth==0.2.6",
|
||||
"ida-settings==2.1.0",
|
||||
"viv-utils[flirt]==0.7.9",
|
||||
"halo==0.0.31",
|
||||
"networkx==3.1",
|
||||
"ruamel.yaml==0.17.32",
|
||||
"vivisect==1.1.1",
|
||||
"pefile==2023.2.7",
|
||||
"pyelftools==0.29",
|
||||
"dnfile==0.13.0",
|
||||
"dncil==1.0.2",
|
||||
"pydantic==1.10.9",
|
||||
"protobuf==4.23.4",
|
||||
]
|
||||
dynamic = ["version", "readme"]
|
||||
|
||||
[tool.setuptools.dynamic]
|
||||
version = {attr = "capa.version.__version__"}
|
||||
readme = {file = "README.md"}
|
||||
|
||||
[tool.setuptools]
|
||||
packages = ["capa"]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = [
|
||||
"pre-commit==3.3.3",
|
||||
"pytest==7.4.0",
|
||||
"pytest-sugar==0.9.7",
|
||||
"pytest-instafail==0.5.0",
|
||||
"pytest-cov==4.1.0",
|
||||
"flake8==6.0.0",
|
||||
"flake8-bugbear==23.7.10",
|
||||
"flake8-encodings==0.5.0.post1",
|
||||
"flake8-comprehensions==3.14.0",
|
||||
"flake8-logging-format==0.9.0",
|
||||
"flake8-no-implicit-concat==0.3.4",
|
||||
"flake8-print==5.0.0",
|
||||
"flake8-todos==0.3.0",
|
||||
"flake8-simplify==0.20.0",
|
||||
"flake8-use-pathlib==0.3.0",
|
||||
"flake8-copyright==0.2.4",
|
||||
"ruff==0.0.278",
|
||||
"black==23.7.0",
|
||||
"isort==5.11.4",
|
||||
"mypy==1.4.1",
|
||||
"psutil==5.9.2",
|
||||
"stix2==3.0.1",
|
||||
"requests==2.31.0",
|
||||
"mypy-protobuf==3.4.0",
|
||||
# type stubs for mypy
|
||||
"types-backports==0.1.3",
|
||||
"types-colorama==0.4.15.11",
|
||||
"types-PyYAML==6.0.8",
|
||||
"types-tabulate==0.9.0.1",
|
||||
"types-termcolor==1.1.4",
|
||||
"types-psutil==5.8.23",
|
||||
"types_requests==2.31.0.1",
|
||||
"types-protobuf==4.23.0.1",
|
||||
]
|
||||
build = [
|
||||
"pyinstaller==5.10.1",
|
||||
"setuptools==68.0.0",
|
||||
"build==0.10.0"
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://github.com/mandiant/capa"
|
||||
Repository = "https://github.com/mandiant/capa.git"
|
||||
Documentation = "https://github.com/mandiant/capa/tree/master/doc"
|
||||
Rules = "https://github.com/mandiant/capa-rules"
|
||||
"Rules Documentation" = "https://github.com/mandiant/capa-rules/tree/master/doc"
|
||||
|
||||
[project.scripts]
|
||||
capa = "capa.main:main"
|
||||
2
rules
2
rules
Submodule rules updated: a10ccf3fd8...85a980a6cc
@@ -1,4 +1,11 @@
|
||||
#!/usr/bin/env python
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
"""
|
||||
bulk-process
|
||||
|
||||
@@ -47,7 +54,7 @@ usage:
|
||||
parallelism factor
|
||||
--no-mp disable subprocesses
|
||||
|
||||
Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -59,10 +66,10 @@ import os
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
import os.path
|
||||
import argparse
|
||||
import multiprocessing
|
||||
import multiprocessing.pool
|
||||
from pathlib import Path
|
||||
|
||||
import capa
|
||||
import capa.main
|
||||
@@ -131,8 +138,10 @@ def get_capa_results(args):
|
||||
|
||||
meta = capa.main.collect_metadata([], path, format, os_, [], extractor)
|
||||
capabilities, counts = capa.main.find_capabilities(rules, extractor, disable_progress=True)
|
||||
meta["analysis"].update(counts)
|
||||
meta["analysis"]["layout"] = capa.main.compute_layout(rules, extractor, capabilities)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
meta.analysis.layout = capa.main.compute_layout(rules, extractor, capabilities)
|
||||
|
||||
doc = rd.ResultDocument.from_capa(meta, rules, capabilities)
|
||||
|
||||
@@ -167,15 +176,16 @@ def main(argv=None):
|
||||
return -1
|
||||
|
||||
samples = []
|
||||
for base, directories, files in os.walk(args.input):
|
||||
for file in files:
|
||||
samples.append(os.path.join(base, file))
|
||||
for file in Path(args.input).rglob("*"):
|
||||
samples.append(file)
|
||||
|
||||
def pmap(f, args, parallelism=multiprocessing.cpu_count()):
|
||||
cpu_count = multiprocessing.cpu_count()
|
||||
|
||||
def pmap(f, args, parallelism=cpu_count):
|
||||
"""apply the given function f to the given args using subprocesses"""
|
||||
return multiprocessing.Pool(parallelism).imap(f, args)
|
||||
|
||||
def tmap(f, args, parallelism=multiprocessing.cpu_count()):
|
||||
def tmap(f, args, parallelism=cpu_count):
|
||||
"""apply the given function f to the given args using threads"""
|
||||
return multiprocessing.pool.ThreadPool(parallelism).imap(f, args)
|
||||
|
||||
@@ -204,7 +214,7 @@ def main(argv=None):
|
||||
if result["status"] == "error":
|
||||
logger.warning(result["error"])
|
||||
elif result["status"] == "ok":
|
||||
results[result["path"]] = rd.ResultDocument.parse_obj(result["ok"]).json(exclude_none=True)
|
||||
results[result["path"].as_posix()] = rd.ResultDocument.parse_obj(result["ok"]).json(exclude_none=True)
|
||||
else:
|
||||
raise ValueError(f"unexpected status: {result['status']}")
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"""
|
||||
Create a cache of the given rules.
|
||||
This is only really intended to be used by CI to pre-cache rulesets
|
||||
This is only really intended to be used by CI to pre-cache rulesets
|
||||
that will be distributed within PyInstaller binaries.
|
||||
|
||||
Usage:
|
||||
@@ -15,11 +15,10 @@ Unless required by applicable law or agreed to in writing, software distributed
|
||||
is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and limitations under the License.
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
import capa.main
|
||||
import capa.rules
|
||||
@@ -48,8 +47,9 @@ def main(argv=None):
|
||||
logging.getLogger("capa").setLevel(logging.ERROR)
|
||||
|
||||
try:
|
||||
os.makedirs(args.cache, exist_ok=True)
|
||||
rules = capa.main.get_rules(args.rules, cache_dir=args.cache)
|
||||
cache_dir = Path(args.cache)
|
||||
cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
rules = capa.main.get_rules(args.rules, cache_dir)
|
||||
logger.info("successfully loaded %s rules", len(rules))
|
||||
except (IOError, capa.rules.InvalidRule, capa.rules.InvalidRuleSet) as e:
|
||||
logger.error("%s", str(e))
|
||||
@@ -57,9 +57,9 @@ def main(argv=None):
|
||||
|
||||
content = capa.rules.cache.get_ruleset_content(rules)
|
||||
id = capa.rules.cache.compute_cache_identifier(content)
|
||||
path = capa.rules.cache.get_cache_path(args.cache, id)
|
||||
path = capa.rules.cache.get_cache_path(cache_dir, id)
|
||||
|
||||
assert os.path.exists(path)
|
||||
assert path.exists()
|
||||
logger.info("cached to: %s", path)
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
"""
|
||||
Convert capa rules to YARA rules (where this is possible)
|
||||
|
||||
@@ -37,6 +44,7 @@ import logging
|
||||
import argparse
|
||||
import datetime
|
||||
import itertools
|
||||
from pathlib import Path
|
||||
|
||||
import capa.main
|
||||
import capa.rules
|
||||
@@ -54,12 +62,13 @@ var_names = ["".join(letters) for letters in itertools.product(string.ascii_lowe
|
||||
|
||||
# this have to be the internal names used by capa.py which are sometimes different to the ones written out in the rules, e.g. "2 or more" is "Some", count is Range
|
||||
unsupported = ["characteristic", "mnemonic", "offset", "subscope", "Range"]
|
||||
# TODO shorten this list, possible stuff:
|
||||
# further idea: shorten this list, possible stuff:
|
||||
# - 2 or more strings: e.g.
|
||||
# -- https://github.com/mandiant/capa-rules/blob/master/collection/file-managers/gather-direct-ftp-information.yml
|
||||
# -- https://github.com/mandiant/capa-rules/blob/master/collection/browser/gather-firefox-profile-information.yml
|
||||
# - count(string (1 rule: /executable/subfile/pe/contain-an-embedded-pe-file.yml)
|
||||
# - count(match( could be done by creating the referenced rule a 2nd time with the condition, that it hits x times (only 1 rule: ./anti-analysis/anti-disasm/contain-anti-disasm-techniques.yml)
|
||||
# - count(match( could be done by creating the referenced rule a 2nd time with the condition, that it hits x times
|
||||
# (only 1 rule: ./anti-analysis/anti-disasm/contain-anti-disasm-techniques.yml)
|
||||
# - it would be technically possible to get the "basic blocks" working, but the rules contain mostly other non supported statements in there => not worth the effort.
|
||||
|
||||
# collect all converted rules to be able to check if we have needed sub rules for match:
|
||||
@@ -70,8 +79,8 @@ default_tags = "CAPA "
|
||||
# minimum number of rounds to do be able to convert rules which depend on referenced rules in several levels of depth
|
||||
min_rounds = 5
|
||||
|
||||
unsupported_capa_rules = open("unsupported_capa_rules.yml", "wb")
|
||||
unsupported_capa_rules_names = open("unsupported_capa_rules.txt", "wb")
|
||||
unsupported_capa_rules = Path("unsupported_capa_rules.yml").open("wb")
|
||||
unsupported_capa_rules_names = Path("unsupported_capa_rules.txt").open("wb")
|
||||
unsupported_capa_rules_list = []
|
||||
|
||||
condition_header = """
|
||||
@@ -93,7 +102,7 @@ private rule capa_pe_file : CAPA {
|
||||
|
||||
def check_feature(statement, rulename):
|
||||
if statement in unsupported:
|
||||
logger.info("unsupported: " + statement + " in rule: " + rulename)
|
||||
logger.info("unsupported: %s in rule: %s", statement, rulename)
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@@ -111,7 +120,7 @@ def convert_capa_number_to_yara_bytes(number):
|
||||
sys.exit()
|
||||
|
||||
number = re.sub(r"^0[xX]", "", number)
|
||||
logger.info("number ok: " + repr(number))
|
||||
logger.info("number ok: %r", number)
|
||||
|
||||
# include spaces every 2 hex
|
||||
bytesv = re.sub(r"(..)", r"\1 ", number)
|
||||
@@ -128,7 +137,8 @@ def convert_capa_number_to_yara_bytes(number):
|
||||
|
||||
|
||||
def convert_rule_name(rule_name):
|
||||
# yara rule names: "Identifiers must follow the same lexical conventions of the C programming language, they can contain any alphanumeric character and the underscore character, but the first character cannot be a digit. Rule identifiers are case sensitive and cannot exceed 128 characters." so we replace any non-alphanum with _
|
||||
# yara rule names: "Identifiers must follow the same lexical conventions of the C programming language, they can contain any alphanumeric character and the underscore character
|
||||
# but the first character cannot be a digit. Rule identifiers are case sensitive and cannot exceed 128 characters." so we replace any non-alphanum with _
|
||||
rule_name = re.sub(r"\W", "_", rule_name)
|
||||
rule_name = "capa_" + rule_name
|
||||
|
||||
@@ -140,9 +150,9 @@ def convert_description(statement):
|
||||
desc = statement.description
|
||||
if desc:
|
||||
yara_desc = " // " + desc
|
||||
logger.info("using desc: " + repr(yara_desc))
|
||||
logger.info("using desc: %r", yara_desc)
|
||||
return yara_desc
|
||||
except:
|
||||
except Exception:
|
||||
# no description
|
||||
pass
|
||||
|
||||
@@ -151,7 +161,7 @@ def convert_description(statement):
|
||||
|
||||
def convert_rule(rule, rulename, cround, depth):
|
||||
depth += 1
|
||||
logger.info("recursion depth: " + str(depth))
|
||||
logger.info("recursion depth: %d", depth)
|
||||
|
||||
global var_names
|
||||
|
||||
@@ -162,7 +172,7 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
return "BREAK", s_type
|
||||
elif s_type == "string":
|
||||
string = kid.value
|
||||
logger.info("doing string: " + repr(string))
|
||||
logger.info("doing string: %r", string)
|
||||
string = string.replace("\\", "\\\\")
|
||||
string = string.replace("\n", "\\n")
|
||||
string = string.replace("\t", "\\t")
|
||||
@@ -170,14 +180,16 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
yara_strings += "\t$" + var_name + ' = "' + string + '" ascii wide' + convert_description(kid) + "\n"
|
||||
yara_condition += "\t$" + var_name + " "
|
||||
elif s_type == "api" or s_type == "import":
|
||||
# TODO: is it possible in YARA to make a difference between api & import?
|
||||
# research needed to decide if its possible in YARA to make a difference between api & import?
|
||||
|
||||
# https://github.com/mandiant/capa-rules/blob/master/doc/format.md#api
|
||||
api = kid.value
|
||||
logger.info("doing api: " + repr(api))
|
||||
logger.info("doing api: %r", api)
|
||||
|
||||
# e.g. kernel32.CreateNamedPipe => look for kernel32.dll and CreateNamedPipe
|
||||
# TODO: improve .NET API call handling
|
||||
#
|
||||
# note: the handling of .NET API calls could be improved here.
|
||||
# once we have a motivation and some examples, lets do that.
|
||||
if "::" in api:
|
||||
mod, api = api.split("::")
|
||||
|
||||
@@ -197,24 +209,25 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
|
||||
# even looking for empty string in dll_regex doesn't work for some files (list below) with pe.imports so do just a string search
|
||||
# yara_condition += '\tpe.imports(/.{0,30}/i, /' + api + '/) '
|
||||
# 5fbbfeed28b258c42e0cfeb16718b31c, 2D3EDC218A90F03089CC01715A9F047F, 7EFF498DE13CC734262F87E6B3EF38AB, C91887D861D9BD4A5872249B641BC9F9, a70052c45e907820187c7e6bcdc7ecca, 0596C4EA5AA8DEF47F22C85D75AACA95
|
||||
# 5fbbfeed28b258c42e0cfeb16718b31c, 2D3EDC218A90F03089CC01715A9F047F, 7EFF498DE13CC734262F87E6B3EF38AB,
|
||||
# C91887D861D9BD4A5872249B641BC9F9, a70052c45e907820187c7e6bcdc7ecca, 0596C4EA5AA8DEF47F22C85D75AACA95
|
||||
var_name = "api_" + var_names.pop(0)
|
||||
|
||||
# limit regex with word boundary \b but also search for appended A and W
|
||||
# TODO: better use something like /(\\x00|\\x01|\\x02|\\x03|\\x04)' + api + '(A|W)?\\x00/ ???
|
||||
# alternatively: use something like /(\\x00|\\x01|\\x02|\\x03|\\x04)' + api + '(A|W)?\\x00/ ???
|
||||
yara_strings += "\t$" + var_name + " = /\\b" + api + "(A|W)?\\b/ ascii wide\n"
|
||||
yara_condition += "\t$" + var_name + " "
|
||||
|
||||
elif s_type == "export":
|
||||
export = kid.value
|
||||
logger.info("doing export: " + repr(export))
|
||||
logger.info("doing export: %r", export)
|
||||
|
||||
yara_condition += '\tpe.exports("' + export + '") '
|
||||
|
||||
elif s_type == "section":
|
||||
# https://github.com/mandiant/capa-rules/blob/master/doc/format.md#section
|
||||
section = kid.value
|
||||
logger.info("doing section: " + repr(section))
|
||||
logger.info("doing section: %r", section)
|
||||
|
||||
# e.g. - section: .rsrc
|
||||
var_name_sec = var_names.pop(0)
|
||||
@@ -226,14 +239,14 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
elif s_type == "match":
|
||||
# https://github.com/mandiant/capa-rules/blob/master/doc/format.md#matching-prior-rule-matches-and-namespaces
|
||||
match = kid.value
|
||||
logger.info("doing match: " + repr(match))
|
||||
logger.info("doing match: %r", match)
|
||||
|
||||
# e.g. - match: create process
|
||||
# - match: host-interaction/file-system/write
|
||||
match_rule_name = convert_rule_name(match)
|
||||
|
||||
if match.startswith(rulename + "/"):
|
||||
logger.info("Depending on myself = basic block: " + match)
|
||||
logger.info("Depending on myself = basic block: %s", match)
|
||||
return "BREAK", "Depending on myself = basic block"
|
||||
|
||||
if match_rule_name in converted_rules:
|
||||
@@ -241,14 +254,14 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
else:
|
||||
# don't complain in the early rounds as there should be 3+ rounds (if all rules are converted)
|
||||
if cround > min_rounds - 2:
|
||||
logger.info("needed sub-rule not converted (yet, maybe in next round): " + repr(match))
|
||||
logger.info("needed sub-rule not converted (yet, maybe in next round): %r", match)
|
||||
return "BREAK", "needed sub-rule not converted"
|
||||
else:
|
||||
return "BREAK", "NOLOG"
|
||||
|
||||
elif s_type == "bytes":
|
||||
bytesv = kid.get_value_str()
|
||||
logger.info("doing bytes: " + repr(bytesv))
|
||||
logger.info("doing bytes: %r", bytesv)
|
||||
var_name = var_names.pop(0)
|
||||
|
||||
yara_strings += "\t$" + var_name + " = { " + bytesv + " }" + convert_description(kid) + "\n"
|
||||
@@ -256,19 +269,19 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
|
||||
elif s_type == "number":
|
||||
number = kid.get_value_str()
|
||||
logger.info("doing number: " + repr(number))
|
||||
logger.info("doing number: %r", number)
|
||||
|
||||
if len(number) < 10:
|
||||
logger.info("too short for byte search (until I figure out how to do it properly)" + repr(number))
|
||||
logger.info("too short for byte search (until I figure out how to do it properly): %r", number)
|
||||
return "BREAK", "Number too short"
|
||||
|
||||
# there's just one rule which contains 0xFFFFFFF but yara gives a warning if if used
|
||||
if number == "0xFFFFFFFF":
|
||||
return "BREAK", "slow byte pattern for YARA search"
|
||||
|
||||
logger.info("number ok: " + repr(number))
|
||||
logger.info("number ok: %r", number)
|
||||
number = convert_capa_number_to_yara_bytes(number)
|
||||
logger.info("number ok: " + repr(number))
|
||||
logger.info("number ok: %r", number)
|
||||
|
||||
var_name = "num_" + var_names.pop(0)
|
||||
yara_strings += "\t$" + var_name + " = { " + number + "}" + convert_description(kid) + "\n"
|
||||
@@ -276,7 +289,7 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
|
||||
elif s_type == "regex":
|
||||
regex = kid.get_value_str()
|
||||
logger.info("doing regex: " + repr(regex))
|
||||
logger.info("doing regex: %r", regex)
|
||||
|
||||
# change capas /xxx/i to yaras /xxx/ nocase, count will be used later to decide appending 'nocase'
|
||||
regex, count = re.subn(r"/i$", "/", regex)
|
||||
@@ -286,7 +299,8 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
|
||||
# all .* in the regexes of capa look like they should be maximum 100 chars so take 1000 to speed up rules and prevent yara warnings on poor performance
|
||||
regex = regex.replace(".*", ".{,1000}")
|
||||
# strange: capa accepts regexes with unescaped / like - string: /com/exe4j/runtime/exe4jcontroller/i in capa-rules/compiler/exe4j/compiled-with-exe4j.yml, needs a fix for yara:
|
||||
# strange: capa accepts regexes with unescaped /
|
||||
# like - string: /com/exe4j/runtime/exe4jcontroller/i in capa-rules/compiler/exe4j/compiled-with-exe4j.yml, needs a fix for yara:
|
||||
# would assume that get_value_str() gives the raw string
|
||||
regex = re.sub(r"(?<!\\)/", r"\/", regex)
|
||||
|
||||
@@ -294,7 +308,8 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
# /reg(|.exe)/ => /reg(.exe)?/
|
||||
regex = re.sub(r"\(\|([^\)]+)\)", r"(\1)?", regex)
|
||||
|
||||
# change beginning of line to null byte, e.g. /^open => /\x00open (not word boundary because we're not looking for the beginning of a word in a text but usually a function name if there's ^ in a capa rule)
|
||||
# change beginning of line to null byte, e.g. /^open => /\x00open
|
||||
# (not word boundary because we're not looking for the beginning of a word in a text but usually a function name if there's ^ in a capa rule)
|
||||
regex = re.sub(r"^\^", r"\\x00", regex)
|
||||
|
||||
# regex = re.sub(r"^\^", r"\\b", regex)
|
||||
@@ -310,7 +325,7 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
elif s_type == "Not" or s_type == "And" or s_type == "Or":
|
||||
pass
|
||||
else:
|
||||
logger.info("something unhandled: " + repr(s_type))
|
||||
logger.info("something unhandled: %r", s_type)
|
||||
sys.exit()
|
||||
|
||||
return yara_strings, yara_condition
|
||||
@@ -324,7 +339,7 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
|
||||
statement = rule.name
|
||||
|
||||
logger.info("doing statement: " + statement)
|
||||
logger.info("doing statement: %s", statement)
|
||||
|
||||
if check_feature(statement, rulename):
|
||||
return "BREAK", statement, rule_comment, incomplete
|
||||
@@ -332,18 +347,18 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
if statement == "And" or statement == "Or":
|
||||
desc = convert_description(rule)
|
||||
if desc:
|
||||
logger.info("description of bool statement: " + repr(desc))
|
||||
logger.info("description of bool statement: %r", desc)
|
||||
yara_strings_list.append("\t" * depth + desc + "\n")
|
||||
elif statement == "Not":
|
||||
logger.info("one of those seldom nots: " + rule.name)
|
||||
logger.info("one of those seldom nots: %s", rule.name)
|
||||
|
||||
# check for nested statements
|
||||
try:
|
||||
kids = rule.children
|
||||
num_kids = len(kids)
|
||||
logger.info("kids: " + kids)
|
||||
except:
|
||||
logger.info("no kids in rule: " + rule.name)
|
||||
logger.info("kids: %s", kids)
|
||||
except Exception:
|
||||
logger.info("no kids in rule: %s", rule.name)
|
||||
|
||||
try:
|
||||
# maybe it's "Not" = only one child:
|
||||
@@ -351,31 +366,31 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
kids = [kid]
|
||||
num_kids = 1
|
||||
logger.info("kid: %s", kids)
|
||||
except:
|
||||
except Exception:
|
||||
logger.info("no kid in rule: %s", rule.name)
|
||||
|
||||
# just a single statement without 'and' or 'or' before it in this rule
|
||||
if "kids" not in locals().keys():
|
||||
logger.info("no kids: " + rule.name)
|
||||
logger.info("no kids: %s", rule.name)
|
||||
|
||||
yara_strings_sub, yara_condition_sub = do_statement(statement, rule)
|
||||
|
||||
if yara_strings_sub == "BREAK":
|
||||
logger.info("Unknown feature at1: " + rule.name)
|
||||
logger.info("Unknown feature at1: %s", rule.name)
|
||||
return "BREAK", yara_condition_sub, rule_comment, incomplete
|
||||
yara_strings_list.append(yara_strings_sub)
|
||||
yara_condition_list.append(yara_condition_sub)
|
||||
|
||||
else:
|
||||
x = 0
|
||||
logger.info("doing kids: %r - len: %s", kids, num_kids)
|
||||
logger.info("doing kids: %r - len: %d", kids, num_kids)
|
||||
for kid in kids:
|
||||
s_type = kid.name
|
||||
logger.info("doing type: " + s_type + " kidnum: " + str(x))
|
||||
logger.info("doing type: %s kidnum: %d", s_type, x)
|
||||
|
||||
if s_type == "Some":
|
||||
cmin = kid.count
|
||||
logger.info("Some type with minimum: " + str(cmin))
|
||||
logger.info("Some type with minimum: %d", cmin)
|
||||
|
||||
if not cmin:
|
||||
logger.info("this is optional: which means, we can just ignore it")
|
||||
@@ -389,9 +404,9 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
# this is "x or more". could be coded for strings TODO
|
||||
return "BREAK", "Some aka x or more (TODO)", rule_comment, incomplete
|
||||
|
||||
if s_type == "And" or s_type == "Or" or s_type == "Not" and not kid.name == "Some":
|
||||
logger.info("doing bool with recursion: " + repr(kid))
|
||||
logger.info("kid coming: " + repr(kid.name))
|
||||
if s_type == "And" or s_type == "Or" or s_type == "Not" and kid.name != "Some":
|
||||
logger.info("doing bool with recursion: %r", kid)
|
||||
logger.info("kid coming: %r", kid.name)
|
||||
# logger.info("grandchildren: " + repr(kid.children))
|
||||
|
||||
#
|
||||
@@ -401,26 +416,29 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
kid, rulename, cround, depth
|
||||
)
|
||||
|
||||
logger.info("coming out of this recursion, depth: " + repr(depth) + " s_type: " + s_type)
|
||||
logger.info("coming out of this recursion, depth: %d s_type: %s", depth, s_type)
|
||||
|
||||
if yara_strings_sub == "BREAK":
|
||||
logger.info(
|
||||
"Unknown feature at2: " + rule.name + " - s_type: " + s_type + " - depth: " + str(depth)
|
||||
"Unknown feature at2: %s - s_type: %s - depth: %d",
|
||||
rule.name,
|
||||
s_type,
|
||||
depth,
|
||||
)
|
||||
|
||||
# luckily this is only a killer, if we're inside an 'And', inside 'Or' we're just missing some coverage
|
||||
# only accept incomplete rules in rounds > 3 because the reason might be a reference to another rule not converted yet because of missing dependencies
|
||||
logger.info("rule.name, depth, cround: " + rule.name + ", " + str(depth) + ", " + str(cround))
|
||||
logger.info("rule.name, depth, cround: %s, %d, %d", rule.name, depth, cround)
|
||||
if rule.name == "Or" and depth == 1 and cround > min_rounds - 1:
|
||||
logger.info(
|
||||
"Unknown feature, just ignore this branch and keep the rest bec we're in Or (1): "
|
||||
+ s_type
|
||||
+ " - depth: "
|
||||
+ str(depth)
|
||||
"Unknown feature, just ignore this branch and keep the rest bec we're in Or (1): %s - depth: %s",
|
||||
s_type,
|
||||
depth,
|
||||
)
|
||||
# remove last 'or'
|
||||
# yara_condition = re.sub(r'\sor $', ' ', yara_condition)
|
||||
rule_comment += "This rule is incomplete because a branch inside an Or-statement had an unsupported feature and was skipped => coverage is reduced compared to the original capa rule. "
|
||||
rule_comment += "This rule is incomplete because a branch inside an Or-statement had an unsupported feature and was skipped "
|
||||
rule_comment += "=> coverage is reduced compared to the original capa rule. "
|
||||
x += 1
|
||||
incomplete = 1
|
||||
continue
|
||||
@@ -436,17 +454,17 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
yara_strings_sub, yara_condition_sub = do_statement(s_type, kid)
|
||||
|
||||
if yara_strings_sub == "BREAK":
|
||||
logger.info("Unknown feature at3: " + rule.name)
|
||||
logger.info("rule.name, depth, cround: " + rule.name + ", " + str(depth) + ", " + str(cround))
|
||||
logger.info("Unknown feature at3: %s", rule.name)
|
||||
logger.info("rule.name, depth, cround: %s, %d, %d", rule.name, depth, cround)
|
||||
if rule.name == "Or" and depth == 1 and cround > min_rounds - 1:
|
||||
logger.info(
|
||||
"Unknown feature, just ignore this branch and keep the rest bec we're in Or (2): "
|
||||
+ s_type
|
||||
+ " - depth: "
|
||||
+ str(depth)
|
||||
"Unknown feature, just ignore this branch and keep the rest bec we're in Or (2): %s - depth: %d",
|
||||
s_type,
|
||||
depth,
|
||||
)
|
||||
|
||||
rule_comment += "This rule is incomplete because a branch inside an Or-statement had an unsupported feature and was skipped => coverage is reduced compared to the original capa rule. "
|
||||
rule_comment += "This rule is incomplete because a branch inside an Or-statement had an unsupported feature and was skipped"
|
||||
rule_comment += "=> coverage is reduced compared to the original capa rule. "
|
||||
x += 1
|
||||
incomplete = 1
|
||||
continue
|
||||
@@ -480,7 +498,7 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
|
||||
elif statement == "Some":
|
||||
cmin = rule.count
|
||||
logger.info("Some type with minimum at2: " + str(cmin))
|
||||
logger.info("Some type with minimum at2: %d", cmin)
|
||||
|
||||
if not cmin:
|
||||
logger.info("this is optional: which means, we can just ignore it")
|
||||
@@ -493,7 +511,7 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
yara_condition = "not " + "".join(yara_condition_list) + " "
|
||||
else:
|
||||
if len(yara_condition_list) != 1:
|
||||
logger.info("something wrong around here" + repr(yara_condition_list) + " - " + statement)
|
||||
logger.info("something wrong around here %r - %s", yara_condition_list, statement)
|
||||
sys.exit()
|
||||
|
||||
# strings might be empty with only conditions
|
||||
@@ -502,8 +520,10 @@ def convert_rule(rule, rulename, cround, depth):
|
||||
|
||||
yara_condition = "\n\t" + yara_condition_list[0]
|
||||
|
||||
logger.info(f"# end of convert_rule() #strings: {len(yara_strings_list)} #conditions: {len(yara_condition_list)}")
|
||||
logger.info(f"strings: {yara_strings} conditions: {yara_condition}")
|
||||
logger.info(
|
||||
"# end of convert_rule() #strings: %d #conditions: %d", len(yara_strings_list), len(yara_condition_list)
|
||||
)
|
||||
logger.info("strings: %s conditions: %s", yara_strings, yara_condition)
|
||||
|
||||
return yara_strings, yara_condition, rule_comment, incomplete
|
||||
|
||||
@@ -515,7 +535,7 @@ def output_yar(yara):
|
||||
def output_unsupported_capa_rules(yaml, capa_rulename, url, reason):
|
||||
if reason != "NOLOG":
|
||||
if capa_rulename not in unsupported_capa_rules_list:
|
||||
logger.info("unsupported: " + capa_rulename + " - reason: " + reason + " - url: " + url)
|
||||
logger.info("unsupported: %s - reason: %s, - url: %s", capa_rulename, reason, url)
|
||||
|
||||
unsupported_capa_rules_list.append(capa_rulename)
|
||||
unsupported_capa_rules.write(yaml.encode("utf-8") + b"\n")
|
||||
@@ -539,32 +559,32 @@ def convert_rules(rules, namespaces, cround, make_priv):
|
||||
rule_name = convert_rule_name(rule.name)
|
||||
|
||||
if rule.is_subscope_rule():
|
||||
logger.info("skipping sub scope rule capa: " + rule.name)
|
||||
logger.info("skipping sub scope rule capa: %s", rule.name)
|
||||
continue
|
||||
|
||||
if rule_name in converted_rules:
|
||||
logger.info("skipping already converted rule capa: " + rule.name + " - yara rule: " + rule_name)
|
||||
logger.info("skipping already converted rule capa: %s - yara rule: %s", rule.name, rule_name)
|
||||
continue
|
||||
|
||||
logger.info("-------------------------- DOING RULE CAPA: " + rule.name + " - yara rule: " + rule_name)
|
||||
logger.info("-------------------------- DOING RULE CAPA: %s - yara rule: ", rule.name, rule_name)
|
||||
if "capa/path" in rule.meta:
|
||||
url = get_rule_url(rule.meta["capa/path"])
|
||||
else:
|
||||
url = "no url"
|
||||
|
||||
logger.info("URL: " + url)
|
||||
logger.info("statements: " + repr(rule.statement))
|
||||
logger.info("URL: %s", url)
|
||||
logger.info("statements: %r", rule.statement)
|
||||
|
||||
# don't really know what that passed empty string is good for :)
|
||||
dependencies = rule.get_dependencies(namespaces)
|
||||
|
||||
if len(dependencies):
|
||||
logger.info("Dependencies at4: " + rule.name + " - dep: " + str(dependencies))
|
||||
logger.info("Dependencies at4: %s - dep: %s", rule.name, dependencies)
|
||||
|
||||
for dep in dependencies:
|
||||
logger.info("Dependencies at44: " + dep)
|
||||
logger.info("Dependencies at44: %s", dep)
|
||||
if not dep.startswith(rule.name + "/"):
|
||||
logger.info("Depending on another rule: " + dep)
|
||||
logger.info("Depending on another rule: %s", dep)
|
||||
continue
|
||||
|
||||
yara_strings, yara_condition, rule_comment, incomplete = convert_rule(rule.statement, rule.name, cround, 0)
|
||||
@@ -573,7 +593,7 @@ def convert_rules(rules, namespaces, cround, make_priv):
|
||||
# only give up if in final extra round #9000
|
||||
if cround == 9000:
|
||||
output_unsupported_capa_rules(rule.to_yaml(), rule.name, url, yara_condition)
|
||||
logger.info("Unknown feature at5: " + rule.name)
|
||||
logger.info("Unknown feature at5: %s", rule.name)
|
||||
else:
|
||||
yara_meta = ""
|
||||
metas = rule.meta
|
||||
@@ -589,24 +609,24 @@ def convert_rules(rules, namespaces, cround, make_priv):
|
||||
if meta_name == "att&ck":
|
||||
meta_name = "attack"
|
||||
for attack in list(metas[meta]):
|
||||
logger.info("attack:" + attack)
|
||||
logger.info("attack: %s", attack)
|
||||
# cut out tag in square brackets, e.g. Defense Evasion::Obfuscated Files or Information [T1027] => T1027
|
||||
r = re.search(r"\[(T[^\]]*)", attack)
|
||||
if r:
|
||||
tag = r.group(1)
|
||||
logger.info("attack tag:" + tag)
|
||||
logger.info("attack tag: %s", tag)
|
||||
tag = re.sub(r"\W", "_", tag)
|
||||
rule_tags += tag + " "
|
||||
# also add a line "attack = ..." to yaras 'meta:' to keep the long description:
|
||||
yara_meta += '\tattack = "' + attack + '"\n'
|
||||
elif meta_name == "mbc":
|
||||
for mbc in list(metas[meta]):
|
||||
logger.info("mbc:" + mbc)
|
||||
logger.info("mbc: %s", mbc)
|
||||
# cut out tag in square brackets, e.g. Cryptography::Encrypt Data::RC6 [C0027.010] => C0027.010
|
||||
r = re.search(r"\[(.[^\]]*)", mbc)
|
||||
if r:
|
||||
tag = r.group(1)
|
||||
logger.info("mbc tag:" + tag)
|
||||
logger.info("mbc tag: %s", tag)
|
||||
tag = re.sub(r"\W", "_", tag)
|
||||
rule_tags += tag + " "
|
||||
|
||||
@@ -669,7 +689,6 @@ def convert_rules(rules, namespaces, cround, make_priv):
|
||||
|
||||
yara += " condition:" + condition_header + yara_condition + "\n}"
|
||||
|
||||
# TODO: now the rule is finished and could be automatically checked with the capa-testfile(s) named in meta (doing it for all of them using yara-ci upload at the moment)
|
||||
output_yar(yara)
|
||||
converted_rules.append(rule_name)
|
||||
count_incomplete += incomplete
|
||||
@@ -703,12 +722,12 @@ def main(argv=None):
|
||||
logging.getLogger("capa2yara").setLevel(level)
|
||||
|
||||
try:
|
||||
rules = capa.main.get_rules([args.rules])
|
||||
rules = capa.main.get_rules([Path(args.rules)])
|
||||
namespaces = capa.rules.index_rules_by_namespace(list(rules.rules.values()))
|
||||
logger.info("successfully loaded %s rules (including subscope rules which will be ignored)", len(rules))
|
||||
logger.info("successfully loaded %d rules (including subscope rules which will be ignored)", len(rules))
|
||||
if args.tag:
|
||||
rules = rules.filter_rules_by_meta(args.tag)
|
||||
logger.debug("selected %s rules", len(rules))
|
||||
logger.debug("selected %d rules", len(rules))
|
||||
for i, r in enumerate(rules.rules, 1):
|
||||
logger.debug(" %d. %s", i, r)
|
||||
except (IOError, capa.rules.InvalidRule, capa.rules.InvalidRuleSet) as e:
|
||||
@@ -719,7 +738,7 @@ def main(argv=None):
|
||||
"// Rules from Mandiant's https://github.com/mandiant/capa-rules converted to YARA using https://github.com/mandiant/capa/blob/master/scripts/capa2yara.py by Arnim Rupp"
|
||||
)
|
||||
output_yar(
|
||||
"// Beware: These are less rules than capa (because not all fit into YARA, stats at EOF) and is less precise because e.g. capas function scopes are applied to the whole file"
|
||||
"// Beware: These are less rules than capa (because not all fit into YARA, stats at EOF) and is less precise e.g. capas function scopes are applied to the whole file"
|
||||
)
|
||||
output_yar(
|
||||
'// Beware: Some rules are incomplete because an optional branch was not supported by YARA. These rules are marked in a comment in meta: (search for "incomplete")'
|
||||
@@ -740,7 +759,7 @@ def main(argv=None):
|
||||
count_incomplete = 0
|
||||
while num_rules != len(converted_rules) or cround < min_rounds:
|
||||
cround += 1
|
||||
logger.info("doing convert_rules(), round: " + str(cround))
|
||||
logger.info("doing convert_rules(), round: %d", cround)
|
||||
num_rules = len(converted_rules)
|
||||
count_incomplete += convert_rules(rules, namespaces, cround, make_priv)
|
||||
|
||||
@@ -750,7 +769,7 @@ def main(argv=None):
|
||||
stats = "\n// converted rules : " + str(len(converted_rules))
|
||||
stats += "\n// among those are incomplete : " + str(count_incomplete)
|
||||
stats += "\n// unconverted rules : " + str(len(unsupported_capa_rules_list)) + "\n"
|
||||
logger.info(stats)
|
||||
logger.info("%s", stats)
|
||||
output_yar(stats)
|
||||
|
||||
return 0
|
||||
|
||||
@@ -1,8 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import json
|
||||
import collections
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Set, Dict
|
||||
from pathlib import Path
|
||||
|
||||
import capa.main
|
||||
import capa.rules
|
||||
@@ -13,7 +21,6 @@ import capa.render.utils as rutils
|
||||
import capa.render.default
|
||||
import capa.render.result_document as rd
|
||||
import capa.features.freeze.features as frzf
|
||||
from capa.engine import *
|
||||
from capa.features.common import OS_AUTO, FORMAT_AUTO
|
||||
|
||||
|
||||
@@ -30,7 +37,7 @@ def find_subrule_matches(doc: rd.ResultDocument) -> Set[str]:
|
||||
collect the rule names that have been matched as a subrule match.
|
||||
this way we can avoid displaying entries for things that are too specific.
|
||||
"""
|
||||
matches = set([])
|
||||
matches = set()
|
||||
|
||||
def rec(node: rd.Match):
|
||||
if not node.success:
|
||||
@@ -66,7 +73,7 @@ def render_capabilities(doc: rd.ResultDocument, result):
|
||||
"""
|
||||
subrule_matches = find_subrule_matches(doc)
|
||||
|
||||
result["CAPABILITY"] = dict()
|
||||
result["CAPABILITY"] = {}
|
||||
for rule in rutils.capability_rules(doc):
|
||||
if rule.meta.name in subrule_matches:
|
||||
# rules that are also matched by other rules should not get rendered by default.
|
||||
@@ -80,7 +87,7 @@ def render_capabilities(doc: rd.ResultDocument, result):
|
||||
else:
|
||||
capability = f"{rule.meta.name} ({count} matches)"
|
||||
|
||||
result["CAPABILITY"].setdefault(rule.meta.namespace, list())
|
||||
result["CAPABILITY"].setdefault(rule.meta.namespace, [])
|
||||
result["CAPABILITY"][rule.meta.namespace].append(capability)
|
||||
|
||||
|
||||
@@ -97,7 +104,7 @@ def render_attack(doc, result):
|
||||
'EXECUTION': ['Shared Modules [T1129]']}
|
||||
}
|
||||
"""
|
||||
result["ATTCK"] = dict()
|
||||
result["ATTCK"] = {}
|
||||
tactics = collections.defaultdict(set)
|
||||
for rule in rutils.capability_rules(doc):
|
||||
if not rule.meta.attack:
|
||||
@@ -130,7 +137,7 @@ def render_mbc(doc, result):
|
||||
'[C0021.004]']}
|
||||
}
|
||||
"""
|
||||
result["MBC"] = dict()
|
||||
result["MBC"] = {}
|
||||
objectives = collections.defaultdict(set)
|
||||
for rule in rutils.capability_rules(doc):
|
||||
if not rule.meta.mbc:
|
||||
@@ -150,7 +157,7 @@ def render_mbc(doc, result):
|
||||
|
||||
|
||||
def render_dictionary(doc: rd.ResultDocument) -> Dict[str, Any]:
|
||||
result: Dict[str, Any] = dict()
|
||||
result: Dict[str, Any] = {}
|
||||
render_meta(doc, result)
|
||||
render_attack(doc, result)
|
||||
render_mbc(doc, result)
|
||||
@@ -160,7 +167,7 @@ def render_dictionary(doc: rd.ResultDocument) -> Dict[str, Any]:
|
||||
|
||||
|
||||
# ==== render dictionary helpers
|
||||
def capa_details(rules_path, file_path, output_format="dictionary"):
|
||||
def capa_details(rules_path: Path, file_path: Path, output_format="dictionary"):
|
||||
# load rules from disk
|
||||
rules = capa.main.get_rules([rules_path])
|
||||
|
||||
@@ -171,11 +178,14 @@ def capa_details(rules_path, file_path, output_format="dictionary"):
|
||||
capabilities, counts = capa.main.find_capabilities(rules, extractor, disable_progress=True)
|
||||
|
||||
# collect metadata (used only to make rendering more complete)
|
||||
meta = capa.main.collect_metadata([], file_path, FORMAT_AUTO, OS_AUTO, rules_path, extractor)
|
||||
meta["analysis"].update(counts)
|
||||
meta["analysis"]["layout"] = capa.main.compute_layout(rules, extractor, capabilities)
|
||||
meta = capa.main.collect_metadata([], file_path, FORMAT_AUTO, OS_AUTO, [rules_path], extractor)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
meta.analysis.layout = capa.main.compute_layout(rules, extractor, capabilities)
|
||||
|
||||
capa_output: Any = False
|
||||
|
||||
if output_format == "dictionary":
|
||||
# ...as python dictionary, simplified as textable but in dictionary
|
||||
doc = rd.ResultDocument.from_capa(meta, rules, capabilities)
|
||||
@@ -193,18 +203,18 @@ def capa_details(rules_path, file_path, output_format="dictionary"):
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
import os.path
|
||||
import argparse
|
||||
|
||||
RULES_PATH = os.path.join(os.path.dirname(__file__), "..", "rules")
|
||||
RULES_PATH = capa.main.get_default_root() / "rules"
|
||||
|
||||
parser = argparse.ArgumentParser(description="Extract capabilities from a file")
|
||||
parser.add_argument("file", help="file to extract capabilities from")
|
||||
parser.add_argument("--rules", help="path to rules directory", default=os.path.abspath(RULES_PATH))
|
||||
parser.add_argument("--rules", help="path to rules directory", default=RULES_PATH)
|
||||
parser.add_argument(
|
||||
"--output", help="output format", choices=["dictionary", "json", "texttable"], default="dictionary"
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
print(capa_details(args.rules, args.file, args.output))
|
||||
if args.rules != RULES_PATH:
|
||||
args.rules = Path(args.rules)
|
||||
print(capa_details(args.rules, Path(args.file), args.output))
|
||||
sys.exit(0)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user