mirror of
https://github.com/mandiant/capa.git
synced 2025-12-19 18:45:00 -08:00
Compare commits
633 Commits
v5.1.0
...
arm-suppor
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b1175ab16a | ||
|
|
9b5c906c2a | ||
|
|
240376153a | ||
|
|
321ef100c5 | ||
|
|
d8eebf524e | ||
|
|
b1e00150f4 | ||
|
|
83a7ce0b82 | ||
|
|
303170f45d | ||
|
|
8a019aa360 | ||
|
|
3dffa8145f | ||
|
|
782a5b3aa7 | ||
|
|
b0af78569c | ||
|
|
79cef0e783 | ||
|
|
09b54a86f0 | ||
|
|
57106701c4 | ||
|
|
55af6f052f | ||
|
|
d2d32f88ef | ||
|
|
7abcf3de9a | ||
|
|
b3dccb3841 | ||
|
|
bc71c94171 | ||
|
|
59d03b3ba3 | ||
|
|
3a5c8ec3b8 | ||
|
|
fd3678904a | ||
|
|
d04ae5294e | ||
|
|
6bae9d757d | ||
|
|
b9c05cf44a | ||
|
|
dc32289aab | ||
|
|
3c1a8f4461 | ||
|
|
b0d55143a4 | ||
|
|
e006702245 | ||
|
|
1224b7e514 | ||
|
|
46e3ed1100 | ||
|
|
7b08f2d55a | ||
|
|
d17db614b9 | ||
|
|
72ed4d1165 | ||
|
|
0ec682a464 | ||
|
|
37917b6181 | ||
|
|
a6e61ed6f1 | ||
|
|
1fddf800c6 | ||
|
|
0ffd631606 | ||
|
|
7cc10401d5 | ||
|
|
3929164fc2 | ||
|
|
f3a2a5958d | ||
|
|
6d3f649a0c | ||
|
|
e00608e298 | ||
|
|
995014afc2 | ||
|
|
a522ae20f1 | ||
|
|
203fc36865 | ||
|
|
7bd2467074 | ||
|
|
f339bbf68c | ||
|
|
8ed4062cf1 | ||
|
|
807792f879 | ||
|
|
9cea7346b2 | ||
|
|
d7c9ae26bc | ||
|
|
fddec33d04 | ||
|
|
65179805a7 | ||
|
|
9d21addc6b | ||
|
|
9accb60eff | ||
|
|
61202913a6 | ||
|
|
2b59fef1b2 | ||
|
|
ddff8634de | ||
|
|
1905f1bfbd | ||
|
|
7a70bc9b2a | ||
|
|
448b122ef0 | ||
|
|
bd2f7bc1f4 | ||
|
|
70d36ab640 | ||
|
|
19b8000c00 | ||
|
|
06f48063d0 | ||
|
|
934d0f969b | ||
|
|
b7b79b565b | ||
|
|
979aab3098 | ||
|
|
058c1fefd2 | ||
|
|
8ed00a2847 | ||
|
|
5787e41dd2 | ||
|
|
0265657937 | ||
|
|
73477b6495 | ||
|
|
a2a2949675 | ||
|
|
b3cf1129e3 | ||
|
|
7e78133925 | ||
|
|
d71ecc7a79 | ||
|
|
a5a1a0bfee | ||
|
|
8118a3f353 | ||
|
|
e6d64ef561 | ||
|
|
408c5076c6 | ||
|
|
3169ee28e9 | ||
|
|
3b9f5114ce | ||
|
|
623fc270c1 | ||
|
|
1199fb94d4 | ||
|
|
26fdbbd442 | ||
|
|
737fab7969 | ||
|
|
f6ee465a0a | ||
|
|
82f352f719 | ||
|
|
2dc5295c0c | ||
|
|
8479bc2f1f | ||
|
|
7c1522d84d | ||
|
|
9afe19a096 | ||
|
|
bd5c65d22c | ||
|
|
e6cb3d3b3b | ||
|
|
18058beb0a | ||
|
|
8003547414 | ||
|
|
ec93ca5b21 | ||
|
|
2de6dc7cb8 | ||
|
|
e5efc158b7 | ||
|
|
9f436763f7 | ||
|
|
a383022cff | ||
|
|
57486733e7 | ||
|
|
df9828dd7f | ||
|
|
d81f3a461e | ||
|
|
f1e737ac92 | ||
|
|
448aa9cd21 | ||
|
|
f2c0509f81 | ||
|
|
6287fbb958 | ||
|
|
c497ad8253 | ||
|
|
9c1aa2fc5d | ||
|
|
f5a254f21f | ||
|
|
fb3ae0267e | ||
|
|
5400576d4e | ||
|
|
dabd9d0810 | ||
|
|
2bd777dbe4 | ||
|
|
959c64b484 | ||
|
|
232c9ce35c | ||
|
|
b3a9763a32 | ||
|
|
0fdc1dd3f5 | ||
|
|
80e224ec7c | ||
|
|
75a4f309b4 | ||
|
|
358888178a | ||
|
|
57e393bf7a | ||
|
|
eb7aa63be6 | ||
|
|
298a07dc07 | ||
|
|
f50a5e8efc | ||
|
|
d06b33e7ea | ||
|
|
9660f1e5ab | ||
|
|
74d9b06835 | ||
|
|
5de055e2af | ||
|
|
dd870a5cbd | ||
|
|
a2254852b0 | ||
|
|
17aad56800 | ||
|
|
2c8f99143a | ||
|
|
ee68031d19 | ||
|
|
851da25560 | ||
|
|
a4b00b9064 | ||
|
|
fd61456164 | ||
|
|
261baca683 | ||
|
|
c7dde262ed | ||
|
|
2bed3468f6 | ||
|
|
bab6c978fb | ||
|
|
6647ecb6d4 | ||
|
|
13533074ea | ||
|
|
a538a7bbab | ||
|
|
b2789f0df6 | ||
|
|
ab5c8b1129 | ||
|
|
149983dced | ||
|
|
04fbcbbbd3 | ||
|
|
727ece499a | ||
|
|
62f50265bc | ||
|
|
95ffdf19ff | ||
|
|
d18224eac6 | ||
|
|
26935ee6e6 | ||
|
|
f8c499fb43 | ||
|
|
3bb5754b66 | ||
|
|
dd2eef52c3 | ||
|
|
da45fb4bea | ||
|
|
7ed517a8f3 | ||
|
|
f00e7426c5 | ||
|
|
3f29c61038 | ||
|
|
647ce67f7e | ||
|
|
224923b8bd | ||
|
|
8a08a93b1c | ||
|
|
ed98bb3a57 | ||
|
|
d12185d851 | ||
|
|
5f8280eb09 | ||
|
|
30abe40999 | ||
|
|
0e58ec5176 | ||
|
|
dd53349aea | ||
|
|
d598faf145 | ||
|
|
c265b1ca96 | ||
|
|
b554eaf563 | ||
|
|
3d51b84bd1 | ||
|
|
684b2ded38 | ||
|
|
557e83b1dc | ||
|
|
8f826cb92d | ||
|
|
78a9909ec6 | ||
|
|
c0e126f812 | ||
|
|
4eabee7329 | ||
|
|
0719273cee | ||
|
|
de6bdf0621 | ||
|
|
1790dab1ab | ||
|
|
781c33d13c | ||
|
|
70a1e66020 | ||
|
|
91b65d1d7f | ||
|
|
a22dd65032 | ||
|
|
3899662cbd | ||
|
|
b73e1e3d7f | ||
|
|
25624a1b46 | ||
|
|
e3c8cb74df | ||
|
|
f99824d996 | ||
|
|
33cb81449c | ||
|
|
c49385e681 | ||
|
|
5277f3b640 | ||
|
|
dbfcbaa98e | ||
|
|
a2d70a12a9 | ||
|
|
be58f65ae5 | ||
|
|
15caa9ee6e | ||
|
|
0398baa752 | ||
|
|
b1214df621 | ||
|
|
c0ed955362 | ||
|
|
1c6434a380 | ||
|
|
fff1248ec4 | ||
|
|
14f0589194 | ||
|
|
d47703fada | ||
|
|
faf3ca53f7 | ||
|
|
18e0408577 | ||
|
|
972fbe7290 | ||
|
|
40793eeefb | ||
|
|
221a5a9f03 | ||
|
|
d1f5a6e76b | ||
|
|
d2567692a8 | ||
|
|
6fa7f24818 | ||
|
|
68caece2fa | ||
|
|
94aaaa297d | ||
|
|
6ce897e39b | ||
|
|
7c67fae52a | ||
|
|
ebae5e5ca0 | ||
|
|
244d56e32a | ||
|
|
5f2b92de40 | ||
|
|
1065ff9779 | ||
|
|
5253ad7014 | ||
|
|
82223dcdc9 | ||
|
|
724f9e4b81 | ||
|
|
c4da4bcfe7 | ||
|
|
fd36946c4b | ||
|
|
8c9853ad12 | ||
|
|
562a61930d | ||
|
|
f9d210367e | ||
|
|
bb6557ea0a | ||
|
|
cb8133467b | ||
|
|
718813bc1c | ||
|
|
394c3807c1 | ||
|
|
74924990a2 | ||
|
|
330f2a6b9b | ||
|
|
6b81c77d22 | ||
|
|
9e9f120c80 | ||
|
|
546789fea6 | ||
|
|
76901ced19 | ||
|
|
c29d0a4f56 | ||
|
|
6b6d7eb494 | ||
|
|
21b2aac8b5 | ||
|
|
7898ac24d5 | ||
|
|
5a3775455b | ||
|
|
892cd48713 | ||
|
|
c062115366 | ||
|
|
ff7a006ba1 | ||
|
|
7665d56f93 | ||
|
|
280e253286 | ||
|
|
7edf126a63 | ||
|
|
ad6b475dfe | ||
|
|
f897f00227 | ||
|
|
ea3090a066 | ||
|
|
b9090b86ce | ||
|
|
5088f45b6a | ||
|
|
ea51801806 | ||
|
|
04db034895 | ||
|
|
b547987b33 | ||
|
|
0511ef7093 | ||
|
|
e9ccc5276a | ||
|
|
36a840cb2c | ||
|
|
797021874b | ||
|
|
2370c5b50d | ||
|
|
b285985a79 | ||
|
|
59bd930881 | ||
|
|
c86ab51210 | ||
|
|
e987fc2034 | ||
|
|
7550cc8466 | ||
|
|
acaf6c1272 | ||
|
|
a28000b41a | ||
|
|
560dc358fa | ||
|
|
a32f2cc0f8 | ||
|
|
eeb0f78564 | ||
|
|
97c2005661 | ||
|
|
87a6459278 | ||
|
|
4e02e36d2c | ||
|
|
a35bf4c807 | ||
|
|
a106953fec | ||
|
|
65e8300145 | ||
|
|
7526ff876f | ||
|
|
78a6d9a511 | ||
|
|
2343e73f41 | ||
|
|
aae2e51688 | ||
|
|
fe57016abd | ||
|
|
de8bba41dc | ||
|
|
90a2fd936c | ||
|
|
deb6114530 | ||
|
|
d438b90879 | ||
|
|
c1cd272865 | ||
|
|
fdb53d97ce | ||
|
|
db5e735928 | ||
|
|
785825d77e | ||
|
|
1baa7a5e4b | ||
|
|
ef39bc3c3a | ||
|
|
8e346cb411 | ||
|
|
d1a1c6875b | ||
|
|
b84af6a205 | ||
|
|
160c662e7c | ||
|
|
015056c54a | ||
|
|
babf99ea48 | ||
|
|
c8f5496008 | ||
|
|
aa8055229d | ||
|
|
454b6d1aca | ||
|
|
1373fabf02 | ||
|
|
320539bd26 | ||
|
|
ac12d5a7e2 | ||
|
|
506d677684 | ||
|
|
f983307c97 | ||
|
|
a712bf3389 | ||
|
|
dc1f2e728d | ||
|
|
1f8aa7cfe1 | ||
|
|
81b964386f | ||
|
|
cb289e3fc5 | ||
|
|
fb176196eb | ||
|
|
dd2bbc9a48 | ||
|
|
118b955e10 | ||
|
|
d89dd499b6 | ||
|
|
430f9da449 | ||
|
|
ae10a2ea34 | ||
|
|
4a49543d12 | ||
|
|
106b12e2a4 | ||
|
|
7fe738e28f | ||
|
|
54203f3be9 | ||
|
|
a949698b86 | ||
|
|
673af45c55 | ||
|
|
e0ed8c6e04 | ||
|
|
fc1dd401d2 | ||
|
|
d452fdeca5 | ||
|
|
4a2902512e | ||
|
|
a8f1067f8a | ||
|
|
ef9b0737a8 | ||
|
|
6218f31ea2 | ||
|
|
14924174c5 | ||
|
|
edeb458b33 | ||
|
|
b8f277b3c6 | ||
|
|
5bc85f39a6 | ||
|
|
51ffb1d75c | ||
|
|
1f631b3ed1 | ||
|
|
1ea91d60ac | ||
|
|
13a8e252f0 | ||
|
|
ff47270681 | ||
|
|
3ad4de70bf | ||
|
|
9f6165f65c | ||
|
|
982dc46623 | ||
|
|
a43d2c115f | ||
|
|
e675bef062 | ||
|
|
511aa0fb51 | ||
|
|
90e607fe9a | ||
|
|
9441da4887 | ||
|
|
47074fd129 | ||
|
|
adbfb8db06 | ||
|
|
8c8601197b | ||
|
|
3ca233e0bd | ||
|
|
f17edb3151 | ||
|
|
691ef1c72f | ||
|
|
75a76b47be | ||
|
|
6f0d1f7518 | ||
|
|
25a6d78b88 | ||
|
|
65e309450d | ||
|
|
51292880fd | ||
|
|
26998efead | ||
|
|
cf9421aabf | ||
|
|
e53fd8d6c8 | ||
|
|
b62c011823 | ||
|
|
f9248262f5 | ||
|
|
bbafedc992 | ||
|
|
46ff798fae | ||
|
|
c5f51e03f4 | ||
|
|
b57188e98c | ||
|
|
49ffbdd54d | ||
|
|
855463b319 | ||
|
|
62db346b49 | ||
|
|
20e7acaa1a | ||
|
|
c0d712acea | ||
|
|
66e2a225d2 | ||
|
|
2e27745b5f | ||
|
|
b5a063b0d9 | ||
|
|
ba8040ace5 | ||
|
|
9bcd7678a4 | ||
|
|
23ed0a5d9d | ||
|
|
2b6cc6fee2 | ||
|
|
6a76760033 | ||
|
|
dd2d5431a9 | ||
|
|
5d1e26a95e | ||
|
|
bf5b2612c8 | ||
|
|
694143ce6b | ||
|
|
19a5ef8a64 | ||
|
|
169b3d60a8 | ||
|
|
bb053561ef | ||
|
|
b1eda6c24d | ||
|
|
1a2e034ee0 | ||
|
|
a6763d8882 | ||
|
|
16ce6a5ef2 | ||
|
|
0a74eb671f | ||
|
|
0c3c5e42ff | ||
|
|
1e258c3bc2 | ||
|
|
2d55976cb4 | ||
|
|
9a7ce0b048 | ||
|
|
446114acc3 | ||
|
|
30950f129e | ||
|
|
066e42e271 | ||
|
|
301d8425c1 | ||
|
|
165fe87aca | ||
|
|
06dd6f45c0 | ||
|
|
2cd6b8bdac | ||
|
|
7ab2a9b163 | ||
|
|
4548303a0c | ||
|
|
4ceff605bf | ||
|
|
39bb4ed842 | ||
|
|
8edeb0e6e8 | ||
|
|
e3b58eac67 | ||
|
|
8b23a86d2e | ||
|
|
d95acc9734 | ||
|
|
7c72b56a4e | ||
|
|
8429d6b8e2 | ||
|
|
842f76c8bd | ||
|
|
157dfac527 | ||
|
|
a92d91e82a | ||
|
|
33a3170bc4 | ||
|
|
2ce4f8769d | ||
|
|
4dedc24f9f | ||
|
|
1bc0174f6f | ||
|
|
90842f313a | ||
|
|
6aa2f6457c | ||
|
|
b7c600e60b | ||
|
|
d397b46b63 | ||
|
|
7a6b7c5ef0 | ||
|
|
7ef78fdbce | ||
|
|
366c55231e | ||
|
|
43b2ee3c52 | ||
|
|
85a7c87830 | ||
|
|
2d7e20f532 | ||
|
|
cc993b67a3 | ||
|
|
a74911e926 | ||
|
|
8cc16e8de9 | ||
|
|
0559e61af1 | ||
|
|
3da233dcad | ||
|
|
2fe0713faa | ||
|
|
28629b352c | ||
|
|
e5f79c9f5c | ||
|
|
c6815ef126 | ||
|
|
28b2cd5117 | ||
|
|
28c24c9d48 | ||
|
|
b2080cdfbc | ||
|
|
57095175d2 | ||
|
|
5b260c00f4 | ||
|
|
9b0fb74d94 | ||
|
|
103b384c09 | ||
|
|
65f18aecc8 | ||
|
|
e971bc4044 | ||
|
|
b4870b120e | ||
|
|
a7988a6e78 | ||
|
|
de19c9300d | ||
|
|
a7639d33b9 | ||
|
|
c3f9c27e34 | ||
|
|
b849cfd4a5 | ||
|
|
7dff76b122 | ||
|
|
be5ada26ea | ||
|
|
5b903ca4f3 | ||
|
|
6b2710ac7e | ||
|
|
764fda8e7b | ||
|
|
151ef95b79 | ||
|
|
4976375d74 | ||
|
|
0b834a1623 | ||
|
|
41c512624b | ||
|
|
9467ee6f10 | ||
|
|
dde76e301d | ||
|
|
5ded85f46e | ||
|
|
0cbe4618e1 | ||
|
|
f03ad2d208 | ||
|
|
8b867836e9 | ||
|
|
236c1c9d17 | ||
|
|
64dca7d801 | ||
|
|
3834314c2a | ||
|
|
144723be3c | ||
|
|
0f54a6f67e | ||
|
|
1cec768521 | ||
|
|
d85d01eea1 | ||
|
|
8d1e1cc54c | ||
|
|
0d9e74028e | ||
|
|
445214b23b | ||
|
|
16444fe5ed | ||
|
|
994edf66fe | ||
|
|
f9291d4e50 | ||
|
|
ab089c024d | ||
|
|
ffb1cb3128 | ||
|
|
57386812f9 | ||
|
|
ce8e15a220 | ||
|
|
0d42ac3912 | ||
|
|
f10a43abe6 | ||
|
|
64ef2c8a65 | ||
|
|
d3c44a8263 | ||
|
|
8d016de217 | ||
|
|
ee3d3a964e | ||
|
|
d6e145936d | ||
|
|
9caea57cde | ||
|
|
99e81e1d8f | ||
|
|
1696a9ad2d | ||
|
|
6c2a83dda8 | ||
|
|
5af1a42bf1 | ||
|
|
73183e9c19 | ||
|
|
b35cfdaf6a | ||
|
|
8c40e82796 | ||
|
|
c113a3b5b8 | ||
|
|
a07b47c845 | ||
|
|
f789e144fd | ||
|
|
78bd5e1e3b | ||
|
|
2e534a4128 | ||
|
|
50afc2f9b2 | ||
|
|
e068ce7bc9 | ||
|
|
2daf880e39 | ||
|
|
7897fa9f29 | ||
|
|
456d4272ab | ||
|
|
52c3ea733b | ||
|
|
acdaeb26d3 | ||
|
|
ffe089d444 | ||
|
|
1f09c92306 | ||
|
|
14b0c5fdbf | ||
|
|
932066bc0e | ||
|
|
66ea0451e9 | ||
|
|
bc05118ee7 | ||
|
|
275386806d | ||
|
|
0afc16fd02 | ||
|
|
6cafe14060 | ||
|
|
ad611c2058 | ||
|
|
b876adbc27 | ||
|
|
e428b74657 | ||
|
|
7ab083f19a | ||
|
|
931dcb1dc5 | ||
|
|
12c191582f | ||
|
|
d861b0798e | ||
|
|
b6e85b878e | ||
|
|
807efec40f | ||
|
|
41ff457d65 | ||
|
|
e605dfb483 | ||
|
|
2511f40ab8 | ||
|
|
61554dbaf0 | ||
|
|
ce56ab71d4 | ||
|
|
21c2705827 | ||
|
|
916db6c197 | ||
|
|
562e03d2d2 | ||
|
|
eca86470c6 | ||
|
|
a90eda50a7 | ||
|
|
187a4712cb | ||
|
|
58bbb8e3a4 | ||
|
|
d57ed97f9d | ||
|
|
b7b451dace | ||
|
|
d91070c116 | ||
|
|
39d2a70679 | ||
|
|
ec6b6a2266 | ||
|
|
9eacf72366 | ||
|
|
30516c33b7 | ||
|
|
615628805c | ||
|
|
8bac455bc9 | ||
|
|
0945d9aea2 | ||
|
|
45c6e74945 | ||
|
|
b32ab87bb7 | ||
|
|
8d2a186b1a | ||
|
|
a62996420f | ||
|
|
7dc4c44393 | ||
|
|
6ffcbfef3d | ||
|
|
1c558a203d | ||
|
|
ed5dabe432 | ||
|
|
ce28d60edf | ||
|
|
afa9410209 | ||
|
|
09865ccd9b | ||
|
|
256611bef5 | ||
|
|
7b0fac27dc | ||
|
|
c7b65cfe8a | ||
|
|
f811b6b803 | ||
|
|
ba43513172 | ||
|
|
f3bb2169c0 | ||
|
|
68b58f979b | ||
|
|
8e80bc844d | ||
|
|
a45cab06d3 | ||
|
|
695508aa4c | ||
|
|
957083d805 | ||
|
|
2aac99b037 | ||
|
|
2401dc785c | ||
|
|
f902add0ce | ||
|
|
2faae5d022 | ||
|
|
2a2878bba0 | ||
|
|
2bb6f924cd | ||
|
|
ee881ab82f | ||
|
|
b32a8ca510 | ||
|
|
b766d957b0 | ||
|
|
e7ccea44e7 | ||
|
|
861e96d33e | ||
|
|
07e6407115 | ||
|
|
69d44cdc16 | ||
|
|
97c8fd0525 | ||
|
|
259dfaed11 | ||
|
|
bf02b2ecb4 | ||
|
|
88c78bb411 | ||
|
|
2c73f08364 | ||
|
|
467c19be97 | ||
|
|
96d7f20980 | ||
|
|
8965fc8a79 | ||
|
|
f4968bc1f1 | ||
|
|
fe0702a06b | ||
|
|
44254bfffe | ||
|
|
c85050ac1a | ||
|
|
21f2cb6e6f | ||
|
|
c71cb55051 | ||
|
|
6ba5b2b72b | ||
|
|
dd207fb238 | ||
|
|
e9e06bb571 | ||
|
|
ae0e0a03a3 | ||
|
|
526fc15082 | ||
|
|
271107436b | ||
|
|
eaa4e15439 | ||
|
|
7cfeebfff7 | ||
|
|
6f3bffe689 | ||
|
|
efb07fafb3 | ||
|
|
eedd885683 | ||
|
|
e6248cd9ed | ||
|
|
3d1ef51863 | ||
|
|
068ac0ca2c | ||
|
|
eef1548baa | ||
|
|
6eaa46ea9a | ||
|
|
6641c8c9c9 | ||
|
|
a40126aeff | ||
|
|
ccc51dab35 | ||
|
|
89c6c235f7 | ||
|
|
a260b35c9d | ||
|
|
c04774b4b1 | ||
|
|
ed64986af8 | ||
|
|
84052c3ac5 |
@@ -41,7 +41,7 @@
|
||||
// "forwardPorts": [],
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "git submodule update --init && pip3 install --user -e .[dev]",
|
||||
"postCreateCommand": "git submodule update --init && pip3 install --user -e .[dev] && pre-commit install",
|
||||
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "vscode",
|
||||
|
||||
13
.github/CONTRIBUTING.md
vendored
13
.github/CONTRIBUTING.md
vendored
@@ -159,12 +159,25 @@ The process described here has several goals:
|
||||
|
||||
Please follow these steps to have your contribution considered by the maintainers:
|
||||
|
||||
0. Sign the [Contributor License Agreement](#contributor-license-agreement)
|
||||
1. Follow the [styleguides](#styleguides)
|
||||
2. Update the CHANGELOG and add tests and documentation. In case they are not needed, indicate it in [the PR template](pull_request_template.md).
|
||||
3. After you submit your pull request, verify that all [status checks](https://help.github.com/articles/about-status-checks/) are passing <details><summary>What if the status checks are failing? </summary>If a status check is failing, and you believe that the failure is unrelated to your change, please leave a comment on the pull request explaining why you believe the failure is unrelated. A maintainer will re-run the status check for you. If we conclude that the failure was a false positive, then we will open an issue to track that problem with our status check suite.</details>
|
||||
|
||||
While the prerequisites above must be satisfied prior to having your pull request reviewed, the reviewer(s) may ask you to complete additional design work, tests, or other changes before your pull request can be ultimately accepted.
|
||||
|
||||
### Contributor License Agreement
|
||||
|
||||
Contributions to this project must be accompanied by a Contributor License
|
||||
Agreement. You (or your employer) retain the copyright to your contribution,
|
||||
this simply gives us permission to use and redistribute your contributions as
|
||||
part of the project. Head over to <https://cla.developers.google.com/> to see
|
||||
your current agreements on file or to sign a new one.
|
||||
|
||||
You generally only need to submit a CLA once, so if you've already submitted one
|
||||
(even if it was for a different project), you probably don't need to do it
|
||||
again.
|
||||
|
||||
## Styleguides
|
||||
|
||||
### Git Commit Messages
|
||||
|
||||
41
.github/flake8.ini
vendored
Normal file
41
.github/flake8.ini
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
[flake8]
|
||||
max-line-length = 120
|
||||
|
||||
extend-ignore =
|
||||
# E203: whitespace before ':' (black does this)
|
||||
E203,
|
||||
# F401: `foo` imported but unused (prefer ruff)
|
||||
F401,
|
||||
# F811 Redefinition of unused `foo` (prefer ruff)
|
||||
F811,
|
||||
# E501 line too long (prefer black)
|
||||
E501,
|
||||
# B010 Do not call setattr with a constant attribute value
|
||||
B010,
|
||||
# G200 Logging statement uses exception in arguments
|
||||
G200,
|
||||
# SIM102 Use a single if-statement instead of nested if-statements
|
||||
# doesn't provide a space for commenting or logical separation of conditions
|
||||
SIM102,
|
||||
# SIM114 Use logical or and a single body
|
||||
# makes logic trees too complex
|
||||
SIM114,
|
||||
# SIM117 Use 'with Foo, Bar:' instead of multiple with statements
|
||||
# makes lines too long
|
||||
SIM117
|
||||
|
||||
per-file-ignores =
|
||||
# T201 print found.
|
||||
#
|
||||
# scripts are meant to print output
|
||||
scripts/*: T201
|
||||
# capa.exe is meant to print output
|
||||
capa/main.py: T201
|
||||
# IDA tests emit results to output window so need to print
|
||||
tests/test_ida_features.py: T201
|
||||
# utility used to find the Binary Ninja API via invoking python.exe
|
||||
capa/features/extractors/binja/find_binja_api.py: T201
|
||||
|
||||
copyright-check = True
|
||||
copyright-min-file-size = 1
|
||||
copyright-regexp = Copyright \(C\) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
6
.github/mypy/mypy.ini
vendored
6
.github/mypy/mypy.ini
vendored
@@ -42,6 +42,9 @@ ignore_missing_imports = True
|
||||
[mypy-idautils.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ida_auto.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ida_bytes.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
@@ -83,3 +86,6 @@ ignore_missing_imports = True
|
||||
|
||||
[mypy-netnode.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-ghidra.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
43
.github/pyinstaller/hooks/hook-vivisect.py
vendored
43
.github/pyinstaller/hooks/hook-vivisect.py
vendored
@@ -38,39 +38,36 @@ hiddenimports = [
|
||||
"vivisect",
|
||||
"vivisect.analysis",
|
||||
"vivisect.analysis.amd64",
|
||||
"vivisect.analysis.amd64",
|
||||
"vivisect.analysis.amd64.emulation",
|
||||
"vivisect.analysis.amd64.golang",
|
||||
"vivisect.analysis.crypto",
|
||||
"vivisect.analysis.crypto",
|
||||
"vivisect.analysis.crypto.constants",
|
||||
"vivisect.analysis.elf",
|
||||
"vivisect.analysis.elf.elfplt",
|
||||
"vivisect.analysis.elf.elfplt_late",
|
||||
"vivisect.analysis.elf.libc_start_main",
|
||||
"vivisect.analysis.generic",
|
||||
"vivisect.analysis.generic",
|
||||
"vivisect.analysis.generic.codeblocks",
|
||||
"vivisect.analysis.generic.emucode",
|
||||
"vivisect.analysis.generic.entrypoints",
|
||||
"vivisect.analysis.generic.funcentries",
|
||||
"vivisect.analysis.generic.impapi",
|
||||
"vivisect.analysis.generic.linker",
|
||||
"vivisect.analysis.generic.mkpointers",
|
||||
"vivisect.analysis.generic.noret",
|
||||
"vivisect.analysis.generic.pointers",
|
||||
"vivisect.analysis.generic.pointertables",
|
||||
"vivisect.analysis.generic.relocations",
|
||||
"vivisect.analysis.generic.strconst",
|
||||
"vivisect.analysis.generic.switchcase",
|
||||
"vivisect.analysis.generic.symswitchcase",
|
||||
"vivisect.analysis.generic.thunks",
|
||||
"vivisect.analysis.generic.noret",
|
||||
"vivisect.analysis.i386",
|
||||
"vivisect.analysis.i386",
|
||||
"vivisect.analysis.i386.calling",
|
||||
"vivisect.analysis.i386.golang",
|
||||
"vivisect.analysis.i386.importcalls",
|
||||
"vivisect.analysis.i386.instrhook",
|
||||
"vivisect.analysis.i386.thunk_bx",
|
||||
"vivisect.analysis.ms",
|
||||
"vivisect.analysis.i386.thunk_reg",
|
||||
"vivisect.analysis.ms",
|
||||
"vivisect.analysis.ms.hotpatch",
|
||||
"vivisect.analysis.ms.localhints",
|
||||
@@ -81,8 +78,40 @@ hiddenimports = [
|
||||
"vivisect.impapi.posix.amd64",
|
||||
"vivisect.impapi.posix.i386",
|
||||
"vivisect.impapi.windows",
|
||||
"vivisect.impapi.windows.advapi_32",
|
||||
"vivisect.impapi.windows.advapi_64",
|
||||
"vivisect.impapi.windows.amd64",
|
||||
"vivisect.impapi.windows.gdi_32",
|
||||
"vivisect.impapi.windows.gdi_64",
|
||||
"vivisect.impapi.windows.i386",
|
||||
"vivisect.impapi.windows.kernel_32",
|
||||
"vivisect.impapi.windows.kernel_64",
|
||||
"vivisect.impapi.windows.msvcr100_32",
|
||||
"vivisect.impapi.windows.msvcr100_64",
|
||||
"vivisect.impapi.windows.msvcr110_32",
|
||||
"vivisect.impapi.windows.msvcr110_64",
|
||||
"vivisect.impapi.windows.msvcr120_32",
|
||||
"vivisect.impapi.windows.msvcr120_64",
|
||||
"vivisect.impapi.windows.msvcr71_32",
|
||||
"vivisect.impapi.windows.msvcr80_32",
|
||||
"vivisect.impapi.windows.msvcr80_64",
|
||||
"vivisect.impapi.windows.msvcr90_32",
|
||||
"vivisect.impapi.windows.msvcr90_64",
|
||||
"vivisect.impapi.windows.msvcrt_32",
|
||||
"vivisect.impapi.windows.msvcrt_64",
|
||||
"vivisect.impapi.windows.ntdll_32",
|
||||
"vivisect.impapi.windows.ntdll_64",
|
||||
"vivisect.impapi.windows.ole_32",
|
||||
"vivisect.impapi.windows.ole_64",
|
||||
"vivisect.impapi.windows.rpcrt4_32",
|
||||
"vivisect.impapi.windows.rpcrt4_64",
|
||||
"vivisect.impapi.windows.shell_32",
|
||||
"vivisect.impapi.windows.shell_64",
|
||||
"vivisect.impapi.windows.user_32",
|
||||
"vivisect.impapi.windows.user_64",
|
||||
"vivisect.impapi.windows.ws2plus_32",
|
||||
"vivisect.impapi.windows.ws2plus_64",
|
||||
"vivisect.impapi.winkern",
|
||||
"vivisect.impapi.winkern.i386",
|
||||
"vivisect.impapi.winkern.amd64",
|
||||
"vivisect.parsers.blob",
|
||||
|
||||
43
.github/ruff.toml
vendored
Normal file
43
.github/ruff.toml
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# Enable the pycodestyle (`E`) and Pyflakes (`F`) rules by default.
|
||||
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
|
||||
# McCabe complexity (`C901`) by default.
|
||||
select = ["E", "F"]
|
||||
|
||||
# Allow autofix for all enabled rules (when `--fix`) is provided.
|
||||
fixable = ["ALL"]
|
||||
unfixable = []
|
||||
|
||||
# E402 module level import not at top of file
|
||||
# E722 do not use bare 'except'
|
||||
# E501 line too long
|
||||
ignore = ["E402", "E722", "E501"]
|
||||
|
||||
line-length = 120
|
||||
|
||||
exclude = [
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
".bzr",
|
||||
".direnv",
|
||||
".eggs",
|
||||
".git",
|
||||
".git-rewrite",
|
||||
".hg",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".pytype",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"venv",
|
||||
# protobuf generated files
|
||||
"*_pb2.py",
|
||||
"*_pb2.pyi"
|
||||
]
|
||||
10
.github/tox.ini
vendored
10
.github/tox.ini
vendored
@@ -1,10 +0,0 @@
|
||||
[pycodestyle]
|
||||
; E402: module level import not at top of file
|
||||
; W503: line break before binary operator
|
||||
; E231 missing whitespace after ',' (emitted by black)
|
||||
; E203 whitespace before ':' (emitted by black)
|
||||
ignore = E402,W503,E203,E231
|
||||
max-line-length = 160
|
||||
statistics = True
|
||||
count = True
|
||||
exclude = .*
|
||||
3
.github/workflows/build.yml
vendored
3
.github/workflows/build.yml
vendored
@@ -6,6 +6,9 @@ on:
|
||||
release:
|
||||
types: [edited, published]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: PyInstaller for ${{ matrix.os }}
|
||||
|
||||
2
.github/workflows/changelog.yml
vendored
2
.github/workflows/changelog.yml
vendored
@@ -7,6 +7,8 @@ on:
|
||||
pull_request_target:
|
||||
types: [opened, edited, synchronize]
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
check_changelog:
|
||||
# no need to check for dependency updates via dependabot
|
||||
|
||||
38
.github/workflows/publish.yml
vendored
38
.github/workflows/publish.yml
vendored
@@ -1,29 +1,41 @@
|
||||
# This workflows will upload a Python Package using Twine when a release is created
|
||||
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
|
||||
|
||||
# use PyPI trusted publishing, as described here:
|
||||
# https://blog.trailofbits.com/2023/05/23/trusted-publishing-a-new-benchmark-for-packaging-security/
|
||||
name: publish to pypi
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-20.04
|
||||
pypi-publish:
|
||||
runs-on: ubuntu-latest
|
||||
environment:
|
||||
name: release
|
||||
permissions:
|
||||
id-token: write
|
||||
steps:
|
||||
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
|
||||
with:
|
||||
python-version: '3.7'
|
||||
python-version: '3.8'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install setuptools wheel twine
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
pip install -e .[build]
|
||||
- name: build package
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload --skip-existing dist/*
|
||||
python -m build
|
||||
- name: upload package artifacts
|
||||
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2
|
||||
with:
|
||||
path: dist/*
|
||||
- name: publish package
|
||||
uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 # release/v1
|
||||
with:
|
||||
skip-existing: true
|
||||
verbose: true
|
||||
print-hash: true
|
||||
|
||||
2
.github/workflows/tag.yml
vendored
2
.github/workflows/tag.yml
vendored
@@ -4,6 +4,8 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions: read-all
|
||||
|
||||
jobs:
|
||||
tag:
|
||||
name: Tag capa rules
|
||||
|
||||
103
.github/workflows/tests.yml
vendored
103
.github/workflows/tests.yml
vendored
@@ -6,6 +6,8 @@ on:
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
permissions: read-all
|
||||
|
||||
# save workspaces to speed up testing
|
||||
env:
|
||||
CAPA_SAVE_WORKSPACE: "True"
|
||||
@@ -27,20 +29,23 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout capa
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||
- name: Set up Python 3.8
|
||||
# use latest available python to take advantage of best performance
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
|
||||
with:
|
||||
python-version: "3.8"
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: pip install -e .[dev]
|
||||
- name: Lint with ruff
|
||||
run: pre-commit run ruff
|
||||
- name: Lint with isort
|
||||
run: isort --profile black --length-sort --line-width 120 --skip-glob "*_pb2.py" -c .
|
||||
run: pre-commit run isort
|
||||
- name: Lint with black
|
||||
run: black -l 120 --extend-exclude ".*_pb2.py" --check .
|
||||
- name: Lint with pycodestyle
|
||||
run: pycodestyle --exclude="*_pb2.py" --show-source capa/ scripts/ tests/
|
||||
run: pre-commit run black
|
||||
- name: Lint with flake8
|
||||
run: pre-commit run flake8
|
||||
- name: Check types with mypy
|
||||
run: mypy --config-file .github/mypy/mypy.ini --check-untyped-defs capa/ scripts/ tests/
|
||||
run: pre-commit run mypy
|
||||
|
||||
rule_linter:
|
||||
runs-on: ubuntu-20.04
|
||||
@@ -49,12 +54,12 @@ jobs:
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Set up Python 3.8
|
||||
- name: Set up Python 3.11
|
||||
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
|
||||
with:
|
||||
python-version: "3.8"
|
||||
python-version: "3.11"
|
||||
- name: Install capa
|
||||
run: pip install -e .
|
||||
run: pip install -e .[dev]
|
||||
- name: Run rule linter
|
||||
run: python scripts/lint.py rules/
|
||||
|
||||
@@ -67,7 +72,7 @@ jobs:
|
||||
matrix:
|
||||
os: [ubuntu-20.04, windows-2019, macos-11]
|
||||
# across all operating systems
|
||||
python-version: ["3.7", "3.11"]
|
||||
python-version: ["3.8", "3.11"]
|
||||
include:
|
||||
# on Ubuntu run these as well
|
||||
- os: ubuntu-20.04
|
||||
@@ -94,36 +99,102 @@ jobs:
|
||||
run: pytest -v tests/
|
||||
|
||||
binja-tests:
|
||||
name: Binary Ninja tests for ${{ matrix.python-version }} on ${{ matrix.os }}
|
||||
name: Binary Ninja tests for ${{ matrix.python-version }}
|
||||
env:
|
||||
BN_SERIAL: ${{ secrets.BN_SERIAL }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [code_style, rule_linter]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.7", "3.11"]
|
||||
python-version: ["3.8", "3.11"]
|
||||
steps:
|
||||
- name: Checkout capa with submodules
|
||||
# do only run if BN_SERIAL is available, have to do this in every step, see https://github.com/orgs/community/discussions/26726#discussioncomment-3253118
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||
with:
|
||||
submodules: recursive
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install pyyaml
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
run: sudo apt-get install -y libyaml-dev
|
||||
- name: Install capa
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
run: pip install -e .[dev]
|
||||
- name: install Binary Ninja
|
||||
env:
|
||||
BN_SERIAL: ${{ secrets.BN_SERIAL }}
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
run: |
|
||||
mkdir ./.github/binja
|
||||
curl "https://raw.githubusercontent.com/Vector35/binaryninja-api/6812c97/scripts/download_headless.py" -o ./.github/binja/download_headless.py
|
||||
python ./.github/binja/download_headless.py --serial $BN_SERIAL --output .github/binja/BinaryNinja-headless.zip
|
||||
python ./.github/binja/download_headless.py --serial ${{ env.BN_SERIAL }} --output .github/binja/BinaryNinja-headless.zip
|
||||
unzip .github/binja/BinaryNinja-headless.zip -d .github/binja/
|
||||
python .github/binja/binaryninja/scripts/install_api.py --install-on-root --silent
|
||||
- name: Run tests
|
||||
if: ${{ env.BN_SERIAL != 0 }}
|
||||
env:
|
||||
BN_LICENSE: ${{ secrets.BN_LICENSE }}
|
||||
run: pytest -v tests/test_binja_features.py # explicitly refer to the binja tests for performance. other tests run above.
|
||||
|
||||
ghidra-tests:
|
||||
name: Ghidra tests for ${{ matrix.python-version }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [code_style, rule_linter]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["3.8", "3.11"]
|
||||
java-version: ["17"]
|
||||
gradle-version: ["7.3"]
|
||||
ghidra-version: ["10.3"]
|
||||
public-version: ["PUBLIC_20230510"] # for ghidra releases
|
||||
jep-version: ["4.1.1"]
|
||||
ghidrathon-version: ["3.0.0"]
|
||||
steps:
|
||||
- name: Checkout capa with submodules
|
||||
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
|
||||
with:
|
||||
submodules: true
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Set up Java ${{ matrix.java-version }}
|
||||
uses: actions/setup-java@5ffc13f4174014e2d4d4572b3d74c3fa61aeb2c2 # v3
|
||||
with:
|
||||
distribution: 'temurin'
|
||||
java-version: ${{ matrix.java-version }}
|
||||
- name: Set up Gradle ${{ matrix.gradle-version }}
|
||||
uses: gradle/gradle-build-action@40b6781dcdec2762ad36556682ac74e31030cfe2 # v2.5.1
|
||||
with:
|
||||
gradle-version: ${{ matrix.gradle-version }}
|
||||
- name: Install Jep ${{ matrix.jep-version }}
|
||||
run : pip install jep==${{ matrix.jep-version }}
|
||||
- name: Install Ghidra ${{ matrix.ghidra-version }}
|
||||
run: |
|
||||
mkdir ./.github/ghidra
|
||||
wget "https://github.com/NationalSecurityAgency/ghidra/releases/download/Ghidra_${{ matrix.ghidra-version }}_build/ghidra_${{ matrix.ghidra-version }}_${{ matrix.public-version }}.zip" -O ./.github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC.zip
|
||||
unzip .github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC.zip -d .github/ghidra/
|
||||
- name: Install Ghidrathon
|
||||
run : |
|
||||
mkdir ./.github/ghidrathon
|
||||
curl -o ./.github/ghidrathon/ghidrathon-${{ matrix.ghidrathon-version }}.zip "https://codeload.github.com/mandiant/Ghidrathon/zip/refs/tags/v${{ matrix.ghidrathon-version }}"
|
||||
unzip .github/ghidrathon/ghidrathon-${{ matrix.ghidrathon-version }}.zip -d .github/ghidrathon/
|
||||
gradle -p ./.github/ghidrathon/Ghidrathon-${{ matrix.ghidrathon-version }}/ -PGHIDRA_INSTALL_DIR=$(pwd)/.github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC
|
||||
unzip .github/ghidrathon/Ghidrathon-${{ matrix.ghidrathon-version }}/dist/*.zip -d .github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC/Ghidra/Extensions
|
||||
- name: Install pyyaml
|
||||
run: sudo apt-get install -y libyaml-dev
|
||||
- name: Install capa
|
||||
run: pip install -e .[dev]
|
||||
- name: Run tests
|
||||
run: |
|
||||
mkdir ./.github/ghidra/project
|
||||
.github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC/support/analyzeHeadless .github/ghidra/project ghidra_test -Import ./tests/data/mimikatz.exe_ -ScriptPath ./tests/ -PostScript test_ghidra_features.py > ../output.log
|
||||
cat ../output.log
|
||||
exit_code=$(cat ../output.log | grep exit | awk '{print $NF}')
|
||||
exit $exit_code
|
||||
|
||||
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -108,13 +108,10 @@ venv.bak/
|
||||
*.viv
|
||||
*.idb
|
||||
*.i64
|
||||
.vscode
|
||||
|
||||
!rules/lib
|
||||
|
||||
# hooks/ci.sh output
|
||||
isort-output.log
|
||||
black-output.log
|
||||
rule-linter-output.log
|
||||
.vscode
|
||||
scripts/perf/*.txt
|
||||
scripts/perf/*.svg
|
||||
scripts/perf/*.zip
|
||||
@@ -127,3 +124,5 @@ Pipfile
|
||||
Pipfile.lock
|
||||
/cache/
|
||||
.github/binja/binaryninja
|
||||
.github/binja/download_headless.py
|
||||
.github/binja/BinaryNinja-headless.zip
|
||||
|
||||
111
.pre-commit-config.yaml
Normal file
111
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,111 @@
|
||||
# install the pre-commit hooks:
|
||||
#
|
||||
# ❯ pre-commit install --hook-type pre-commit
|
||||
# pre-commit installed at .git/hooks/pre-commit
|
||||
#
|
||||
# ❯ pre-commit install --hook-type pre-push
|
||||
# pre-commit installed at .git/hooks/pre-push
|
||||
#
|
||||
# run all linters liks:
|
||||
#
|
||||
# ❯ pre-commit run --all-files
|
||||
# isort....................................................................Passed
|
||||
# black....................................................................Passed
|
||||
# ruff.....................................................................Passed
|
||||
# flake8...................................................................Passed
|
||||
# mypy.....................................................................Passed
|
||||
#
|
||||
# run a single linter like:
|
||||
#
|
||||
# ❯ pre-commit run --all-files isort
|
||||
# isort....................................................................Passed
|
||||
|
||||
repos:
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: isort
|
||||
name: isort
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: isort
|
||||
args:
|
||||
- "--length-sort"
|
||||
- "--profile"
|
||||
- "black"
|
||||
- "--line-length=120"
|
||||
- "--skip-glob"
|
||||
- "*_pb2.py"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: black
|
||||
name: black
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: black
|
||||
args:
|
||||
- "--line-length=120"
|
||||
- "--extend-exclude"
|
||||
- ".*_pb2.py"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: ruff
|
||||
name: ruff
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: ruff
|
||||
args:
|
||||
- "check"
|
||||
- "--config"
|
||||
- ".github/ruff.toml"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: flake8
|
||||
name: flake8
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: flake8
|
||||
args:
|
||||
- "--config"
|
||||
- ".github/flake8.ini"
|
||||
- "--extend-exclude"
|
||||
- "capa/render/proto/capa_pb2.py"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: mypy
|
||||
name: mypy
|
||||
stages: [commit, push]
|
||||
language: system
|
||||
entry: mypy
|
||||
args:
|
||||
- "--check-untyped-defs"
|
||||
- "--ignore-missing-imports"
|
||||
- "--config-file=.github/mypy/mypy.ini"
|
||||
- "capa/"
|
||||
- "scripts/"
|
||||
- "tests/"
|
||||
always_run: true
|
||||
pass_filenames: false
|
||||
139
CHANGELOG.md
139
CHANGELOG.md
@@ -3,22 +3,151 @@
|
||||
## master (unreleased)
|
||||
|
||||
### New Features
|
||||
- ghidra: add Ghidra feature extractor and supporting code #1770 @colton-gabertan
|
||||
- ghidra: add entry script helping users run capa against a loaded Ghidra database #1767 @mike-hunhoff
|
||||
- binja: add support for forwarded exports #1646 @xusheng6
|
||||
- binja: add support for symtab names #1504 @xusheng6
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
### New Rules (0)
|
||||
### New Rules (1)
|
||||
|
||||
- nursery/get-ntoskrnl-base-address @mr-tz
|
||||
-
|
||||
|
||||
### Bug Fixes
|
||||
- ghidra: fix ints_to_bytes performance #1761 @mike-hunhoff
|
||||
- binja: improve function call site detection @xusheng6
|
||||
- binja: use binaryninja.load to open files @xusheng6
|
||||
- binja: bump binja version to 3.5 #1789 @xusheng6
|
||||
|
||||
### capa explorer IDA Pro plugin
|
||||
|
||||
### Development
|
||||
|
||||
### Raw diffs
|
||||
- [capa v5.1.0...master](https://github.com/mandiant/capa/compare/v5.1.0...master)
|
||||
- [capa-rules v5.1.0...master](https://github.com/mandiant/capa-rules/compare/v5.1.0...master)
|
||||
- [capa v6.1.0...master](https://github.com/mandiant/capa/compare/v6.1.0...master)
|
||||
- [capa-rules v6.1.0...master](https://github.com/mandiant/capa-rules/compare/v6.1.0...master)
|
||||
|
||||
## v6.1.0
|
||||
|
||||
capa v6.1.0 is a bug fix release, most notably fixing unhandled exceptions in the capa explorer IDA Pro plugin.
|
||||
@Aayush-Goel-04 put a lot of effort into improving code quality and adding a script for rule authors.
|
||||
The script shows which features are present in a sample but not referenced by any existing rule.
|
||||
You could use this script to find opportunities for new rules.
|
||||
|
||||
Speaking of new rules, we have eight additions, coming from Ronnie, Jakub, Moritz, Ervin, and still@teamt5.org!
|
||||
|
||||
### New Features
|
||||
- ELF: implement import and export name extractor #1607 #1608 @Aayush-Goel-04
|
||||
- bump pydantic from 1.10.9 to 2.1.1 #1582 @Aayush-Goel-04
|
||||
- develop script to highlight features not used during matching #331 @Aayush-Goel-04
|
||||
|
||||
### New Rules (8)
|
||||
|
||||
- executable/pe/export/forwarded-export ronnie.salomonsen@mandiant.com
|
||||
- host-interaction/bootloader/get-uefi-variable jakub.jozwiak@mandiant.com
|
||||
- host-interaction/bootloader/set-uefi-variable jakub.jozwiak@mandiant.com
|
||||
- nursery/enumerate-device-drivers-on-linux @mr-tz
|
||||
- anti-analysis/anti-vm/vm-detection/check-for-foreground-window-switch ervin.ocampo@mandiant.com
|
||||
- linking/static/sqlite3/linked-against-cppsqlite3 still@teamt5.org
|
||||
- linking/static/sqlite3/linked-against-sqlite3 still@teamt5.org
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
- rules: fix forwarded export characteristic #1656 @RonnieSalomonsen
|
||||
- Binary Ninja: Fix stack string detection #1473 @xusheng6
|
||||
- linter: skip native API check for NtProtectVirtualMemory #1675 @williballenthin
|
||||
- OS: detect Android ELF files #1705 @williballenthin
|
||||
- ELF: fix parsing of symtab #1704 @williballenthin
|
||||
- result document: don't use deprecated pydantic functions #1718 @williballenthin
|
||||
- pytest: don't mark IDA tests as pytest tests #1719 @williballenthin
|
||||
|
||||
### capa explorer IDA Pro plugin
|
||||
- fix unhandled exception when resolving rule path #1693 @mike-hunhoff
|
||||
|
||||
### Raw diffs
|
||||
- [capa v6.0.0...v6.1.0](https://github.com/mandiant/capa/compare/v6.0.0...v6.1.0)
|
||||
- [capa-rules v6.0.0...v6.1.0](https://github.com/mandiant/capa-rules/compare/v6.0.0...v6.1.0)
|
||||
|
||||
## v6.0.0
|
||||
|
||||
capa v6.0 brings many bug fixes and quality improvements, including 64 rule updates and 26 new rules. We're now publishing to PyPI via [Trusted Publishing](https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/) and have migrated to using a `pyproject.toml` file. @Aayush-Goel-04 contributed a lot of new code across many files, so please welcome them to the project, along with @anders-v @crowface28 @dkelly2e @RonnieSalomonsen and @ejfocampo as first-time rule contributors!
|
||||
|
||||
For those that use capa as a library, we've introduced some limited breaking changes that better represent data types (versus less-structured data like dictionaries and strings). With the recent deprecation, we've also dropped support for Python 3.7.
|
||||
|
||||
### New Features
|
||||
- add script to detect feature overlap between new and existing capa rules [#1451](https://github.com/mandiant/capa/issues/1451) [@Aayush-Goel-04](https://github.com/aayush-goel-04)
|
||||
- extract forwarded exports from PE files #1624 @williballenthin
|
||||
- extract function and API names from ELF symtab entries @yelhamer https://github.com/mandiant/capa-rules/issues/736
|
||||
- use fancy box drawing characters for default output #1586 @williballenthin
|
||||
|
||||
### Breaking Changes
|
||||
- use a class to represent Metadata (not dict) #1411 @Aayush-Goel-04 @manasghandat
|
||||
- use pathlib.Path to represent file paths #1534 @Aayush-Goel-04
|
||||
- Python 3.8 is now the minimum supported Python version #1578 @williballenthin
|
||||
- Require a Contributor License Agreement (CLA) for PRs going forward #1642 @williballenthin
|
||||
|
||||
### New Rules (26)
|
||||
|
||||
- load-code/shellcode/execute-shellcode-via-windows-callback-function ervin.ocampo@mandiant.com jakub.jozwiak@mandiant.com
|
||||
- nursery/execute-shellcode-via-indirect-call ronnie.salomonsen@mandiant.com
|
||||
- data-manipulation/encryption/aes/encrypt-data-using-aes-mixcolumns-step @mr-tz
|
||||
- linking/static/aplib/linked-against-aplib still@teamt5.org
|
||||
- communication/mailslot/read-from-mailslot nick.simonian@mandiant.com
|
||||
- nursery/hash-data-using-sha512managed-in-dotnet jonathanlepore@google.com
|
||||
- nursery/compiled-with-exescript jonathanlepore@google.com
|
||||
- nursery/check-for-sandbox-via-mac-address-ouis-in-dotnet jonathanlepore@google.com
|
||||
- host-interaction/hardware/enumerate-devices-by-category @mr-tz
|
||||
- host-interaction/service/continue-service @mr-tz
|
||||
- host-interaction/service/pause-service @mr-tz
|
||||
- persistence/exchange/act-as-exchange-transport-agent jakub.jozwiak@mandiant.com
|
||||
- host-interaction/file-system/create-virtual-file-system-in-dotnet jakub.jozwiak@mandiant.com
|
||||
- compiler/cx_freeze/compiled-with-cx_freeze @mr-tz jakub.jozwiak@mandiant.com
|
||||
- communication/socket/create-vmci-socket jakub.jozwiak@mandiant.com
|
||||
- persistence/office/act-as-excel-xll-add-in jakub.jozwiak@mandiant.com
|
||||
- persistence/office/act-as-office-com-add-in jakub.jozwiak@mandiant.com
|
||||
- persistence/office/act-as-word-wll-add-in jakub.jozwiak@mandiant.com
|
||||
- anti-analysis/anti-debugging/debugger-evasion/hide-thread-from-debugger michael.hunhoff@mandiant.com jakub.jozwiak@mandiant.com
|
||||
- host-interaction/memory/create-new-application-domain-in-dotnet jakub.jozwiak@mandiant.com
|
||||
- host-interaction/gui/switch-active-desktop jakub.jozwiak@mandiant.com
|
||||
- host-interaction/service/query-service-configuration @mr-tz
|
||||
- anti-analysis/anti-av/patch-event-tracing-for-windows-function jakub.jozwiak@mandiant.com
|
||||
- data-manipulation/encoding/xor/covertly-decode-and-write-data-to-windows-directory-using-indirect-calls dan.kelly@mandiant.com
|
||||
- linking/runtime-linking/resolve-function-by-brute-ratel-badger-hash jakub.jozwiak@mandiant.com
|
||||
|
||||
### Bug Fixes
|
||||
- extractor: add a Binary Ninja test that asserts its version #1487 @xusheng6
|
||||
- extractor: update Binary Ninja stack string detection after the new constant outlining feature #1473 @xusheng6
|
||||
- extractor: update vivisect Arch extraction #1334 @mr-tz
|
||||
- extractor: avoid Binary Ninja exception when analyzing certain files #1441 @xusheng6
|
||||
- symtab: fix struct.unpack() format for 64-bit ELF files @yelhamer
|
||||
- symtab: safeguard against ZeroDivisionError for files containing a symtab with a null entry size @yelhamer
|
||||
- improve ELF strtab and needed parsing @mr-tz
|
||||
- better handle exceptional cases when parsing ELF files #1458 @Aayush-Goel-04
|
||||
- improved testing coverage for Binary Ninja backend #1446 @Aayush-Goel-04
|
||||
- add logging and print redirect to tqdm for capa main #749 @Aayush-Goel-04
|
||||
- extractor: fix binja installation path detection does not work with Python 3.11
|
||||
- tests: refine the IDA test runner script #1513 @williballenthin
|
||||
- output: don't leave behind traces of progress bar @williballenthin
|
||||
- import-to-ida: fix bug introduced with JSON report changes in v5 #1584 @williballenthin
|
||||
- main: don't show spinner when emitting debug messages #1636 @williballenthin
|
||||
- rules: add forwarded export characteristics to rule syntax file scope #1653 @RonnieSalomonsen
|
||||
|
||||
### capa explorer IDA Pro plugin
|
||||
|
||||
### Development
|
||||
- update ATT&CK/MBC data for linting #1568 @mr-tz
|
||||
- log time taken to analyze each function #1290 @williballenthin
|
||||
- tests: make fixture available via conftest.py #1592 @williballenthin
|
||||
- publish via PyPI trusted publishing #1491 @williballenthin
|
||||
- migrate to pyproject.toml #1301 @williballenthin
|
||||
- use [pre-commit](https://pre-commit.com/) to invoke linters #1579 @williballenthin
|
||||
|
||||
|
||||
### Raw diffs
|
||||
- [capa v5.1.0...v6.0.0](https://github.com/mandiant/capa/compare/v5.1.0...v6.0.0)
|
||||
- [capa-rules v5.1.0...v6.0.0](https://github.com/mandiant/capa-rules/compare/v5.1.0...v6.0.0)
|
||||
|
||||
## v5.1.0
|
||||
capa version 5.1.0 adds a Protocol Buffers (protobuf) format for result documents. Additionally, the [Vector35](https://vector35.com/) team contributed a new feature extractor using Binary Ninja. Other new features are a new CLI flag to override the detected operating system, functionality to read and render existing result documents, and a output color format that's easier to read.
|
||||
@@ -65,12 +194,14 @@ Thanks for all the support, especially to @xusheng6, @captainGeech42, @ggold7046
|
||||
- nursery/contain-a-thread-local-storage-tls-section-in-dotnet michael.hunhoff@mandiant.com
|
||||
|
||||
### Bug Fixes
|
||||
- extractor: interface of cache modified to prevent extracting file and global features multiple times @stevemk14ebr
|
||||
- extractor: removed '.dynsym' as the library name for ELF imports #1318 @stevemk14ebr
|
||||
- extractor: fix vivisect loop detection corner case #1310 @mr-tz
|
||||
- match: extend OS characteristic to match OS_ANY to all supported OSes #1324 @mike-hunhoff
|
||||
- extractor: fix IDA and vivisect string and bytes features overlap and tests #1327 #1336 @xusheng6
|
||||
- extractor: fix IDA and vivisect string and bytes features overlap and tests #1327 #1336 @xusheng6
|
||||
|
||||
### capa explorer IDA Pro plugin
|
||||
- rule generator plugin now loads faster when jumping between functions @stevemk14ebr
|
||||
- fix exception when plugin loaded in IDA hosted under idat #1341 @mike-hunhoff
|
||||
- improve embedded PE detection performance and reduce FP potential #1344 @mike-hunhoff
|
||||
|
||||
|
||||
@@ -187,7 +187,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright (C) 2020 Mandiant, Inc.
|
||||
Copyright (C) 2023 Mandiant, Inc.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
[](https://pypi.org/project/flare-capa)
|
||||
[](https://github.com/mandiant/capa/releases)
|
||||
[](https://github.com/mandiant/capa-rules)
|
||||
[](https://github.com/mandiant/capa-rules)
|
||||
[](https://github.com/mandiant/capa/actions?query=workflow%3ACI+event%3Apush+branch%3Amaster)
|
||||
[](https://github.com/mandiant/capa/releases)
|
||||
[](LICENSE.txt)
|
||||
@@ -170,6 +170,8 @@ capa explorer helps you identify interesting areas of a program and build new ca
|
||||
|
||||

|
||||
|
||||
If you use Ghidra, you can use the Python 3 [Ghidra feature extractor](/capa/ghidra/). This integration enables capa to extract features directly from your Ghidra database, which can help you identify capabilities in programs that you analyze using Ghidra.
|
||||
|
||||
# further information
|
||||
## capa
|
||||
- [Installation](https://github.com/mandiant/capa/blob/master/doc/installation.md)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -8,7 +8,7 @@
|
||||
|
||||
import copy
|
||||
import collections
|
||||
from typing import TYPE_CHECKING, Set, Dict, List, Tuple, Union, Mapping, Iterable, Iterator, cast
|
||||
from typing import TYPE_CHECKING, Set, Dict, List, Tuple, Union, Mapping, Iterable, Iterator
|
||||
|
||||
import capa.perf
|
||||
import capa.features.common
|
||||
@@ -71,7 +71,7 @@ class Statement:
|
||||
yield child
|
||||
|
||||
if hasattr(self, "children"):
|
||||
for child in getattr(self, "children"):
|
||||
for child in self.children:
|
||||
assert isinstance(child, (Statement, Feature))
|
||||
yield child
|
||||
|
||||
@@ -83,7 +83,7 @@ class Statement:
|
||||
self.child = new
|
||||
|
||||
if hasattr(self, "children"):
|
||||
children = getattr(self, "children")
|
||||
children = self.children
|
||||
for i, child in enumerate(children):
|
||||
if child is existing:
|
||||
children[i] = new
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
class UnsupportedRuntimeError(RuntimeError):
|
||||
pass
|
||||
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import abc
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -100,7 +100,10 @@ class Result:
|
||||
return self.success
|
||||
|
||||
|
||||
class Feature(abc.ABC):
|
||||
class Feature(abc.ABC): # noqa: B024
|
||||
# this is an abstract class, since we don't want anyone to instantiate it directly,
|
||||
# but it doesn't have any abstract methods.
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
value: Union[str, int, float, bytes],
|
||||
@@ -124,12 +127,17 @@ class Feature(abc.ABC):
|
||||
return self.name == other.name and self.value == other.value
|
||||
|
||||
def __lt__(self, other):
|
||||
# TODO: this is a huge hack!
|
||||
# implementing sorting by serializing to JSON is a huge hack.
|
||||
# its slow, inelegant, and probably doesn't work intuitively;
|
||||
# however, we only use it for deterministic output, so it's good enough for now.
|
||||
|
||||
# circular import
|
||||
# we should fix if this wasn't already a huge hack.
|
||||
import capa.features.freeze.features
|
||||
|
||||
return (
|
||||
capa.features.freeze.features.feature_from_capa(self).json()
|
||||
< capa.features.freeze.features.feature_from_capa(other).json()
|
||||
capa.features.freeze.features.feature_from_capa(self).model_dump_json()
|
||||
< capa.features.freeze.features.feature_from_capa(other).model_dump_json()
|
||||
)
|
||||
|
||||
def get_name_str(self) -> str:
|
||||
@@ -267,7 +275,7 @@ class _MatchedSubstring(Substring):
|
||||
self.matches = matches
|
||||
|
||||
def __str__(self):
|
||||
matches = ", ".join(map(lambda s: '"' + s + '"', (self.matches or {}).keys()))
|
||||
matches = ", ".join(f'"{s}"' for s in (self.matches or {}).keys())
|
||||
assert isinstance(self.value, str)
|
||||
return f'substring("{self.value}", matches = {matches})'
|
||||
|
||||
@@ -359,7 +367,7 @@ class _MatchedRegex(Regex):
|
||||
self.matches = matches
|
||||
|
||||
def __str__(self):
|
||||
matches = ", ".join(map(lambda s: '"' + s + '"', (self.matches or {}).keys()))
|
||||
matches = ", ".join(f'"{s}"' for s in (self.matches or {}).keys())
|
||||
assert isinstance(self.value, str)
|
||||
return f"regex(string =~ {self.value}, matches = {matches})"
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,15 +6,16 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import sys
|
||||
import string
|
||||
import struct
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
from binaryninja import Function
|
||||
from binaryninja import Function, Settings
|
||||
from binaryninja import BasicBlock as BinjaBasicBlock
|
||||
from binaryninja import (
|
||||
BinaryView,
|
||||
SymbolType,
|
||||
RegisterValueType,
|
||||
VariableSourceType,
|
||||
MediumLevelILSetVar,
|
||||
MediumLevelILOperation,
|
||||
@@ -23,11 +24,72 @@ from binaryninja import (
|
||||
)
|
||||
|
||||
from capa.features.common import Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.address import Address
|
||||
from capa.features.basicblock import BasicBlock
|
||||
from capa.features.extractors.helpers import MIN_STACKSTRING_LEN
|
||||
from capa.features.extractors.base_extractor import BBHandle, FunctionHandle
|
||||
|
||||
use_const_outline: bool = False
|
||||
settings: Settings = Settings()
|
||||
if settings.contains("analysis.outlining.builtins") and settings.get_bool("analysis.outlining.builtins"):
|
||||
use_const_outline = True
|
||||
|
||||
|
||||
def get_printable_len_ascii(s: bytes) -> int:
|
||||
"""Return string length if all operand bytes are ascii or utf16-le printable"""
|
||||
count = 0
|
||||
for c in s:
|
||||
if c == 0:
|
||||
return count
|
||||
if c < 127 and chr(c) in string.printable:
|
||||
count += 1
|
||||
return count
|
||||
|
||||
|
||||
def get_printable_len_wide(s: bytes) -> int:
|
||||
"""Return string length if all operand bytes are ascii or utf16-le printable"""
|
||||
if all(c == 0x00 for c in s[1::2]):
|
||||
return get_printable_len_ascii(s[::2])
|
||||
return 0
|
||||
|
||||
|
||||
def get_stack_string_len(f: Function, il: MediumLevelILInstruction) -> int:
|
||||
bv: BinaryView = f.view
|
||||
|
||||
if il.operation != MediumLevelILOperation.MLIL_CALL:
|
||||
return 0
|
||||
|
||||
target = il.dest
|
||||
if target.operation not in [MediumLevelILOperation.MLIL_CONST, MediumLevelILOperation.MLIL_CONST_PTR]:
|
||||
return 0
|
||||
|
||||
addr = target.value.value
|
||||
sym = bv.get_symbol_at(addr)
|
||||
if not sym or sym.type != SymbolType.LibraryFunctionSymbol:
|
||||
return 0
|
||||
|
||||
if sym.name not in ["__builtin_strncpy", "__builtin_strcpy", "__builtin_wcscpy"]:
|
||||
return 0
|
||||
|
||||
if len(il.params) < 2:
|
||||
return 0
|
||||
|
||||
dest = il.params[0]
|
||||
if dest.operation in [MediumLevelILOperation.MLIL_ADDRESS_OF, MediumLevelILOperation.MLIL_VAR]:
|
||||
var = dest.src
|
||||
else:
|
||||
return 0
|
||||
|
||||
if var.source_type != VariableSourceType.StackVariableSourceType:
|
||||
return 0
|
||||
|
||||
src = il.params[1]
|
||||
if src.value.type != RegisterValueType.ConstantDataAggregateValue:
|
||||
return 0
|
||||
|
||||
s = f.get_constant_data(RegisterValueType.ConstantDataAggregateValue, src.value.value)
|
||||
return max(get_printable_len_ascii(bytes(s)), get_printable_len_wide(bytes(s)))
|
||||
|
||||
|
||||
def get_printable_len(il: MediumLevelILSetVar) -> int:
|
||||
"""Return string length if all operand bytes are ascii or utf16-le printable"""
|
||||
@@ -69,7 +131,7 @@ def is_mov_imm_to_stack(il: MediumLevelILInstruction) -> bool:
|
||||
if il.src.operation != MediumLevelILOperation.MLIL_CONST:
|
||||
return False
|
||||
|
||||
if not il.dest.source_type == VariableSourceType.StackVariableSourceType:
|
||||
if il.dest.source_type != VariableSourceType.StackVariableSourceType:
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -82,8 +144,11 @@ def bb_contains_stackstring(f: Function, bb: MediumLevelILBasicBlock) -> bool:
|
||||
"""
|
||||
count = 0
|
||||
for il in bb:
|
||||
if is_mov_imm_to_stack(il):
|
||||
count += get_printable_len(il)
|
||||
if use_const_outline:
|
||||
count += get_stack_string_len(f, il)
|
||||
else:
|
||||
if is_mov_imm_to_stack(il):
|
||||
count += get_printable_len(il)
|
||||
|
||||
if count > MIN_STACKSTRING_LEN:
|
||||
return True
|
||||
@@ -117,30 +182,3 @@ BASIC_BLOCK_HANDLERS = (
|
||||
extract_bb_tight_loop,
|
||||
extract_bb_stackstring,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
return
|
||||
|
||||
from binaryninja import BinaryViewType
|
||||
|
||||
from capa.features.extractors.binja.extractor import BinjaFeatureExtractor
|
||||
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(sys.argv[1])
|
||||
if bv is None:
|
||||
return
|
||||
|
||||
features = []
|
||||
extractor = BinjaFeatureExtractor(bv)
|
||||
for fh in extractor.get_functions():
|
||||
for bbh in extractor.get_basic_blocks(fh):
|
||||
features.extend(list(extract_features(fh, bbh)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -53,9 +53,7 @@ class BinjaFeatureExtractor(FeatureExtractor):
|
||||
mlil_lookup[mlil_bb.source_block.start] = mlil_bb
|
||||
|
||||
for bb in f.basic_blocks:
|
||||
mlil_bb = None
|
||||
if bb.start in mlil_lookup:
|
||||
mlil_bb = mlil_lookup[bb.start]
|
||||
mlil_bb = mlil_lookup.get(bb.start)
|
||||
|
||||
yield BBHandle(address=AbsoluteVirtualAddress(bb.start), inner=(bb, mlil_bb))
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,11 +6,10 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import sys
|
||||
import struct
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
from binaryninja import Symbol, Segment, BinaryView, SymbolType, SymbolBinding
|
||||
from binaryninja import Segment, BinaryView, SymbolType, SymbolBinding
|
||||
|
||||
import capa.features.extractors.common
|
||||
import capa.features.extractors.helpers
|
||||
@@ -18,7 +17,7 @@ import capa.features.extractors.strings
|
||||
from capa.features.file import Export, Import, Section, FunctionName
|
||||
from capa.features.common import FORMAT_PE, FORMAT_ELF, Format, String, Feature, Characteristic
|
||||
from capa.features.address import NO_ADDRESS, Address, FileOffsetAddress, AbsoluteVirtualAddress
|
||||
from capa.features.extractors.binja.helpers import unmangle_c_name
|
||||
from capa.features.extractors.binja.helpers import read_c_string, unmangle_c_name
|
||||
|
||||
|
||||
def check_segment_for_pe(bv: BinaryView, seg: Segment) -> Iterator[Tuple[int, int]]:
|
||||
@@ -83,6 +82,24 @@ def extract_file_export_names(bv: BinaryView) -> Iterator[Tuple[Feature, Address
|
||||
if name != unmangled_name:
|
||||
yield Export(unmangled_name), AbsoluteVirtualAddress(sym.address)
|
||||
|
||||
for sym in bv.get_symbols_of_type(SymbolType.DataSymbol):
|
||||
if sym.binding not in [SymbolBinding.GlobalBinding]:
|
||||
continue
|
||||
|
||||
name = sym.short_name
|
||||
if not name.startswith("__forwarder_name"):
|
||||
continue
|
||||
|
||||
# Due to https://github.com/Vector35/binaryninja-api/issues/4641, in binja version 3.5, the symbol's name
|
||||
# does not contain the DLL name. As a workaround, we read the C string at the symbol's address, which contains
|
||||
# both the DLL name and the function name.
|
||||
# Once the above issue is closed in the next binjs stable release, we can update the code here to use the
|
||||
# symbol name directly.
|
||||
name = read_c_string(bv, sym.address, 1024)
|
||||
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(name)
|
||||
yield Export(forwarded_name), AbsoluteVirtualAddress(sym.address)
|
||||
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(sym.address)
|
||||
|
||||
|
||||
def extract_file_import_names(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract function imports
|
||||
@@ -126,15 +143,17 @@ def extract_file_function_names(bv: BinaryView) -> Iterator[Tuple[Feature, Addre
|
||||
"""
|
||||
for sym_name in bv.symbols:
|
||||
for sym in bv.symbols[sym_name]:
|
||||
if sym.type == SymbolType.LibraryFunctionSymbol:
|
||||
name = sym.short_name
|
||||
yield FunctionName(name), sym.address
|
||||
if name.startswith("_"):
|
||||
# some linkers may prefix linked routines with a `_` to avoid name collisions.
|
||||
# extract features for both the mangled and un-mangled representations.
|
||||
# e.g. `_fwrite` -> `fwrite`
|
||||
# see: https://stackoverflow.com/a/2628384/87207
|
||||
yield FunctionName(name[1:]), sym.address
|
||||
if sym.type not in [SymbolType.LibraryFunctionSymbol, SymbolType.FunctionSymbol]:
|
||||
continue
|
||||
|
||||
name = sym.short_name
|
||||
yield FunctionName(name), sym.address
|
||||
if name.startswith("_"):
|
||||
# some linkers may prefix linked routines with a `_` to avoid name collisions.
|
||||
# extract features for both the mangled and un-mangled representations.
|
||||
# e.g. `_fwrite` -> `fwrite`
|
||||
# see: https://stackoverflow.com/a/2628384/87207
|
||||
yield FunctionName(name[1:]), sym.address
|
||||
|
||||
|
||||
def extract_file_format(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
|
||||
@@ -166,23 +185,3 @@ FILE_HANDLERS = (
|
||||
extract_file_function_names,
|
||||
extract_file_format,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
if len(sys.argv) < 2:
|
||||
return
|
||||
|
||||
from binaryninja import BinaryViewType
|
||||
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(sys.argv[1])
|
||||
if bv is None:
|
||||
return
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(list(extract_features(bv)))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,6 +6,7 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
|
||||
# When the script gets executed as a standalone executable (via PyInstaller), `import binaryninja` does not work because
|
||||
# we have excluded the binaryninja module in `pyinstaller.spec`. The trick here is to call the system Python and try
|
||||
@@ -15,8 +16,8 @@ import subprocess
|
||||
# binaryninja module is extracted by the PyInstaller.
|
||||
code = r"""
|
||||
from pathlib import Path
|
||||
import importlib
|
||||
spec = importlib.util.find_spec('binaryninja')
|
||||
from importlib import util
|
||||
spec = util.find_spec('binaryninja')
|
||||
if spec is not None:
|
||||
if len(spec.submodule_search_locations) > 0:
|
||||
path = Path(spec.submodule_search_locations[0])
|
||||
@@ -25,9 +26,9 @@ if spec is not None:
|
||||
"""
|
||||
|
||||
|
||||
def find_binja_path() -> str:
|
||||
def find_binja_path() -> Path:
|
||||
raw_output = subprocess.check_output(["python", "-c", code]).decode("ascii").strip()
|
||||
return bytes.fromhex(raw_output).decode("utf8")
|
||||
return Path(bytes.fromhex(raw_output).decode("utf8"))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import sys
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
from binaryninja import Function, BinaryView, LowLevelILOperation
|
||||
from binaryninja import Function, BinaryView, SymbolType, RegisterValueType, LowLevelILOperation
|
||||
|
||||
from capa.features.file import FunctionName
|
||||
from capa.features.common import Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors import loops
|
||||
@@ -19,19 +19,32 @@ from capa.features.extractors.base_extractor import FunctionHandle
|
||||
def extract_function_calls_to(fh: FunctionHandle):
|
||||
"""extract callers to a function"""
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
for caller in func.caller_sites:
|
||||
# Everything that is a code reference to the current function is considered a caller, which actually includes
|
||||
# many other references that are NOT a caller. For example, an instruction `push function_start` will also be
|
||||
# considered a caller to the function
|
||||
if caller.llil.operation in [
|
||||
llil = caller.llil
|
||||
if (llil is None) or llil.operation not in [
|
||||
LowLevelILOperation.LLIL_CALL,
|
||||
LowLevelILOperation.LLIL_CALL_STACK_ADJUST,
|
||||
LowLevelILOperation.LLIL_JUMP,
|
||||
LowLevelILOperation.LLIL_TAILCALL,
|
||||
]:
|
||||
yield Characteristic("calls to"), AbsoluteVirtualAddress(caller.address)
|
||||
continue
|
||||
|
||||
if llil.dest.value.type not in [
|
||||
RegisterValueType.ImportedAddressValue,
|
||||
RegisterValueType.ConstantValue,
|
||||
RegisterValueType.ConstantPointerValue,
|
||||
]:
|
||||
continue
|
||||
|
||||
address = llil.dest.value.value
|
||||
if address != func.start:
|
||||
continue
|
||||
|
||||
yield Characteristic("calls to"), AbsoluteVirtualAddress(caller.address)
|
||||
|
||||
|
||||
def extract_function_loop(fh: FunctionHandle):
|
||||
@@ -61,37 +74,31 @@ def extract_recursive_call(fh: FunctionHandle):
|
||||
yield Characteristic("recursive call"), fh.address
|
||||
|
||||
|
||||
def extract_function_name(fh: FunctionHandle):
|
||||
"""extract function names (e.g., symtab names)"""
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
if bv is None:
|
||||
return
|
||||
|
||||
for sym in bv.get_symbols(func.start):
|
||||
if sym.type not in [SymbolType.LibraryFunctionSymbol, SymbolType.FunctionSymbol]:
|
||||
continue
|
||||
|
||||
name = sym.short_name
|
||||
yield FunctionName(name), sym.address
|
||||
if name.startswith("_"):
|
||||
# some linkers may prefix linked routines with a `_` to avoid name collisions.
|
||||
# extract features for both the mangled and un-mangled representations.
|
||||
# e.g. `_fwrite` -> `fwrite`
|
||||
# see: https://stackoverflow.com/a/2628384/87207
|
||||
yield FunctionName(name[1:]), sym.address
|
||||
|
||||
|
||||
def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
for func_handler in FUNCTION_HANDLERS:
|
||||
for feature, addr in func_handler(fh):
|
||||
yield feature, addr
|
||||
|
||||
|
||||
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
if len(sys.argv) < 2:
|
||||
return
|
||||
|
||||
from binaryninja import BinaryViewType
|
||||
|
||||
from capa.features.extractors.binja.extractor import BinjaFeatureExtractor
|
||||
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(sys.argv[1])
|
||||
if bv is None:
|
||||
return
|
||||
|
||||
features = []
|
||||
extractor = BinjaFeatureExtractor(bv)
|
||||
for fh in extractor.get_functions():
|
||||
features.extend(list(extract_features(fh)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call, extract_function_name)
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
import contextlib
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
from binaryninja import BinaryView
|
||||
|
||||
import capa.features.extractors.elf
|
||||
from capa.features.common import OS, OS_MACOS, ARCH_I386, ARCH_AMD64, OS_WINDOWS, Arch, Feature
|
||||
from capa.features.address import NO_ADDRESS, Address
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -9,7 +9,7 @@ import re
|
||||
from typing import List, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from binaryninja import LowLevelILInstruction
|
||||
from binaryninja import BinaryView, LowLevelILInstruction
|
||||
from binaryninja.architecture import InstructionTextToken
|
||||
|
||||
|
||||
@@ -41,10 +41,29 @@ def unmangle_c_name(name: str) -> str:
|
||||
# _lstrlenWStub@4
|
||||
|
||||
# A small optimization to avoid running the regex too many times
|
||||
# TODO: this still increases the unit test execution time from 170s to 200s, should be able to accelerate it
|
||||
# this still increases the unit test execution time from 170s to 200s, should be able to accelerate it
|
||||
#
|
||||
# TODO(xusheng): performance optimizations to improve test execution time
|
||||
# https://github.com/mandiant/capa/issues/1610
|
||||
if name[0] in ["@", "_"]:
|
||||
match = re.match(r"^[@|_](.*?)(Stub)?(@\d+)?$", name)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
return name
|
||||
|
||||
|
||||
def read_c_string(bv: BinaryView, offset: int, max_len: int) -> str:
|
||||
s: List[str] = []
|
||||
while len(s) < max_len:
|
||||
try:
|
||||
c = bv.read(offset + len(s), 1)[0]
|
||||
except Exception:
|
||||
break
|
||||
|
||||
if c == 0:
|
||||
break
|
||||
|
||||
s.append(chr(c))
|
||||
|
||||
return "".join(s)
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import sys
|
||||
from typing import Any, Dict, List, Tuple, Iterator, Optional
|
||||
from typing import Any, List, Tuple, Iterator, Optional
|
||||
|
||||
from binaryninja import Function
|
||||
from binaryninja import BasicBlock as BinjaBasicBlock
|
||||
@@ -18,12 +17,11 @@ from binaryninja import (
|
||||
RegisterValueType,
|
||||
LowLevelILOperation,
|
||||
LowLevelILInstruction,
|
||||
InstructionTextTokenType,
|
||||
)
|
||||
|
||||
import capa.features.extractors.helpers
|
||||
from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset
|
||||
from capa.features.common import MAX_BYTES_FEATURE_SIZE, THUNK_CHAIN_DEPTH_DELTA, Bytes, String, Feature, Characteristic
|
||||
from capa.features.common import MAX_BYTES_FEATURE_SIZE, Bytes, String, Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors.binja.helpers import DisassemblyInstruction, visit_llil_exprs
|
||||
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
|
||||
@@ -73,7 +71,6 @@ def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle)
|
||||
example:
|
||||
call dword [0x00473038]
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -97,28 +94,32 @@ def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle)
|
||||
candidate_addrs.append(stub_addr)
|
||||
|
||||
for address in candidate_addrs:
|
||||
sym = func.view.get_symbol_at(address)
|
||||
if sym is None or sym.type not in [SymbolType.ImportAddressSymbol, SymbolType.ImportedFunctionSymbol]:
|
||||
continue
|
||||
for sym in func.view.get_symbols(address):
|
||||
if sym is None or sym.type not in [
|
||||
SymbolType.ImportAddressSymbol,
|
||||
SymbolType.ImportedFunctionSymbol,
|
||||
SymbolType.FunctionSymbol,
|
||||
]:
|
||||
continue
|
||||
|
||||
sym_name = sym.short_name
|
||||
sym_name = sym.short_name
|
||||
|
||||
lib_name = ""
|
||||
import_lib = bv.lookup_imported_object_library(sym.address)
|
||||
if import_lib is not None:
|
||||
lib_name = import_lib[0].name
|
||||
if lib_name.endswith(".dll"):
|
||||
lib_name = lib_name[:-4]
|
||||
elif lib_name.endswith(".so"):
|
||||
lib_name = lib_name[:-3]
|
||||
lib_name = ""
|
||||
import_lib = bv.lookup_imported_object_library(sym.address)
|
||||
if import_lib is not None:
|
||||
lib_name = import_lib[0].name
|
||||
if lib_name.endswith(".dll"):
|
||||
lib_name = lib_name[:-4]
|
||||
elif lib_name.endswith(".so"):
|
||||
lib_name = lib_name[:-3]
|
||||
|
||||
for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name):
|
||||
yield API(name), ih.address
|
||||
|
||||
if sym_name.startswith("_"):
|
||||
for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name[1:]):
|
||||
for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name):
|
||||
yield API(name), ih.address
|
||||
|
||||
if sym_name.startswith("_"):
|
||||
for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name[1:]):
|
||||
yield API(name), ih.address
|
||||
|
||||
|
||||
def extract_insn_number_features(
|
||||
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
|
||||
@@ -128,12 +129,9 @@ def extract_insn_number_features(
|
||||
example:
|
||||
push 3136B0h ; dwControlCode
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
results: List[Tuple[Any[Number, OperandNumber], Address]] = []
|
||||
address_size = func.view.arch.address_size * 8
|
||||
|
||||
def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index: int) -> bool:
|
||||
if il.operation == LowLevelILOperation.LLIL_LOAD:
|
||||
@@ -161,8 +159,7 @@ def extract_insn_number_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def extract_insn_bytes_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
@@ -171,7 +168,6 @@ def extract_insn_bytes_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandl
|
||||
example:
|
||||
push offset iid_004118d4_IShellLinkA ; riid
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -220,7 +216,6 @@ def extract_insn_string_features(
|
||||
example:
|
||||
push offset aAcr ; "ACR > "
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -278,7 +273,6 @@ def extract_insn_offset_features(
|
||||
example:
|
||||
.text:0040112F cmp [esi+4], ebx
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
results: List[Tuple[Any[Offset, OperandOffset], Address]] = []
|
||||
@@ -327,13 +321,13 @@ def extract_insn_offset_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def is_nzxor_stack_cookie(f: Function, bb: BinjaBasicBlock, llil: LowLevelILInstruction) -> bool:
|
||||
"""check if nzxor exists within stack cookie delta"""
|
||||
# TODO: we can do a much accurate analysi using LLIL SSA
|
||||
# TODO(xusheng): use LLIL SSA to do more accurate analysis
|
||||
# https://github.com/mandiant/capa/issues/1609
|
||||
|
||||
reg_names = []
|
||||
if llil.left.operation == LowLevelILOperation.LLIL_REG:
|
||||
@@ -364,7 +358,6 @@ def extract_insn_nzxor_characteristic_features(
|
||||
parse instruction non-zeroing XOR instruction
|
||||
ignore expected non-zeroing XORs, e.g. security cookies
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
results = []
|
||||
@@ -384,8 +377,7 @@ def extract_insn_nzxor_characteristic_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def extract_insn_mnemonic_features(
|
||||
@@ -414,7 +406,6 @@ def extract_insn_peb_access_characteristic_features(
|
||||
|
||||
fs:[0x30] on x86, gs:[0x60] on x64
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
results = []
|
||||
@@ -439,7 +430,7 @@ def extract_insn_peb_access_characteristic_features(
|
||||
return True
|
||||
|
||||
value = right.value.value
|
||||
if not (reg, value) in (("fsbase", 0x30), ("gsbase", 0x60)):
|
||||
if (reg, value) not in (("fsbase", 0x30), ("gsbase", 0x60)):
|
||||
return True
|
||||
|
||||
results.append((Characteristic("peb access"), ih.address))
|
||||
@@ -448,15 +439,13 @@ def extract_insn_peb_access_characteristic_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def extract_insn_segment_access_features(
|
||||
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""parse instruction fs or gs access"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
results = []
|
||||
@@ -477,15 +466,13 @@ def extract_insn_segment_access_features(
|
||||
for llil in func.get_llils_at(ih.address):
|
||||
visit_llil_exprs(llil, llil_checker)
|
||||
|
||||
for result in results:
|
||||
yield result
|
||||
yield from results
|
||||
|
||||
|
||||
def extract_insn_cross_section_cflow(
|
||||
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""inspect the instruction for a CALL or JMP that crosses section boundaries"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -509,7 +496,6 @@ def extract_function_calls_from(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandl
|
||||
|
||||
most relevant at the function scope, however, its most efficient to extract at the instruction scope
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
bv: BinaryView = func.view
|
||||
|
||||
@@ -555,7 +541,6 @@ def extract_function_indirect_call_characteristic_features(
|
||||
most relevant at the function or basic block scope;
|
||||
however, its most efficient to extract at the instruction scope
|
||||
"""
|
||||
insn: DisassemblyInstruction = ih.inner
|
||||
func: Function = fh.inner
|
||||
|
||||
llil = func.get_llil_at(ih.address)
|
||||
@@ -599,32 +584,3 @@ INSTRUCTION_HANDLERS = (
|
||||
extract_function_calls_from,
|
||||
extract_function_indirect_call_characteristic_features,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
if len(sys.argv) < 2:
|
||||
return
|
||||
|
||||
from binaryninja import BinaryViewType
|
||||
|
||||
from capa.features.extractors.binja.extractor import BinjaFeatureExtractor
|
||||
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(sys.argv[1])
|
||||
if bv is None:
|
||||
return
|
||||
|
||||
features = []
|
||||
extractor = BinjaFeatureExtractor(bv)
|
||||
for fh in extractor.get_functions():
|
||||
for bbh in extractor.get_basic_blocks(fh):
|
||||
for insn in extractor.get_instructions(fh, bbh):
|
||||
features.extend(list(extract_features(fh, bbh, insn)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import io
|
||||
import logging
|
||||
import binascii
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -9,6 +9,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Dict, List, Tuple, Union, Iterator, Optional
|
||||
from pathlib import Path
|
||||
|
||||
import dnfile
|
||||
from dncil.cil.opcode import OpCodes
|
||||
@@ -52,25 +53,25 @@ class DnFileFeatureExtractorCache:
|
||||
self.types[type_.token] = type_
|
||||
|
||||
def get_import(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.imports.get(token, None)
|
||||
return self.imports.get(token)
|
||||
|
||||
def get_native_import(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.native_imports.get(token, None)
|
||||
return self.native_imports.get(token)
|
||||
|
||||
def get_method(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.methods.get(token, None)
|
||||
return self.methods.get(token)
|
||||
|
||||
def get_field(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.fields.get(token, None)
|
||||
return self.fields.get(token)
|
||||
|
||||
def get_type(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
|
||||
return self.types.get(token, None)
|
||||
return self.types.get(token)
|
||||
|
||||
|
||||
class DnfileFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(path)
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(str(path))
|
||||
|
||||
# pre-compute .NET token lookup tables; each .NET method has access to this cache for feature extraction
|
||||
# most relevant at instruction scope
|
||||
@@ -119,7 +120,7 @@ class DnfileFeatureExtractor(FeatureExtractor):
|
||||
address: DNTokenAddress = DNTokenAddress(insn.operand.value)
|
||||
|
||||
# record call to destination method; note: we only consider MethodDef methods for destinations
|
||||
dest: Optional[FunctionHandle] = methods.get(address, None)
|
||||
dest: Optional[FunctionHandle] = methods.get(address)
|
||||
if dest is not None:
|
||||
dest.ctx["calls_to"].add(fh.address)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -52,7 +52,7 @@ def resolve_dotnet_token(pe: dnfile.dnPE, token: Token) -> Union[dnfile.base.MDT
|
||||
return InvalidToken(token.value)
|
||||
return user_string
|
||||
|
||||
table: Optional[dnfile.base.ClrMetaDataTable] = pe.net.mdtables.tables.get(token.table, None)
|
||||
table: Optional[dnfile.base.ClrMetaDataTable] = pe.net.mdtables.tables.get(token.table)
|
||||
if table is None:
|
||||
# table index is not valid
|
||||
return InvalidToken(token.value)
|
||||
@@ -204,7 +204,7 @@ def get_dotnet_managed_methods(pe: dnfile.dnPE) -> Iterator[DnType]:
|
||||
continue
|
||||
|
||||
token: int = calculate_dotnet_token_value(method.table.number, method.row_index)
|
||||
access: Optional[str] = accessor_map.get(token, None)
|
||||
access: Optional[str] = accessor_map.get(token)
|
||||
|
||||
method_name: str = method.row.Name
|
||||
if method_name.startswith(("get_", "set_")):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -9,7 +9,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Dict, Tuple, Union, Iterator, Optional
|
||||
from typing import TYPE_CHECKING, Tuple, Union, Iterator, Optional
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from capa.features.extractors.dnfile.extractor import DnFileFeatureExtractorCache
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,11 +6,10 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
from enum import Enum
|
||||
from typing import Union, Optional
|
||||
from typing import Optional
|
||||
|
||||
|
||||
class DnType(object):
|
||||
class DnType:
|
||||
def __init__(self, token: int, class_: str, namespace: str = "", member: str = "", access: Optional[str] = None):
|
||||
self.token: int = token
|
||||
self.access: Optional[str] = access
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
from typing import Tuple, Iterator
|
||||
from pathlib import Path
|
||||
|
||||
import dnfile
|
||||
import pefile
|
||||
@@ -74,10 +82,10 @@ GLOBAL_HANDLERS = (
|
||||
|
||||
|
||||
class DnfileFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.path: str = path
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(path)
|
||||
self.path: Path = path
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(str(path))
|
||||
|
||||
def get_base_address(self) -> AbsoluteVirtualAddress:
|
||||
return AbsoluteVirtualAddress(0x0)
|
||||
|
||||
@@ -1,5 +1,13 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
from typing import Tuple, Iterator, cast
|
||||
from typing import Tuple, Iterator
|
||||
from pathlib import Path
|
||||
|
||||
import dnfile
|
||||
import pefile
|
||||
@@ -158,10 +166,10 @@ GLOBAL_HANDLERS = (
|
||||
|
||||
|
||||
class DotnetFileFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.path: str = path
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(path)
|
||||
self.path: Path = path
|
||||
self.pe: dnfile.dnPE = dnfile.dnPE(str(path))
|
||||
|
||||
def get_base_address(self):
|
||||
return NO_ADDRESS
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -13,6 +13,8 @@ from enum import Enum
|
||||
from typing import Set, Dict, List, Tuple, BinaryIO, Iterator, Optional
|
||||
from dataclasses import dataclass
|
||||
|
||||
import Elf # from vivisect
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -24,7 +26,7 @@ def align(v, alignment):
|
||||
return v + (alignment - remainder)
|
||||
|
||||
|
||||
def read_cstr(buf, offset):
|
||||
def read_cstr(buf, offset) -> str:
|
||||
s = buf[offset:]
|
||||
s, _, _ = s.partition(b"\x00")
|
||||
return s.decode("utf-8")
|
||||
@@ -54,6 +56,7 @@ class OS(str, Enum):
|
||||
CLOUD = "cloud"
|
||||
SYLLABLE = "syllable"
|
||||
NACL = "nacl"
|
||||
ANDROID = "android"
|
||||
|
||||
|
||||
# via readelf: https://github.com/bminor/binutils-gdb/blob/c0e94211e1ac05049a4ce7c192c9d14d1764eb3e/binutils/readelf.c#L19635-L19658
|
||||
@@ -91,6 +94,20 @@ class Shdr:
|
||||
entsize: int
|
||||
buf: bytes
|
||||
|
||||
@classmethod
|
||||
def from_viv(cls, section, buf: bytes) -> "Shdr":
|
||||
return cls(
|
||||
section.sh_name,
|
||||
section.sh_type,
|
||||
section.sh_flags,
|
||||
section.sh_addr,
|
||||
section.sh_offset,
|
||||
section.sh_size,
|
||||
section.sh_link,
|
||||
section.sh_entsize,
|
||||
buf,
|
||||
)
|
||||
|
||||
|
||||
class ELF:
|
||||
def __init__(self, f: BinaryIO):
|
||||
@@ -397,7 +414,7 @@ class ELF:
|
||||
# there should be vn_cnt of these.
|
||||
# each entry describes an ABI name required by the shared object.
|
||||
vna_offset = vn_offset + vn_aux
|
||||
for i in range(vn_cnt):
|
||||
for _ in range(vn_cnt):
|
||||
# ElfXX_Vernaux layout is the same on 32 and 64 bit
|
||||
_, _, _, vna_name, vna_next = struct.unpack_from(self.endian + "IHHII", shdr.buf, vna_offset)
|
||||
|
||||
@@ -458,10 +475,12 @@ class ELF:
|
||||
for d_tag, d_val in self.dynamic_entries:
|
||||
if d_tag == DT_STRTAB:
|
||||
strtab_addr = d_val
|
||||
break
|
||||
|
||||
for d_tag, d_val in self.dynamic_entries:
|
||||
if d_tag == DT_STRSZ:
|
||||
strtab_size = d_val
|
||||
break
|
||||
|
||||
if strtab_addr is None:
|
||||
return None
|
||||
@@ -471,8 +490,10 @@ class ELF:
|
||||
|
||||
strtab_offset = None
|
||||
for shdr in self.section_headers:
|
||||
if shdr.addr <= strtab_addr < shdr.addr + shdr.size:
|
||||
# the section header address should be defined
|
||||
if shdr.addr and shdr.addr <= strtab_addr < shdr.addr + shdr.size:
|
||||
strtab_offset = shdr.offset + (strtab_addr - shdr.addr)
|
||||
break
|
||||
|
||||
if strtab_offset is None:
|
||||
return None
|
||||
@@ -501,7 +522,10 @@ class ELF:
|
||||
if d_tag != DT_NEEDED:
|
||||
continue
|
||||
|
||||
yield read_cstr(strtab, d_val)
|
||||
try:
|
||||
yield read_cstr(strtab, d_val)
|
||||
except UnicodeDecodeError as e:
|
||||
logger.warning("failed to read DT_NEEDED entry: %s", str(e))
|
||||
|
||||
@property
|
||||
def symtab(self) -> Optional[Tuple[Shdr, Shdr]]:
|
||||
@@ -651,6 +675,9 @@ class SymTab:
|
||||
return the symbol's information in
|
||||
the order specified by sys/elf32.h
|
||||
"""
|
||||
if self.symtab.entsize == 0:
|
||||
return
|
||||
|
||||
for i in range(int(len(self.symtab.buf) / self.symtab.entsize)):
|
||||
if bitness == 32:
|
||||
name_offset, value, size, info, other, shndx = struct.unpack_from(
|
||||
@@ -658,7 +685,7 @@ class SymTab:
|
||||
)
|
||||
elif bitness == 64:
|
||||
name_offset, info, other, shndx, value, size = struct.unpack_from(
|
||||
endian + "IBBBQQ", symtab_buf, i * self.symtab.entsize
|
||||
endian + "IBBHQQ", symtab_buf, i * self.symtab.entsize
|
||||
)
|
||||
|
||||
self.symbols.append(Symbol(name_offset, value, size, info, other, shndx))
|
||||
@@ -682,8 +709,30 @@ class SymTab:
|
||||
return a tuple: (name, value, size, info, other, shndx)
|
||||
for each symbol contained in the symbol table
|
||||
"""
|
||||
for symbol in self.symbols:
|
||||
yield symbol
|
||||
yield from self.symbols
|
||||
|
||||
@classmethod
|
||||
def from_viv(cls, elf: Elf.Elf) -> Optional["SymTab"]:
|
||||
endian = "<" if elf.getEndian() == 0 else ">"
|
||||
bitness = elf.bits
|
||||
|
||||
SHT_SYMTAB = 0x2
|
||||
for section in elf.sections:
|
||||
if section.sh_type == SHT_SYMTAB:
|
||||
strtab_section = elf.sections[section.sh_link]
|
||||
sh_symtab = Shdr.from_viv(section, elf.readAtOffset(section.sh_offset, section.sh_size))
|
||||
sh_strtab = Shdr.from_viv(
|
||||
strtab_section, elf.readAtOffset(strtab_section.sh_offset, strtab_section.sh_size)
|
||||
)
|
||||
|
||||
try:
|
||||
return cls(endian, bitness, sh_symtab, sh_strtab)
|
||||
except NameError:
|
||||
return None
|
||||
except Exception:
|
||||
# all exceptions that could be encountered by
|
||||
# cls._parse() imply a faulty symbol's table.
|
||||
raise CorruptElfFile("malformed symbol's table")
|
||||
|
||||
|
||||
def guess_os_from_osabi(elf: ELF) -> Optional[OS]:
|
||||
@@ -718,6 +767,11 @@ def guess_os_from_ph_notes(elf: ELF) -> Optional[OS]:
|
||||
elif note.name == "FreeBSD":
|
||||
logger.debug("note owner: %s", "FREEBSD")
|
||||
return OS.FREEBSD
|
||||
elif note.name == "Android":
|
||||
logger.debug("note owner: %s", "Android")
|
||||
# see the following for parsing the structure:
|
||||
# https://android.googlesource.com/platform/ndk/+/master/parse_elfnote.py
|
||||
return OS.ANDROID
|
||||
elif note.name == "GNU":
|
||||
abi_tag = note.abi_tag
|
||||
if abi_tag:
|
||||
@@ -777,7 +831,7 @@ def guess_os_from_abi_versions_needed(elf: ELF) -> Optional[OS]:
|
||||
# this will let us guess about linux/hurd in some cases.
|
||||
|
||||
versions_needed = elf.versions_needed
|
||||
if any(map(lambda abi: abi.startswith("GLIBC"), itertools.chain(*versions_needed.values()))):
|
||||
if any(abi.startswith("GLIBC") for abi in itertools.chain(*versions_needed.values())):
|
||||
# there are any GLIBC versions needed
|
||||
|
||||
if elf.e_machine != "i386":
|
||||
@@ -809,6 +863,8 @@ def guess_os_from_needed_dependencies(elf: ELF) -> Optional[OS]:
|
||||
return OS.HURD
|
||||
if needed.startswith("libhurduser.so"):
|
||||
return OS.HURD
|
||||
if needed.startswith("libandroid.so"):
|
||||
return OS.ANDROID
|
||||
|
||||
return None
|
||||
|
||||
@@ -834,7 +890,7 @@ def guess_os_from_symtab(elf: ELF) -> Optional[OS]:
|
||||
sym_name = symtab.get_name(symbol)
|
||||
|
||||
for os, hints in keywords.items():
|
||||
if any(map(lambda x: x in sym_name, hints)):
|
||||
if any(hint in sym_name for hint in hints):
|
||||
return os
|
||||
|
||||
return None
|
||||
@@ -842,30 +898,62 @@ def guess_os_from_symtab(elf: ELF) -> Optional[OS]:
|
||||
|
||||
def detect_elf_os(f) -> str:
|
||||
"""
|
||||
f: type Union[BinaryIO, IDAIO]
|
||||
f: type Union[BinaryIO, IDAIO, GHIDRAIO]
|
||||
"""
|
||||
elf = ELF(f)
|
||||
try:
|
||||
elf = ELF(f)
|
||||
except Exception as e:
|
||||
logger.warning("Error parsing ELF file: %s", e)
|
||||
return "unknown"
|
||||
|
||||
osabi_guess = guess_os_from_osabi(elf)
|
||||
logger.debug("guess: osabi: %s", osabi_guess)
|
||||
try:
|
||||
osabi_guess = guess_os_from_osabi(elf)
|
||||
logger.debug("guess: osabi: %s", osabi_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from OSABI: %s", e)
|
||||
osabi_guess = None
|
||||
|
||||
ph_notes_guess = guess_os_from_ph_notes(elf)
|
||||
logger.debug("guess: ph notes: %s", ph_notes_guess)
|
||||
try:
|
||||
ph_notes_guess = guess_os_from_ph_notes(elf)
|
||||
logger.debug("guess: ph notes: %s", ph_notes_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from program header notes: %s", e)
|
||||
ph_notes_guess = None
|
||||
|
||||
sh_notes_guess = guess_os_from_sh_notes(elf)
|
||||
logger.debug("guess: sh notes: %s", sh_notes_guess)
|
||||
try:
|
||||
sh_notes_guess = guess_os_from_sh_notes(elf)
|
||||
logger.debug("guess: sh notes: %s", sh_notes_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from section header notes: %s", e)
|
||||
sh_notes_guess = None
|
||||
|
||||
linker_guess = guess_os_from_linker(elf)
|
||||
logger.debug("guess: linker: %s", linker_guess)
|
||||
try:
|
||||
linker_guess = guess_os_from_linker(elf)
|
||||
logger.debug("guess: linker: %s", linker_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from linker: %s", e)
|
||||
linker_guess = None
|
||||
|
||||
abi_versions_needed_guess = guess_os_from_abi_versions_needed(elf)
|
||||
logger.debug("guess: ABI versions needed: %s", abi_versions_needed_guess)
|
||||
try:
|
||||
abi_versions_needed_guess = guess_os_from_abi_versions_needed(elf)
|
||||
logger.debug("guess: ABI versions needed: %s", abi_versions_needed_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from ABI versions needed: %s", e)
|
||||
abi_versions_needed_guess = None
|
||||
|
||||
needed_dependencies_guess = guess_os_from_needed_dependencies(elf)
|
||||
logger.debug("guess: needed dependencies: %s", needed_dependencies_guess)
|
||||
try:
|
||||
needed_dependencies_guess = guess_os_from_needed_dependencies(elf)
|
||||
logger.debug("guess: needed dependencies: %s", needed_dependencies_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from needed dependencies: %s", e)
|
||||
needed_dependencies_guess = None
|
||||
|
||||
symtab_guess = guess_os_from_symtab(elf)
|
||||
logger.debug("guess: pertinent symbol name: %s", symtab_guess)
|
||||
try:
|
||||
symtab_guess = guess_os_from_symtab(elf)
|
||||
logger.debug("guess: pertinent symbol name: %s", symtab_guess)
|
||||
except Exception as e:
|
||||
logger.warning("Error guessing OS from symbol table: %s", e)
|
||||
symtab_guess = None
|
||||
|
||||
ret = None
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -8,11 +8,13 @@
|
||||
import io
|
||||
import logging
|
||||
from typing import Tuple, Iterator
|
||||
from pathlib import Path
|
||||
|
||||
from elftools.elf.elffile import ELFFile, SymbolTableSection
|
||||
from elftools.elf.relocation import RelocationSection
|
||||
|
||||
import capa.features.extractors.common
|
||||
from capa.features.file import Import, Section
|
||||
from capa.features.file import Export, Import, Section
|
||||
from capa.features.common import OS, FORMAT_ELF, Arch, Format, Feature
|
||||
from capa.features.address import NO_ADDRESS, FileOffsetAddress, AbsoluteVirtualAddress
|
||||
from capa.features.extractors.base_extractor import FeatureExtractor
|
||||
@@ -20,11 +22,8 @@ from capa.features.extractors.base_extractor import FeatureExtractor
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def extract_file_import_names(elf, **kwargs):
|
||||
# see https://github.com/eliben/pyelftools/blob/0664de05ed2db3d39041e2d51d19622a8ef4fb0f/scripts/readelf.py#L372
|
||||
symbol_tables = [(idx, s) for idx, s in enumerate(elf.iter_sections()) if isinstance(s, SymbolTableSection)]
|
||||
|
||||
for _, section in symbol_tables:
|
||||
def extract_file_export_names(elf: ELFFile, **kwargs):
|
||||
for section in elf.iter_sections():
|
||||
if not isinstance(section, SymbolTableSection):
|
||||
continue
|
||||
|
||||
@@ -34,14 +33,64 @@ def extract_file_import_names(elf, **kwargs):
|
||||
|
||||
logger.debug("Symbol table '%s' contains %s entries:", section.name, section.num_symbols())
|
||||
|
||||
for symbol in section.iter_symbols():
|
||||
# The following conditions are based on the following article
|
||||
# http://www.m4b.io/elf/export/binary/analysis/2015/05/25/what-is-an-elf-export.html
|
||||
if not symbol.name:
|
||||
continue
|
||||
if symbol.entry.st_info.type not in ["STT_FUNC", "STT_OBJECT", "STT_IFUNC"]:
|
||||
continue
|
||||
if symbol.entry.st_value == 0:
|
||||
continue
|
||||
if symbol.entry.st_shndx == "SHN_UNDEF":
|
||||
continue
|
||||
|
||||
yield Export(symbol.name), AbsoluteVirtualAddress(symbol.entry.st_value)
|
||||
|
||||
|
||||
def extract_file_import_names(elf: ELFFile, **kwargs):
|
||||
# Create a dictionary to store symbol names by their index
|
||||
symbol_names = {}
|
||||
|
||||
# Extract symbol names and store them in the dictionary
|
||||
for section in elf.iter_sections():
|
||||
if not isinstance(section, SymbolTableSection):
|
||||
continue
|
||||
|
||||
for _, symbol in enumerate(section.iter_symbols()):
|
||||
if symbol.name and symbol.entry.st_info.type == "STT_FUNC":
|
||||
# TODO symbol address
|
||||
# TODO symbol version info?
|
||||
yield Import(symbol.name), FileOffsetAddress(0x0)
|
||||
# The following conditions are based on the following article
|
||||
# http://www.m4b.io/elf/export/binary/analysis/2015/05/25/what-is-an-elf-export.html
|
||||
if not symbol.name:
|
||||
continue
|
||||
if symbol.entry.st_info.type not in ["STT_FUNC", "STT_OBJECT", "STT_IFUNC"]:
|
||||
continue
|
||||
if symbol.entry.st_value != 0:
|
||||
continue
|
||||
if symbol.entry.st_shndx != "SHN_UNDEF":
|
||||
continue
|
||||
if symbol.entry.st_name == 0:
|
||||
continue
|
||||
|
||||
symbol_names[_] = symbol.name
|
||||
|
||||
for section in elf.iter_sections():
|
||||
if not isinstance(section, RelocationSection):
|
||||
continue
|
||||
|
||||
if section["sh_entsize"] == 0:
|
||||
logger.debug("Symbol table '%s' has a sh_entsize of zero!", section.name)
|
||||
continue
|
||||
|
||||
logger.debug("Symbol table '%s' contains %s entries:", section.name, section.num_relocations())
|
||||
|
||||
for relocation in section.iter_relocations():
|
||||
# Extract the symbol name from the symbol table using the symbol index in the relocation
|
||||
if relocation["r_info_sym"] not in symbol_names:
|
||||
continue
|
||||
yield Import(symbol_names[relocation["r_info_sym"]]), FileOffsetAddress(relocation["r_offset"])
|
||||
|
||||
|
||||
def extract_file_section_names(elf, **kwargs):
|
||||
def extract_file_section_names(elf: ELFFile, **kwargs):
|
||||
for section in elf.iter_sections():
|
||||
if section.name:
|
||||
yield Section(section.name), AbsoluteVirtualAddress(section.header.sh_addr)
|
||||
@@ -53,7 +102,7 @@ def extract_file_strings(buf, **kwargs):
|
||||
yield from capa.features.extractors.common.extract_file_strings(buf)
|
||||
|
||||
|
||||
def extract_file_os(elf, buf, **kwargs):
|
||||
def extract_file_os(elf: ELFFile, buf, **kwargs):
|
||||
# our current approach does not always get an OS value, e.g. for packed samples
|
||||
# for file limitation purposes, we're more lax here
|
||||
try:
|
||||
@@ -67,8 +116,7 @@ def extract_file_format(**kwargs):
|
||||
yield Format(FORMAT_ELF), NO_ADDRESS
|
||||
|
||||
|
||||
def extract_file_arch(elf, **kwargs):
|
||||
# TODO merge with capa.features.extractors.elf.detect_elf_arch()
|
||||
def extract_file_arch(elf: ELFFile, **kwargs):
|
||||
arch = elf.get_machine_arch()
|
||||
if arch == "x86":
|
||||
yield Arch("i386"), NO_ADDRESS
|
||||
@@ -85,7 +133,7 @@ def extract_file_features(elf: ELFFile, buf: bytes) -> Iterator[Tuple[Feature, i
|
||||
|
||||
|
||||
FILE_HANDLERS = (
|
||||
# TODO extract_file_export_names,
|
||||
extract_file_export_names,
|
||||
extract_file_import_names,
|
||||
extract_file_section_names,
|
||||
extract_file_strings,
|
||||
@@ -107,11 +155,10 @@ GLOBAL_HANDLERS = (
|
||||
|
||||
|
||||
class ElfFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.path = path
|
||||
with open(self.path, "rb") as f:
|
||||
self.elf = ELFFile(io.BytesIO(f.read()))
|
||||
self.path: Path = path
|
||||
self.elf = ELFFile(io.BytesIO(path.read_bytes()))
|
||||
|
||||
def get_base_address(self):
|
||||
# virtual address of the first segment with type LOAD
|
||||
@@ -120,15 +167,13 @@ class ElfFeatureExtractor(FeatureExtractor):
|
||||
return AbsoluteVirtualAddress(segment.header.p_vaddr)
|
||||
|
||||
def extract_global_features(self):
|
||||
with open(self.path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = self.path.read_bytes()
|
||||
|
||||
for feature, addr in extract_global_features(self.elf, buf):
|
||||
yield feature, addr
|
||||
|
||||
def extract_file_features(self):
|
||||
with open(self.path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = self.path.read_bytes()
|
||||
|
||||
for feature, addr in extract_file_features(self.elf, buf):
|
||||
yield feature, addr
|
||||
|
||||
0
capa/features/extractors/ghidra/__init__.py
Normal file
0
capa/features/extractors/ghidra/__init__.py
Normal file
152
capa/features/extractors/ghidra/basicblock.py
Normal file
152
capa/features/extractors/ghidra/basicblock.py
Normal file
@@ -0,0 +1,152 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import string
|
||||
import struct
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
import ghidra
|
||||
from ghidra.program.model.lang import OperandType
|
||||
|
||||
import capa.features.extractors.ghidra.helpers
|
||||
from capa.features.common import Feature, Characteristic
|
||||
from capa.features.address import Address
|
||||
from capa.features.basicblock import BasicBlock
|
||||
from capa.features.extractors.helpers import MIN_STACKSTRING_LEN
|
||||
from capa.features.extractors.base_extractor import BBHandle, FunctionHandle
|
||||
|
||||
|
||||
def get_printable_len(op: ghidra.program.model.scalar.Scalar) -> int:
|
||||
"""Return string length if all operand bytes are ascii or utf16-le printable"""
|
||||
op_bit_len = op.bitLength()
|
||||
op_byte_len = op_bit_len // 8
|
||||
op_val = op.getValue()
|
||||
|
||||
if op_bit_len == 8:
|
||||
chars = struct.pack("<B", op_val & 0xFF)
|
||||
elif op_bit_len == 16:
|
||||
chars = struct.pack("<H", op_val & 0xFFFF)
|
||||
elif op_bit_len == 32:
|
||||
chars = struct.pack("<I", op_val & 0xFFFFFFFF)
|
||||
elif op_bit_len == 64:
|
||||
chars = struct.pack("<Q", op_val & 0xFFFFFFFFFFFFFFFF)
|
||||
else:
|
||||
raise ValueError(f"Unhandled operand data type 0x{op_bit_len:x}.")
|
||||
|
||||
def is_printable_ascii(chars_: bytes):
|
||||
return all(c < 127 and chr(c) in string.printable for c in chars_)
|
||||
|
||||
def is_printable_utf16le(chars_: bytes):
|
||||
if all(c == 0x00 for c in chars_[1::2]):
|
||||
return is_printable_ascii(chars_[::2])
|
||||
|
||||
if is_printable_ascii(chars):
|
||||
return op_byte_len
|
||||
|
||||
if is_printable_utf16le(chars):
|
||||
return op_byte_len
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def is_mov_imm_to_stack(insn: ghidra.program.database.code.InstructionDB) -> bool:
|
||||
"""verify instruction moves immediate onto stack"""
|
||||
|
||||
# Ghidra will Bitwise OR the OperandTypes to assign multiple
|
||||
# i.e., the first operand is a stackvar (dynamically allocated),
|
||||
# and the second is a scalar value (single int/char/float/etc.)
|
||||
mov_its_ops = [(OperandType.ADDRESS | OperandType.DYNAMIC), OperandType.SCALAR]
|
||||
found = False
|
||||
|
||||
# MOV dword ptr [EBP + local_*], 0x65
|
||||
if insn.getMnemonicString().startswith("MOV"):
|
||||
found = all(insn.getOperandType(i) == mov_its_ops[i] for i in range(2))
|
||||
|
||||
return found
|
||||
|
||||
|
||||
def bb_contains_stackstring(bb: ghidra.program.model.block.CodeBlock) -> bool:
|
||||
"""check basic block for stackstring indicators
|
||||
|
||||
true if basic block contains enough moves of constant bytes to the stack
|
||||
"""
|
||||
count = 0
|
||||
for insn in currentProgram().getListing().getInstructions(bb, True): # type: ignore [name-defined] # noqa: F821
|
||||
if is_mov_imm_to_stack(insn):
|
||||
count += get_printable_len(insn.getScalar(1))
|
||||
if count > MIN_STACKSTRING_LEN:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _bb_has_tight_loop(bb: ghidra.program.model.block.CodeBlock):
|
||||
"""
|
||||
parse tight loops, true if last instruction in basic block branches to bb start
|
||||
"""
|
||||
# Reverse Ordered, first InstructionDB
|
||||
last_insn = currentProgram().getListing().getInstructions(bb, False).next() # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
if last_insn.getFlowType().isJump():
|
||||
return last_insn.getAddress(0) == bb.getMinAddress()
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def extract_bb_stackstring(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract stackstring indicators from basic block"""
|
||||
bb: ghidra.program.model.block.CodeBlock = bbh.inner
|
||||
|
||||
if bb_contains_stackstring(bb):
|
||||
yield Characteristic("stack string"), bbh.address
|
||||
|
||||
|
||||
def extract_bb_tight_loop(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""check basic block for tight loop indicators"""
|
||||
bb: ghidra.program.model.block.CodeBlock = bbh.inner
|
||||
|
||||
if _bb_has_tight_loop(bb):
|
||||
yield Characteristic("tight loop"), bbh.address
|
||||
|
||||
|
||||
BASIC_BLOCK_HANDLERS = (
|
||||
extract_bb_tight_loop,
|
||||
extract_bb_stackstring,
|
||||
)
|
||||
|
||||
|
||||
def extract_features(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""
|
||||
extract features from the given basic block.
|
||||
|
||||
args:
|
||||
bb: the basic block to process.
|
||||
|
||||
yields:
|
||||
Tuple[Feature, int]: the features and their location found in this basic block.
|
||||
"""
|
||||
yield BasicBlock(), bbh.address
|
||||
for bb_handler in BASIC_BLOCK_HANDLERS:
|
||||
for feature, addr in bb_handler(fh, bbh):
|
||||
yield feature, addr
|
||||
|
||||
|
||||
def main():
|
||||
features = []
|
||||
from capa.features.extractors.ghidra.extractor import GhidraFeatureExtractor
|
||||
|
||||
for fh in GhidraFeatureExtractor().get_functions():
|
||||
for bbh in capa.features.extractors.ghidra.helpers.get_function_blocks(fh):
|
||||
features.extend(list(extract_features(fh, bbh)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features) # noqa: T203
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
75
capa/features/extractors/ghidra/extractor.py
Normal file
75
capa/features/extractors/ghidra/extractor.py
Normal file
@@ -0,0 +1,75 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
from typing import List, Tuple, Iterator
|
||||
|
||||
import capa.features.extractors.ghidra.file
|
||||
import capa.features.extractors.ghidra.insn
|
||||
import capa.features.extractors.ghidra.global_
|
||||
import capa.features.extractors.ghidra.function
|
||||
import capa.features.extractors.ghidra.basicblock
|
||||
from capa.features.common import Feature
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle, FeatureExtractor
|
||||
|
||||
|
||||
class GhidraFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
import capa.features.extractors.ghidra.helpers as ghidra_helpers
|
||||
|
||||
self.global_features: List[Tuple[Feature, Address]] = []
|
||||
self.global_features.extend(capa.features.extractors.ghidra.file.extract_file_format())
|
||||
self.global_features.extend(capa.features.extractors.ghidra.global_.extract_os())
|
||||
self.global_features.extend(capa.features.extractors.ghidra.global_.extract_arch())
|
||||
self.imports = ghidra_helpers.get_file_imports()
|
||||
self.externs = ghidra_helpers.get_file_externs()
|
||||
self.fakes = ghidra_helpers.map_fake_import_addrs()
|
||||
|
||||
def get_base_address(self):
|
||||
return AbsoluteVirtualAddress(currentProgram().getImageBase().getOffset()) # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
def extract_global_features(self):
|
||||
yield from self.global_features
|
||||
|
||||
def extract_file_features(self):
|
||||
yield from capa.features.extractors.ghidra.file.extract_features()
|
||||
|
||||
def get_functions(self) -> Iterator[FunctionHandle]:
|
||||
import capa.features.extractors.ghidra.helpers as ghidra_helpers
|
||||
|
||||
for fhandle in ghidra_helpers.get_function_symbols():
|
||||
fh: FunctionHandle = FunctionHandle(
|
||||
address=AbsoluteVirtualAddress(fhandle.getEntryPoint().getOffset()),
|
||||
inner=fhandle,
|
||||
ctx={"imports_cache": self.imports, "externs_cache": self.externs, "fakes_cache": self.fakes},
|
||||
)
|
||||
yield fh
|
||||
|
||||
@staticmethod
|
||||
def get_function(addr: int) -> FunctionHandle:
|
||||
func = getFunctionContaining(toAddr(addr)) # type: ignore [name-defined] # noqa: F821
|
||||
return FunctionHandle(address=AbsoluteVirtualAddress(func.getEntryPoint().getOffset()), inner=func)
|
||||
|
||||
def extract_function_features(self, fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
yield from capa.features.extractors.ghidra.function.extract_features(fh)
|
||||
|
||||
def get_basic_blocks(self, fh: FunctionHandle) -> Iterator[BBHandle]:
|
||||
import capa.features.extractors.ghidra.helpers as ghidra_helpers
|
||||
|
||||
yield from ghidra_helpers.get_function_blocks(fh)
|
||||
|
||||
def extract_basic_block_features(self, fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
yield from capa.features.extractors.ghidra.basicblock.extract_features(fh, bbh)
|
||||
|
||||
def get_instructions(self, fh: FunctionHandle, bbh: BBHandle) -> Iterator[InsnHandle]:
|
||||
import capa.features.extractors.ghidra.helpers as ghidra_helpers
|
||||
|
||||
yield from ghidra_helpers.get_insn_in_range(bbh)
|
||||
|
||||
def extract_insn_features(self, fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle):
|
||||
yield from capa.features.extractors.ghidra.insn.extract_features(fh, bbh, ih)
|
||||
202
capa/features/extractors/ghidra/file.py
Normal file
202
capa/features/extractors/ghidra/file.py
Normal file
@@ -0,0 +1,202 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import re
|
||||
import struct
|
||||
from typing import List, Tuple, Iterator
|
||||
|
||||
from ghidra.program.model.symbol import SourceType, SymbolType
|
||||
|
||||
import capa.features.extractors.common
|
||||
import capa.features.extractors.helpers
|
||||
import capa.features.extractors.strings
|
||||
import capa.features.extractors.ghidra.helpers
|
||||
from capa.features.file import Export, Import, Section, FunctionName
|
||||
from capa.features.common import FORMAT_PE, FORMAT_ELF, Format, String, Feature, Characteristic
|
||||
from capa.features.address import NO_ADDRESS, Address, FileOffsetAddress, AbsoluteVirtualAddress
|
||||
|
||||
MAX_OFFSET_PE_AFTER_MZ = 0x200
|
||||
|
||||
|
||||
def find_embedded_pe(block_bytez: bytes, mz_xor: List[Tuple[bytes, bytes, int]]) -> Iterator[Tuple[int, int]]:
|
||||
"""check segment for embedded PE
|
||||
|
||||
adapted for Ghidra from:
|
||||
https://github.com/vivisect/vivisect/blob/91e8419a861f4977https://github.com/vivisect/vivisect/blob/91e8419a861f49779f18316f155311967e696836/PE/carve.py#L259f18316f155311967e696836/PE/carve.py#L25
|
||||
"""
|
||||
todo = []
|
||||
|
||||
for mzx, pex, i in mz_xor:
|
||||
for match in re.finditer(re.escape(mzx), block_bytez):
|
||||
todo.append((match.start(), mzx, pex, i))
|
||||
|
||||
seg_max = len(block_bytez) # type: ignore [name-defined] # noqa: F821
|
||||
while len(todo):
|
||||
off, mzx, pex, i = todo.pop()
|
||||
|
||||
# MZ header has one field we will check e_lfanew is at 0x3c
|
||||
e_lfanew = off + 0x3C
|
||||
|
||||
if seg_max < e_lfanew + 4:
|
||||
continue
|
||||
|
||||
e_lfanew_bytes = block_bytez[e_lfanew : e_lfanew + 4]
|
||||
newoff = struct.unpack("<I", capa.features.extractors.helpers.xor_static(e_lfanew_bytes, i))[0]
|
||||
|
||||
# assume XOR'd "PE" bytes exist within threshold
|
||||
if newoff > MAX_OFFSET_PE_AFTER_MZ:
|
||||
continue
|
||||
|
||||
peoff = off + newoff
|
||||
if seg_max < peoff + 2:
|
||||
continue
|
||||
|
||||
pe_bytes = block_bytez[peoff : peoff + 2]
|
||||
if pe_bytes == pex:
|
||||
yield off, i
|
||||
|
||||
|
||||
def extract_file_embedded_pe() -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract embedded PE features"""
|
||||
|
||||
# pre-compute XOR pairs
|
||||
mz_xor: List[Tuple[bytes, bytes, int]] = [
|
||||
(
|
||||
capa.features.extractors.helpers.xor_static(b"MZ", i),
|
||||
capa.features.extractors.helpers.xor_static(b"PE", i),
|
||||
i,
|
||||
)
|
||||
for i in range(256)
|
||||
]
|
||||
|
||||
for block in currentProgram().getMemory().getBlocks(): # type: ignore [name-defined] # noqa: F821
|
||||
if not all((block.isLoaded(), block.isInitialized(), "Headers" not in block.getName())):
|
||||
continue
|
||||
|
||||
for off, _ in find_embedded_pe(capa.features.extractors.ghidra.helpers.get_block_bytes(block), mz_xor):
|
||||
# add offset back to block start
|
||||
ea: int = block.getStart().add(off).getOffset()
|
||||
|
||||
yield Characteristic("embedded pe"), FileOffsetAddress(ea)
|
||||
|
||||
|
||||
def extract_file_export_names() -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract function exports"""
|
||||
st = currentProgram().getSymbolTable() # type: ignore [name-defined] # noqa: F821
|
||||
for addr in st.getExternalEntryPointIterator():
|
||||
yield Export(st.getPrimarySymbol(addr).getName()), AbsoluteVirtualAddress(addr.getOffset())
|
||||
|
||||
|
||||
def extract_file_import_names() -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract function imports
|
||||
|
||||
1. imports by ordinal:
|
||||
- modulename.#ordinal
|
||||
|
||||
2. imports by name, results in two features to support importname-only
|
||||
matching:
|
||||
- modulename.importname
|
||||
- importname
|
||||
"""
|
||||
|
||||
for f in currentProgram().getFunctionManager().getExternalFunctions(): # type: ignore [name-defined] # noqa: F821
|
||||
for r in f.getSymbol().getReferences():
|
||||
if r.getReferenceType().isData():
|
||||
addr = r.getFromAddress().getOffset() # gets pointer to fake external addr
|
||||
|
||||
fstr = f.toString().split("::") # format: MODULE.dll::import / MODULE::Ordinal_*
|
||||
if "Ordinal_" in fstr[1]:
|
||||
fstr[1] = f"#{fstr[1].split('_')[1]}"
|
||||
|
||||
for name in capa.features.extractors.helpers.generate_symbols(fstr[0][:-4], fstr[1]):
|
||||
yield Import(name), AbsoluteVirtualAddress(addr)
|
||||
|
||||
|
||||
def extract_file_section_names() -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract section names"""
|
||||
|
||||
for block in currentProgram().getMemory().getBlocks(): # type: ignore [name-defined] # noqa: F821
|
||||
yield Section(block.getName()), AbsoluteVirtualAddress(block.getStart().getOffset())
|
||||
|
||||
|
||||
def extract_file_strings() -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract ASCII and UTF-16 LE strings"""
|
||||
|
||||
for block in currentProgram().getMemory().getBlocks(): # type: ignore [name-defined] # noqa: F821
|
||||
if block.isInitialized():
|
||||
p_bytes = capa.features.extractors.ghidra.helpers.get_block_bytes(block)
|
||||
|
||||
for s in capa.features.extractors.strings.extract_ascii_strings(p_bytes):
|
||||
offset = block.getStart().getOffset() + s.offset
|
||||
yield String(s.s), FileOffsetAddress(offset)
|
||||
|
||||
for s in capa.features.extractors.strings.extract_unicode_strings(p_bytes):
|
||||
offset = block.getStart().getOffset() + s.offset
|
||||
yield String(s.s), FileOffsetAddress(offset)
|
||||
|
||||
|
||||
def extract_file_function_names() -> Iterator[Tuple[Feature, Address]]:
|
||||
"""
|
||||
extract the names of statically-linked library functions.
|
||||
"""
|
||||
|
||||
for sym in currentProgram().getSymbolTable().getAllSymbols(True): # type: ignore [name-defined] # noqa: F821
|
||||
# .isExternal() misses more than this config for the function symbols
|
||||
if sym.getSymbolType() == SymbolType.FUNCTION and sym.getSource() == SourceType.ANALYSIS and sym.isGlobal():
|
||||
name = sym.getName() # starts to resolve names based on Ghidra's FidDB
|
||||
if name.startswith("FID_conflict:"): # format: FID_conflict:<function-name>
|
||||
name = name[13:]
|
||||
addr = AbsoluteVirtualAddress(sym.getAddress().getOffset())
|
||||
yield FunctionName(name), addr
|
||||
if name.startswith("_"):
|
||||
# some linkers may prefix linked routines with a `_` to avoid name collisions.
|
||||
# extract features for both the mangled and un-mangled representations.
|
||||
# e.g. `_fwrite` -> `fwrite`
|
||||
# see: https://stackoverflow.com/a/2628384/87207
|
||||
yield FunctionName(name[1:]), addr
|
||||
|
||||
|
||||
def extract_file_format() -> Iterator[Tuple[Feature, Address]]:
|
||||
ef = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821
|
||||
if "PE" in ef:
|
||||
yield Format(FORMAT_PE), NO_ADDRESS
|
||||
elif "ELF" in ef:
|
||||
yield Format(FORMAT_ELF), NO_ADDRESS
|
||||
elif "Raw" in ef:
|
||||
# no file type to return when processing a binary file, but we want to continue processing
|
||||
return
|
||||
else:
|
||||
raise NotImplementedError(f"unexpected file format: {ef}")
|
||||
|
||||
|
||||
def extract_features() -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract file features"""
|
||||
for file_handler in FILE_HANDLERS:
|
||||
for feature, addr in file_handler():
|
||||
yield feature, addr
|
||||
|
||||
|
||||
FILE_HANDLERS = (
|
||||
extract_file_embedded_pe,
|
||||
extract_file_export_names,
|
||||
extract_file_import_names,
|
||||
extract_file_section_names,
|
||||
extract_file_strings,
|
||||
extract_file_function_names,
|
||||
extract_file_format,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
import pprint
|
||||
|
||||
pprint.pprint(list(extract_features())) # noqa: T203
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
73
capa/features/extractors/ghidra/function.py
Normal file
73
capa/features/extractors/ghidra/function.py
Normal file
@@ -0,0 +1,73 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
import ghidra
|
||||
from ghidra.program.model.block import BasicBlockModel, SimpleBlockIterator
|
||||
|
||||
import capa.features.extractors.ghidra.helpers
|
||||
from capa.features.common import Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors import loops
|
||||
from capa.features.extractors.base_extractor import FunctionHandle
|
||||
|
||||
|
||||
def extract_function_calls_to(fh: FunctionHandle):
|
||||
"""extract callers to a function"""
|
||||
f: ghidra.program.database.function.FunctionDB = fh.inner
|
||||
for ref in f.getSymbol().getReferences():
|
||||
if ref.getReferenceType().isCall():
|
||||
yield Characteristic("calls to"), AbsoluteVirtualAddress(ref.getFromAddress().getOffset())
|
||||
|
||||
|
||||
def extract_function_loop(fh: FunctionHandle):
|
||||
f: ghidra.program.database.function.FunctionDB = fh.inner
|
||||
|
||||
edges = []
|
||||
for block in SimpleBlockIterator(BasicBlockModel(currentProgram()), f.getBody(), monitor()): # type: ignore [name-defined] # noqa: F821
|
||||
dests = block.getDestinations(monitor()) # type: ignore [name-defined] # noqa: F821
|
||||
s_addrs = block.getStartAddresses()
|
||||
|
||||
while dests.hasNext(): # For loop throws Python TypeError
|
||||
for addr in s_addrs:
|
||||
edges.append((addr.getOffset(), dests.next().getDestinationAddress().getOffset()))
|
||||
|
||||
if loops.has_loop(edges):
|
||||
yield Characteristic("loop"), AbsoluteVirtualAddress(f.getEntryPoint().getOffset())
|
||||
|
||||
|
||||
def extract_recursive_call(fh: FunctionHandle):
|
||||
f: ghidra.program.database.function.FunctionDB = fh.inner
|
||||
|
||||
for func in f.getCalledFunctions(monitor()): # type: ignore [name-defined] # noqa: F821
|
||||
if func.getEntryPoint().getOffset() == f.getEntryPoint().getOffset():
|
||||
yield Characteristic("recursive call"), AbsoluteVirtualAddress(f.getEntryPoint().getOffset())
|
||||
|
||||
|
||||
def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
for func_handler in FUNCTION_HANDLERS:
|
||||
for feature, addr in func_handler(fh):
|
||||
yield feature, addr
|
||||
|
||||
|
||||
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
features = []
|
||||
for fhandle in capa.features.extractors.ghidra.helpers.get_function_symbols():
|
||||
features.extend(list(extract_features(fhandle)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features) # noqa: T203
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
67
capa/features/extractors/ghidra/global_.py
Normal file
67
capa/features/extractors/ghidra/global_.py
Normal file
@@ -0,0 +1,67 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
import contextlib
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
import capa.ghidra.helpers
|
||||
import capa.features.extractors.elf
|
||||
import capa.features.extractors.ghidra.helpers
|
||||
from capa.features.common import OS, ARCH_I386, ARCH_AMD64, OS_WINDOWS, Arch, Feature
|
||||
from capa.features.address import NO_ADDRESS, Address
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def extract_os() -> Iterator[Tuple[Feature, Address]]:
|
||||
format_name: str = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
if "PE" in format_name:
|
||||
yield OS(OS_WINDOWS), NO_ADDRESS
|
||||
|
||||
elif "ELF" in format_name:
|
||||
with contextlib.closing(capa.ghidra.helpers.GHIDRAIO()) as f:
|
||||
os = capa.features.extractors.elf.detect_elf_os(f)
|
||||
|
||||
yield OS(os), NO_ADDRESS
|
||||
|
||||
else:
|
||||
# we likely end up here:
|
||||
# 1. handling shellcode, or
|
||||
# 2. handling a new file format (e.g. macho)
|
||||
#
|
||||
# for (1) we can't do much - its shellcode and all bets are off.
|
||||
# we could maybe accept a further CLI argument to specify the OS,
|
||||
# but i think this would be rarely used.
|
||||
# rules that rely on OS conditions will fail to match on shellcode.
|
||||
#
|
||||
# for (2), this logic will need to be updated as the format is implemented.
|
||||
logger.debug("unsupported file format: %s, will not guess OS", format_name)
|
||||
return
|
||||
|
||||
|
||||
def extract_arch() -> Iterator[Tuple[Feature, Address]]:
|
||||
lang_id = currentProgram().getMetadata().get("Language ID") # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
if "x86" in lang_id and "64" in lang_id:
|
||||
yield Arch(ARCH_AMD64), NO_ADDRESS
|
||||
|
||||
elif "x86" in lang_id and "32" in lang_id:
|
||||
yield Arch(ARCH_I386), NO_ADDRESS
|
||||
|
||||
elif "x86" not in lang_id:
|
||||
logger.debug("unsupported architecture: non-32-bit nor non-64-bit intel")
|
||||
return
|
||||
|
||||
else:
|
||||
# we likely end up here:
|
||||
# 1. handling a new architecture (e.g. aarch64)
|
||||
#
|
||||
# for (1), this logic will need to be updated as the format is implemented.
|
||||
logger.debug("unsupported architecture: %s", lang_id)
|
||||
return
|
||||
277
capa/features/extractors/ghidra/helpers.py
Normal file
277
capa/features/extractors/ghidra/helpers.py
Normal file
@@ -0,0 +1,277 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
from typing import Dict, List, Iterator
|
||||
|
||||
import ghidra
|
||||
import java.lang
|
||||
from ghidra.program.model.lang import OperandType
|
||||
from ghidra.program.model.block import BasicBlockModel, SimpleBlockIterator
|
||||
from ghidra.program.model.symbol import SourceType, SymbolType
|
||||
from ghidra.program.model.address import AddressSpace
|
||||
|
||||
import capa.features.extractors.helpers
|
||||
from capa.features.common import THUNK_CHAIN_DEPTH_DELTA
|
||||
from capa.features.address import AbsoluteVirtualAddress
|
||||
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
|
||||
|
||||
|
||||
def ints_to_bytes(bytez: List[int]) -> bytes:
|
||||
"""convert Java signed ints to Python bytes
|
||||
|
||||
args:
|
||||
bytez: list of Java signed ints
|
||||
"""
|
||||
return bytes([b & 0xFF for b in bytez])
|
||||
|
||||
|
||||
def find_byte_sequence(addr: ghidra.program.model.address.Address, seq: bytes) -> Iterator[int]:
|
||||
"""yield all ea of a given byte sequence
|
||||
|
||||
args:
|
||||
addr: start address
|
||||
seq: bytes to search e.g. b"\x01\x03"
|
||||
"""
|
||||
seqstr = "".join([f"\\x{b:02x}" for b in seq])
|
||||
eas = findBytes(addr, seqstr, java.lang.Integer.MAX_VALUE, 1) # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
yield from eas
|
||||
|
||||
|
||||
def get_bytes(addr: ghidra.program.model.address.Address, length: int) -> bytes:
|
||||
"""yield length bytes at addr
|
||||
|
||||
args:
|
||||
addr: Address to begin pull from
|
||||
length: length of bytes to pull
|
||||
"""
|
||||
try:
|
||||
return ints_to_bytes(getBytes(addr, length)) # type: ignore [name-defined] # noqa: F821
|
||||
except RuntimeError:
|
||||
return b""
|
||||
|
||||
|
||||
def get_block_bytes(block: ghidra.program.model.mem.MemoryBlock) -> bytes:
|
||||
"""yield all bytes in a given block
|
||||
|
||||
args:
|
||||
block: MemoryBlock to pull from
|
||||
"""
|
||||
return get_bytes(block.getStart(), block.getSize())
|
||||
|
||||
|
||||
def get_function_symbols():
|
||||
"""yield all non-external function symbols"""
|
||||
yield from currentProgram().getFunctionManager().getFunctionsNoStubs(True) # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
|
||||
def get_function_blocks(fh: FunctionHandle) -> Iterator[BBHandle]:
|
||||
"""yield BBHandle for each bb in a given function"""
|
||||
|
||||
func: ghidra.program.database.function.FunctionDB = fh.inner
|
||||
for bb in SimpleBlockIterator(BasicBlockModel(currentProgram()), func.getBody(), monitor()): # type: ignore [name-defined] # noqa: F821
|
||||
yield BBHandle(address=AbsoluteVirtualAddress(bb.getMinAddress().getOffset()), inner=bb)
|
||||
|
||||
|
||||
def get_insn_in_range(bbh: BBHandle) -> Iterator[InsnHandle]:
|
||||
"""yield InshHandle for each insn in a given basicblock"""
|
||||
for insn in currentProgram().getListing().getInstructions(bbh.inner, True): # type: ignore [name-defined] # noqa: F821
|
||||
yield InsnHandle(address=AbsoluteVirtualAddress(insn.getAddress().getOffset()), inner=insn)
|
||||
|
||||
|
||||
def get_file_imports() -> Dict[int, List[str]]:
|
||||
"""get all import names & addrs"""
|
||||
|
||||
import_dict: Dict[int, List[str]] = {}
|
||||
|
||||
for f in currentProgram().getFunctionManager().getExternalFunctions(): # type: ignore [name-defined] # noqa: F821
|
||||
for r in f.getSymbol().getReferences():
|
||||
if r.getReferenceType().isData():
|
||||
addr = r.getFromAddress().getOffset() # gets pointer to fake external addr
|
||||
|
||||
ex_loc = f.getExternalLocation().getAddress() # map external locations as well (offset into module files)
|
||||
|
||||
fstr = f.toString().split("::") # format: MODULE.dll::import / MODULE::Ordinal_* / <EXTERNAL>::import
|
||||
if "Ordinal_" in fstr[1]:
|
||||
fstr[1] = f"#{fstr[1].split('_')[1]}"
|
||||
|
||||
# <EXTERNAL> mostly shows up in ELF files, otherwise, strip '.dll' w/ [:-4]
|
||||
fstr[0] = "*" if "<EXTERNAL>" in fstr[0] else fstr[0][:-4]
|
||||
|
||||
for name in capa.features.extractors.helpers.generate_symbols(fstr[0], fstr[1]):
|
||||
import_dict.setdefault(addr, []).append(name)
|
||||
if ex_loc:
|
||||
import_dict.setdefault(ex_loc.getOffset(), []).append(name)
|
||||
|
||||
return import_dict
|
||||
|
||||
|
||||
def get_file_externs() -> Dict[int, List[str]]:
|
||||
"""
|
||||
Gets function names & addresses of statically-linked library functions
|
||||
|
||||
Ghidra's external namespace is mostly reserved for dynamically-linked
|
||||
imports. Statically-linked functions are part of the global namespace.
|
||||
Filtering on the type, source, and namespace of the symbols yield more
|
||||
statically-linked library functions.
|
||||
|
||||
Example: (PMA Lab 16-01.exe_) 7faafc7e4a5c736ebfee6abbbc812d80:0x407490
|
||||
- __aulldiv
|
||||
- Note: See Symbol Table labels
|
||||
"""
|
||||
|
||||
extern_dict: Dict[int, List[str]] = {}
|
||||
|
||||
for sym in currentProgram().getSymbolTable().getAllSymbols(True): # type: ignore [name-defined] # noqa: F821
|
||||
# .isExternal() misses more than this config for the function symbols
|
||||
if sym.getSymbolType() == SymbolType.FUNCTION and sym.getSource() == SourceType.ANALYSIS and sym.isGlobal():
|
||||
name = sym.getName() # starts to resolve names based on Ghidra's FidDB
|
||||
if name.startswith("FID_conflict:"): # format: FID_conflict:<function-name>
|
||||
name = name[13:]
|
||||
extern_dict.setdefault(sym.getAddress().getOffset(), []).append(name)
|
||||
if name.startswith("_"):
|
||||
# some linkers may prefix linked routines with a `_` to avoid name collisions.
|
||||
# extract features for both the mangled and un-mangled representations.
|
||||
# e.g. `_fwrite` -> `fwrite`
|
||||
# see: https://stackoverflow.com/a/2628384/87207
|
||||
extern_dict.setdefault(sym.getAddress().getOffset(), []).append(name[1:])
|
||||
|
||||
return extern_dict
|
||||
|
||||
|
||||
def map_fake_import_addrs() -> Dict[int, List[int]]:
|
||||
"""
|
||||
Map ghidra's fake import entrypoints to their
|
||||
real addresses
|
||||
|
||||
Helps as many Ghidra Scripting API calls end up returning
|
||||
these external (fake) addresses.
|
||||
|
||||
Undocumented but intended Ghidra behavior:
|
||||
- Import entryPoint fields are stored in the 'EXTERNAL:' AddressSpace.
|
||||
'getEntryPoint()' returns the entryPoint field, which is an offset
|
||||
from the beginning of the assigned AddressSpace. In the case of externals,
|
||||
they start from 1 and increment.
|
||||
https://github.com/NationalSecurityAgency/ghidra/blob/26d4bd9104809747c21f2528cab8aba9aef9acd5/Ghidra/Features/Base/src/test.slow/java/ghidra/program/database/function/ExternalFunctionDBTest.java#L90
|
||||
|
||||
Example: (mimikatz.exe_) 5f66b82558ca92e54e77f216ef4c066c:0x473090
|
||||
- 0x473090 -> PTR_CreateServiceW_00473090
|
||||
- 'EXTERNAL:00000025' -> External Address (ghidra.program.model.address.SpecialAddress)
|
||||
"""
|
||||
fake_dict: Dict[int, List[int]] = {}
|
||||
|
||||
for f in currentProgram().getFunctionManager().getExternalFunctions(): # type: ignore [name-defined] # noqa: F821
|
||||
for r in f.getSymbol().getReferences():
|
||||
if r.getReferenceType().isData():
|
||||
fake_dict.setdefault(f.getEntryPoint().getOffset(), []).append(r.getFromAddress().getOffset())
|
||||
|
||||
return fake_dict
|
||||
|
||||
|
||||
def check_addr_for_api(
|
||||
addr: ghidra.program.model.address.Address,
|
||||
fakes: Dict[int, List[int]],
|
||||
imports: Dict[int, List[str]],
|
||||
externs: Dict[int, List[str]],
|
||||
) -> bool:
|
||||
offset = addr.getOffset()
|
||||
|
||||
fake = fakes.get(offset)
|
||||
if fake:
|
||||
return True
|
||||
|
||||
imp = imports.get(offset)
|
||||
if imp:
|
||||
return True
|
||||
|
||||
extern = externs.get(offset)
|
||||
if extern:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def is_call_or_jmp(insn: ghidra.program.database.code.InstructionDB) -> bool:
|
||||
return any(mnem in insn.getMnemonicString() for mnem in ["CALL", "J"]) # JMP, JNE, JNZ, etc
|
||||
|
||||
|
||||
def is_sp_modified(insn: ghidra.program.database.code.InstructionDB) -> bool:
|
||||
for i in range(insn.getNumOperands()):
|
||||
if insn.getOperandType(i) == OperandType.REGISTER:
|
||||
return "SP" in insn.getRegister(i).getName() and insn.getOperandRefType(i).isWrite()
|
||||
return False
|
||||
|
||||
|
||||
def is_stack_referenced(insn: ghidra.program.database.code.InstructionDB) -> bool:
|
||||
"""generic catch-all for stack references"""
|
||||
for i in range(insn.getNumOperands()):
|
||||
if insn.getOperandType(i) == OperandType.REGISTER:
|
||||
if "BP" in insn.getRegister(i).getName():
|
||||
return True
|
||||
else:
|
||||
continue
|
||||
|
||||
return any(ref.isStackReference() for ref in insn.getReferencesFrom())
|
||||
|
||||
|
||||
def is_zxor(insn: ghidra.program.database.code.InstructionDB) -> bool:
|
||||
# assume XOR insn
|
||||
# XOR's against the same operand zero out
|
||||
ops = []
|
||||
operands = []
|
||||
for i in range(insn.getNumOperands()):
|
||||
ops.append(insn.getOpObjects(i))
|
||||
|
||||
# Operands stored in a 2D array
|
||||
for j in range(len(ops)):
|
||||
for k in range(len(ops[j])):
|
||||
operands.append(ops[j][k])
|
||||
|
||||
return all(n == operands[0] for n in operands)
|
||||
|
||||
|
||||
def handle_thunk(addr: ghidra.program.model.address.Address):
|
||||
"""Follow thunk chains down to a reasonable depth"""
|
||||
ref = addr
|
||||
for _ in range(THUNK_CHAIN_DEPTH_DELTA):
|
||||
thunk_jmp = getInstructionAt(ref) # type: ignore [name-defined] # noqa: F821
|
||||
if thunk_jmp and is_call_or_jmp(thunk_jmp):
|
||||
if OperandType.isAddress(thunk_jmp.getOperandType(0)):
|
||||
ref = thunk_jmp.getAddress(0)
|
||||
else:
|
||||
thunk_dat = getDataContaining(ref) # type: ignore [name-defined] # noqa: F821
|
||||
if thunk_dat and thunk_dat.isDefined() and thunk_dat.isPointer():
|
||||
ref = thunk_dat.getValue()
|
||||
break # end of thunk chain reached
|
||||
return ref
|
||||
|
||||
|
||||
def dereference_ptr(insn: ghidra.program.database.code.InstructionDB):
|
||||
addr_code = OperandType.ADDRESS | OperandType.CODE
|
||||
to_deref = insn.getAddress(0)
|
||||
dat = getDataContaining(to_deref) # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
if insn.getOperandType(0) == addr_code:
|
||||
thfunc = getFunctionContaining(to_deref) # type: ignore [name-defined] # noqa: F821
|
||||
if thfunc and thfunc.isThunk():
|
||||
return handle_thunk(to_deref)
|
||||
else:
|
||||
# if it doesn't poin to a thunk, it's usually a jmp to a label
|
||||
return to_deref
|
||||
if not dat:
|
||||
return to_deref
|
||||
if dat.isDefined() and dat.isPointer():
|
||||
addr = dat.getValue()
|
||||
# now we need to check the addr space to see if it is truly resolvable
|
||||
# ghidra sometimes likes to hand us direct RAM addrs, which typically point
|
||||
# to api calls that we can't actually resolve as such
|
||||
if addr.getAddressSpace().getType() == AddressSpace.TYPE_RAM:
|
||||
return to_deref
|
||||
else:
|
||||
return addr
|
||||
else:
|
||||
return to_deref
|
||||
521
capa/features/extractors/ghidra/insn.py
Normal file
521
capa/features/extractors/ghidra/insn.py
Normal file
@@ -0,0 +1,521 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
from typing import Any, Dict, Tuple, Iterator
|
||||
|
||||
import ghidra
|
||||
from ghidra.program.model.lang import OperandType
|
||||
from ghidra.program.model.block import SimpleBlockModel
|
||||
|
||||
import capa.features.extractors.helpers
|
||||
import capa.features.extractors.ghidra.helpers
|
||||
from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset
|
||||
from capa.features.common import MAX_BYTES_FEATURE_SIZE, Bytes, String, Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
|
||||
|
||||
# security cookie checks may perform non-zeroing XORs, these are expected within a certain
|
||||
# byte range within the first and returning basic blocks, this helps to reduce FP features
|
||||
SECURITY_COOKIE_BYTES_DELTA = 0x40
|
||||
|
||||
|
||||
def get_imports(ctx: Dict[str, Any]) -> Dict[int, Any]:
|
||||
"""Populate the import cache for this context"""
|
||||
if "imports_cache" not in ctx:
|
||||
ctx["imports_cache"] = capa.features.extractors.ghidra.helpers.get_file_imports()
|
||||
return ctx["imports_cache"]
|
||||
|
||||
|
||||
def get_externs(ctx: Dict[str, Any]) -> Dict[int, Any]:
|
||||
"""Populate the externs cache for this context"""
|
||||
if "externs_cache" not in ctx:
|
||||
ctx["externs_cache"] = capa.features.extractors.ghidra.helpers.get_file_externs()
|
||||
return ctx["externs_cache"]
|
||||
|
||||
|
||||
def get_fakes(ctx: Dict[str, Any]) -> Dict[int, Any]:
|
||||
"""Populate the fake import addrs cache for this context"""
|
||||
if "fakes_cache" not in ctx:
|
||||
ctx["fakes_cache"] = capa.features.extractors.ghidra.helpers.map_fake_import_addrs()
|
||||
return ctx["fakes_cache"]
|
||||
|
||||
|
||||
def check_for_api_call(
|
||||
insn, externs: Dict[int, Any], fakes: Dict[int, Any], imports: Dict[int, Any], imp_or_ex: bool
|
||||
) -> Iterator[Any]:
|
||||
"""check instruction for API call
|
||||
|
||||
params:
|
||||
externs - external library functions cache
|
||||
fakes - mapped fake import addresses cache
|
||||
imports - imported functions cache
|
||||
imp_or_ex - flag to check imports or externs
|
||||
|
||||
yields:
|
||||
matched api calls
|
||||
"""
|
||||
info = ()
|
||||
funcs = imports if imp_or_ex else externs
|
||||
|
||||
# assume only CALLs or JMPs are passed
|
||||
ref_type = insn.getOperandType(0)
|
||||
addr_data = OperandType.ADDRESS | OperandType.DATA # needs dereferencing
|
||||
addr_code = OperandType.ADDRESS | OperandType.CODE # needs dereferencing
|
||||
|
||||
if OperandType.isRegister(ref_type):
|
||||
if OperandType.isAddress(ref_type):
|
||||
# If it's an address in a register, check the mapped fake addrs
|
||||
# since they're dereferenced to their fake addrs
|
||||
op_ref = insn.getAddress(0).getOffset()
|
||||
ref = fakes.get(op_ref) # obtain the real addr
|
||||
if not ref:
|
||||
return
|
||||
else:
|
||||
return
|
||||
elif ref_type in (addr_data, addr_code) or (OperandType.isIndirect(ref_type) and OperandType.isAddress(ref_type)):
|
||||
# we must dereference and check if the addr is a pointer to an api function
|
||||
addr_ref = capa.features.extractors.ghidra.helpers.dereference_ptr(insn)
|
||||
if not capa.features.extractors.ghidra.helpers.check_addr_for_api(addr_ref, fakes, imports, externs):
|
||||
return
|
||||
ref = addr_ref.getOffset()
|
||||
elif ref_type == OperandType.DYNAMIC | OperandType.ADDRESS or ref_type == OperandType.DYNAMIC:
|
||||
return # cannot resolve dynamics statically
|
||||
else:
|
||||
# pure address does not need to get dereferenced/ handled
|
||||
addr_ref = insn.getAddress(0)
|
||||
if not addr_ref:
|
||||
# If it returned null, it was an indirect
|
||||
# that had no address reference.
|
||||
# This check is faster than checking for (indirect and not address)
|
||||
return
|
||||
if not capa.features.extractors.ghidra.helpers.check_addr_for_api(addr_ref, fakes, imports, externs):
|
||||
return
|
||||
ref = addr_ref.getOffset()
|
||||
|
||||
if isinstance(ref, list): # ref from REG | ADDR
|
||||
for r in ref:
|
||||
info = funcs.get(r) # type: ignore
|
||||
if info:
|
||||
yield info
|
||||
else:
|
||||
info = funcs.get(ref) # type: ignore
|
||||
if info:
|
||||
yield info
|
||||
|
||||
|
||||
def extract_insn_api_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
if not capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn):
|
||||
return
|
||||
|
||||
externs = get_externs(fh.ctx)
|
||||
fakes = get_fakes(fh.ctx)
|
||||
imports = get_imports(fh.ctx)
|
||||
|
||||
# check calls to imported functions
|
||||
for api in check_for_api_call(insn, externs, fakes, imports, True):
|
||||
for imp in api:
|
||||
yield API(imp), ih.address
|
||||
|
||||
# check calls to extern functions
|
||||
for api in check_for_api_call(insn, externs, fakes, imports, False):
|
||||
for ext in api:
|
||||
yield API(ext), ih.address
|
||||
|
||||
|
||||
def extract_insn_number_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""
|
||||
parse instruction number features
|
||||
example:
|
||||
push 3136B0h ; dwControlCode
|
||||
"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
if insn.getMnemonicString().startswith("RET"):
|
||||
# skip things like:
|
||||
# .text:0042250E retn 8
|
||||
return
|
||||
|
||||
if capa.features.extractors.ghidra.helpers.is_sp_modified(insn):
|
||||
# skip things like:
|
||||
# .text:00401145 add esp, 0Ch
|
||||
return
|
||||
|
||||
for i in range(insn.getNumOperands()):
|
||||
# Exceptions for LEA insn:
|
||||
# invalid operand encoding, considered numbers instead of offsets
|
||||
# see: mimikatz.exe_:0x4018C0
|
||||
if insn.getOperandType(i) == OperandType.DYNAMIC and insn.getMnemonicString().startswith("LEA"):
|
||||
# Additional check, avoid yielding "wide" values (ex. mimikatz.exe:0x471EE6 LEA EBX, [ECX + EAX*0x4])
|
||||
op_objs = insn.getOpObjects(i)
|
||||
if len(op_objs) == 3: # ECX, EAX, 0x4
|
||||
continue
|
||||
|
||||
if isinstance(op_objs[-1], ghidra.program.model.scalar.Scalar):
|
||||
const = op_objs[-1].getUnsignedValue()
|
||||
addr = ih.address
|
||||
|
||||
yield Number(const), addr
|
||||
yield OperandNumber(i, const), addr
|
||||
elif not OperandType.isScalar(insn.getOperandType(i)):
|
||||
# skip things like:
|
||||
# references, void types
|
||||
continue
|
||||
else:
|
||||
const = insn.getScalar(i).getUnsignedValue()
|
||||
addr = ih.address
|
||||
|
||||
yield Number(const), addr
|
||||
yield OperandNumber(i, const), addr
|
||||
|
||||
if insn.getMnemonicString().startswith("ADD") and 0 < const < MAX_STRUCTURE_SIZE:
|
||||
# for pattern like:
|
||||
#
|
||||
# add eax, 0x10
|
||||
#
|
||||
# assume 0x10 is also an offset (imagine eax is a pointer).
|
||||
yield Offset(const), addr
|
||||
yield OperandOffset(i, const), addr
|
||||
|
||||
|
||||
def extract_insn_offset_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""
|
||||
parse instruction structure offset features
|
||||
|
||||
example:
|
||||
.text:0040112F cmp [esi+4], ebx
|
||||
"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
if insn.getMnemonicString().startswith("LEA"):
|
||||
return
|
||||
|
||||
# ignore any stack references
|
||||
if not capa.features.extractors.ghidra.helpers.is_stack_referenced(insn):
|
||||
# Ghidra stores operands in 2D arrays if they contain offsets
|
||||
for i in range(insn.getNumOperands()):
|
||||
if insn.getOperandType(i) == OperandType.DYNAMIC: # e.g. [esi + 4]
|
||||
# manual extraction, since the default api calls only work on the 1st dimension of the array
|
||||
op_objs = insn.getOpObjects(i)
|
||||
if isinstance(op_objs[-1], ghidra.program.model.scalar.Scalar):
|
||||
op_off = op_objs[-1].getValue()
|
||||
yield Offset(op_off), ih.address
|
||||
yield OperandOffset(i, op_off), ih.address
|
||||
else:
|
||||
yield Offset(0), ih.address
|
||||
yield OperandOffset(i, 0), ih.address
|
||||
|
||||
|
||||
def extract_insn_bytes_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""
|
||||
parse referenced byte sequences
|
||||
example:
|
||||
push offset iid_004118d4_IShellLinkA ; riid
|
||||
"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
if capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn):
|
||||
return
|
||||
|
||||
ref = insn.getAddress() # init to insn addr
|
||||
for i in range(insn.getNumOperands()):
|
||||
if OperandType.isAddress(insn.getOperandType(i)):
|
||||
ref = insn.getAddress(i) # pulls pointer if there is one
|
||||
|
||||
if ref != insn.getAddress(): # bail out if there's no pointer
|
||||
ghidra_dat = getDataAt(ref) # type: ignore [name-defined] # noqa: F821
|
||||
if (
|
||||
ghidra_dat and not ghidra_dat.hasStringValue() and not ghidra_dat.isPointer()
|
||||
): # avoid if the data itself is a pointer
|
||||
extracted_bytes = capa.features.extractors.ghidra.helpers.get_bytes(ref, MAX_BYTES_FEATURE_SIZE)
|
||||
if extracted_bytes and not capa.features.extractors.helpers.all_zeros(extracted_bytes):
|
||||
# don't extract byte features for obvious strings
|
||||
yield Bytes(extracted_bytes), ih.address
|
||||
|
||||
|
||||
def extract_insn_string_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""
|
||||
parse instruction string features
|
||||
|
||||
example:
|
||||
push offset aAcr ; "ACR > "
|
||||
"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
dyn_addr = OperandType.DYNAMIC | OperandType.ADDRESS
|
||||
|
||||
ref = insn.getAddress()
|
||||
for i in range(insn.getNumOperands()):
|
||||
if OperandType.isScalarAsAddress(insn.getOperandType(i)):
|
||||
ref = insn.getAddress(i)
|
||||
# strings are also referenced dynamically via pointers & arrays, so we need to deref them
|
||||
if insn.getOperandType(i) == dyn_addr:
|
||||
ref = insn.getAddress(i)
|
||||
dat = getDataAt(ref) # type: ignore [name-defined] # noqa: F821
|
||||
if dat and dat.isPointer():
|
||||
ref = dat.getValue()
|
||||
|
||||
if ref != insn.getAddress():
|
||||
ghidra_dat = getDataAt(ref) # type: ignore [name-defined] # noqa: F821
|
||||
if ghidra_dat and ghidra_dat.hasStringValue():
|
||||
yield String(ghidra_dat.getValue()), ih.address
|
||||
|
||||
|
||||
def extract_insn_mnemonic_features(
|
||||
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""parse instruction mnemonic features"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
yield Mnemonic(insn.getMnemonicString().lower()), ih.address
|
||||
|
||||
|
||||
def extract_insn_obfs_call_plus_5_characteristic_features(
|
||||
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""
|
||||
parse call $+5 instruction from the given instruction.
|
||||
"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
if not capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn):
|
||||
return
|
||||
|
||||
code_ref = OperandType.ADDRESS | OperandType.CODE
|
||||
ref = insn.getAddress()
|
||||
for i in range(insn.getNumOperands()):
|
||||
if insn.getOperandType(i) == code_ref:
|
||||
ref = insn.getAddress(i)
|
||||
|
||||
if insn.getAddress().add(5) == ref:
|
||||
yield Characteristic("call $+5"), ih.address
|
||||
|
||||
|
||||
def extract_insn_segment_access_features(
|
||||
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""parse instruction fs or gs access"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
insn_str = insn.toString()
|
||||
|
||||
if "FS:" in insn_str:
|
||||
yield Characteristic("fs access"), ih.address
|
||||
|
||||
if "GS:" in insn_str:
|
||||
yield Characteristic("gs access"), ih.address
|
||||
|
||||
|
||||
def extract_insn_peb_access_characteristic_features(
|
||||
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""parse instruction peb access
|
||||
|
||||
fs:[0x30] on x86, gs:[0x60] on x64
|
||||
|
||||
"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
insn_str = insn.toString()
|
||||
if insn_str.startswith(("PUSH", "MOV")):
|
||||
if "FS:[0x30]" in insn_str or "GS:[0x60]" in insn_str:
|
||||
yield Characteristic("peb access"), ih.address
|
||||
|
||||
|
||||
def extract_insn_cross_section_cflow(
|
||||
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""inspect the instruction for a CALL or JMP that crosses section boundaries"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
if not capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn):
|
||||
return
|
||||
|
||||
externs = get_externs(fh.ctx)
|
||||
fakes = get_fakes(fh.ctx)
|
||||
imports = get_imports(fh.ctx)
|
||||
|
||||
# OperandType to dereference
|
||||
addr_data = OperandType.ADDRESS | OperandType.DATA
|
||||
addr_code = OperandType.ADDRESS | OperandType.CODE
|
||||
|
||||
ref_type = insn.getOperandType(0)
|
||||
|
||||
# both OperandType flags must be present
|
||||
# bail on REGISTER alone
|
||||
if OperandType.isRegister(ref_type):
|
||||
if OperandType.isAddress(ref_type):
|
||||
ref = insn.getAddress(0) # Ghidra dereferences REG | ADDR
|
||||
if capa.features.extractors.ghidra.helpers.check_addr_for_api(ref, fakes, imports, externs):
|
||||
return
|
||||
else:
|
||||
return
|
||||
elif ref_type in (addr_data, addr_code) or (OperandType.isIndirect(ref_type) and OperandType.isAddress(ref_type)):
|
||||
# we must dereference and check if the addr is a pointer to an api function
|
||||
ref = capa.features.extractors.ghidra.helpers.dereference_ptr(insn)
|
||||
if capa.features.extractors.ghidra.helpers.check_addr_for_api(ref, fakes, imports, externs):
|
||||
return
|
||||
elif ref_type == OperandType.DYNAMIC | OperandType.ADDRESS or ref_type == OperandType.DYNAMIC:
|
||||
return # cannot resolve dynamics statically
|
||||
else:
|
||||
# pure address does not need to get dereferenced/ handled
|
||||
ref = insn.getAddress(0)
|
||||
if not ref:
|
||||
# If it returned null, it was an indirect
|
||||
# that had no address reference.
|
||||
# This check is faster than checking for (indirect and not address)
|
||||
return
|
||||
if capa.features.extractors.ghidra.helpers.check_addr_for_api(ref, fakes, imports, externs):
|
||||
return
|
||||
|
||||
this_mem_block = getMemoryBlock(insn.getAddress()) # type: ignore [name-defined] # noqa: F821
|
||||
ref_block = getMemoryBlock(ref) # type: ignore [name-defined] # noqa: F821
|
||||
if ref_block != this_mem_block:
|
||||
yield Characteristic("cross section flow"), ih.address
|
||||
|
||||
|
||||
def extract_function_calls_from(
|
||||
fh: FunctionHandle,
|
||||
bb: BBHandle,
|
||||
ih: InsnHandle,
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract functions calls from features
|
||||
|
||||
most relevant at the function scope, however, its most efficient to extract at the instruction scope
|
||||
"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
if insn.getMnemonicString().startswith("CALL"):
|
||||
# This method of "dereferencing" addresses/ pointers
|
||||
# is not as robust as methods in other functions,
|
||||
# but works just fine for this one
|
||||
reference = 0
|
||||
for ref in insn.getReferencesFrom():
|
||||
addr = ref.getToAddress()
|
||||
|
||||
# avoid returning fake addrs
|
||||
if not addr.isExternalAddress():
|
||||
reference = addr.getOffset()
|
||||
|
||||
# if a reference is < 0, then ghidra pulled an offset from a DYNAMIC | ADDR (usually a stackvar)
|
||||
# these cannot be resolved to actual addrs
|
||||
if reference > 0:
|
||||
yield Characteristic("calls from"), AbsoluteVirtualAddress(reference)
|
||||
|
||||
|
||||
def extract_function_indirect_call_characteristic_features(
|
||||
fh: FunctionHandle,
|
||||
bb: BBHandle,
|
||||
ih: InsnHandle,
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract indirect function calls (e.g., call eax or call dword ptr [edx+4])
|
||||
does not include calls like => call ds:dword_ABD4974
|
||||
|
||||
most relevant at the function or basic block scope;
|
||||
however, its most efficient to extract at the instruction scope
|
||||
"""
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
if insn.getMnemonicString().startswith("CALL"):
|
||||
if OperandType.isRegister(insn.getOperandType(0)):
|
||||
yield Characteristic("indirect call"), ih.address
|
||||
if OperandType.isIndirect(insn.getOperandType(0)):
|
||||
yield Characteristic("indirect call"), ih.address
|
||||
|
||||
|
||||
def check_nzxor_security_cookie_delta(
|
||||
fh: ghidra.program.database.function.FunctionDB, insn: ghidra.program.database.code.InstructionDB
|
||||
):
|
||||
"""Get the function containing the insn
|
||||
Get the last block of the function that contains the insn
|
||||
|
||||
Check the bb containing the insn
|
||||
Check the last bb of the function containing the insn
|
||||
"""
|
||||
|
||||
model = SimpleBlockModel(currentProgram()) # type: ignore [name-defined] # noqa: F821
|
||||
insn_addr = insn.getAddress()
|
||||
func_asv = fh.getBody()
|
||||
first_addr = func_asv.getMinAddress()
|
||||
last_addr = func_asv.getMaxAddress()
|
||||
|
||||
if model.getFirstCodeBlockContaining(
|
||||
first_addr, monitor() # type: ignore [name-defined] # noqa: F821
|
||||
) == model.getFirstCodeBlockContaining(
|
||||
last_addr, monitor() # type: ignore [name-defined] # noqa: F821
|
||||
):
|
||||
if insn_addr < first_addr.add(SECURITY_COOKIE_BYTES_DELTA):
|
||||
return True
|
||||
else:
|
||||
return insn_addr > last_addr.add(SECURITY_COOKIE_BYTES_DELTA * -1)
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
def extract_insn_nzxor_characteristic_features(
|
||||
fh: FunctionHandle,
|
||||
bb: BBHandle,
|
||||
ih: InsnHandle,
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
f: ghidra.program.database.function.FunctionDB = fh.inner
|
||||
insn: ghidra.program.database.code.InstructionDB = ih.inner
|
||||
|
||||
if "XOR" not in insn.getMnemonicString():
|
||||
return
|
||||
if capa.features.extractors.ghidra.helpers.is_stack_referenced(insn):
|
||||
return
|
||||
if capa.features.extractors.ghidra.helpers.is_zxor(insn):
|
||||
return
|
||||
if check_nzxor_security_cookie_delta(f, insn):
|
||||
return
|
||||
yield Characteristic("nzxor"), ih.address
|
||||
|
||||
|
||||
def extract_features(
|
||||
fh: FunctionHandle,
|
||||
bb: BBHandle,
|
||||
insn: InsnHandle,
|
||||
) -> Iterator[Tuple[Feature, Address]]:
|
||||
for insn_handler in INSTRUCTION_HANDLERS:
|
||||
for feature, addr in insn_handler(fh, bb, insn):
|
||||
yield feature, addr
|
||||
|
||||
|
||||
INSTRUCTION_HANDLERS = (
|
||||
extract_insn_api_features,
|
||||
extract_insn_number_features,
|
||||
extract_insn_bytes_features,
|
||||
extract_insn_string_features,
|
||||
extract_insn_offset_features,
|
||||
extract_insn_nzxor_characteristic_features,
|
||||
extract_insn_mnemonic_features,
|
||||
extract_insn_obfs_call_plus_5_characteristic_features,
|
||||
extract_insn_peb_access_characteristic_features,
|
||||
extract_insn_cross_section_cflow,
|
||||
extract_insn_segment_access_features,
|
||||
extract_function_calls_from,
|
||||
extract_function_indirect_call_characteristic_features,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
features = []
|
||||
from capa.features.extractors.ghidra.extractor import GhidraFeatureExtractor
|
||||
|
||||
for fh in GhidraFeatureExtractor().get_functions():
|
||||
for bb in capa.features.extractors.ghidra.helpers.get_function_blocks(fh):
|
||||
for insn in capa.features.extractors.ghidra.helpers.get_insn_in_range(bb):
|
||||
features.extend(list(extract_features(fh, bb, insn)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features) # noqa: T203
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -70,6 +70,23 @@ def generate_symbols(dll: str, symbol: str) -> Iterator[str]:
|
||||
yield symbol[:-1]
|
||||
|
||||
|
||||
def reformat_forwarded_export_name(forwarded_name: str) -> str:
|
||||
"""
|
||||
a forwarded export has a DLL name/path an symbol name.
|
||||
we want the former to be lowercase, and the latter to be verbatim.
|
||||
"""
|
||||
|
||||
# use rpartition so we can split on separator between dll and name.
|
||||
# the dll name can be a full path, like in the case of
|
||||
# ef64d6d7c34250af8e21a10feb931c9b
|
||||
# which i assume means the path can have embedded periods.
|
||||
# so we don't want the first period, we want the last.
|
||||
forwarded_dll, _, forwarded_symbol = forwarded_name.rpartition(".")
|
||||
forwarded_dll = forwarded_dll.lower()
|
||||
|
||||
return f"{forwarded_dll}.{forwarded_symbol}"
|
||||
|
||||
|
||||
def all_zeros(bytez: bytes) -> bool:
|
||||
return all(b == 0 for b in builtins.bytes(bytez))
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -104,19 +104,3 @@ BASIC_BLOCK_HANDLERS = (
|
||||
extract_bb_tight_loop,
|
||||
extract_bb_stackstring,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
features = []
|
||||
for fhandle in helpers.get_functions(skip_thunks=True, skip_libs=True):
|
||||
f: idaapi.func_t = fhandle.inner
|
||||
for bb in idaapi.FlowChart(f, flags=idaapi.FC_PREDS):
|
||||
features.extend(list(extract_features(fhandle, bb)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -12,6 +12,7 @@ from typing import Tuple, Iterator
|
||||
import idc
|
||||
import idaapi
|
||||
import idautils
|
||||
import ida_entry
|
||||
|
||||
import capa.features.extractors.common
|
||||
import capa.features.extractors.helpers
|
||||
@@ -83,8 +84,14 @@ def extract_file_embedded_pe() -> Iterator[Tuple[Feature, Address]]:
|
||||
|
||||
def extract_file_export_names() -> Iterator[Tuple[Feature, Address]]:
|
||||
"""extract function exports"""
|
||||
for _, _, ea, name in idautils.Entries():
|
||||
yield Export(name), AbsoluteVirtualAddress(ea)
|
||||
for _, ordinal, ea, name in idautils.Entries():
|
||||
forwarded_name = ida_entry.get_entry_forwarder(ordinal)
|
||||
if forwarded_name is None:
|
||||
yield Export(name), AbsoluteVirtualAddress(ea)
|
||||
else:
|
||||
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(forwarded_name)
|
||||
yield Export(forwarded_name), AbsoluteVirtualAddress(ea)
|
||||
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(ea)
|
||||
|
||||
|
||||
def extract_file_import_names() -> Iterator[Tuple[Feature, Address]]:
|
||||
@@ -199,14 +206,3 @@ FILE_HANDLERS = (
|
||||
extract_file_function_names,
|
||||
extract_file_format,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
import pprint
|
||||
|
||||
pprint.pprint(list(extract_features()))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -50,18 +50,3 @@ def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
|
||||
|
||||
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
features = []
|
||||
for fhandle in capa.features.extractors.ida.helpers.get_functions(skip_thunks=True, skip_libs=True):
|
||||
features.extend(list(extract_features(fhandle)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
import contextlib
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import functools
|
||||
from typing import Any, Dict, Tuple, Iterator, Optional
|
||||
|
||||
import idc
|
||||
@@ -27,7 +28,8 @@ def find_byte_sequence(start: int, end: int, seq: bytes) -> Iterator[int]:
|
||||
"""
|
||||
seqstr = " ".join([f"{b:02x}" for b in seq])
|
||||
while True:
|
||||
# TODO find_binary: Deprecated. Please use ida_bytes.bin_search() instead.
|
||||
# TODO(mike-hunhoff): find_binary is deprecated. Please use ida_bytes.bin_search() instead.
|
||||
# https://github.com/mandiant/capa/issues/1606
|
||||
ea = idaapi.find_binary(start, end, seqstr, 0, idaapi.SEARCH_DOWN)
|
||||
if ea == idaapi.BADADDR:
|
||||
break
|
||||
@@ -80,9 +82,22 @@ def get_segment_buffer(seg: idaapi.segment_t) -> bytes:
|
||||
return buff if buff else b""
|
||||
|
||||
|
||||
def inspect_import(imports, library, ea, function, ordinal):
|
||||
if function and function.startswith("__imp_"):
|
||||
# handle mangled PE imports
|
||||
function = function[len("__imp_") :]
|
||||
|
||||
if function and "@@" in function:
|
||||
# handle mangled ELF imports, like "fopen@@glibc_2.2.5"
|
||||
function, _, _ = function.partition("@@")
|
||||
|
||||
imports[ea] = (library.lower(), function, ordinal)
|
||||
return True
|
||||
|
||||
|
||||
def get_file_imports() -> Dict[int, Tuple[str, str, int]]:
|
||||
"""get file imports"""
|
||||
imports = {}
|
||||
imports: Dict[int, Tuple[str, str, int]] = {}
|
||||
|
||||
for idx in range(idaapi.get_import_module_qty()):
|
||||
library = idaapi.get_import_module_name(idx)
|
||||
@@ -92,23 +107,13 @@ def get_file_imports() -> Dict[int, Tuple[str, str, int]]:
|
||||
|
||||
# IDA uses section names for the library of ELF imports, like ".dynsym".
|
||||
# These are not useful to us, we may need to expand this list over time
|
||||
# TODO: exhaust this list, see #1419
|
||||
# TODO(williballenthin): find all section names used by IDA
|
||||
# https://github.com/mandiant/capa/issues/1419
|
||||
if library == ".dynsym":
|
||||
library = ""
|
||||
|
||||
def inspect_import(ea, function, ordinal):
|
||||
if function and function.startswith("__imp_"):
|
||||
# handle mangled PE imports
|
||||
function = function[len("__imp_") :]
|
||||
|
||||
if function and "@@" in function:
|
||||
# handle mangled ELF imports, like "fopen@@glibc_2.2.5"
|
||||
function, _, _ = function.partition("@@")
|
||||
|
||||
imports[ea] = (library.lower(), function, ordinal)
|
||||
return True
|
||||
|
||||
idaapi.enum_import_names(idx, inspect_import)
|
||||
cb = functools.partial(inspect_import, imports, library)
|
||||
idaapi.enum_import_names(idx, cb)
|
||||
|
||||
return imports
|
||||
|
||||
@@ -117,7 +122,7 @@ def get_file_externs() -> Dict[int, Tuple[str, str, int]]:
|
||||
externs = {}
|
||||
|
||||
for seg in get_segments(skip_header_segments=True):
|
||||
if not (seg.type == ida_segment.SEG_XTRN):
|
||||
if seg.type != ida_segment.SEG_XTRN:
|
||||
continue
|
||||
|
||||
for ea in idautils.Functions(seg.start_ea, seg.end_ea):
|
||||
@@ -270,20 +275,18 @@ def is_op_offset(insn: idaapi.insn_t, op: idaapi.op_t) -> bool:
|
||||
|
||||
def is_sp_modified(insn: idaapi.insn_t) -> bool:
|
||||
"""determine if instruction modifies SP, ESP, RSP"""
|
||||
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,)):
|
||||
if op.reg == idautils.procregs.sp.reg and is_op_write(insn, op):
|
||||
# register is stack and written
|
||||
return True
|
||||
return False
|
||||
return any(
|
||||
op.reg == idautils.procregs.sp.reg and is_op_write(insn, op)
|
||||
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,))
|
||||
)
|
||||
|
||||
|
||||
def is_bp_modified(insn: idaapi.insn_t) -> bool:
|
||||
"""check if instruction modifies BP, EBP, RBP"""
|
||||
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,)):
|
||||
if op.reg == idautils.procregs.bp.reg and is_op_write(insn, op):
|
||||
# register is base and written
|
||||
return True
|
||||
return False
|
||||
return any(
|
||||
op.reg == idautils.procregs.bp.reg and is_op_write(insn, op)
|
||||
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,))
|
||||
)
|
||||
|
||||
|
||||
def is_frame_register(reg: int) -> bool:
|
||||
@@ -329,10 +332,7 @@ def mask_op_val(op: idaapi.op_t) -> int:
|
||||
|
||||
def is_function_recursive(f: idaapi.func_t) -> bool:
|
||||
"""check if function is recursive"""
|
||||
for ref in idautils.CodeRefsTo(f.start_ea, True):
|
||||
if f.contains(ref):
|
||||
return True
|
||||
return False
|
||||
return any(f.contains(ref) for ref in idautils.CodeRefsTo(f.start_ea, True))
|
||||
|
||||
|
||||
def is_basic_block_tight_loop(bb: idaapi.BasicBlock) -> bool:
|
||||
@@ -381,8 +381,7 @@ def find_data_reference_from_insn(insn: idaapi.insn_t, max_depth: int = 10) -> i
|
||||
def get_function_blocks(f: idaapi.func_t) -> Iterator[idaapi.BasicBlock]:
|
||||
"""yield basic blocks contained in specified function"""
|
||||
# leverage idaapi.FC_NOEXT flag to ignore useless external blocks referenced by the function
|
||||
for block in idaapi.FlowChart(f, flags=(idaapi.FC_PREDS | idaapi.FC_NOEXT)):
|
||||
yield block
|
||||
yield from idaapi.FlowChart(f, flags=(idaapi.FC_PREDS | idaapi.FC_NOEXT))
|
||||
|
||||
|
||||
def is_basic_block_return(bb: idaapi.BasicBlock) -> bool:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -73,7 +73,7 @@ def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle)
|
||||
"""
|
||||
insn: idaapi.insn_t = ih.inner
|
||||
|
||||
if not insn.get_canon_mnem() in ("call", "jmp"):
|
||||
if insn.get_canon_mnem() not in ("call", "jmp"):
|
||||
return
|
||||
|
||||
# check calls to imported functions
|
||||
@@ -216,7 +216,7 @@ def extract_insn_offset_features(
|
||||
|
||||
p_info = capa.features.extractors.ida.helpers.get_op_phrase_info(op)
|
||||
|
||||
op_off = p_info.get("offset", None)
|
||||
op_off = p_info.get("offset")
|
||||
if op_off is None:
|
||||
continue
|
||||
|
||||
@@ -398,14 +398,16 @@ def extract_insn_peb_access_characteristic_features(
|
||||
if insn.itype not in (idaapi.NN_push, idaapi.NN_mov):
|
||||
return
|
||||
|
||||
if all(map(lambda op: op.type != idaapi.o_mem, insn.ops)):
|
||||
if all(op.type != idaapi.o_mem for op in insn.ops):
|
||||
# try to optimize for only memory references
|
||||
return
|
||||
|
||||
disasm = idc.GetDisasm(insn.ea)
|
||||
|
||||
if " fs:30h" in disasm or " gs:60h" in disasm:
|
||||
# TODO: replace above with proper IDA
|
||||
# TODO(mike-hunhoff): use proper IDA API for fetching segment access
|
||||
# scanning the disassembly text is a hack.
|
||||
# https://github.com/mandiant/capa/issues/1605
|
||||
yield Characteristic("peb access"), ih.address
|
||||
|
||||
|
||||
@@ -419,18 +421,22 @@ def extract_insn_segment_access_features(
|
||||
"""
|
||||
insn: idaapi.insn_t = ih.inner
|
||||
|
||||
if all(map(lambda op: op.type != idaapi.o_mem, insn.ops)):
|
||||
if all(op.type != idaapi.o_mem for op in insn.ops):
|
||||
# try to optimize for only memory references
|
||||
return
|
||||
|
||||
disasm = idc.GetDisasm(insn.ea)
|
||||
|
||||
if " fs:" in disasm:
|
||||
# TODO: replace above with proper IDA
|
||||
# TODO(mike-hunhoff): use proper IDA API for fetching segment access
|
||||
# scanning the disassembly text is a hack.
|
||||
# https://github.com/mandiant/capa/issues/1605
|
||||
yield Characteristic("fs access"), ih.address
|
||||
|
||||
if " gs:" in disasm:
|
||||
# TODO: replace above with proper IDA
|
||||
# TODO(mike-hunhoff): use proper IDA API for fetching segment access
|
||||
# scanning the disassembly text is a hack.
|
||||
# https://github.com/mandiant/capa/issues/1605
|
||||
yield Characteristic("gs access"), ih.address
|
||||
|
||||
|
||||
@@ -441,7 +447,7 @@ def extract_insn_cross_section_cflow(
|
||||
insn: idaapi.insn_t = ih.inner
|
||||
|
||||
for ref in idautils.CodeRefsFrom(insn.ea, False):
|
||||
if ref in get_imports(fh.ctx).keys():
|
||||
if ref in get_imports(fh.ctx):
|
||||
# ignore API calls
|
||||
continue
|
||||
if not idaapi.getseg(ref):
|
||||
@@ -501,20 +507,3 @@ INSTRUCTION_HANDLERS = (
|
||||
extract_function_calls_from,
|
||||
extract_function_indirect_call_characteristic_features,
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
""" """
|
||||
features = []
|
||||
for f in capa.features.extractors.ida.helpers.get_functions(skip_thunks=True, skip_libs=True):
|
||||
for bb in idaapi.FlowChart(f, flags=idaapi.FC_PREDS):
|
||||
for insn in capa.features.extractors.ida.helpers.get_instructions_in_range(bb.start_ea, bb.end_ea):
|
||||
features.extend(list(extract_features(f, bb, insn)))
|
||||
|
||||
import pprint
|
||||
|
||||
pprint.pprint(features)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
from typing import Dict, List, Tuple
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -7,6 +7,7 @@
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import pefile
|
||||
|
||||
@@ -39,8 +40,20 @@ def extract_file_export_names(pe, **kwargs):
|
||||
name = export.name.partition(b"\x00")[0].decode("ascii")
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
va = base_address + export.address
|
||||
yield Export(name), AbsoluteVirtualAddress(va)
|
||||
|
||||
if export.forwarder is None:
|
||||
va = base_address + export.address
|
||||
yield Export(name), AbsoluteVirtualAddress(va)
|
||||
|
||||
else:
|
||||
try:
|
||||
forwarded_name = export.forwarder.partition(b"\x00")[0].decode("ascii")
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(forwarded_name)
|
||||
va = base_address + export.address
|
||||
yield Export(forwarded_name), AbsoluteVirtualAddress(va)
|
||||
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(va)
|
||||
|
||||
|
||||
def extract_file_import_names(pe, **kwargs):
|
||||
@@ -173,23 +186,21 @@ GLOBAL_HANDLERS = (
|
||||
|
||||
|
||||
class PefileFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, path: str):
|
||||
def __init__(self, path: Path):
|
||||
super().__init__()
|
||||
self.path = path
|
||||
self.pe = pefile.PE(path)
|
||||
self.path: Path = path
|
||||
self.pe = pefile.PE(str(path))
|
||||
|
||||
def get_base_address(self):
|
||||
return AbsoluteVirtualAddress(self.pe.OPTIONAL_HEADER.ImageBase)
|
||||
|
||||
def extract_global_features(self):
|
||||
with open(self.path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = Path(self.path).read_bytes()
|
||||
|
||||
yield from extract_global_features(self.pe, buf)
|
||||
|
||||
def extract_file_features(self):
|
||||
with open(self.path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = Path(self.path).read_bytes()
|
||||
|
||||
yield from extract_file_features(self.pe, buf)
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# strings code from FLOSS, https://github.com/mandiant/flare-floss
|
||||
#
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -9,6 +9,7 @@
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import re
|
||||
import contextlib
|
||||
from collections import namedtuple
|
||||
|
||||
ASCII_BYTE = r" !\"#\$%&\'\(\)\*\+,-\./0123456789:;<=>\?@ABCDEFGHIJKLMNOPQRSTUVWXYZ\[\]\^_`abcdefghijklmnopqrstuvwxyz\{\|\}\\\~\t".encode(
|
||||
@@ -81,24 +82,5 @@ def extract_unicode_strings(buf, n=4):
|
||||
reg = b"((?:[%s]\x00){%d,})" % (ASCII_BYTE, n)
|
||||
r = re.compile(reg)
|
||||
for match in r.finditer(buf):
|
||||
try:
|
||||
with contextlib.suppress(UnicodeDecodeError):
|
||||
yield String(match.group().decode("utf-16"), match.start())
|
||||
except UnicodeDecodeError:
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
import sys
|
||||
|
||||
with open(sys.argv[1], "rb") as f:
|
||||
b = f.read()
|
||||
|
||||
for s in extract_ascii_strings(b):
|
||||
print("0x{:x}: {:s}".format(s.offset, s.s))
|
||||
|
||||
for s in extract_unicode_strings(b):
|
||||
print("0x{:x}: {:s}".format(s.offset, s.s))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -92,7 +92,6 @@ def is_mov_imm_to_stack(instr: envi.archs.i386.disasm.i386Opcode) -> bool:
|
||||
if not src.isImmed():
|
||||
return False
|
||||
|
||||
# TODO what about 64-bit operands?
|
||||
if not isinstance(dst, envi.archs.i386.disasm.i386SibOper) and not isinstance(
|
||||
dst, envi.archs.i386.disasm.i386RegMemOper
|
||||
):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,7 +6,8 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
from typing import List, Tuple, Iterator
|
||||
from typing import Any, Dict, List, Tuple, Iterator
|
||||
from pathlib import Path
|
||||
|
||||
import viv_utils
|
||||
import viv_utils.flirt
|
||||
@@ -25,12 +26,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class VivisectFeatureExtractor(FeatureExtractor):
|
||||
def __init__(self, vw, path, os):
|
||||
def __init__(self, vw, path: Path, os):
|
||||
super().__init__()
|
||||
self.vw = vw
|
||||
self.path = path
|
||||
with open(self.path, "rb") as f:
|
||||
self.buf = f.read()
|
||||
self.buf = path.read_bytes()
|
||||
|
||||
# pre-compute these because we'll yield them at *every* scope.
|
||||
self.global_features: List[Tuple[Feature, Address]] = []
|
||||
@@ -49,8 +49,11 @@ class VivisectFeatureExtractor(FeatureExtractor):
|
||||
yield from capa.features.extractors.viv.file.extract_features(self.vw, self.buf)
|
||||
|
||||
def get_functions(self) -> Iterator[FunctionHandle]:
|
||||
cache: Dict[str, Any] = {}
|
||||
for va in sorted(self.vw.getFunctions()):
|
||||
yield FunctionHandle(address=AbsoluteVirtualAddress(va), inner=viv_utils.Function(self.vw, va))
|
||||
yield FunctionHandle(
|
||||
address=AbsoluteVirtualAddress(va), inner=viv_utils.Function(self.vw, va), ctx={"cache": cache}
|
||||
)
|
||||
|
||||
def extract_function_features(self, fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
yield from capa.features.extractors.viv.function.extract_features(fh)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -8,6 +8,7 @@
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
import PE.carve as pe_carve # vivisect PE
|
||||
import vivisect
|
||||
import viv_utils
|
||||
import viv_utils.flirt
|
||||
|
||||
@@ -16,7 +17,7 @@ import capa.features.extractors.common
|
||||
import capa.features.extractors.helpers
|
||||
import capa.features.extractors.strings
|
||||
from capa.features.file import Export, Import, Section, FunctionName
|
||||
from capa.features.common import String, Feature, Characteristic
|
||||
from capa.features.common import Feature, Characteristic
|
||||
from capa.features.address import Address, FileOffsetAddress, AbsoluteVirtualAddress
|
||||
|
||||
|
||||
@@ -25,10 +26,35 @@ def extract_file_embedded_pe(buf, **kwargs) -> Iterator[Tuple[Feature, Address]]
|
||||
yield Characteristic("embedded pe"), FileOffsetAddress(offset)
|
||||
|
||||
|
||||
def extract_file_export_names(vw, **kwargs) -> Iterator[Tuple[Feature, Address]]:
|
||||
def get_first_vw_filename(vw: vivisect.VivWorkspace):
|
||||
# vivisect associates metadata with each file that its loaded into the workspace.
|
||||
# capa only loads a single file into each workspace.
|
||||
# so to access the metadata for the file in question, we can just take the first one.
|
||||
# otherwise, we'd have to pass around the module name of the file we're analyzing,
|
||||
# which is a pain.
|
||||
#
|
||||
# so this is a simplifying assumption.
|
||||
return next(iter(vw.filemeta.keys()))
|
||||
|
||||
|
||||
def extract_file_export_names(vw: vivisect.VivWorkspace, **kwargs) -> Iterator[Tuple[Feature, Address]]:
|
||||
for va, _, name, _ in vw.getExports():
|
||||
yield Export(name), AbsoluteVirtualAddress(va)
|
||||
|
||||
if vw.getMeta("Format") == "pe":
|
||||
pe = vw.parsedbin
|
||||
baseaddr = pe.IMAGE_NT_HEADERS.OptionalHeader.ImageBase
|
||||
for rva, _, forwarded_name in vw.getFileMeta(get_first_vw_filename(vw), "forwarders"):
|
||||
try:
|
||||
forwarded_name = forwarded_name.partition(b"\x00")[0].decode("ascii")
|
||||
except UnicodeDecodeError:
|
||||
continue
|
||||
|
||||
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(forwarded_name)
|
||||
va = baseaddr + rva
|
||||
yield Export(forwarded_name), AbsoluteVirtualAddress(va)
|
||||
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(va)
|
||||
|
||||
|
||||
def extract_file_import_names(vw, **kwargs) -> Iterator[Tuple[Feature, Address]]:
|
||||
"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -11,9 +11,11 @@ import envi
|
||||
import viv_utils
|
||||
import vivisect.const
|
||||
|
||||
from capa.features.file import FunctionName
|
||||
from capa.features.common import Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors import loops
|
||||
from capa.features.extractors.elf import SymTab
|
||||
from capa.features.extractors.base_extractor import FunctionHandle
|
||||
|
||||
|
||||
@@ -30,6 +32,28 @@ def interface_extract_function_XXX(fh: FunctionHandle) -> Iterator[Tuple[Feature
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def extract_function_symtab_names(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
if fh.inner.vw.metadata["Format"] == "elf":
|
||||
# the file's symbol table gets added to the metadata of the vivisect workspace.
|
||||
# this is in order to eliminate the computational overhead of refetching symtab each time.
|
||||
if "symtab" not in fh.ctx["cache"]:
|
||||
try:
|
||||
fh.ctx["cache"]["symtab"] = SymTab.from_viv(fh.inner.vw.parsedbin)
|
||||
except Exception:
|
||||
fh.ctx["cache"]["symtab"] = None
|
||||
|
||||
symtab = fh.ctx["cache"]["symtab"]
|
||||
if symtab:
|
||||
for symbol in symtab.get_symbols():
|
||||
sym_name = symtab.get_name(symbol)
|
||||
sym_value = symbol.value
|
||||
sym_info = symbol.info
|
||||
|
||||
STT_FUNC = 0x2
|
||||
if sym_value == fh.address and sym_info & STT_FUNC != 0:
|
||||
yield FunctionName(sym_name), fh.address
|
||||
|
||||
|
||||
def extract_function_calls_to(fhandle: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
f: viv_utils.Function = fhandle.inner
|
||||
for src, _, _, _ in f.vw.getXrefsTo(f.va, rtype=vivisect.const.REF_CODE):
|
||||
@@ -79,4 +103,8 @@ def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
|
||||
yield feature, addr
|
||||
|
||||
|
||||
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop)
|
||||
FUNCTION_HANDLERS = (
|
||||
extract_function_symtab_names,
|
||||
extract_function_calls_to,
|
||||
extract_function_loop,
|
||||
)
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
from typing import Tuple, Iterator
|
||||
|
||||
import envi.archs.i386
|
||||
import envi.archs.amd64
|
||||
|
||||
from capa.features.common import ARCH_I386, ARCH_AMD64, Arch, Feature
|
||||
from capa.features.address import NO_ADDRESS, Address
|
||||
|
||||
@@ -11,10 +15,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def extract_arch(vw) -> Iterator[Tuple[Feature, Address]]:
|
||||
if isinstance(vw.arch, envi.archs.amd64.Amd64Module):
|
||||
arch = vw.getMeta("Architecture")
|
||||
if arch == "amd64":
|
||||
yield Arch(ARCH_AMD64), NO_ADDRESS
|
||||
|
||||
elif isinstance(vw.arch, envi.archs.i386.i386Module):
|
||||
elif arch == "i386":
|
||||
yield Arch(ARCH_I386), NO_ADDRESS
|
||||
|
||||
else:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -7,7 +7,7 @@
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
|
||||
import collections
|
||||
from typing import Set, List, Deque, Tuple, Union, Optional
|
||||
from typing import Set, List, Deque, Tuple, Optional
|
||||
|
||||
import envi
|
||||
import vivisect.const
|
||||
@@ -71,7 +71,7 @@ class NotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Union[int, None]]:
|
||||
def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Optional[int]]:
|
||||
"""
|
||||
scan backwards from the given address looking for assignments to the given register.
|
||||
if a constant, return that value.
|
||||
@@ -87,8 +87,8 @@ def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Union[int
|
||||
raises:
|
||||
NotFoundError: when the definition cannot be found.
|
||||
"""
|
||||
q = collections.deque() # type: Deque[int]
|
||||
seen = set([]) # type: Set[int]
|
||||
q: Deque[int] = collections.deque()
|
||||
seen: Set[int] = set()
|
||||
|
||||
q.extend(get_previous_instructions(vw, va))
|
||||
while q:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -22,6 +22,7 @@ import capa.features.extractors.viv.helpers
|
||||
from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset
|
||||
from capa.features.common import MAX_BYTES_FEATURE_SIZE, THUNK_CHAIN_DEPTH_DELTA, Bytes, String, Feature, Characteristic
|
||||
from capa.features.address import Address, AbsoluteVirtualAddress
|
||||
from capa.features.extractors.elf import SymTab
|
||||
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
|
||||
from capa.features.extractors.viv.indirect_calls import NotFoundError, resolve_indirect_call
|
||||
|
||||
@@ -109,6 +110,26 @@ def extract_insn_api_features(fh: FunctionHandle, bb, ih: InsnHandle) -> Iterato
|
||||
if not target:
|
||||
return
|
||||
|
||||
if f.vw.metadata["Format"] == "elf":
|
||||
if "symtab" not in fh.ctx["cache"]:
|
||||
# the symbol table gets stored as a function's attribute in order to avoid running
|
||||
# this code everytime the call is made, thus preventing the computational overhead.
|
||||
try:
|
||||
fh.ctx["cache"]["symtab"] = SymTab.from_viv(f.vw.parsedbin)
|
||||
except Exception:
|
||||
fh.ctx["cache"]["symtab"] = None
|
||||
|
||||
symtab = fh.ctx["cache"]["symtab"]
|
||||
if symtab:
|
||||
for symbol in symtab.get_symbols():
|
||||
sym_name = symtab.get_name(symbol)
|
||||
sym_value = symbol.value
|
||||
sym_info = symbol.info
|
||||
|
||||
STT_FUNC = 0x2
|
||||
if sym_value == target and sym_info & STT_FUNC != 0:
|
||||
yield API(sym_name), ih.address
|
||||
|
||||
if viv_utils.flirt.is_library_function(f.vw, target):
|
||||
name = viv_utils.get_function_name(f.vw, target)
|
||||
yield API(name), ih.address
|
||||
@@ -267,16 +288,16 @@ def extract_insn_bytes_features(fh: FunctionHandle, bb, ih: InsnHandle) -> Itera
|
||||
else:
|
||||
continue
|
||||
|
||||
for v in derefs(f.vw, v):
|
||||
for vv in derefs(f.vw, v):
|
||||
try:
|
||||
buf = read_bytes(f.vw, v)
|
||||
buf = read_bytes(f.vw, vv)
|
||||
except envi.exc.SegmentationViolation:
|
||||
continue
|
||||
|
||||
if capa.features.extractors.helpers.all_zeros(buf):
|
||||
continue
|
||||
|
||||
if f.vw.isProbablyString(v) or f.vw.isProbablyUnicode(v):
|
||||
if f.vw.isProbablyString(vv) or f.vw.isProbablyUnicode(vv):
|
||||
# don't extract byte features for obvious strings
|
||||
continue
|
||||
|
||||
@@ -330,7 +351,6 @@ def is_security_cookie(f, bb, insn) -> bool:
|
||||
if oper.isReg() and oper.reg not in [
|
||||
envi.archs.i386.regs.REG_ESP,
|
||||
envi.archs.i386.regs.REG_EBP,
|
||||
# TODO: do x64 support for real.
|
||||
envi.archs.amd64.regs.REG_RBP,
|
||||
envi.archs.amd64.regs.REG_RSP,
|
||||
]:
|
||||
@@ -390,9 +410,7 @@ def extract_insn_obfs_call_plus_5_characteristic_features(f, bb, ih: InsnHandle)
|
||||
if insn.va + 5 == insn.opers[0].getOperValue(insn):
|
||||
yield Characteristic("call $+5"), ih.address
|
||||
|
||||
if isinstance(insn.opers[0], envi.archs.i386.disasm.i386ImmMemOper) or isinstance(
|
||||
insn.opers[0], envi.archs.amd64.disasm.Amd64RipRelOper
|
||||
):
|
||||
if isinstance(insn.opers[0], (envi.archs.i386.disasm.i386ImmMemOper, envi.archs.amd64.disasm.Amd64RipRelOper)):
|
||||
if insn.va + 5 == insn.opers[0].getOperAddr(insn):
|
||||
yield Characteristic("call $+5"), ih.address
|
||||
|
||||
@@ -401,7 +419,6 @@ def extract_insn_peb_access_characteristic_features(f, bb, ih: InsnHandle) -> It
|
||||
"""
|
||||
parse peb access from the given function. fs:[0x30] on x86, gs:[0x60] on x64
|
||||
"""
|
||||
# TODO handle where fs/gs are loaded into a register or onto the stack and used later
|
||||
insn: envi.Opcode = ih.inner
|
||||
|
||||
if insn.mnem not in ["push", "mov"]:
|
||||
@@ -625,7 +642,6 @@ def extract_op_offset_features(
|
||||
if oper.reg == envi.archs.i386.regs.REG_EBP:
|
||||
return
|
||||
|
||||
# TODO: do x64 support for real.
|
||||
if oper.reg == envi.archs.amd64.regs.REG_RBP:
|
||||
return
|
||||
|
||||
@@ -679,9 +695,9 @@ def extract_op_string_features(
|
||||
else:
|
||||
return
|
||||
|
||||
for v in derefs(f.vw, v):
|
||||
for vv in derefs(f.vw, v):
|
||||
try:
|
||||
s = read_string(f.vw, v).rstrip("\x00")
|
||||
s = read_string(f.vw, vv).rstrip("\x00")
|
||||
except ValueError:
|
||||
continue
|
||||
else:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""
|
||||
capa freeze file format: `| capa0000 | + zlib(utf-8(json(...)))`
|
||||
|
||||
Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -12,9 +12,9 @@ See the License for the specific language governing permissions and limitations
|
||||
import zlib
|
||||
import logging
|
||||
from enum import Enum
|
||||
from typing import Any, List, Tuple, Union
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
from pydantic import Field, BaseModel
|
||||
from pydantic import Field, BaseModel, ConfigDict
|
||||
|
||||
import capa.helpers
|
||||
import capa.version
|
||||
@@ -31,8 +31,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HashableModel(BaseModel):
|
||||
class Config:
|
||||
frozen = True
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
|
||||
class AddressType(str, Enum):
|
||||
@@ -46,7 +45,7 @@ class AddressType(str, Enum):
|
||||
|
||||
class Address(HashableModel):
|
||||
type: AddressType
|
||||
value: Union[int, Tuple[int, int], None]
|
||||
value: Union[int, Tuple[int, int], None] = None # None default value to support deserialization of NO_ADDRESS
|
||||
|
||||
@classmethod
|
||||
def from_capa(cls, a: capa.features.address.Address) -> "Address":
|
||||
@@ -159,9 +158,7 @@ class BasicBlockFeature(HashableModel):
|
||||
basic_block: Address = Field(alias="basic block")
|
||||
address: Address
|
||||
feature: Feature
|
||||
|
||||
class Config:
|
||||
allow_population_by_field_name = True
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
|
||||
|
||||
class InstructionFeature(HashableModel):
|
||||
@@ -194,26 +191,20 @@ class FunctionFeatures(BaseModel):
|
||||
address: Address
|
||||
features: Tuple[FunctionFeature, ...]
|
||||
basic_blocks: Tuple[BasicBlockFeatures, ...] = Field(alias="basic blocks")
|
||||
|
||||
class Config:
|
||||
allow_population_by_field_name = True
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
|
||||
|
||||
class Features(BaseModel):
|
||||
global_: Tuple[GlobalFeature, ...] = Field(alias="global")
|
||||
file: Tuple[FileFeature, ...]
|
||||
functions: Tuple[FunctionFeatures, ...]
|
||||
|
||||
class Config:
|
||||
allow_population_by_field_name = True
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
|
||||
|
||||
class Extractor(BaseModel):
|
||||
name: str
|
||||
version: str = capa.version.__version__
|
||||
|
||||
class Config:
|
||||
allow_population_by_field_name = True
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
|
||||
|
||||
class Freeze(BaseModel):
|
||||
@@ -221,9 +212,7 @@ class Freeze(BaseModel):
|
||||
base_address: Address = Field(alias="base address")
|
||||
extractor: Extractor
|
||||
features: Features
|
||||
|
||||
class Config:
|
||||
allow_population_by_field_name = True
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
|
||||
|
||||
def dumps(extractor: capa.features.extractors.base_extractor.FeatureExtractor) -> str:
|
||||
@@ -324,14 +313,14 @@ def dumps(extractor: capa.features.extractors.base_extractor.FeatureExtractor) -
|
||||
) # type: ignore
|
||||
# Mypy is unable to recognise `base_address` as a argument due to alias
|
||||
|
||||
return freeze.json()
|
||||
return freeze.model_dump_json()
|
||||
|
||||
|
||||
def loads(s: str) -> capa.features.extractors.base_extractor.FeatureExtractor:
|
||||
"""deserialize a set of features (as a NullFeatureExtractor) from a string."""
|
||||
import capa.features.extractors.null as null
|
||||
|
||||
freeze = Freeze.parse_raw(s)
|
||||
freeze = Freeze.model_validate_json(s)
|
||||
if freeze.version != 2:
|
||||
raise ValueError(f"unsupported freeze format version: {freeze.version}")
|
||||
|
||||
@@ -382,6 +371,7 @@ def load(buf: bytes) -> capa.features.extractors.base_extractor.FeatureExtractor
|
||||
def main(argv=None):
|
||||
import sys
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
import capa.main
|
||||
|
||||
@@ -398,8 +388,7 @@ def main(argv=None):
|
||||
|
||||
extractor = capa.main.get_extractor(args.sample, args.format, args.os, args.backend, sigpaths, False)
|
||||
|
||||
with open(args.output, "wb") as f:
|
||||
f.write(dump(extractor))
|
||||
Path(args.output).write_bytes(dump(extractor))
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import binascii
|
||||
from typing import Union, Optional
|
||||
|
||||
from pydantic import Field, BaseModel
|
||||
from pydantic import Field, BaseModel, ConfigDict
|
||||
|
||||
import capa.features.file
|
||||
import capa.features.insn
|
||||
@@ -10,9 +17,7 @@ import capa.features.basicblock
|
||||
|
||||
|
||||
class FeatureModel(BaseModel):
|
||||
class Config:
|
||||
frozen = True
|
||||
allow_population_by_field_name = True
|
||||
model_config = ConfigDict(frozen=True, populate_by_name=True)
|
||||
|
||||
def to_capa(self) -> capa.features.common.Feature:
|
||||
if isinstance(self, OSFeature):
|
||||
@@ -206,141 +211,141 @@ def feature_from_capa(f: capa.features.common.Feature) -> "Feature":
|
||||
class OSFeature(FeatureModel):
|
||||
type: str = "os"
|
||||
os: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class ArchFeature(FeatureModel):
|
||||
type: str = "arch"
|
||||
arch: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class FormatFeature(FeatureModel):
|
||||
type: str = "format"
|
||||
format: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class MatchFeature(FeatureModel):
|
||||
type: str = "match"
|
||||
match: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class CharacteristicFeature(FeatureModel):
|
||||
type: str = "characteristic"
|
||||
characteristic: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class ExportFeature(FeatureModel):
|
||||
type: str = "export"
|
||||
export: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class ImportFeature(FeatureModel):
|
||||
type: str = "import"
|
||||
import_: str = Field(alias="import")
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class SectionFeature(FeatureModel):
|
||||
type: str = "section"
|
||||
section: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class FunctionNameFeature(FeatureModel):
|
||||
type: str = "function name"
|
||||
function_name: str = Field(alias="function name")
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class SubstringFeature(FeatureModel):
|
||||
type: str = "substring"
|
||||
substring: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class RegexFeature(FeatureModel):
|
||||
type: str = "regex"
|
||||
regex: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class StringFeature(FeatureModel):
|
||||
type: str = "string"
|
||||
string: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class ClassFeature(FeatureModel):
|
||||
type: str = "class"
|
||||
class_: str = Field(alias="class")
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class NamespaceFeature(FeatureModel):
|
||||
type: str = "namespace"
|
||||
namespace: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class BasicBlockFeature(FeatureModel):
|
||||
type: str = "basic block"
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class APIFeature(FeatureModel):
|
||||
type: str = "api"
|
||||
api: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class PropertyFeature(FeatureModel):
|
||||
type: str = "property"
|
||||
access: Optional[str]
|
||||
access: Optional[str] = None
|
||||
property: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class NumberFeature(FeatureModel):
|
||||
type: str = "number"
|
||||
number: Union[int, float]
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class BytesFeature(FeatureModel):
|
||||
type: str = "bytes"
|
||||
bytes: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class OffsetFeature(FeatureModel):
|
||||
type: str = "offset"
|
||||
offset: int
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class MnemonicFeature(FeatureModel):
|
||||
type: str = "mnemonic"
|
||||
mnemonic: str
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class OperandNumberFeature(FeatureModel):
|
||||
type: str = "operand number"
|
||||
index: int
|
||||
operand_number: int = Field(alias="operand number")
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class OperandOffsetFeature(FeatureModel):
|
||||
type: str = "operand offset"
|
||||
index: int
|
||||
operand_offset: int = Field(alias="operand offset")
|
||||
description: Optional[str]
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
Feature = Union[
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
172
capa/ghidra/README.md
Normal file
172
capa/ghidra/README.md
Normal file
@@ -0,0 +1,172 @@
|
||||
<div align="center">
|
||||
<img src="/doc/img/ghidra_backend_logo.png" width=300 height=175>
|
||||
</div>
|
||||
|
||||
The Ghidra feature extractor is an application of the FLARE team's open-source project, Ghidrathon, to integrate capa with Ghidra using Python 3. capa is a framework that uses a well-defined collection of rules to identify capabilities in a program. You can run capa against a PE file, ELF file, or shellcode and it tells you what it thinks the program can do. For example, it might suggest that the program is a backdoor, can install services, or relies on HTTP to communicate. The Ghidra feature extractor can be used to run capa analysis on your Ghidra databases without needing access to the original binary file.
|
||||
|
||||
<img src="/doc/img/ghidra_script_mngr_output.png">
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Installation
|
||||
|
||||
Please ensure that you have the following dependencies installed before continuing:
|
||||
|
||||
| Dependency | Version | Source |
|
||||
|------------|---------|--------|
|
||||
| Ghidrathon | `>= 3.0.0` | https://github.com/mandiant/Ghidrathon |
|
||||
| Python | `>= 3.8` | https://www.python.org/downloads |
|
||||
| Ghidra | `>= 10.2` | https://ghidra-sre.org |
|
||||
|
||||
In order to run capa using using Ghidra, you must install capa as a library, obtain the official capa rules that match the capa version you have installed, and configure the Python 3 script [capa_ghidra.py](/capa/ghidra/capa_ghidra.py). You can do this by completing the following steps using the Python 3 interpreter that you have configured for your Ghidrathon installation:
|
||||
|
||||
1. Install capa and its dependencies from PyPI using the following command:
|
||||
```bash
|
||||
$ pip install flare-capa
|
||||
```
|
||||
|
||||
2. Download and extract the [official capa rules](https://github.com/mandiant/capa-rules/releases) that match the capa version you have installed. Use the following command to view the version of capa you have installed:
|
||||
```bash
|
||||
$ pip show flare-capa
|
||||
OR
|
||||
$ capa --version
|
||||
```
|
||||
|
||||
3. Copy [capa_ghidra.py](/capa/ghidra/capa_ghidra.py) to your `$USER_HOME/ghidra_scripts` directory or manually add `</path/to/ghidra_capa.py/>` to the Ghidra Script Manager.
|
||||
|
||||
## Usage
|
||||
|
||||
After completing the installation steps you can execute `capa_ghidra.py` using the Ghidra Script Manager or Headless Analyzer.
|
||||
|
||||
### Ghidra Script Manager
|
||||
|
||||
To execute `capa_ghidra.py` using the Ghidra Script Manager, first open the Ghidra Script Manager by navigating to `Window > Script Manager` in the Ghidra Code Browser. Next, locate `capa_ghidra.py` by selecting the `Python 3 > capa` category or using the Ghidra Script Manager search funtionality. Finally, double-click `capa_ghidra.py` to execute the script. If you don't see `capa_ghidra.py`, make sure you have copied the script to your `$USER_HOME/ghidra_scripts` directory or manually added `</path/to/ghidra_capa.py/>` to the Ghidra Script Manager
|
||||
|
||||
When executed, `capa_ghidra.py` asks you to provide your capa rules directory and preferred output format. `capa_ghidra.py` supports `default`, `verbose`, and `vverbose` output formats when executed from the Ghidra Script Manager. `capa_ghidra.py` writes output to the Ghidra Console Window.
|
||||
|
||||
#### Example
|
||||
|
||||
The following is an example of running `capa_ghidra.py` using the Ghidra Script Manager:
|
||||
|
||||
Selecting capa rules:
|
||||
<img src="/doc/img/ghidra_script_mngr_rules.png">
|
||||
|
||||
Choosing output format:
|
||||
<img src="/doc/img/ghidra_script_mngr_verbosity.png">
|
||||
|
||||
Viewing results in Ghidra Console Window:
|
||||
<img src="/doc/img/ghidra_script_mngr_output.png">
|
||||
|
||||
### Ghidra Headless Analyzer
|
||||
|
||||
To execute `capa_ghidra.py` using the Ghidra Headless Analyzer, you can use the Ghidra `analyzeHeadless` script located in your `$GHIDRA_HOME/support` directory. You will need to provide the following arguments to the Ghidra `analyzeHeadless` script:
|
||||
|
||||
1. `</path/to/ghidra/project/>`: path to Ghidra project
|
||||
2. `<ghidra_project_name>`: name of Ghidra Project
|
||||
3. `-process <sample_name>`: name of sample `<sample_name>`
|
||||
4. `-ScriptPath </path/to/capa_ghidra/>`: OPTIONAL argument specifying path `</path/to/capa_ghidra/>` to `capa_ghidra.py`
|
||||
5. `-PostScript capa_ghidra.py`: executes `capa_ghidra.py` as post-analysis script
|
||||
6. `"<capa_args>"`: single, quoted string containing capa arguments that must specify capa rules directory and output format, e.g. `"<path/to/capa/rules> --verbose"`. `capa_ghidra.py` supports `default`, `verbose`, `vverbose` and `json` formats when executed using the Ghidra Headless Analyzer. `capa_ghidra.py` writes output to the console window used to execute the Ghidra `analyzeHeadless` script.
|
||||
7. `-processor <languageID>`: required ONLY if sample `<sample_name>` is shellcode. More information on specifying the `<languageID>` can be found in the `$GHIDRA_HOME/support/analyzeHeadlessREADME.html` documentation.
|
||||
|
||||
The following is an example of combining these arguments into a single `analyzeHeadless` script command:
|
||||
|
||||
```
|
||||
$GHIDRA_HOME/support/analyzeHeadless </path/to/ghidra/project/> <ghidra_project_name> -process <sample_name> -PostScript capa_ghidra.py "/path/to/capa/rules/ --verbose"
|
||||
```
|
||||
|
||||
You may also want to run capa against a sample that you have not yet imported into your Ghidra project. The following is an example of importing a sample and running `capa_ghidra.py` using a single `analyzeHeadless` script command:
|
||||
|
||||
```
|
||||
$GHIDRA_HOME/support/analyzeHeadless </path/to/ghidra/project/> <ghidra_project_name> -Import </path/to/sample> -PostScript capa_ghidra.py "/path/to/capa/rules/ --verbose"
|
||||
```
|
||||
|
||||
You can also provide `capa_ghidra.py` the single argument `"help"` to view supported arguments when running the script using the Ghidra Headless Analyzer:
|
||||
```
|
||||
$GHIDRA_HOME/support/analyzeHeadless </path/to/ghidra/project/> <ghidra_project_name> -process <sample_name> -PostScript capa_ghidra.py "help"
|
||||
```
|
||||
|
||||
#### Example
|
||||
|
||||
The following is an example of running `capa_ghidra.py` against a shellcode sample using the Ghidra `analyzeHeadless` script:
|
||||
```
|
||||
$ analyzeHeadless /home/wumbo/Desktop/ghidra_projects/ capa_test -process 499c2a85f6e8142c3f48d4251c9c7cd6.raw32 -processor x86:LE:32:default -PostScript capa_ghidra.py "/home/wumbo/capa/rules -vv"
|
||||
[...]
|
||||
|
||||
INFO REPORT: Analysis succeeded for file: /499c2a85f6e8142c3f48d4251c9c7cd6.raw32 (HeadlessAnalyzer)
|
||||
INFO SCRIPT: /home/wumbo/ghidra_scripts/capa_ghidra.py (HeadlessAnalyzer)
|
||||
md5 499c2a85f6e8142c3f48d4251c9c7cd6
|
||||
sha1
|
||||
sha256 e8e02191c1b38c808d27a899ac164b3675eb5cadd3a8907b0ffa863714000e72
|
||||
path /home/wumbo/capa/tests/data/499c2a85f6e8142c3f48d4251c9c7cd6.raw32
|
||||
timestamp 2023-08-29 17:57:00.946588
|
||||
capa version 6.1.0
|
||||
os unknown os
|
||||
format Raw Binary
|
||||
arch x86
|
||||
extractor ghidra
|
||||
base address global
|
||||
rules /home/wumbo/capa/rules
|
||||
function count 42
|
||||
library function count 0
|
||||
total feature count 1970
|
||||
|
||||
contain loop (24 matches, only showing first match of library rule)
|
||||
author moritz.raabe@mandiant.com
|
||||
scope function
|
||||
function @ 0x0
|
||||
or:
|
||||
characteristic: loop @ 0x0
|
||||
characteristic: tight loop @ 0x278
|
||||
|
||||
contain obfuscated stackstrings
|
||||
namespace anti-analysis/obfuscation/string/stackstring
|
||||
author moritz.raabe@mandiant.com
|
||||
scope basic block
|
||||
att&ck Defense Evasion::Obfuscated Files or Information::Indicator Removal from Tools [T1027.005]
|
||||
mbc Anti-Static Analysis::Executable Code Obfuscation::Argument Obfuscation [B0032.020], Anti-Static Analysis::Executable Code Obfuscation::Stack Strings [B0032.017]
|
||||
basic block @ 0x0 in function 0x0
|
||||
characteristic: stack string @ 0x0
|
||||
|
||||
encode data using XOR
|
||||
namespace data-manipulation/encoding/xor
|
||||
author moritz.raabe@mandiant.com
|
||||
scope basic block
|
||||
att&ck Defense Evasion::Obfuscated Files or Information [T1027]
|
||||
mbc Defense Evasion::Obfuscated Files or Information::Encoding-Standard Algorithm [E1027.m02], Data::Encode Data::XOR [C0026.002]
|
||||
basic block @ 0x8AF in function 0x8A1
|
||||
and:
|
||||
characteristic: tight loop @ 0x8AF
|
||||
characteristic: nzxor @ 0x8C0
|
||||
not: = filter for potential false positives
|
||||
or:
|
||||
or: = unsigned bitwise negation operation (~i)
|
||||
number: 0xFFFFFFFF = bitwise negation for unsigned 32 bits
|
||||
number: 0xFFFFFFFFFFFFFFFF = bitwise negation for unsigned 64 bits
|
||||
or: = signed bitwise negation operation (~i)
|
||||
number: 0xFFFFFFF = bitwise negation for signed 32 bits
|
||||
number: 0xFFFFFFFFFFFFFFF = bitwise negation for signed 64 bits
|
||||
or: = Magic constants used in the implementation of strings functions.
|
||||
number: 0x7EFEFEFF = optimized string constant for 32 bits
|
||||
number: 0x81010101 = -0x81010101 = 0x7EFEFEFF
|
||||
number: 0x81010100 = 0x81010100 = ~0x7EFEFEFF
|
||||
number: 0x7EFEFEFEFEFEFEFF = optimized string constant for 64 bits
|
||||
number: 0x8101010101010101 = -0x8101010101010101 = 0x7EFEFEFEFEFEFEFF
|
||||
number: 0x8101010101010100 = 0x8101010101010100 = ~0x7EFEFEFEFEFEFEFF
|
||||
|
||||
get OS information via KUSER_SHARED_DATA
|
||||
namespace host-interaction/os/version
|
||||
author @mr-tz
|
||||
scope function
|
||||
att&ck Discovery::System Information Discovery [T1082]
|
||||
references https://www.geoffchappell.com/studies/windows/km/ntoskrnl/inc/api/ntexapi_x/kuser_shared_data/index.htm
|
||||
function @ 0x1CA6
|
||||
or:
|
||||
number: 0x7FFE026C = NtMajorVersion @ 0x1D18
|
||||
|
||||
|
||||
|
||||
Script /home/wumbo/ghidra_scripts/capa_ghidra.py called exit with code 0
|
||||
|
||||
[...]
|
||||
```
|
||||
0
capa/ghidra/__init__.py
Normal file
0
capa/ghidra/__init__.py
Normal file
166
capa/ghidra/capa_ghidra.py
Normal file
166
capa/ghidra/capa_ghidra.py
Normal file
@@ -0,0 +1,166 @@
|
||||
# Run capa against loaded Ghidra database
|
||||
# @author Mike Hunhoff (mehunhoff@google.com)
|
||||
# @category Python 3.capa
|
||||
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import sys
|
||||
import logging
|
||||
import pathlib
|
||||
import argparse
|
||||
|
||||
import capa
|
||||
import capa.main
|
||||
import capa.rules
|
||||
import capa.ghidra.helpers
|
||||
import capa.render.default
|
||||
import capa.features.extractors.ghidra.extractor
|
||||
|
||||
logger = logging.getLogger("capa_ghidra")
|
||||
|
||||
|
||||
def run_headless():
|
||||
parser = argparse.ArgumentParser(description="The FLARE team's open-source tool to integrate capa with Ghidra.")
|
||||
|
||||
parser.add_argument(
|
||||
"rules",
|
||||
type=str,
|
||||
help="path to rule file or directory",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", action="store_true", help="enable verbose result document (no effect with --json)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-vv", "--vverbose", action="store_true", help="enable very verbose result document (no effect with --json)"
|
||||
)
|
||||
parser.add_argument("-d", "--debug", action="store_true", help="enable debugging output on STDERR")
|
||||
parser.add_argument("-q", "--quiet", action="store_true", help="disable all output but errors")
|
||||
parser.add_argument("-j", "--json", action="store_true", help="emit JSON instead of text")
|
||||
|
||||
script_args = list(getScriptArgs()) # type: ignore [name-defined] # noqa: F821
|
||||
if not script_args or len(script_args) > 1:
|
||||
script_args = []
|
||||
else:
|
||||
script_args = script_args[0].split()
|
||||
for idx, arg in enumerate(script_args):
|
||||
if arg.lower() == "help":
|
||||
script_args[idx] = "--help"
|
||||
|
||||
args = parser.parse_args(args=script_args)
|
||||
|
||||
if args.quiet:
|
||||
logging.basicConfig(level=logging.WARNING)
|
||||
logging.getLogger().setLevel(logging.WARNING)
|
||||
elif args.debug:
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
else:
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
logger.debug("running in Ghidra headless mode")
|
||||
|
||||
rules_path = pathlib.Path(args.rules)
|
||||
|
||||
logger.debug("rule path: %s", rules_path)
|
||||
rules = capa.main.get_rules([rules_path])
|
||||
|
||||
meta = capa.ghidra.helpers.collect_metadata([rules_path])
|
||||
extractor = capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor()
|
||||
|
||||
capabilities, counts = capa.main.find_capabilities(rules, extractor, False)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
meta.analysis.layout = capa.main.compute_layout(rules, extractor, capabilities)
|
||||
|
||||
if capa.main.has_file_limitation(rules, capabilities, is_standalone=True):
|
||||
logger.info("capa encountered warnings during analysis")
|
||||
|
||||
if args.json:
|
||||
print(capa.render.json.render(meta, rules, capabilities)) # noqa: T201
|
||||
elif args.vverbose:
|
||||
print(capa.render.vverbose.render(meta, rules, capabilities)) # noqa: T201
|
||||
elif args.verbose:
|
||||
print(capa.render.verbose.render(meta, rules, capabilities)) # noqa: T201
|
||||
else:
|
||||
print(capa.render.default.render(meta, rules, capabilities)) # noqa: T201
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def run_ui():
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
rules_dir: str = ""
|
||||
try:
|
||||
selected_dir = askDirectory("Choose capa rules directory", "Ok") # type: ignore [name-defined] # noqa: F821
|
||||
if selected_dir:
|
||||
rules_dir = selected_dir.getPath()
|
||||
except RuntimeError:
|
||||
# RuntimeError thrown when user selects "Cancel"
|
||||
pass
|
||||
|
||||
if not rules_dir:
|
||||
logger.info("You must choose a capa rules directory before running capa.")
|
||||
return capa.main.E_MISSING_RULES
|
||||
|
||||
verbose = askChoice( # type: ignore [name-defined] # noqa: F821
|
||||
"capa output verbosity", "Choose capa output verbosity", ["default", "verbose", "vverbose"], "default"
|
||||
)
|
||||
|
||||
rules_path: pathlib.Path = pathlib.Path(rules_dir)
|
||||
logger.info("running capa using rules from %s", str(rules_path))
|
||||
|
||||
rules = capa.main.get_rules([rules_path])
|
||||
|
||||
meta = capa.ghidra.helpers.collect_metadata([rules_path])
|
||||
extractor = capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor()
|
||||
|
||||
capabilities, counts = capa.main.find_capabilities(rules, extractor, True)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
meta.analysis.layout = capa.main.compute_layout(rules, extractor, capabilities)
|
||||
|
||||
if capa.main.has_file_limitation(rules, capabilities, is_standalone=False):
|
||||
logger.info("capa encountered warnings during analysis")
|
||||
|
||||
if verbose == "vverbose":
|
||||
print(capa.render.vverbose.render(meta, rules, capabilities)) # noqa: T201
|
||||
elif verbose == "verbose":
|
||||
print(capa.render.verbose.render(meta, rules, capabilities)) # noqa: T201
|
||||
else:
|
||||
print(capa.render.default.render(meta, rules, capabilities)) # noqa: T201
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def main():
|
||||
if not capa.ghidra.helpers.is_supported_ghidra_version():
|
||||
return capa.main.E_UNSUPPORTED_GHIDRA_VERSION
|
||||
|
||||
if not capa.ghidra.helpers.is_supported_file_type():
|
||||
return capa.main.E_INVALID_FILE_TYPE
|
||||
|
||||
if not capa.ghidra.helpers.is_supported_arch_type():
|
||||
return capa.main.E_INVALID_FILE_ARCH
|
||||
|
||||
if isRunningHeadless(): # type: ignore [name-defined] # noqa: F821
|
||||
return run_headless()
|
||||
else:
|
||||
return run_ui()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if sys.version_info < (3, 8):
|
||||
from capa.exceptions import UnsupportedRuntimeError
|
||||
|
||||
raise UnsupportedRuntimeError("This version of capa can only be used with Python 3.8+")
|
||||
sys.exit(main())
|
||||
159
capa/ghidra/helpers.py
Normal file
159
capa/ghidra/helpers.py
Normal file
@@ -0,0 +1,159 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
import datetime
|
||||
import contextlib
|
||||
from typing import List
|
||||
from pathlib import Path
|
||||
|
||||
import capa
|
||||
import capa.version
|
||||
import capa.features.common
|
||||
import capa.features.freeze
|
||||
import capa.render.result_document as rdoc
|
||||
import capa.features.extractors.ghidra.helpers
|
||||
|
||||
logger = logging.getLogger("capa")
|
||||
|
||||
# file type as returned by Ghidra
|
||||
SUPPORTED_FILE_TYPES = ("Executable and Linking Format (ELF)", "Portable Executable (PE)", "Raw Binary")
|
||||
|
||||
|
||||
class GHIDRAIO:
|
||||
"""
|
||||
An object that acts as a file-like object,
|
||||
using bytes from the current Ghidra listing.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
self.offset = 0
|
||||
self.bytes_ = self.get_bytes()
|
||||
|
||||
def seek(self, offset, whence=0):
|
||||
assert whence == 0
|
||||
self.offset = offset
|
||||
|
||||
def read(self, size):
|
||||
logger.debug("reading 0x%x bytes at 0x%x (ea: 0x%x)", size, self.offset, currentProgram().getImageBase().add(self.offset).getOffset()) # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
if size > len(self.bytes_) - self.offset:
|
||||
logger.debug("cannot read 0x%x bytes at 0x%x (ea: BADADDR)", size, self.offset)
|
||||
return b""
|
||||
else:
|
||||
return self.bytes_[self.offset : self.offset + size]
|
||||
|
||||
def close(self):
|
||||
return
|
||||
|
||||
def get_bytes(self):
|
||||
file_bytes = currentProgram().getMemory().getAllFileBytes()[0] # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
# getOriginalByte() allows for raw file parsing on the Ghidra side
|
||||
# other functions will fail as Ghidra will think that it's reading uninitialized memory
|
||||
bytes_ = [file_bytes.getOriginalByte(i) for i in range(file_bytes.getSize())]
|
||||
|
||||
return capa.features.extractors.ghidra.helpers.ints_to_bytes(bytes_)
|
||||
|
||||
|
||||
def is_supported_ghidra_version():
|
||||
version = float(getGhidraVersion()[:4]) # type: ignore [name-defined] # noqa: F821
|
||||
if version < 10.2:
|
||||
warning_msg = "capa does not support this Ghidra version"
|
||||
logger.warning(warning_msg)
|
||||
logger.warning("Your Ghidra version is: %s. Supported versions are: Ghidra >= 10.2", version)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_running_headless():
|
||||
return isRunningHeadless() # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
|
||||
def is_supported_file_type():
|
||||
file_info = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821
|
||||
if file_info not in SUPPORTED_FILE_TYPES:
|
||||
logger.error("-" * 80)
|
||||
logger.error(" Input file does not appear to be a supported file type.")
|
||||
logger.error(" ")
|
||||
logger.error(
|
||||
" capa currently only supports analyzing PE, ELF, or binary files containing x86 (32- and 64-bit) shellcode."
|
||||
)
|
||||
logger.error(" If you don't know the input file type, you can try using the `file` utility to guess it.")
|
||||
logger.error("-" * 80)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def is_supported_arch_type():
|
||||
lang_id = str(currentProgram().getLanguageID()).lower() # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
if not all((lang_id.startswith("x86"), any(arch in lang_id for arch in ("32", "64")))):
|
||||
logger.error("-" * 80)
|
||||
logger.error(" Input file does not appear to target a supported architecture.")
|
||||
logger.error(" ")
|
||||
logger.error(" capa currently only supports analyzing x86 (32- and 64-bit).")
|
||||
logger.error("-" * 80)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def get_file_md5():
|
||||
return currentProgram().getExecutableMD5() # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
|
||||
def get_file_sha256():
|
||||
return currentProgram().getExecutableSHA256() # type: ignore [name-defined] # noqa: F821
|
||||
|
||||
|
||||
def collect_metadata(rules: List[Path]):
|
||||
md5 = get_file_md5()
|
||||
sha256 = get_file_sha256()
|
||||
|
||||
info = currentProgram().getLanguageID().toString() # type: ignore [name-defined] # noqa: F821
|
||||
if "x86" in info and "64" in info:
|
||||
arch = "x86_64"
|
||||
elif "x86" in info and "32" in info:
|
||||
arch = "x86"
|
||||
else:
|
||||
arch = "unknown arch"
|
||||
|
||||
format_name: str = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821
|
||||
if "PE" in format_name:
|
||||
os = "windows"
|
||||
elif "ELF" in format_name:
|
||||
with contextlib.closing(capa.ghidra.helpers.GHIDRAIO()) as f:
|
||||
os = capa.features.extractors.elf.detect_elf_os(f)
|
||||
else:
|
||||
os = "unknown os"
|
||||
|
||||
return rdoc.Metadata(
|
||||
timestamp=datetime.datetime.now(),
|
||||
version=capa.version.__version__,
|
||||
argv=(),
|
||||
sample=rdoc.Sample(
|
||||
md5=md5,
|
||||
sha1="",
|
||||
sha256=sha256,
|
||||
path=currentProgram().getExecutablePath(), # type: ignore [name-defined] # noqa: F821
|
||||
),
|
||||
analysis=rdoc.Analysis(
|
||||
format=currentProgram().getExecutableFormat(), # type: ignore [name-defined] # noqa: F821
|
||||
arch=arch,
|
||||
os=os,
|
||||
extractor="ghidra",
|
||||
rules=tuple(r.resolve().absolute().as_posix() for r in rules),
|
||||
base_address=capa.features.freeze.Address.from_capa(currentProgram().getImageBase().getOffset()), # type: ignore [name-defined] # noqa: F821
|
||||
layout=rdoc.Layout(
|
||||
functions=(),
|
||||
),
|
||||
feature_counts=rdoc.FeatureCounts(file=0, functions=()),
|
||||
library_functions=(),
|
||||
),
|
||||
)
|
||||
@@ -1,13 +1,18 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import os
|
||||
import inspect
|
||||
import logging
|
||||
import contextlib
|
||||
import importlib.util
|
||||
from typing import NoReturn
|
||||
from pathlib import Path
|
||||
|
||||
import tqdm
|
||||
|
||||
from capa.exceptions import UnsupportedFormatError
|
||||
from capa.features.common import FORMAT_PE, FORMAT_SC32, FORMAT_SC64, FORMAT_DOTNET, FORMAT_UNKNOWN, Format
|
||||
@@ -27,36 +32,40 @@ def hex(n: int) -> str:
|
||||
return f"0x{(n):X}"
|
||||
|
||||
|
||||
def get_file_taste(sample_path: str) -> bytes:
|
||||
if not os.path.exists(sample_path):
|
||||
def get_file_taste(sample_path: Path) -> bytes:
|
||||
if not sample_path.exists():
|
||||
raise IOError(f"sample path {sample_path} does not exist or cannot be accessed")
|
||||
with open(sample_path, "rb") as f:
|
||||
taste = f.read(8)
|
||||
taste = sample_path.open("rb").read(8)
|
||||
return taste
|
||||
|
||||
|
||||
def is_runtime_ida():
|
||||
return importlib.util.find_spec("idc") is not None
|
||||
|
||||
|
||||
def is_runtime_ghidra():
|
||||
try:
|
||||
import idc
|
||||
except ImportError:
|
||||
currentProgram # type: ignore [name-defined] # noqa: F821
|
||||
except NameError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
return True
|
||||
|
||||
|
||||
def assert_never(value) -> NoReturn:
|
||||
assert False, f"Unhandled value: {value} ({type(value).__name__})"
|
||||
# careful: python -O will remove this assertion.
|
||||
# but this is only used for type checking, so it's ok.
|
||||
assert False, f"Unhandled value: {value} ({type(value).__name__})" # noqa: B011
|
||||
|
||||
|
||||
def get_format_from_extension(sample: str) -> str:
|
||||
if sample.endswith(EXTENSIONS_SHELLCODE_32):
|
||||
def get_format_from_extension(sample: Path) -> str:
|
||||
if sample.name.endswith(EXTENSIONS_SHELLCODE_32):
|
||||
return FORMAT_SC32
|
||||
elif sample.endswith(EXTENSIONS_SHELLCODE_64):
|
||||
elif sample.name.endswith(EXTENSIONS_SHELLCODE_64):
|
||||
return FORMAT_SC64
|
||||
return FORMAT_UNKNOWN
|
||||
|
||||
|
||||
def get_auto_format(path: str) -> str:
|
||||
def get_auto_format(path: Path) -> str:
|
||||
format_ = get_format(path)
|
||||
if format_ == FORMAT_UNKNOWN:
|
||||
format_ = get_format_from_extension(path)
|
||||
@@ -65,13 +74,12 @@ def get_auto_format(path: str) -> str:
|
||||
return format_
|
||||
|
||||
|
||||
def get_format(sample: str) -> str:
|
||||
def get_format(sample: Path) -> str:
|
||||
# imported locally to avoid import cycle
|
||||
from capa.features.extractors.common import extract_format
|
||||
from capa.features.extractors.dnfile_ import DnfileFeatureExtractor
|
||||
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = sample.read_bytes()
|
||||
|
||||
for feature, _ in extract_format(buf):
|
||||
if feature == Format(FORMAT_PE):
|
||||
@@ -85,6 +93,39 @@ def get_format(sample: str) -> str:
|
||||
return FORMAT_UNKNOWN
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def redirecting_print_to_tqdm(disable_progress):
|
||||
"""
|
||||
tqdm (progress bar) expects to have fairly tight control over console output.
|
||||
so calls to `print()` will break the progress bar and make things look bad.
|
||||
so, this context manager temporarily replaces the `print` implementation
|
||||
with one that is compatible with tqdm.
|
||||
via: https://stackoverflow.com/a/42424890/87207
|
||||
"""
|
||||
old_print = print # noqa: T202 [reserved word print used]
|
||||
|
||||
def new_print(*args, **kwargs):
|
||||
# If tqdm.tqdm.write raises error, use builtin print
|
||||
if disable_progress:
|
||||
old_print(*args, **kwargs)
|
||||
else:
|
||||
try:
|
||||
tqdm.tqdm.write(*args, **kwargs)
|
||||
except Exception:
|
||||
old_print(*args, **kwargs)
|
||||
|
||||
try:
|
||||
# Globally replace print with new_print.
|
||||
# Verified this works manually on Python 3.11:
|
||||
# >>> import inspect
|
||||
# >>> inspect.builtins
|
||||
# <module 'builtins' (built-in)>
|
||||
inspect.builtins.print = new_print # type: ignore
|
||||
yield
|
||||
finally:
|
||||
inspect.builtins.print = old_print # type: ignore
|
||||
|
||||
|
||||
def log_unsupported_format_error():
|
||||
logger.error("-" * 80)
|
||||
logger.error(" Input file does not appear to be a PE or ELF file.")
|
||||
@@ -118,7 +159,7 @@ def log_unsupported_runtime_error():
|
||||
logger.error("-" * 80)
|
||||
logger.error(" Unsupported runtime or Python interpreter.")
|
||||
logger.error(" ")
|
||||
logger.error(" capa supports running under Python 3.7 and higher.")
|
||||
logger.error(" capa supports running under Python 3.8 and higher.")
|
||||
logger.error(" ")
|
||||
logger.error(
|
||||
" If you're seeing this message on the command line, please ensure you're running a supported Python version."
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import json
|
||||
import logging
|
||||
import datetime
|
||||
import contextlib
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
from pathlib import Path
|
||||
|
||||
import idc
|
||||
import idaapi
|
||||
@@ -22,7 +22,8 @@ import capa
|
||||
import capa.version
|
||||
import capa.render.utils as rutils
|
||||
import capa.features.common
|
||||
import capa.render.result_document
|
||||
import capa.features.freeze
|
||||
import capa.render.result_document as rdoc
|
||||
from capa.features.address import AbsoluteVirtualAddress
|
||||
|
||||
logger = logging.getLogger("capa")
|
||||
@@ -45,7 +46,8 @@ NETNODE_RULES_CACHE_ID = "rules-cache-id"
|
||||
|
||||
|
||||
def inform_user_ida_ui(message):
|
||||
idaapi.info(f"{message}. Please refer to IDA Output window for more information.")
|
||||
# this isn't a logger, this is IDA's logging facility
|
||||
idaapi.info(f"{message}. Please refer to IDA Output window for more information.") # noqa: G004
|
||||
|
||||
|
||||
def is_supported_ida_version():
|
||||
@@ -53,7 +55,7 @@ def is_supported_ida_version():
|
||||
if version < 7.4 or version >= 9:
|
||||
warning_msg = "This plugin does not support your IDA Pro version"
|
||||
logger.warning(warning_msg)
|
||||
logger.warning("Your IDA Pro version is: %s. Supported versions are: IDA >= 7.4 and IDA < 9.0." % version)
|
||||
logger.warning("Your IDA Pro version is: %s. Supported versions are: IDA >= 7.4 and IDA < 9.0.", version)
|
||||
return False
|
||||
return True
|
||||
|
||||
@@ -118,7 +120,7 @@ def get_file_sha256():
|
||||
return sha256
|
||||
|
||||
|
||||
def collect_metadata(rules):
|
||||
def collect_metadata(rules: List[Path]):
|
||||
""" """
|
||||
md5 = get_file_md5()
|
||||
sha256 = get_file_sha256()
|
||||
@@ -140,37 +142,35 @@ def collect_metadata(rules):
|
||||
else:
|
||||
os = "unknown os"
|
||||
|
||||
return {
|
||||
"timestamp": datetime.datetime.now().isoformat(),
|
||||
"argv": [],
|
||||
"sample": {
|
||||
"md5": md5,
|
||||
"sha1": "", # not easily accessible
|
||||
"sha256": sha256,
|
||||
"path": idaapi.get_input_file_path(),
|
||||
},
|
||||
"analysis": {
|
||||
"format": idaapi.get_file_type_name(),
|
||||
"arch": arch,
|
||||
"os": os,
|
||||
"extractor": "ida",
|
||||
"rules": rules,
|
||||
"base_address": idaapi.get_imagebase(),
|
||||
"layout": {
|
||||
return rdoc.Metadata(
|
||||
timestamp=datetime.datetime.now(),
|
||||
version=capa.version.__version__,
|
||||
argv=(),
|
||||
sample=rdoc.Sample(
|
||||
md5=md5,
|
||||
sha1="", # not easily accessible
|
||||
sha256=sha256,
|
||||
path=idaapi.get_input_file_path(),
|
||||
),
|
||||
analysis=rdoc.Analysis(
|
||||
format=idaapi.get_file_type_name(),
|
||||
arch=arch,
|
||||
os=os,
|
||||
extractor="ida",
|
||||
rules=tuple(r.resolve().absolute().as_posix() for r in rules),
|
||||
base_address=capa.features.freeze.Address.from_capa(idaapi.get_imagebase()),
|
||||
layout=rdoc.Layout(
|
||||
functions=(),
|
||||
# this is updated after capabilities have been collected.
|
||||
# will look like:
|
||||
#
|
||||
# "functions": { 0x401000: { "matched_basic_blocks": [ 0x401000, 0x401005, ... ] }, ... }
|
||||
},
|
||||
),
|
||||
# ignore these for now - not used by IDA plugin.
|
||||
"feature_counts": {
|
||||
"file": {},
|
||||
"functions": {},
|
||||
},
|
||||
"library_functions": {},
|
||||
},
|
||||
"version": capa.version.__version__,
|
||||
}
|
||||
feature_counts=rdoc.FeatureCounts(file=0, functions=()),
|
||||
library_functions=(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class IDAIO:
|
||||
@@ -213,16 +213,16 @@ def idb_contains_cached_results() -> bool:
|
||||
n = netnode.Netnode(CAPA_NETNODE)
|
||||
return bool(n.get(NETNODE_RESULTS))
|
||||
except netnode.NetnodeCorruptError as e:
|
||||
logger.error("%s", e, exc_info=True)
|
||||
logger.exception(str(e))
|
||||
return False
|
||||
|
||||
|
||||
def load_and_verify_cached_results() -> Optional[capa.render.result_document.ResultDocument]:
|
||||
def load_and_verify_cached_results() -> Optional[rdoc.ResultDocument]:
|
||||
"""verifies that cached results have valid (mapped) addresses for the current database"""
|
||||
logger.debug("loading cached capa results from netnode '%s'", CAPA_NETNODE)
|
||||
|
||||
n = netnode.Netnode(CAPA_NETNODE)
|
||||
doc = capa.render.result_document.ResultDocument.parse_obj(json.loads(n[NETNODE_RESULTS]))
|
||||
doc = rdoc.ResultDocument.model_validate_json(n[NETNODE_RESULTS])
|
||||
|
||||
for rule in rutils.capability_rules(doc):
|
||||
for location_, _ in rule.matches:
|
||||
|
||||
@@ -95,7 +95,7 @@ can update using the `Settings` button.
|
||||
|
||||
### Requirements
|
||||
|
||||
capa explorer supports Python versions >= 3.7.x and IDA Pro versions >= 7.4. The following IDA Pro versions have been tested:
|
||||
capa explorer supports Python versions >= 3.8.x and IDA Pro versions >= 7.4. The following IDA Pro versions have been tested:
|
||||
|
||||
* IDA 7.4
|
||||
* IDA 7.5
|
||||
@@ -105,7 +105,7 @@ capa explorer supports Python versions >= 3.7.x and IDA Pro versions >= 7.4. The
|
||||
* IDA 8.1
|
||||
* IDA 8.2
|
||||
|
||||
capa explorer is however limited to the Python versions supported by your IDA installation (which may not include all Python versions >= 3.7.x).
|
||||
capa explorer is however limited to the Python versions supported by your IDA installation (which may not include all Python versions >= 3.8.x).
|
||||
|
||||
If you encounter issues with your specific setup, please open a new [Issue](https://github.com/mandiant/capa/issues).
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -10,7 +10,7 @@ from __future__ import annotations
|
||||
|
||||
import itertools
|
||||
import collections
|
||||
from typing import Set, Dict, List, Tuple, Union, Optional
|
||||
from typing import Set, Dict, Tuple, Union, Optional
|
||||
|
||||
import capa.engine
|
||||
from capa.rules import Scope, RuleSet
|
||||
@@ -37,18 +37,21 @@ class CapaRuleGenFeatureCacheNode:
|
||||
self.children: Set[CapaRuleGenFeatureCacheNode] = set()
|
||||
|
||||
def __hash__(self):
|
||||
# TODO: unique enough?
|
||||
# TODO(mike-hunhoff): confirm this is unique enough
|
||||
# https://github.com/mandiant/capa/issues/1604
|
||||
return hash((self.address,))
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, type(self)):
|
||||
return NotImplemented
|
||||
# TODO: unique enough?
|
||||
# TODO(mike-hunhoff): confirm this is unique enough
|
||||
# https://github.com/mandiant/capa/issues/1604
|
||||
return self.address == other.address
|
||||
|
||||
|
||||
class CapaRuleGenFeatureCache:
|
||||
def __init__(self, fh_list: List[FunctionHandle], extractor: CapaExplorerFeatureExtractor):
|
||||
def __init__(self, extractor: CapaExplorerFeatureExtractor):
|
||||
self.extractor = extractor
|
||||
self.global_features: FeatureSet = collections.defaultdict(set)
|
||||
|
||||
self.file_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(None, None)
|
||||
@@ -56,12 +59,11 @@ class CapaRuleGenFeatureCache:
|
||||
self.bb_nodes: Dict[Address, CapaRuleGenFeatureCacheNode] = {}
|
||||
self.insn_nodes: Dict[Address, CapaRuleGenFeatureCacheNode] = {}
|
||||
|
||||
self._find_global_features(extractor)
|
||||
self._find_file_features(extractor)
|
||||
self._find_function_and_below_features(fh_list, extractor)
|
||||
self._find_global_features()
|
||||
self._find_file_features()
|
||||
|
||||
def _find_global_features(self, extractor: CapaExplorerFeatureExtractor):
|
||||
for feature, addr in extractor.extract_global_features():
|
||||
def _find_global_features(self):
|
||||
for feature, addr in self.extractor.extract_global_features():
|
||||
# not all global features may have virtual addresses.
|
||||
# if not, then at least ensure the feature shows up in the index.
|
||||
# the set of addresses will still be empty.
|
||||
@@ -71,46 +73,45 @@ class CapaRuleGenFeatureCache:
|
||||
if feature not in self.global_features:
|
||||
self.global_features[feature] = set()
|
||||
|
||||
def _find_file_features(self, extractor: CapaExplorerFeatureExtractor):
|
||||
def _find_file_features(self):
|
||||
# not all file features may have virtual addresses.
|
||||
# if not, then at least ensure the feature shows up in the index.
|
||||
# the set of addresses will still be empty.
|
||||
for feature, addr in extractor.extract_file_features():
|
||||
for feature, addr in self.extractor.extract_file_features():
|
||||
if addr is not None:
|
||||
self.file_node.features[feature].add(addr)
|
||||
else:
|
||||
if feature not in self.file_node.features:
|
||||
self.file_node.features[feature] = set()
|
||||
|
||||
def _find_function_and_below_features(self, fh_list: List[FunctionHandle], extractor: CapaExplorerFeatureExtractor):
|
||||
for fh in fh_list:
|
||||
f_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(fh, self.file_node)
|
||||
def _find_function_and_below_features(self, fh: FunctionHandle):
|
||||
f_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(fh, self.file_node)
|
||||
|
||||
# extract basic block and below features
|
||||
for bbh in extractor.get_basic_blocks(fh):
|
||||
bb_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(bbh, f_node)
|
||||
# extract basic block and below features
|
||||
for bbh in self.extractor.get_basic_blocks(fh):
|
||||
bb_node: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(bbh, f_node)
|
||||
|
||||
# extract instruction features
|
||||
for ih in extractor.get_instructions(fh, bbh):
|
||||
inode: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(ih, bb_node)
|
||||
# extract instruction features
|
||||
for ih in self.extractor.get_instructions(fh, bbh):
|
||||
inode: CapaRuleGenFeatureCacheNode = CapaRuleGenFeatureCacheNode(ih, bb_node)
|
||||
|
||||
for feature, addr in extractor.extract_insn_features(fh, bbh, ih):
|
||||
inode.features[feature].add(addr)
|
||||
for feature, addr in self.extractor.extract_insn_features(fh, bbh, ih):
|
||||
inode.features[feature].add(addr)
|
||||
|
||||
self.insn_nodes[inode.address] = inode
|
||||
self.insn_nodes[inode.address] = inode
|
||||
|
||||
# extract basic block features
|
||||
for feature, addr in extractor.extract_basic_block_features(fh, bbh):
|
||||
bb_node.features[feature].add(addr)
|
||||
# extract basic block features
|
||||
for feature, addr in self.extractor.extract_basic_block_features(fh, bbh):
|
||||
bb_node.features[feature].add(addr)
|
||||
|
||||
# store basic block features in cache and function parent
|
||||
self.bb_nodes[bb_node.address] = bb_node
|
||||
# store basic block features in cache and function parent
|
||||
self.bb_nodes[bb_node.address] = bb_node
|
||||
|
||||
# extract function features
|
||||
for feature, addr in extractor.extract_function_features(fh):
|
||||
f_node.features[feature].add(addr)
|
||||
# extract function features
|
||||
for feature, addr in self.extractor.extract_function_features(fh):
|
||||
f_node.features[feature].add(addr)
|
||||
|
||||
self.func_nodes[f_node.address] = f_node
|
||||
self.func_nodes[f_node.address] = f_node
|
||||
|
||||
def _find_instruction_capabilities(
|
||||
self, ruleset: RuleSet, insn: CapaRuleGenFeatureCacheNode
|
||||
@@ -155,7 +156,7 @@ class CapaRuleGenFeatureCache:
|
||||
def find_code_capabilities(
|
||||
self, ruleset: RuleSet, fh: FunctionHandle
|
||||
) -> Tuple[FeatureSet, MatchResults, MatchResults, MatchResults]:
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self.func_nodes.get(fh.address, None)
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self._get_cached_func_node(fh)
|
||||
if f_node is None:
|
||||
return {}, {}, {}, {}
|
||||
|
||||
@@ -195,8 +196,16 @@ class CapaRuleGenFeatureCache:
|
||||
_, matches = ruleset.match(Scope.FILE, features, NO_ADDRESS)
|
||||
return features, matches
|
||||
|
||||
def _get_cached_func_node(self, fh: FunctionHandle) -> Optional[CapaRuleGenFeatureCacheNode]:
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self.func_nodes.get(fh.address)
|
||||
if f_node is None:
|
||||
# function is not in our cache, do extraction now
|
||||
self._find_function_and_below_features(fh)
|
||||
f_node = self.func_nodes.get(fh.address)
|
||||
return f_node
|
||||
|
||||
def get_all_function_features(self, fh: FunctionHandle) -> FeatureSet:
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self.func_nodes.get(fh.address, None)
|
||||
f_node: Optional[CapaRuleGenFeatureCacheNode] = self._get_cached_func_node(fh)
|
||||
if f_node is None:
|
||||
return {}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import os
|
||||
import copy
|
||||
import logging
|
||||
import itertools
|
||||
import collections
|
||||
from enum import IntFlag
|
||||
from typing import Any, List, Optional
|
||||
from pathlib import Path
|
||||
|
||||
import idaapi
|
||||
import ida_kernwin
|
||||
@@ -57,9 +58,6 @@ CAPA_OFFICIAL_RULESET_URL = f"https://github.com/mandiant/capa-rules/releases/ta
|
||||
CAPA_RULESET_DOC_URL = "https://github.com/mandiant/capa/blob/master/doc/rules.md"
|
||||
|
||||
|
||||
from enum import IntFlag
|
||||
|
||||
|
||||
class Options(IntFlag):
|
||||
NO_ANALYSIS = 0 # No auto analysis
|
||||
ANALYZE_AUTO = 1 # Runs the analysis when starting the explorer, see details below
|
||||
@@ -73,10 +71,9 @@ AnalyzeOptionsText = {
|
||||
}
|
||||
|
||||
|
||||
def write_file(path, data):
|
||||
def write_file(path: Path, data):
|
||||
""" """
|
||||
with open(path, "wb") as save_file:
|
||||
save_file.write(data)
|
||||
path.write_bytes(data)
|
||||
|
||||
|
||||
def trim_function_name(f, max_length=25):
|
||||
@@ -192,8 +189,10 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
# caches used to speed up capa explorer analysis - these must be init to None
|
||||
self.resdoc_cache: Optional[capa.render.result_document.ResultDocument] = None
|
||||
self.program_analysis_ruleset_cache: Optional[capa.rules.RuleSet] = None
|
||||
self.rulegen_ruleset_cache: Optional[capa.rules.RuleSet] = None
|
||||
self.feature_extractor: Optional[CapaExplorerFeatureExtractor] = None
|
||||
self.rulegen_feature_extractor: Optional[CapaExplorerFeatureExtractor] = None
|
||||
self.rulegen_feature_cache: Optional[CapaRuleGenFeatureCache] = None
|
||||
self.rulegen_ruleset_cache: Optional[capa.rules.RuleSet] = None
|
||||
self.rulegen_current_function: Optional[FunctionHandle] = None
|
||||
|
||||
# models
|
||||
@@ -536,7 +535,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
@param new_ea: destination ea
|
||||
@param old_ea: source ea
|
||||
"""
|
||||
if not self.view_tabs.currentIndex() in (0, 1):
|
||||
if self.view_tabs.currentIndex() not in (0, 1):
|
||||
return
|
||||
|
||||
if idaapi.get_widget_type(widget) != idaapi.BWN_DISASM:
|
||||
@@ -577,7 +576,8 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
path: str = settings.user.get(CAPA_SETTINGS_RULE_PATH, "")
|
||||
|
||||
# resolve rules directory - check self and settings first, then ask user
|
||||
if not os.path.exists(path):
|
||||
# pathlib.Path considers "" equivalent to "." so we first check if rule path is an empty string
|
||||
if not path or not Path(path).exists():
|
||||
# configure rules selection messagebox
|
||||
rules_message = QtWidgets.QMessageBox()
|
||||
rules_message.setIcon(QtWidgets.QMessageBox.Information)
|
||||
@@ -585,7 +585,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
rules_message.setText("You must specify a directory containing capa rules before running analysis.")
|
||||
rules_message.setInformativeText(
|
||||
"Click 'Ok' to specify a local directory of rules or you can download and extract the official "
|
||||
f"rules from the URL listed in the details."
|
||||
+ "rules from the URL listed in the details."
|
||||
)
|
||||
rules_message.setDetailedText(f"{CAPA_OFFICIAL_RULESET_URL}")
|
||||
rules_message.setStandardButtons(QtWidgets.QMessageBox.Ok | QtWidgets.QMessageBox.Cancel)
|
||||
@@ -599,20 +599,21 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
if not path:
|
||||
raise UserCancelledError()
|
||||
|
||||
if not os.path.exists(path):
|
||||
logger.error("rule path %s does not exist or cannot be accessed" % path)
|
||||
if not Path(path).exists():
|
||||
logger.error("rule path %s does not exist or cannot be accessed", path)
|
||||
return False
|
||||
|
||||
settings.user[CAPA_SETTINGS_RULE_PATH] = path
|
||||
except UserCancelledError as e:
|
||||
except UserCancelledError:
|
||||
capa.ida.helpers.inform_user_ida_ui("Analysis requires capa rules")
|
||||
logger.warning(
|
||||
f"You must specify a directory containing capa rules before running analysis. Download and extract the official rules from {CAPA_OFFICIAL_RULESET_URL} (recommended)."
|
||||
"You must specify a directory containing capa rules before running analysis.%s",
|
||||
f"Download and extract the official rules from {CAPA_OFFICIAL_RULESET_URL} (recommended).",
|
||||
)
|
||||
return False
|
||||
except Exception as e:
|
||||
capa.ida.helpers.inform_user_ida_ui("Failed to load capa rules")
|
||||
logger.error("Failed to load capa rules (error: %s).", e, exc_info=True)
|
||||
logger.exception("Failed to load capa rules (error: %s).", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -626,7 +627,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
if not self.ensure_capa_settings_rule_path():
|
||||
return False
|
||||
|
||||
rule_path: str = settings.user.get(CAPA_SETTINGS_RULE_PATH, "")
|
||||
rule_path: Path = Path(settings.user.get(CAPA_SETTINGS_RULE_PATH, ""))
|
||||
try:
|
||||
|
||||
def on_load_rule(_, i, total):
|
||||
@@ -645,9 +646,9 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
|
||||
logger.error("Failed to load capa rules from %s (error: %s).", settings.user[CAPA_SETTINGS_RULE_PATH], e)
|
||||
logger.error(
|
||||
"Make sure your file directory contains properly "
|
||||
"formatted capa rules. You can download and extract the official rules from %s. "
|
||||
"Or, for more details, see the rules documentation here: %s",
|
||||
"Make sure your file directory contains properly " # noqa: G003 [logging statement uses +]
|
||||
+ "formatted capa rules. You can download and extract the official rules from %s. "
|
||||
+ "Or, for more details, see the rules documentation here: %s",
|
||||
CAPA_OFFICIAL_RULESET_URL,
|
||||
CAPA_RULESET_DOC_URL,
|
||||
)
|
||||
@@ -705,14 +706,15 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
|
||||
capa.ida.helpers.inform_user_ida_ui("Cached results were generated using different capas rules")
|
||||
logger.warning(
|
||||
"capa is showing you cached results from a previous analysis run. Your rules have changed since and you should reanalyze the program to see new results."
|
||||
"capa is showing you cached results from a previous analysis run.%s ",
|
||||
"Your rules have changed since and you should reanalyze the program to see new results.",
|
||||
)
|
||||
view_status_rules = "no rules matched for cache"
|
||||
|
||||
cached_results_time = self.resdoc_cache.meta.timestamp.strftime("%Y-%m-%d %H:%M:%S")
|
||||
new_view_status = f"capa rules: {view_status_rules}, cached results (created {cached_results_time})"
|
||||
except Exception as e:
|
||||
logger.error("Failed to load cached capa results (error: %s).", e, exc_info=True)
|
||||
logger.exception("Failed to load cached capa results (error: %s).", e)
|
||||
return False
|
||||
else:
|
||||
# load results from fresh anlaysis
|
||||
@@ -725,13 +727,11 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
update_wait_box(f"{text} ({self.process_count} of {self.process_total})")
|
||||
self.process_count += 1
|
||||
|
||||
update_wait_box("initializing feature extractor")
|
||||
|
||||
try:
|
||||
extractor = CapaExplorerFeatureExtractor()
|
||||
extractor.indicator.progress.connect(slot_progress_feature_extraction)
|
||||
self.feature_extractor = CapaExplorerFeatureExtractor()
|
||||
self.feature_extractor.indicator.progress.connect(slot_progress_feature_extraction)
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize feature extractor (error: %s).", e, exc_info=True)
|
||||
logger.exception("Failed to initialize feature extractor (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -741,9 +741,9 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
update_wait_box("calculating analysis")
|
||||
|
||||
try:
|
||||
self.process_total += len(tuple(extractor.get_functions()))
|
||||
self.process_total += len(tuple(self.feature_extractor.get_functions()))
|
||||
except Exception as e:
|
||||
logger.error("Failed to calculate analysis (error: %s).", e, exc_info=True)
|
||||
logger.exception("Failed to calculate analysis (error: %s).", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -767,15 +767,19 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
update_wait_box("extracting features")
|
||||
|
||||
try:
|
||||
meta = capa.ida.helpers.collect_metadata([settings.user[CAPA_SETTINGS_RULE_PATH]])
|
||||
capabilities, counts = capa.main.find_capabilities(ruleset, extractor, disable_progress=True)
|
||||
meta["analysis"].update(counts)
|
||||
meta["analysis"]["layout"] = capa.main.compute_layout(ruleset, extractor, capabilities)
|
||||
meta = capa.ida.helpers.collect_metadata([Path(settings.user[CAPA_SETTINGS_RULE_PATH])])
|
||||
capabilities, counts = capa.main.find_capabilities(
|
||||
ruleset, self.feature_extractor, disable_progress=True
|
||||
)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
meta.analysis.layout = capa.main.compute_layout(ruleset, self.feature_extractor, capabilities)
|
||||
except UserCancelledError:
|
||||
logger.info("User cancelled analysis.")
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error("Failed to extract capabilities from database (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to extract capabilities from database (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -787,7 +791,8 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
try:
|
||||
# support binary files specifically for x86/AMD64 shellcode
|
||||
# warn user binary file is loaded but still allow capa to process it
|
||||
# TODO: check specific architecture of binary files based on how user configured IDA processors
|
||||
# TODO(mike-hunhoff): check specific architecture of binary files based on how user configured IDA processors
|
||||
# https://github.com/mandiant/capa/issues/1603
|
||||
if idaapi.get_file_type_name() == "Binary file":
|
||||
logger.warning("-" * 80)
|
||||
logger.warning(" Input file appears to be a binary file.")
|
||||
@@ -808,7 +813,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
if capa.main.has_file_limitation(ruleset, capabilities, is_standalone=False):
|
||||
capa.ida.helpers.inform_user_ida_ui("capa encountered file limitation warnings during analysis")
|
||||
except Exception as e:
|
||||
logger.error("Failed to check for file limitations (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to check for file limitations (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -822,7 +827,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
meta, ruleset, capabilities
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to collect results (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to collect results (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -838,7 +843,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
capa.ida.helpers.save_rules_cache_id(ruleset_id)
|
||||
logger.info("Saved cached results to database")
|
||||
except Exception as e:
|
||||
logger.error("Failed to save results to database (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to save results to database (error: %s)", e)
|
||||
return False
|
||||
user_settings = settings.user[CAPA_SETTINGS_RULE_PATH]
|
||||
count_source_rules = self.program_analysis_ruleset_cache.source_rule_count
|
||||
@@ -859,7 +864,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
|
||||
self.model_data.render_capa_doc(self.resdoc_cache, self.view_show_results_by_function.isChecked())
|
||||
except Exception as e:
|
||||
logger.error("Failed to render results (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to render results (error: %s)", e)
|
||||
return False
|
||||
|
||||
self.set_view_status_label(new_view_status)
|
||||
@@ -911,7 +916,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
has_cache: bool = capa.ida.helpers.idb_contains_cached_results()
|
||||
except Exception as e:
|
||||
capa.ida.helpers.inform_user_ida_ui("Failed to check for cached results, reanalyzing program")
|
||||
logger.error("Failed to check for cached results (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to check for cached results (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -931,7 +936,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
] = capa.ida.helpers.load_and_verify_cached_results()
|
||||
except Exception as e:
|
||||
capa.ida.helpers.inform_user_ida_ui("Failed to verify cached results, reanalyzing program")
|
||||
logger.error("Failed to verify cached results (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to verify cached results (error: %s)", e)
|
||||
return False
|
||||
|
||||
if results is None:
|
||||
@@ -944,9 +949,9 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
"Reanalyze program",
|
||||
"",
|
||||
ida_kernwin.ASKBTN_YES,
|
||||
f"This database contains capa results generated on "
|
||||
f"{results.meta.timestamp.strftime('%Y-%m-%d at %H:%M:%S')}.\n"
|
||||
f"Load existing data or analyze program again?",
|
||||
"This database contains capa results generated on "
|
||||
+ results.meta.timestamp.strftime("%Y-%m-%d at %H:%M:%S")
|
||||
+ ".\nLoad existing data or analyze program again?",
|
||||
)
|
||||
|
||||
if btn_id == ida_kernwin.ASKBTN_CANCEL:
|
||||
@@ -973,26 +978,21 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
# so we'll work with a local copy of the ruleset.
|
||||
ruleset = copy.deepcopy(self.rulegen_ruleset_cache)
|
||||
|
||||
# clear feature cache
|
||||
if self.rulegen_feature_cache is not None:
|
||||
self.rulegen_feature_cache = None
|
||||
|
||||
# clear cached function
|
||||
if self.rulegen_current_function is not None:
|
||||
self.rulegen_current_function = None
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
logger.info("User cancelled analysis.")
|
||||
return False
|
||||
|
||||
update_wait_box("Initializing feature extractor")
|
||||
|
||||
try:
|
||||
# must use extractor to get function, as capa analysis requires casted object
|
||||
extractor = CapaExplorerFeatureExtractor()
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize feature extractor (error: %s)", e, exc_info=True)
|
||||
return False
|
||||
# these are init once objects, create on tab change
|
||||
if self.rulegen_feature_cache is None or self.rulegen_feature_extractor is None:
|
||||
try:
|
||||
update_wait_box("performing one-time file analysis")
|
||||
self.rulegen_feature_extractor = CapaExplorerFeatureExtractor()
|
||||
self.rulegen_feature_cache = CapaRuleGenFeatureCache(self.rulegen_feature_extractor)
|
||||
except Exception as e:
|
||||
logger.exception("Failed to initialize feature extractor (error: %s)", e)
|
||||
return False
|
||||
else:
|
||||
logger.info("Reusing prior rulegen cache")
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
logger.info("User cancelled analysis.")
|
||||
@@ -1004,24 +1004,9 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
try:
|
||||
f = idaapi.get_func(idaapi.get_screen_ea())
|
||||
if f is not None:
|
||||
self.rulegen_current_function = extractor.get_function(f.start_ea)
|
||||
self.rulegen_current_function = self.rulegen_feature_extractor.get_function(f.start_ea)
|
||||
except Exception as e:
|
||||
logger.error("Failed to resolve function at address 0x%X (error: %s)", f.start_ea, e, exc_info=True)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
logger.info("User cancelled analysis.")
|
||||
return False
|
||||
|
||||
# extract features
|
||||
try:
|
||||
fh_list: List[FunctionHandle] = []
|
||||
if self.rulegen_current_function is not None:
|
||||
fh_list.append(self.rulegen_current_function)
|
||||
|
||||
self.rulegen_feature_cache = CapaRuleGenFeatureCache(fh_list, extractor)
|
||||
except Exception as e:
|
||||
logger.error("Failed to extract features (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to resolve function at address 0x%X (error: %s)", f.start_ea, e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -1047,7 +1032,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
for addr, _ in result:
|
||||
all_function_features[capa.features.common.MatchedRule(name)].add(addr)
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate rule matches (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to generate rule matches (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -1068,7 +1053,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
for addr, _ in result:
|
||||
all_file_features[capa.features.common.MatchedRule(name)].add(addr)
|
||||
except Exception as e:
|
||||
logger.error("Failed to generate file rule matches (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to generate file rule matches (error: %s)", e)
|
||||
return False
|
||||
|
||||
if ida_kernwin.user_cancelled():
|
||||
@@ -1091,7 +1076,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
f"capa rules: {settings.user[CAPA_SETTINGS_RULE_PATH]} ({settings.user[CAPA_SETTINGS_RULE_PATH]} rules)"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to render views (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to render views (error: %s)", e)
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1176,7 +1161,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
assert self.rulegen_ruleset_cache is not None
|
||||
assert self.rulegen_feature_cache is not None
|
||||
except Exception as e:
|
||||
logger.error("Failed to access cache (error: %s)", e, exc_info=True)
|
||||
logger.exception("Failed to access cache (error: %s)", e)
|
||||
self.set_rulegen_status("Error: see console output for more details")
|
||||
return
|
||||
|
||||
@@ -1220,11 +1205,11 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
self.set_rulegen_status(f"Failed to create function rule matches from rule set ({e})")
|
||||
return
|
||||
|
||||
if rule.scope == capa.rules.Scope.FUNCTION and rule.name in func_matches.keys():
|
||||
if rule.scope == capa.rules.Scope.FUNCTION and rule.name in func_matches:
|
||||
is_match = True
|
||||
elif rule.scope == capa.rules.Scope.BASIC_BLOCK and rule.name in bb_matches.keys():
|
||||
elif rule.scope == capa.rules.Scope.BASIC_BLOCK and rule.name in bb_matches:
|
||||
is_match = True
|
||||
elif rule.scope == capa.rules.Scope.INSTRUCTION and rule.name in insn_matches.keys():
|
||||
elif rule.scope == capa.rules.Scope.INSTRUCTION and rule.name in insn_matches:
|
||||
is_match = True
|
||||
elif rule.scope == capa.rules.Scope.FILE:
|
||||
try:
|
||||
@@ -1232,7 +1217,7 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
except Exception as e:
|
||||
self.set_rulegen_status(f"Failed to create file rule matches from rule set ({e})")
|
||||
return
|
||||
if rule.name in file_matches.keys():
|
||||
if rule.name in file_matches:
|
||||
is_match = True
|
||||
else:
|
||||
is_match = False
|
||||
@@ -1259,7 +1244,6 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
elif index == 1:
|
||||
self.set_view_status_label(self.view_status_label_rulegen_cache)
|
||||
self.view_status_label_analysis_cache = status_prev
|
||||
|
||||
self.view_reset_button.setText("Clear")
|
||||
|
||||
def slot_rulegen_editor_update(self):
|
||||
@@ -1321,10 +1305,10 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
idaapi.info("No program analysis to save.")
|
||||
return
|
||||
|
||||
s = self.resdoc_cache.json().encode("utf-8")
|
||||
s = self.resdoc_cache.model_dump_json().encode("utf-8")
|
||||
|
||||
path = self.ask_user_capa_json_file()
|
||||
if not path:
|
||||
path = Path(self.ask_user_capa_json_file())
|
||||
if not path.exists():
|
||||
return
|
||||
|
||||
write_file(path, s)
|
||||
@@ -1336,8 +1320,8 @@ class CapaExplorerForm(idaapi.PluginForm):
|
||||
idaapi.info("No rule to save.")
|
||||
return
|
||||
|
||||
path = self.ask_user_capa_rule_file()
|
||||
if not path:
|
||||
path = Path(self.ask_user_capa_rule_file())
|
||||
if not path.exists():
|
||||
return
|
||||
|
||||
write_file(path, s)
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -30,7 +30,7 @@ class CapaExplorerIdaHooks(idaapi.UI_Hooks):
|
||||
|
||||
@retval must be 0
|
||||
"""
|
||||
self.process_action_handle = self.process_action_hooks.get(name, None)
|
||||
self.process_action_handle = self.process_action_hooks.get(name)
|
||||
|
||||
if self.process_action_handle:
|
||||
self.process_action_handle(self.process_action_meta)
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import base64
|
||||
|
||||
# this is just `capa/.github/icon.png`.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -130,8 +130,7 @@ class CapaExplorerDataItem:
|
||||
|
||||
def children(self) -> Iterator["CapaExplorerDataItem"]:
|
||||
"""yield children"""
|
||||
for child in self._children:
|
||||
yield child
|
||||
yield from self._children
|
||||
|
||||
def removeChildren(self):
|
||||
"""remove children"""
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -372,7 +372,8 @@ class CapaExplorerDataModel(QtCore.QAbstractItemModel):
|
||||
display += f" ({statement.description})"
|
||||
return CapaExplorerDefaultItem(parent, display)
|
||||
elif isinstance(statement, rd.CompoundStatement) and statement.type == rd.CompoundStatementType.NOT:
|
||||
# TODO: do we display 'not'
|
||||
# TODO(mike-hunhoff): verify that we can display NOT statements
|
||||
# https://github.com/mandiant/capa/issues/1602
|
||||
pass
|
||||
elif isinstance(statement, rd.SomeStatement):
|
||||
display = f"{statement.count} or more"
|
||||
@@ -421,12 +422,13 @@ class CapaExplorerDataModel(QtCore.QAbstractItemModel):
|
||||
@param doc: result doc
|
||||
"""
|
||||
if not match.success:
|
||||
# TODO: display failed branches at some point? Help with debugging rules?
|
||||
# TODO(mike-hunhoff): display failed branches at some point? Help with debugging rules?
|
||||
# https://github.com/mandiant/capa/issues/1601
|
||||
return
|
||||
|
||||
# optional statement with no successful children is empty
|
||||
if isinstance(match.node, rd.StatementNode) and match.node.statement.type == rd.CompoundStatementType.OPTIONAL:
|
||||
if not any(map(lambda m: m.success, match.children)):
|
||||
if not any(m.success for m in match.children):
|
||||
return
|
||||
|
||||
if isinstance(match.node, rd.StatementNode):
|
||||
@@ -626,7 +628,7 @@ class CapaExplorerDataModel(QtCore.QAbstractItemModel):
|
||||
matched_rule_source = ""
|
||||
|
||||
# check if match is a matched rule
|
||||
matched_rule = doc.rules.get(feature.match, None)
|
||||
matched_rule = doc.rules.get(feature.match)
|
||||
if matched_rule is not None:
|
||||
matched_rule_source = matched_rule.source
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -6,6 +6,7 @@
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import re
|
||||
from typing import Dict, Optional
|
||||
from collections import Counter
|
||||
|
||||
import idc
|
||||
@@ -63,7 +64,7 @@ def parse_yaml_line(feature):
|
||||
feature, _, comment = feature.partition("#")
|
||||
feature, _, description = feature.partition("=")
|
||||
|
||||
return map(lambda o: o.strip(), (feature, description, comment))
|
||||
return (o.strip() for o in (feature, description, comment))
|
||||
|
||||
|
||||
def parse_node_for_feature(feature, description, comment, depth):
|
||||
@@ -498,12 +499,13 @@ class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
|
||||
rule_text += "\n features:\n"
|
||||
|
||||
for o in iterate_tree(self):
|
||||
feature, description, comment = map(lambda o: o.strip(), tuple(o.text(i) for i in range(3)))
|
||||
feature, description, comment = (o.strip() for o in tuple(o.text(i) for i in range(3)))
|
||||
rule_text += parse_node_for_feature(feature, description, comment, calc_item_depth(o))
|
||||
|
||||
# FIXME we avoid circular update by disabling signals when updating
|
||||
# TODO(mike-hunhoff): we avoid circular update by disabling signals when updating
|
||||
# the preview. Preferably we would refactor the code to avoid this
|
||||
# in the first place
|
||||
# in the first place.
|
||||
# https://github.com/mandiant/capa/issues/1600
|
||||
self.preview.blockSignals(True)
|
||||
self.preview.setPlainText(rule_text)
|
||||
self.preview.blockSignals(False)
|
||||
@@ -646,7 +648,7 @@ class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
|
||||
counted = list(zip(Counter(features).keys(), Counter(features).values()))
|
||||
|
||||
# single features
|
||||
for k, v in filter(lambda t: t[1] == 1, counted):
|
||||
for k, _ in filter(lambda t: t[1] == 1, counted):
|
||||
if isinstance(k, (capa.features.common.String,)):
|
||||
value = f'"{capa.features.common.escape_string(k.get_value_str())}"'
|
||||
else:
|
||||
@@ -682,10 +684,12 @@ class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
|
||||
|
||||
# we don't add a new node for description; either set description column of parent's last child
|
||||
# or the parent itself
|
||||
if parent.childCount():
|
||||
parent.child(parent.childCount() - 1).setText(1, feature.lstrip("description:").lstrip())
|
||||
else:
|
||||
parent.setText(1, feature.lstrip("description:").lstrip())
|
||||
if feature.startswith("description:"):
|
||||
description = feature[len("description:") :].lstrip()
|
||||
if parent.childCount():
|
||||
parent.child(parent.childCount() - 1).setText(1, description)
|
||||
else:
|
||||
parent.setText(1, description)
|
||||
return None
|
||||
elif feature.startswith("- description:"):
|
||||
if not parent:
|
||||
@@ -693,7 +697,8 @@ class CapaExplorerRulegenEditor(QtWidgets.QTreeWidget):
|
||||
return None
|
||||
|
||||
# we don't add a new node for description; set the description column of the parent instead
|
||||
parent.setText(1, feature.lstrip("- description:").lstrip())
|
||||
description = feature[len("- description:") :].lstrip()
|
||||
parent.setText(1, description)
|
||||
return None
|
||||
|
||||
node = QtWidgets.QTreeWidgetItem(parent)
|
||||
@@ -1010,7 +1015,7 @@ class CapaExplorerRulegenFeatures(QtWidgets.QTreeWidget):
|
||||
|
||||
return o
|
||||
|
||||
def load_features(self, file_features, func_features={}):
|
||||
def load_features(self, file_features, func_features: Optional[Dict] = None):
|
||||
""" """
|
||||
self.parse_features_for_tree(self.new_parent_node(self, ("File Scope",)), file_features)
|
||||
if func_features:
|
||||
@@ -1219,8 +1224,7 @@ class CapaExplorerQtreeView(QtWidgets.QTreeView):
|
||||
yield self.new_action(*action)
|
||||
|
||||
# add default actions
|
||||
for action in self.load_default_context_menu_actions(data):
|
||||
yield action
|
||||
yield from self.load_default_context_menu_actions(data)
|
||||
|
||||
def load_default_context_menu(self, pos, item, model_index):
|
||||
"""create default custom context menu
|
||||
|
||||
486
capa/main.py
486
capa/main.py
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -8,38 +8,43 @@ Unless required by applicable law or agreed to in writing, software distributed
|
||||
is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and limitations under the License.
|
||||
"""
|
||||
import io
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import hashlib
|
||||
import logging
|
||||
import os.path
|
||||
import argparse
|
||||
import datetime
|
||||
import textwrap
|
||||
import itertools
|
||||
import contextlib
|
||||
import collections
|
||||
from typing import Any, Dict, List, Tuple, Callable
|
||||
from typing import Any, Dict, List, Tuple, Callable, Optional
|
||||
from pathlib import Path
|
||||
|
||||
import halo
|
||||
import tqdm
|
||||
import colorama
|
||||
import tqdm.contrib.logging
|
||||
from pefile import PEFormatError
|
||||
from elftools.common.exceptions import ELFError
|
||||
|
||||
import capa.perf
|
||||
import capa.rules
|
||||
import capa.engine
|
||||
import capa.helpers
|
||||
import capa.version
|
||||
import capa.render.json
|
||||
import capa.rules.cache
|
||||
import capa.render.default
|
||||
import capa.render.verbose
|
||||
import capa.features.common
|
||||
import capa.features.freeze
|
||||
import capa.features.freeze as frz
|
||||
import capa.render.vverbose
|
||||
import capa.features.extractors
|
||||
import capa.render.result_document
|
||||
import capa.render.result_document as rdoc
|
||||
import capa.features.extractors.common
|
||||
import capa.features.extractors.pefile
|
||||
import capa.features.extractors.dnfile_
|
||||
@@ -53,6 +58,7 @@ from capa.helpers import (
|
||||
get_file_taste,
|
||||
get_auto_format,
|
||||
log_unsupported_os_error,
|
||||
redirecting_print_to_tqdm,
|
||||
log_unsupported_arch_error,
|
||||
log_unsupported_format_error,
|
||||
)
|
||||
@@ -79,6 +85,7 @@ SIGNATURES_PATH_DEFAULT_STRING = "(embedded signatures)"
|
||||
BACKEND_VIV = "vivisect"
|
||||
BACKEND_DOTNET = "dotnet"
|
||||
BACKEND_BINJA = "binja"
|
||||
BACKEND_PEFILE = "pefile"
|
||||
|
||||
E_MISSING_RULES = 10
|
||||
E_MISSING_FILE = 11
|
||||
@@ -90,6 +97,7 @@ E_INVALID_FILE_TYPE = 16
|
||||
E_INVALID_FILE_ARCH = 17
|
||||
E_INVALID_FILE_OS = 18
|
||||
E_UNSUPPORTED_IDA_VERSION = 19
|
||||
E_UNSUPPORTED_GHIDRA_VERSION = 20
|
||||
|
||||
logger = logging.getLogger("capa")
|
||||
|
||||
@@ -243,45 +251,66 @@ def find_capabilities(ruleset: RuleSet, extractor: FeatureExtractor, disable_pro
|
||||
all_bb_matches = collections.defaultdict(list) # type: MatchResults
|
||||
all_insn_matches = collections.defaultdict(list) # type: MatchResults
|
||||
|
||||
meta = {
|
||||
"feature_counts": {
|
||||
"file": 0,
|
||||
"functions": {},
|
||||
},
|
||||
"library_functions": {},
|
||||
} # type: Dict[str, Any]
|
||||
feature_counts = rdoc.FeatureCounts(file=0, functions=())
|
||||
library_functions: Tuple[rdoc.LibraryFunction, ...] = ()
|
||||
|
||||
pbar = tqdm.tqdm
|
||||
if disable_progress:
|
||||
# do not use tqdm to avoid unnecessary side effects when caller intends
|
||||
# to disable progress completely
|
||||
pbar = lambda s, *args, **kwargs: s
|
||||
with redirecting_print_to_tqdm(disable_progress):
|
||||
with tqdm.contrib.logging.logging_redirect_tqdm():
|
||||
pbar = tqdm.tqdm
|
||||
if capa.helpers.is_runtime_ghidra():
|
||||
# Ghidrathon interpreter cannot properly handle
|
||||
# the TMonitor thread that is created via a monitor_interval
|
||||
# > 0
|
||||
pbar.monitor_interval = 0
|
||||
if disable_progress:
|
||||
# do not use tqdm to avoid unnecessary side effects when caller intends
|
||||
# to disable progress completely
|
||||
def pbar(s, *args, **kwargs):
|
||||
return s
|
||||
|
||||
functions = list(extractor.get_functions())
|
||||
n_funcs = len(functions)
|
||||
functions = list(extractor.get_functions())
|
||||
n_funcs = len(functions)
|
||||
|
||||
pb = pbar(functions, desc="matching", unit=" functions", postfix="skipped 0 library functions")
|
||||
for f in pb:
|
||||
if extractor.is_library_function(f.address):
|
||||
function_name = extractor.get_function_name(f.address)
|
||||
logger.debug("skipping library function 0x%x (%s)", f.address, function_name)
|
||||
meta["library_functions"][f.address] = function_name
|
||||
n_libs = len(meta["library_functions"])
|
||||
percentage = round(100 * (n_libs / n_funcs))
|
||||
if isinstance(pb, tqdm.tqdm):
|
||||
pb.set_postfix_str(f"skipped {n_libs} library functions ({percentage}%)")
|
||||
continue
|
||||
pb = pbar(functions, desc="matching", unit=" functions", postfix="skipped 0 library functions", leave=False)
|
||||
for f in pb:
|
||||
t0 = time.time()
|
||||
if extractor.is_library_function(f.address):
|
||||
function_name = extractor.get_function_name(f.address)
|
||||
logger.debug("skipping library function 0x%x (%s)", f.address, function_name)
|
||||
library_functions += (
|
||||
rdoc.LibraryFunction(address=frz.Address.from_capa(f.address), name=function_name),
|
||||
)
|
||||
n_libs = len(library_functions)
|
||||
percentage = round(100 * (n_libs / n_funcs))
|
||||
if isinstance(pb, tqdm.tqdm):
|
||||
pb.set_postfix_str(f"skipped {n_libs} library functions ({percentage}%)")
|
||||
continue
|
||||
|
||||
function_matches, bb_matches, insn_matches, feature_count = find_code_capabilities(ruleset, extractor, f)
|
||||
meta["feature_counts"]["functions"][f.address] = feature_count
|
||||
logger.debug("analyzed function 0x%x and extracted %d features", f.address, feature_count)
|
||||
function_matches, bb_matches, insn_matches, feature_count = find_code_capabilities(
|
||||
ruleset, extractor, f
|
||||
)
|
||||
feature_counts.functions += (
|
||||
rdoc.FunctionFeatureCount(address=frz.Address.from_capa(f.address), count=feature_count),
|
||||
)
|
||||
t1 = time.time()
|
||||
|
||||
for rule_name, res in function_matches.items():
|
||||
all_function_matches[rule_name].extend(res)
|
||||
for rule_name, res in bb_matches.items():
|
||||
all_bb_matches[rule_name].extend(res)
|
||||
for rule_name, res in insn_matches.items():
|
||||
all_insn_matches[rule_name].extend(res)
|
||||
match_count = sum(len(res) for res in function_matches.values())
|
||||
match_count += sum(len(res) for res in bb_matches.values())
|
||||
match_count += sum(len(res) for res in insn_matches.values())
|
||||
logger.debug(
|
||||
"analyzed function 0x%x and extracted %d features, %d matches in %0.02fs",
|
||||
f.address,
|
||||
feature_count,
|
||||
match_count,
|
||||
t1 - t0,
|
||||
)
|
||||
|
||||
for rule_name, res in function_matches.items():
|
||||
all_function_matches[rule_name].extend(res)
|
||||
for rule_name, res in bb_matches.items():
|
||||
all_bb_matches[rule_name].extend(res)
|
||||
for rule_name, res in insn_matches.items():
|
||||
all_insn_matches[rule_name].extend(res)
|
||||
|
||||
# collection of features that captures the rule matches within function, BB, and instruction scopes.
|
||||
# mapping from feature (matched rule) to set of addresses at which it matched.
|
||||
@@ -289,16 +318,15 @@ def find_capabilities(ruleset: RuleSet, extractor: FeatureExtractor, disable_pro
|
||||
for rule_name, results in itertools.chain(
|
||||
all_function_matches.items(), all_bb_matches.items(), all_insn_matches.items()
|
||||
):
|
||||
locations = set(map(lambda p: p[0], results))
|
||||
locations = {p[0] for p in results}
|
||||
rule = ruleset[rule_name]
|
||||
capa.engine.index_rule_matches(function_and_lower_features, rule, locations)
|
||||
|
||||
all_file_matches, feature_count = find_file_capabilities(ruleset, extractor, function_and_lower_features)
|
||||
meta["feature_counts"]["file"] = feature_count
|
||||
feature_counts.file = feature_count
|
||||
|
||||
matches = {
|
||||
rule_name: results
|
||||
for rule_name, results in itertools.chain(
|
||||
matches = dict(
|
||||
itertools.chain(
|
||||
# each rule exists in exactly one scope,
|
||||
# so there won't be any overlap among these following MatchResults,
|
||||
# and we can merge the dictionaries naively.
|
||||
@@ -307,17 +335,20 @@ def find_capabilities(ruleset: RuleSet, extractor: FeatureExtractor, disable_pro
|
||||
all_function_matches.items(),
|
||||
all_file_matches.items(),
|
||||
)
|
||||
)
|
||||
|
||||
meta = {
|
||||
"feature_counts": feature_counts,
|
||||
"library_functions": library_functions,
|
||||
}
|
||||
|
||||
return matches, meta
|
||||
|
||||
|
||||
# TODO move all to helpers?
|
||||
def has_rule_with_namespace(rules, capabilities, rule_cat):
|
||||
for rule_name in capabilities.keys():
|
||||
if rules.rules[rule_name].meta.get("namespace", "").startswith(rule_cat):
|
||||
return True
|
||||
return False
|
||||
def has_rule_with_namespace(rules: RuleSet, capabilities: MatchResults, namespace: str) -> bool:
|
||||
return any(
|
||||
rules.rules[rule_name].meta.get("namespace", "").startswith(namespace) for rule_name in capabilities.keys()
|
||||
)
|
||||
|
||||
|
||||
def is_internal_rule(rule: Rule) -> bool:
|
||||
@@ -350,26 +381,23 @@ def has_file_limitation(rules: RuleSet, capabilities: MatchResults, is_standalon
|
||||
return False
|
||||
|
||||
|
||||
def is_supported_format(sample: str) -> bool:
|
||||
def is_supported_format(sample: Path) -> bool:
|
||||
"""
|
||||
Return if this is a supported file based on magic header values
|
||||
"""
|
||||
with open(sample, "rb") as f:
|
||||
taste = f.read(0x100)
|
||||
taste = sample.open("rb").read(0x100)
|
||||
|
||||
return len(list(capa.features.extractors.common.extract_format(taste))) == 1
|
||||
|
||||
|
||||
def is_supported_arch(sample: str) -> bool:
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
def is_supported_arch(sample: Path) -> bool:
|
||||
buf = sample.read_bytes()
|
||||
|
||||
return len(list(capa.features.extractors.common.extract_arch(buf))) == 1
|
||||
|
||||
|
||||
def get_arch(sample: str) -> str:
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
def get_arch(sample: Path) -> str:
|
||||
buf = sample.read_bytes()
|
||||
|
||||
for feature, _ in capa.features.extractors.common.extract_arch(buf):
|
||||
assert isinstance(feature.value, str)
|
||||
@@ -378,16 +406,14 @@ def get_arch(sample: str) -> str:
|
||||
return "unknown"
|
||||
|
||||
|
||||
def is_supported_os(sample: str) -> bool:
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
def is_supported_os(sample: Path) -> bool:
|
||||
buf = sample.read_bytes()
|
||||
|
||||
return len(list(capa.features.extractors.common.extract_os(buf))) == 1
|
||||
|
||||
|
||||
def get_os(sample: str) -> str:
|
||||
with open(sample, "rb") as f:
|
||||
buf = f.read()
|
||||
def get_os(sample: Path) -> str:
|
||||
buf = sample.read_bytes()
|
||||
|
||||
for feature, _ in capa.features.extractors.common.extract_os(buf):
|
||||
assert isinstance(feature.value, str)
|
||||
@@ -415,7 +441,7 @@ def is_running_standalone() -> bool:
|
||||
return hasattr(sys, "frozen") and hasattr(sys, "_MEIPASS")
|
||||
|
||||
|
||||
def get_default_root() -> str:
|
||||
def get_default_root() -> Path:
|
||||
"""
|
||||
get the file system path to the default resources directory.
|
||||
under PyInstaller, this comes from _MEIPASS.
|
||||
@@ -425,30 +451,28 @@ def get_default_root() -> str:
|
||||
# pylance/mypy don't like `sys._MEIPASS` because this isn't standard.
|
||||
# its injected by pyinstaller.
|
||||
# so we'll fetch this attribute dynamically.
|
||||
return getattr(sys, "_MEIPASS")
|
||||
assert hasattr(sys, "_MEIPASS")
|
||||
return Path(sys._MEIPASS)
|
||||
else:
|
||||
return os.path.join(os.path.dirname(__file__), "..")
|
||||
return Path(__file__).resolve().parent.parent
|
||||
|
||||
|
||||
def get_default_signatures() -> List[str]:
|
||||
def get_default_signatures() -> List[Path]:
|
||||
"""
|
||||
compute a list of file system paths to the default FLIRT signatures.
|
||||
"""
|
||||
sigs_path = os.path.join(get_default_root(), "sigs")
|
||||
sigs_path = get_default_root() / "sigs"
|
||||
logger.debug("signatures path: %s", sigs_path)
|
||||
|
||||
ret = []
|
||||
for root, _, files in os.walk(sigs_path):
|
||||
for file in files:
|
||||
if not (file.endswith(".pat") or file.endswith(".pat.gz") or file.endswith(".sig")):
|
||||
continue
|
||||
|
||||
ret.append(os.path.join(root, file))
|
||||
for file in sigs_path.rglob("*"):
|
||||
if file.is_file() and file.suffix.lower() in (".pat", ".pat.gz", ".sig"):
|
||||
ret.append(file)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def get_workspace(path, format_, sigpaths):
|
||||
def get_workspace(path: Path, format_: str, sigpaths: List[Path]):
|
||||
"""
|
||||
load the program at the given path into a vivisect workspace using the given format.
|
||||
also apply the given FLIRT signatures.
|
||||
@@ -469,24 +493,23 @@ def get_workspace(path, format_, sigpaths):
|
||||
import viv_utils.flirt
|
||||
|
||||
logger.debug("generating vivisect workspace for: %s", path)
|
||||
# TODO should not be auto at this point, anymore
|
||||
if format_ == FORMAT_AUTO:
|
||||
if not is_supported_format(path):
|
||||
raise UnsupportedFormatError()
|
||||
|
||||
# don't analyze, so that we can add our Flirt function analyzer first.
|
||||
vw = viv_utils.getWorkspace(path, analyze=False, should_save=False)
|
||||
vw = viv_utils.getWorkspace(str(path), analyze=False, should_save=False)
|
||||
elif format_ in {FORMAT_PE, FORMAT_ELF}:
|
||||
vw = viv_utils.getWorkspace(path, analyze=False, should_save=False)
|
||||
vw = viv_utils.getWorkspace(str(path), analyze=False, should_save=False)
|
||||
elif format_ == FORMAT_SC32:
|
||||
# these are not analyzed nor saved.
|
||||
vw = viv_utils.getShellcodeWorkspaceFromFile(path, arch="i386", analyze=False)
|
||||
vw = viv_utils.getShellcodeWorkspaceFromFile(str(path), arch="i386", analyze=False)
|
||||
elif format_ == FORMAT_SC64:
|
||||
vw = viv_utils.getShellcodeWorkspaceFromFile(path, arch="amd64", analyze=False)
|
||||
vw = viv_utils.getShellcodeWorkspaceFromFile(str(path), arch="amd64", analyze=False)
|
||||
else:
|
||||
raise ValueError("unexpected format: " + format_)
|
||||
|
||||
viv_utils.flirt.register_flirt_signature_analyzers(vw, sigpaths)
|
||||
viv_utils.flirt.register_flirt_signature_analyzers(vw, [str(s) for s in sigpaths])
|
||||
|
||||
vw.analyze()
|
||||
|
||||
@@ -494,13 +517,12 @@ def get_workspace(path, format_, sigpaths):
|
||||
return vw
|
||||
|
||||
|
||||
# TODO get_extractors -> List[FeatureExtractor]?
|
||||
def get_extractor(
|
||||
path: str,
|
||||
path: Path,
|
||||
format_: str,
|
||||
os_: str,
|
||||
backend: str,
|
||||
sigpaths: List[str],
|
||||
sigpaths: List[Path],
|
||||
should_save_workspace=False,
|
||||
disable_progress=False,
|
||||
) -> FeatureExtractor:
|
||||
@@ -532,28 +554,33 @@ def get_extractor(
|
||||
# We need to fist find the binja API installation path and add it into sys.path
|
||||
if is_running_standalone():
|
||||
bn_api = find_binja_path()
|
||||
if os.path.exists(bn_api):
|
||||
sys.path.append(bn_api)
|
||||
if bn_api.exists():
|
||||
sys.path.append(str(bn_api))
|
||||
|
||||
try:
|
||||
from binaryninja import BinaryView, BinaryViewType
|
||||
import binaryninja
|
||||
from binaryninja import BinaryView
|
||||
except ImportError:
|
||||
raise RuntimeError(
|
||||
"Cannot import binaryninja module. Please install the Binary Ninja Python API first: "
|
||||
"https://docs.binary.ninja/dev/batch.html#install-the-api)."
|
||||
+ "https://docs.binary.ninja/dev/batch.html#install-the-api)."
|
||||
)
|
||||
|
||||
import capa.features.extractors.binja.extractor
|
||||
|
||||
with halo.Halo(text="analyzing program", spinner="simpleDots", stream=sys.stderr, enabled=not disable_progress):
|
||||
bv: BinaryView = BinaryViewType.get_view_of_file(path)
|
||||
bv: BinaryView = binaryninja.load(str(path))
|
||||
if bv is None:
|
||||
raise RuntimeError(f"Binary Ninja cannot open file {path}")
|
||||
|
||||
return capa.features.extractors.binja.extractor.BinjaFeatureExtractor(bv)
|
||||
|
||||
# default to use vivisect backend
|
||||
else:
|
||||
elif backend == BACKEND_PEFILE:
|
||||
import capa.features.extractors.pefile
|
||||
|
||||
return capa.features.extractors.pefile.PefileFeatureExtractor(path)
|
||||
|
||||
elif backend == BACKEND_VIV:
|
||||
import capa.features.extractors.viv.extractor
|
||||
|
||||
with halo.Halo(text="analyzing program", spinner="simpleDots", stream=sys.stderr, enabled=not disable_progress):
|
||||
@@ -571,9 +598,12 @@ def get_extractor(
|
||||
|
||||
return capa.features.extractors.viv.extractor.VivisectFeatureExtractor(vw, path, os_)
|
||||
|
||||
else:
|
||||
raise ValueError("unexpected backend: " + backend)
|
||||
|
||||
def get_file_extractors(sample: str, format_: str) -> List[FeatureExtractor]:
|
||||
file_extractors: List[FeatureExtractor] = list()
|
||||
|
||||
def get_file_extractors(sample: Path, format_: str) -> List[FeatureExtractor]:
|
||||
file_extractors: List[FeatureExtractor] = []
|
||||
|
||||
if format_ == FORMAT_PE:
|
||||
file_extractors.append(capa.features.extractors.pefile.PefileFeatureExtractor(sample))
|
||||
@@ -588,7 +618,7 @@ def get_file_extractors(sample: str, format_: str) -> List[FeatureExtractor]:
|
||||
return file_extractors
|
||||
|
||||
|
||||
def is_nursery_rule_path(path: str) -> bool:
|
||||
def is_nursery_rule_path(path: Path) -> bool:
|
||||
"""
|
||||
The nursery is a spot for rules that have not yet been fully polished.
|
||||
For example, they may not have references to public example of a technique.
|
||||
@@ -598,21 +628,21 @@ def is_nursery_rule_path(path: str) -> bool:
|
||||
When nursery rules are loaded, their metadata section should be updated with:
|
||||
`nursery=True`.
|
||||
"""
|
||||
return "nursery" in path
|
||||
return "nursery" in path.parts
|
||||
|
||||
|
||||
def collect_rule_file_paths(rule_paths: List[str]) -> List[str]:
|
||||
def collect_rule_file_paths(rule_paths: List[Path]) -> List[Path]:
|
||||
"""
|
||||
collect all rule file paths, including those in subdirectories.
|
||||
"""
|
||||
rule_file_paths = []
|
||||
for rule_path in rule_paths:
|
||||
if not os.path.exists(rule_path):
|
||||
if not rule_path.exists():
|
||||
raise IOError(f"rule path {rule_path} does not exist or cannot be accessed")
|
||||
|
||||
if os.path.isfile(rule_path):
|
||||
if rule_path.is_file():
|
||||
rule_file_paths.append(rule_path)
|
||||
elif os.path.isdir(rule_path):
|
||||
elif rule_path.is_dir():
|
||||
logger.debug("reading rules from directory %s", rule_path)
|
||||
for root, _, files in os.walk(rule_path):
|
||||
if ".git" in root:
|
||||
@@ -629,14 +659,12 @@ def collect_rule_file_paths(rule_paths: List[str]) -> List[str]:
|
||||
# other things maybe are rules, but are mis-named.
|
||||
logger.warning("skipping non-.yml file: %s", file)
|
||||
continue
|
||||
rule_path = os.path.join(root, file)
|
||||
rule_file_paths.append(rule_path)
|
||||
|
||||
rule_file_paths.append(Path(root) / file)
|
||||
return rule_file_paths
|
||||
|
||||
|
||||
# TypeAlias. note: using `foo: TypeAlias = bar` is Python 3.10+
|
||||
RulePath = str
|
||||
RulePath = Path
|
||||
|
||||
|
||||
def on_load_rule_default(_path: RulePath, i: int, _total: int) -> None:
|
||||
@@ -656,17 +684,13 @@ def get_rules(
|
||||
"""
|
||||
if cache_dir is None:
|
||||
cache_dir = capa.rules.cache.get_default_cache_directory()
|
||||
|
||||
# rule_paths may contain directory paths,
|
||||
# so search for file paths recursively.
|
||||
rule_file_paths = collect_rule_file_paths(rule_paths)
|
||||
|
||||
# this list is parallel to `rule_file_paths`:
|
||||
# rule_file_paths[i] corresponds to rule_contents[i].
|
||||
rule_contents = []
|
||||
for file_path in rule_file_paths:
|
||||
with open(file_path, "rb") as f:
|
||||
rule_contents.append(f.read())
|
||||
rule_contents = [file_path.read_bytes() for file_path in rule_file_paths]
|
||||
|
||||
ruleset = capa.rules.cache.load_cached_ruleset(cache_dir, rule_contents)
|
||||
if ruleset is not None:
|
||||
@@ -683,9 +707,8 @@ def get_rules(
|
||||
except capa.rules.InvalidRule:
|
||||
raise
|
||||
else:
|
||||
rule.meta["capa/path"] = path
|
||||
if is_nursery_rule_path(path):
|
||||
rule.meta["capa/nursery"] = True
|
||||
rule.meta["capa/path"] = path.as_posix()
|
||||
rule.meta["capa/nursery"] = is_nursery_rule_path(path)
|
||||
|
||||
rules.append(rule)
|
||||
logger.debug("loaded rule: '%s' with scope: %s", rule.name, rule.scope)
|
||||
@@ -697,27 +720,25 @@ def get_rules(
|
||||
return ruleset
|
||||
|
||||
|
||||
def get_signatures(sigs_path):
|
||||
if not os.path.exists(sigs_path):
|
||||
def get_signatures(sigs_path: Path) -> List[Path]:
|
||||
if not sigs_path.exists():
|
||||
raise IOError(f"signatures path {sigs_path} does not exist or cannot be accessed")
|
||||
|
||||
paths = []
|
||||
if os.path.isfile(sigs_path):
|
||||
paths: List[Path] = []
|
||||
if sigs_path.is_file():
|
||||
paths.append(sigs_path)
|
||||
elif os.path.isdir(sigs_path):
|
||||
logger.debug("reading signatures from directory %s", os.path.abspath(os.path.normpath(sigs_path)))
|
||||
for root, _, files in os.walk(sigs_path):
|
||||
for file in files:
|
||||
if file.endswith((".pat", ".pat.gz", ".sig")):
|
||||
sig_path = os.path.join(root, file)
|
||||
paths.append(sig_path)
|
||||
elif sigs_path.is_dir():
|
||||
logger.debug("reading signatures from directory %s", sigs_path.resolve())
|
||||
for file in sigs_path.rglob("*"):
|
||||
if file.is_file() and file.suffix.lower() in (".pat", ".pat.gz", ".sig"):
|
||||
paths.append(file)
|
||||
|
||||
# nicely normalize and format path so that debugging messages are clearer
|
||||
paths = [os.path.abspath(os.path.normpath(path)) for path in paths]
|
||||
# Convert paths to their absolute and normalized forms
|
||||
paths = [path.resolve().absolute() for path in paths]
|
||||
|
||||
# load signatures in deterministic order: the alphabetic sorting of filename.
|
||||
# this means that `0_sigs.pat` loads before `1_sigs.pat`.
|
||||
paths = sorted(paths, key=os.path.basename)
|
||||
paths = sorted(paths, key=lambda path: path.name)
|
||||
|
||||
for path in paths:
|
||||
logger.debug("found signature file: %s", path)
|
||||
@@ -727,58 +748,58 @@ def get_signatures(sigs_path):
|
||||
|
||||
def collect_metadata(
|
||||
argv: List[str],
|
||||
sample_path: str,
|
||||
sample_path: Path,
|
||||
format_: str,
|
||||
os_: str,
|
||||
rules_path: List[str],
|
||||
rules_path: List[Path],
|
||||
extractor: capa.features.extractors.base_extractor.FeatureExtractor,
|
||||
):
|
||||
) -> rdoc.Metadata:
|
||||
md5 = hashlib.md5()
|
||||
sha1 = hashlib.sha1()
|
||||
sha256 = hashlib.sha256()
|
||||
|
||||
with open(sample_path, "rb") as f:
|
||||
buf = f.read()
|
||||
buf = sample_path.read_bytes()
|
||||
|
||||
md5.update(buf)
|
||||
sha1.update(buf)
|
||||
sha256.update(buf)
|
||||
|
||||
if rules_path != [RULES_PATH_DEFAULT_STRING]:
|
||||
rules_path = [os.path.abspath(os.path.normpath(r)) for r in rules_path]
|
||||
|
||||
rules = tuple(r.resolve().absolute().as_posix() for r in rules_path)
|
||||
format_ = get_format(sample_path) if format_ == FORMAT_AUTO else format_
|
||||
arch = get_arch(sample_path)
|
||||
os_ = get_os(sample_path) if os_ == OS_AUTO else os_
|
||||
|
||||
return {
|
||||
"timestamp": datetime.datetime.now().isoformat(),
|
||||
"version": capa.version.__version__,
|
||||
"argv": argv,
|
||||
"sample": {
|
||||
"md5": md5.hexdigest(),
|
||||
"sha1": sha1.hexdigest(),
|
||||
"sha256": sha256.hexdigest(),
|
||||
"path": os.path.normpath(sample_path),
|
||||
},
|
||||
"analysis": {
|
||||
"format": format_,
|
||||
"arch": arch,
|
||||
"os": os_,
|
||||
"extractor": extractor.__class__.__name__,
|
||||
"rules": rules_path,
|
||||
"base_address": extractor.get_base_address(),
|
||||
"layout": {
|
||||
return rdoc.Metadata(
|
||||
timestamp=datetime.datetime.now(),
|
||||
version=capa.version.__version__,
|
||||
argv=tuple(argv) if argv else None,
|
||||
sample=rdoc.Sample(
|
||||
md5=md5.hexdigest(),
|
||||
sha1=sha1.hexdigest(),
|
||||
sha256=sha256.hexdigest(),
|
||||
path=sample_path.resolve().absolute().as_posix(),
|
||||
),
|
||||
analysis=rdoc.Analysis(
|
||||
format=format_,
|
||||
arch=arch,
|
||||
os=os_,
|
||||
extractor=extractor.__class__.__name__,
|
||||
rules=rules,
|
||||
base_address=frz.Address.from_capa(extractor.get_base_address()),
|
||||
layout=rdoc.Layout(
|
||||
functions=(),
|
||||
# this is updated after capabilities have been collected.
|
||||
# will look like:
|
||||
#
|
||||
# "functions": { 0x401000: { "matched_basic_blocks": [ 0x401000, 0x401005, ... ] }, ... }
|
||||
},
|
||||
},
|
||||
}
|
||||
),
|
||||
feature_counts=rdoc.FeatureCounts(file=0, functions=()),
|
||||
library_functions=(),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def compute_layout(rules, extractor, capabilities):
|
||||
def compute_layout(rules, extractor, capabilities) -> rdoc.Layout:
|
||||
"""
|
||||
compute a metadata structure that links basic blocks
|
||||
to the functions in which they're found.
|
||||
@@ -803,16 +824,19 @@ def compute_layout(rules, extractor, capabilities):
|
||||
assert addr in functions_by_bb
|
||||
matched_bbs.add(addr)
|
||||
|
||||
layout = {
|
||||
"functions": {
|
||||
f: {
|
||||
"matched_basic_blocks": [bb for bb in bbs if bb in matched_bbs]
|
||||
# this object is open to extension in the future,
|
||||
layout = rdoc.Layout(
|
||||
functions=tuple(
|
||||
rdoc.FunctionLayout(
|
||||
address=frz.Address.from_capa(f),
|
||||
matched_basic_blocks=tuple(
|
||||
rdoc.BasicBlockLayout(address=frz.Address.from_capa(bb)) for bb in bbs if bb in matched_bbs
|
||||
) # this object is open to extension in the future,
|
||||
# such as with the function name, etc.
|
||||
}
|
||||
)
|
||||
for f, bbs in bbs_by_function.items()
|
||||
}
|
||||
}
|
||||
if len([bb for bb in bbs if bb in matched_bbs]) > 0
|
||||
)
|
||||
)
|
||||
|
||||
return layout
|
||||
|
||||
@@ -902,7 +926,7 @@ def install_common_args(parser, wanted=None):
|
||||
"--backend",
|
||||
type=str,
|
||||
help="select the backend to use",
|
||||
choices=(BACKEND_VIV, BACKEND_BINJA),
|
||||
choices=(BACKEND_VIV, BACKEND_BINJA, BACKEND_PEFILE),
|
||||
default=BACKEND_VIV,
|
||||
)
|
||||
|
||||
@@ -971,12 +995,20 @@ def handle_common_args(args):
|
||||
# disable vivisect-related logging, it's verbose and not relevant for capa users
|
||||
set_vivisect_log_level(logging.CRITICAL)
|
||||
|
||||
# Since Python 3.8 cp65001 is an alias to utf_8, but not for Python < 3.8
|
||||
# TODO: remove this code when only supporting Python 3.8+
|
||||
# https://stackoverflow.com/a/3259271/87207
|
||||
import codecs
|
||||
|
||||
codecs.register(lambda name: codecs.lookup("utf-8") if name == "cp65001" else None)
|
||||
if isinstance(sys.stdout, io.TextIOWrapper) or hasattr(sys.stdout, "reconfigure"):
|
||||
# from sys.stdout type hint:
|
||||
#
|
||||
# TextIO is used instead of more specific types for the standard streams,
|
||||
# since they are often monkeypatched at runtime. At startup, the objects
|
||||
# are initialized to instances of TextIOWrapper.
|
||||
#
|
||||
# To use methods from TextIOWrapper, use an isinstance check to ensure that
|
||||
# the streams have not been overridden:
|
||||
#
|
||||
# if isinstance(sys.stdout, io.TextIOWrapper):
|
||||
# sys.stdout.reconfigure(...)
|
||||
sys.stdout.reconfigure(encoding="utf-8")
|
||||
colorama.just_fix_windows_console()
|
||||
|
||||
if args.color == "always":
|
||||
colorama.init(strip=False)
|
||||
@@ -991,8 +1023,11 @@ def handle_common_args(args):
|
||||
else:
|
||||
raise RuntimeError("unexpected --color value: " + args.color)
|
||||
|
||||
if hasattr(args, "sample"):
|
||||
args.sample = Path(args.sample)
|
||||
|
||||
if hasattr(args, "rules"):
|
||||
rules_paths: List[str] = []
|
||||
rules_paths: List[Path] = []
|
||||
|
||||
if args.rules == [RULES_PATH_DEFAULT_STRING]:
|
||||
logger.debug("-" * 80)
|
||||
@@ -1002,9 +1037,9 @@ def handle_common_args(args):
|
||||
logger.debug(" https://github.com/mandiant/capa-rules")
|
||||
logger.debug("-" * 80)
|
||||
|
||||
default_rule_path = os.path.join(get_default_root(), "rules")
|
||||
default_rule_path = get_default_root() / "rules"
|
||||
|
||||
if not os.path.exists(default_rule_path):
|
||||
if not default_rule_path.exists():
|
||||
# when a users installs capa via pip,
|
||||
# this pulls down just the source code - not the default rules.
|
||||
# i'm not sure the default rules should even be written to the library directory,
|
||||
@@ -1016,10 +1051,9 @@ def handle_common_args(args):
|
||||
rules_paths.append(default_rule_path)
|
||||
args.is_default_rules = True
|
||||
else:
|
||||
rules_paths = args.rules
|
||||
|
||||
if RULES_PATH_DEFAULT_STRING in rules_paths:
|
||||
rules_paths.remove(RULES_PATH_DEFAULT_STRING)
|
||||
for rule in args.rules:
|
||||
if RULES_PATH_DEFAULT_STRING != rule:
|
||||
rules_paths.append(Path(rule))
|
||||
|
||||
for rule_path in rules_paths:
|
||||
logger.debug("using rules path: %s", rule_path)
|
||||
@@ -1037,24 +1071,25 @@ def handle_common_args(args):
|
||||
)
|
||||
logger.debug("-" * 80)
|
||||
|
||||
sigs_path = os.path.join(get_default_root(), "sigs")
|
||||
if not os.path.exists(sigs_path):
|
||||
sigs_path = get_default_root() / "sigs"
|
||||
|
||||
if not sigs_path.exists():
|
||||
logger.error(
|
||||
"Using default signature path, but it doesn't exist. "
|
||||
"Please install the signatures first: "
|
||||
"https://github.com/mandiant/capa/blob/master/doc/installation.md#method-2-using-capa-as-a-python-library."
|
||||
"Using default signature path, but it doesn't exist. " # noqa: G003 [logging statement uses +]
|
||||
+ "Please install the signatures first: "
|
||||
+ "https://github.com/mandiant/capa/blob/master/doc/installation.md#method-2-using-capa-as-a-python-library."
|
||||
)
|
||||
raise IOError(f"signatures path {sigs_path} does not exist or cannot be accessed")
|
||||
else:
|
||||
sigs_path = args.signatures
|
||||
sigs_path = Path(args.signatures)
|
||||
logger.debug("using signatures path: %s", sigs_path)
|
||||
|
||||
args.signatures = sigs_path
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if sys.version_info < (3, 7):
|
||||
raise UnsupportedRuntimeError("This version of capa can only be used with Python 3.7+")
|
||||
def main(argv: Optional[List[str]] = None):
|
||||
if sys.version_info < (3, 8):
|
||||
raise UnsupportedRuntimeError("This version of capa can only be used with Python 3.8+")
|
||||
|
||||
if argv is None:
|
||||
argv = sys.argv[1:]
|
||||
@@ -1119,7 +1154,7 @@ def main(argv=None):
|
||||
|
||||
try:
|
||||
if is_running_standalone() and args.is_default_rules:
|
||||
cache_dir = os.path.join(get_default_root(), "cache")
|
||||
cache_dir = get_default_root() / "cache"
|
||||
else:
|
||||
cache_dir = capa.rules.cache.get_default_cache_directory()
|
||||
|
||||
@@ -1136,13 +1171,13 @@ def main(argv=None):
|
||||
rules = rules.filter_rules_by_meta(args.tag)
|
||||
logger.debug("selected %d rules", len(rules))
|
||||
for i, r in enumerate(rules.rules, 1):
|
||||
# TODO don't display subscope rules?
|
||||
logger.debug(" %d. %s", i, r)
|
||||
|
||||
except (IOError, capa.rules.InvalidRule, capa.rules.InvalidRuleSet) as e:
|
||||
logger.error("%s", str(e))
|
||||
logger.error(
|
||||
"Make sure your file directory contains properly formatted capa rules. You can download the standard "
|
||||
"collection of capa rules from https://github.com/mandiant/capa-rules/releases."
|
||||
"Make sure your file directory contains properly formatted capa rules. You can download the standard " # noqa: G003 [logging statement uses +]
|
||||
+ "collection of capa rules from https://github.com/mandiant/capa-rules/releases."
|
||||
)
|
||||
logger.error(
|
||||
"Please ensure you're using the rules that correspond to your major version of capa (%s)",
|
||||
@@ -1189,14 +1224,13 @@ def main(argv=None):
|
||||
logger.debug("file limitation short circuit, won't analyze fully.")
|
||||
return E_FILE_LIMITATION
|
||||
|
||||
# TODO: #1411 use a real type, not a dict here.
|
||||
meta: Dict[str, Any]
|
||||
meta: rdoc.Metadata
|
||||
capabilities: MatchResults
|
||||
counts: Dict[str, Any]
|
||||
|
||||
if format_ == FORMAT_RESULT:
|
||||
# result document directly parses into meta, capabilities
|
||||
result_doc = capa.render.result_document.ResultDocument.parse_file(args.sample)
|
||||
result_doc = capa.render.result_document.ResultDocument.from_file(Path(args.sample))
|
||||
meta, capabilities = result_doc.to_capa()
|
||||
|
||||
else:
|
||||
@@ -1205,8 +1239,7 @@ def main(argv=None):
|
||||
|
||||
if format_ == FORMAT_FREEZE:
|
||||
# freeze format deserializes directly into an extractor
|
||||
with open(args.sample, "rb") as f:
|
||||
extractor = capa.features.freeze.load(f.read())
|
||||
extractor = frz.load(Path(args.sample).read_bytes())
|
||||
else:
|
||||
# all other formats we must create an extractor,
|
||||
# such as viv, binary ninja, etc. workspaces
|
||||
@@ -1232,7 +1265,7 @@ def main(argv=None):
|
||||
args.backend,
|
||||
sig_paths,
|
||||
should_save_workspace,
|
||||
disable_progress=args.quiet,
|
||||
disable_progress=args.quiet or args.debug,
|
||||
)
|
||||
except UnsupportedFormatError:
|
||||
log_unsupported_format_error()
|
||||
@@ -1247,15 +1280,16 @@ def main(argv=None):
|
||||
meta = collect_metadata(argv, args.sample, args.format, args.os, args.rules, extractor)
|
||||
|
||||
capabilities, counts = find_capabilities(rules, extractor, disable_progress=args.quiet)
|
||||
meta["analysis"].update(counts)
|
||||
meta["analysis"]["layout"] = compute_layout(rules, extractor, capabilities)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
meta.analysis.layout = compute_layout(rules, extractor, capabilities)
|
||||
|
||||
if has_file_limitation(rules, capabilities):
|
||||
# bail if capa encountered file limitation e.g. a packed binary
|
||||
# do show the output in verbose mode, though.
|
||||
if not (args.verbose or args.vverbose or args.json):
|
||||
return E_FILE_LIMITATION
|
||||
|
||||
if args.json:
|
||||
print(capa.render.json.render(meta, rules, capabilities))
|
||||
elif args.vverbose:
|
||||
@@ -1293,14 +1327,16 @@ def ida_main():
|
||||
logger.debug(" https://github.com/mandiant/capa-rules")
|
||||
logger.debug("-" * 80)
|
||||
|
||||
rules_path = os.path.join(get_default_root(), "rules")
|
||||
rules_path = get_default_root() / "rules"
|
||||
logger.debug("rule path: %s", rules_path)
|
||||
rules = get_rules([rules_path])
|
||||
|
||||
meta = capa.ida.helpers.collect_metadata([rules_path])
|
||||
|
||||
capabilities, counts = find_capabilities(rules, capa.features.extractors.ida.extractor.IdaFeatureExtractor())
|
||||
meta["analysis"].update(counts)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
|
||||
if has_file_limitation(rules, capabilities, is_standalone=False):
|
||||
capa.ida.helpers.inform_user_ida_ui("capa encountered warnings during analysis")
|
||||
@@ -1309,17 +1345,47 @@ def ida_main():
|
||||
print(capa.render.default.render(meta, rules, capabilities))
|
||||
|
||||
|
||||
def is_runtime_ida():
|
||||
try:
|
||||
import idc
|
||||
except ImportError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
def ghidra_main():
|
||||
import capa.rules
|
||||
import capa.ghidra.helpers
|
||||
import capa.render.default
|
||||
import capa.features.extractors.ghidra.extractor
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
logger.debug("-" * 80)
|
||||
logger.debug(" Using default embedded rules.")
|
||||
logger.debug(" ")
|
||||
logger.debug(" You can see the current default rule set here:")
|
||||
logger.debug(" https://github.com/mandiant/capa-rules")
|
||||
logger.debug("-" * 80)
|
||||
|
||||
rules_path = get_default_root() / "rules"
|
||||
logger.debug("rule path: %s", rules_path)
|
||||
rules = get_rules([rules_path])
|
||||
|
||||
meta = capa.ghidra.helpers.collect_metadata([rules_path])
|
||||
|
||||
capabilities, counts = find_capabilities(
|
||||
rules,
|
||||
capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor(),
|
||||
not capa.ghidra.helpers.is_running_headless(),
|
||||
)
|
||||
|
||||
meta.analysis.feature_counts = counts["feature_counts"]
|
||||
meta.analysis.library_functions = counts["library_functions"]
|
||||
|
||||
if has_file_limitation(rules, capabilities, is_standalone=False):
|
||||
logger.info("capa encountered warnings during analysis")
|
||||
|
||||
print(capa.render.default.render(meta, rules, capabilities))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
if is_runtime_ida():
|
||||
if capa.helpers.is_runtime_ida():
|
||||
ida_main()
|
||||
elif capa.helpers.is_runtime_ghidra():
|
||||
ghidra_main()
|
||||
else:
|
||||
sys.exit(main())
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import logging
|
||||
|
||||
import capa.engine as ceng
|
||||
@@ -22,7 +29,7 @@ def get_node_cost(node):
|
||||
# substring and regex features require a full scan of each string
|
||||
# which we anticipate is more expensive then a hash lookup feature (e.g. mnemonic or count).
|
||||
#
|
||||
# TODO: compute the average cost of these feature relative to hash feature
|
||||
# fun research: compute the average cost of these feature relative to hash feature
|
||||
# and adjust the factor accordingly.
|
||||
return 2
|
||||
|
||||
|
||||
@@ -1,3 +1,10 @@
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import typing
|
||||
import collections
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -11,7 +11,6 @@ import collections
|
||||
import tabulate
|
||||
|
||||
import capa.render.utils as rutils
|
||||
import capa.features.freeze as frz
|
||||
import capa.render.result_document as rd
|
||||
import capa.features.freeze.features as frzf
|
||||
from capa.rules import RuleSet
|
||||
@@ -40,7 +39,7 @@ def render_meta(doc: rd.ResultDocument, ostream: StringIO):
|
||||
("path", doc.meta.sample.path),
|
||||
]
|
||||
|
||||
ostream.write(tabulate.tabulate(rows, tablefmt="psql"))
|
||||
ostream.write(tabulate.tabulate(rows, tablefmt="mixed_outline"))
|
||||
ostream.write("\n")
|
||||
|
||||
|
||||
@@ -49,7 +48,7 @@ def find_subrule_matches(doc: rd.ResultDocument):
|
||||
collect the rule names that have been matched as a subrule match.
|
||||
this way we can avoid displaying entries for things that are too specific.
|
||||
"""
|
||||
matches = set([])
|
||||
matches = set()
|
||||
|
||||
def rec(match: rd.Match):
|
||||
if not match.success:
|
||||
@@ -65,7 +64,7 @@ def find_subrule_matches(doc: rd.ResultDocument):
|
||||
matches.add(match.node.feature.match)
|
||||
|
||||
for rule in rutils.capability_rules(doc):
|
||||
for address, match in rule.matches:
|
||||
for _, match in rule.matches:
|
||||
rec(match)
|
||||
|
||||
return matches
|
||||
@@ -102,7 +101,7 @@ def render_capabilities(doc: rd.ResultDocument, ostream: StringIO):
|
||||
|
||||
if rows:
|
||||
ostream.write(
|
||||
tabulate.tabulate(rows, headers=[width("CAPABILITY", 50), width("NAMESPACE", 50)], tablefmt="psql")
|
||||
tabulate.tabulate(rows, headers=[width("Capability", 50), width("Namespace", 50)], tablefmt="mixed_outline")
|
||||
)
|
||||
ostream.write("\n")
|
||||
else:
|
||||
@@ -148,7 +147,7 @@ def render_attack(doc: rd.ResultDocument, ostream: StringIO):
|
||||
if rows:
|
||||
ostream.write(
|
||||
tabulate.tabulate(
|
||||
rows, headers=[width("ATT&CK Tactic", 20), width("ATT&CK Technique", 80)], tablefmt="psql"
|
||||
rows, headers=[width("ATT&CK Tactic", 20), width("ATT&CK Technique", 80)], tablefmt="mixed_grid"
|
||||
)
|
||||
)
|
||||
ostream.write("\n")
|
||||
@@ -190,7 +189,9 @@ def render_mbc(doc: rd.ResultDocument, ostream: StringIO):
|
||||
|
||||
if rows:
|
||||
ostream.write(
|
||||
tabulate.tabulate(rows, headers=[width("MBC Objective", 25), width("MBC Behavior", 75)], tablefmt="psql")
|
||||
tabulate.tabulate(
|
||||
rows, headers=[width("MBC Objective", 25), width("MBC Behavior", 75)], tablefmt="mixed_grid"
|
||||
)
|
||||
)
|
||||
ostream.write("\n")
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -11,4 +11,4 @@ from capa.engine import MatchResults
|
||||
|
||||
|
||||
def render(meta, rules: RuleSet, capabilities: MatchResults) -> str:
|
||||
return rd.ResultDocument.from_capa(meta, rules, capabilities).json(exclude_none=True)
|
||||
return rd.ResultDocument.from_capa(meta, rules, capabilities).model_dump_json(exclude_none=True)
|
||||
|
||||
@@ -24,14 +24,10 @@ $ protoc.exe --python_out=. --mypy_out=. <path_to_proto> (e.g. capa/render/proto
|
||||
|
||||
Alternatively, --pyi_out=. can be used to generate a Python Interface file that supports development
|
||||
"""
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import datetime
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
import google.protobuf.json_format
|
||||
from google.protobuf.json_format import MessageToJson
|
||||
|
||||
import capa.rules
|
||||
import capa.features.freeze as frz
|
||||
@@ -43,7 +39,7 @@ from capa.features.freeze import AddressType
|
||||
|
||||
|
||||
def dict_tuple_to_list_values(d: Dict) -> Dict:
|
||||
o = dict()
|
||||
o = {}
|
||||
for k, v in d.items():
|
||||
if isinstance(v, tuple):
|
||||
o[k] = list(v)
|
||||
@@ -130,13 +126,13 @@ def metadata_to_pb2(meta: rd.Metadata) -> capa_pb2.Metadata:
|
||||
timestamp=str(meta.timestamp),
|
||||
version=meta.version,
|
||||
argv=meta.argv,
|
||||
sample=google.protobuf.json_format.ParseDict(meta.sample.dict(), capa_pb2.Sample()),
|
||||
sample=google.protobuf.json_format.ParseDict(meta.sample.model_dump(), capa_pb2.Sample()),
|
||||
analysis=capa_pb2.Analysis(
|
||||
format=meta.analysis.format,
|
||||
arch=meta.analysis.arch,
|
||||
os=meta.analysis.os,
|
||||
extractor=meta.analysis.extractor,
|
||||
rules=meta.analysis.rules,
|
||||
rules=list(meta.analysis.rules),
|
||||
base_address=addr_to_pb2(meta.analysis.base_address),
|
||||
layout=capa_pb2.Layout(
|
||||
functions=[
|
||||
@@ -397,7 +393,7 @@ def match_to_pb2(match: rd.Match) -> capa_pb2.Match:
|
||||
def rule_metadata_to_pb2(rule_metadata: rd.RuleMetadata) -> capa_pb2.RuleMetadata:
|
||||
# after manual type conversions to the RuleMetadata, we can rely on the protobuf json parser
|
||||
# conversions include tuple -> list and rd.Enum -> proto.enum
|
||||
meta = dict_tuple_to_list_values(rule_metadata.dict())
|
||||
meta = dict_tuple_to_list_values(rule_metadata.model_dump())
|
||||
meta["scope"] = scope_to_pb2(meta["scope"])
|
||||
meta["attack"] = list(map(dict_tuple_to_list_values, meta.get("attack", [])))
|
||||
meta["mbc"] = list(map(dict_tuple_to_list_values, meta.get("mbc", [])))
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
|
||||
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at: [package root]/LICENSE.txt
|
||||
@@ -7,9 +7,10 @@
|
||||
# See the License for the specific language governing permissions and limitations under the License.
|
||||
import datetime
|
||||
import collections
|
||||
from typing import Any, Dict, List, Tuple, Union, Optional
|
||||
from typing import Dict, List, Tuple, Union, Literal, Optional
|
||||
from pathlib import Path
|
||||
|
||||
from pydantic import Field, BaseModel
|
||||
from pydantic import Field, BaseModel, ConfigDict
|
||||
|
||||
import capa.rules
|
||||
import capa.engine
|
||||
@@ -23,47 +24,49 @@ from capa.helpers import assert_never
|
||||
|
||||
|
||||
class FrozenModel(BaseModel):
|
||||
class Config:
|
||||
frozen = True
|
||||
extra = "forbid"
|
||||
model_config = ConfigDict(frozen=True, extra="forbid")
|
||||
|
||||
|
||||
class Sample(FrozenModel):
|
||||
class Model(BaseModel):
|
||||
model_config = ConfigDict(extra="forbid")
|
||||
|
||||
|
||||
class Sample(Model):
|
||||
md5: str
|
||||
sha1: str
|
||||
sha256: str
|
||||
path: str
|
||||
|
||||
|
||||
class BasicBlockLayout(FrozenModel):
|
||||
class BasicBlockLayout(Model):
|
||||
address: frz.Address
|
||||
|
||||
|
||||
class FunctionLayout(FrozenModel):
|
||||
class FunctionLayout(Model):
|
||||
address: frz.Address
|
||||
matched_basic_blocks: Tuple[BasicBlockLayout, ...]
|
||||
|
||||
|
||||
class Layout(FrozenModel):
|
||||
class Layout(Model):
|
||||
functions: Tuple[FunctionLayout, ...]
|
||||
|
||||
|
||||
class LibraryFunction(FrozenModel):
|
||||
class LibraryFunction(Model):
|
||||
address: frz.Address
|
||||
name: str
|
||||
|
||||
|
||||
class FunctionFeatureCount(FrozenModel):
|
||||
class FunctionFeatureCount(Model):
|
||||
address: frz.Address
|
||||
count: int
|
||||
|
||||
|
||||
class FeatureCounts(FrozenModel):
|
||||
class FeatureCounts(Model):
|
||||
file: int
|
||||
functions: Tuple[FunctionFeatureCount, ...]
|
||||
|
||||
|
||||
class Analysis(FrozenModel):
|
||||
class Analysis(Model):
|
||||
format: str
|
||||
arch: str
|
||||
os: str
|
||||
@@ -75,92 +78,13 @@ class Analysis(FrozenModel):
|
||||
library_functions: Tuple[LibraryFunction, ...]
|
||||
|
||||
|
||||
class Metadata(FrozenModel):
|
||||
class Metadata(Model):
|
||||
timestamp: datetime.datetime
|
||||
version: str
|
||||
argv: Optional[Tuple[str, ...]]
|
||||
sample: Sample
|
||||
analysis: Analysis
|
||||
|
||||
@classmethod
|
||||
def from_capa(cls, meta: Any) -> "Metadata":
|
||||
return cls(
|
||||
timestamp=meta["timestamp"],
|
||||
version=meta["version"],
|
||||
argv=meta["argv"] if "argv" in meta else None,
|
||||
sample=Sample(
|
||||
md5=meta["sample"]["md5"],
|
||||
sha1=meta["sample"]["sha1"],
|
||||
sha256=meta["sample"]["sha256"],
|
||||
path=meta["sample"]["path"],
|
||||
),
|
||||
analysis=Analysis(
|
||||
format=meta["analysis"]["format"],
|
||||
arch=meta["analysis"]["arch"],
|
||||
os=meta["analysis"]["os"],
|
||||
extractor=meta["analysis"]["extractor"],
|
||||
rules=meta["analysis"]["rules"],
|
||||
base_address=frz.Address.from_capa(meta["analysis"]["base_address"]),
|
||||
layout=Layout(
|
||||
functions=tuple(
|
||||
FunctionLayout(
|
||||
address=frz.Address.from_capa(address),
|
||||
matched_basic_blocks=tuple(
|
||||
BasicBlockLayout(address=frz.Address.from_capa(bb)) for bb in f["matched_basic_blocks"]
|
||||
),
|
||||
)
|
||||
for address, f in meta["analysis"]["layout"]["functions"].items()
|
||||
)
|
||||
),
|
||||
feature_counts=FeatureCounts(
|
||||
file=meta["analysis"]["feature_counts"]["file"],
|
||||
functions=tuple(
|
||||
FunctionFeatureCount(address=frz.Address.from_capa(address), count=count)
|
||||
for address, count in meta["analysis"]["feature_counts"]["functions"].items()
|
||||
),
|
||||
),
|
||||
library_functions=tuple(
|
||||
LibraryFunction(address=frz.Address.from_capa(address), name=name)
|
||||
for address, name in meta["analysis"]["library_functions"].items()
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
def to_capa(self) -> Dict[str, Any]:
|
||||
capa_meta = {
|
||||
"timestamp": self.timestamp.isoformat(),
|
||||
"version": self.version,
|
||||
"sample": {
|
||||
"md5": self.sample.md5,
|
||||
"sha1": self.sample.sha1,
|
||||
"sha256": self.sample.sha256,
|
||||
"path": self.sample.path,
|
||||
},
|
||||
"analysis": {
|
||||
"format": self.analysis.format,
|
||||
"arch": self.analysis.arch,
|
||||
"os": self.analysis.os,
|
||||
"extractor": self.analysis.extractor,
|
||||
"rules": self.analysis.rules,
|
||||
"base_address": self.analysis.base_address.to_capa(),
|
||||
"layout": {
|
||||
"functions": {
|
||||
f.address.to_capa(): {
|
||||
"matched_basic_blocks": [bb.address.to_capa() for bb in f.matched_basic_blocks]
|
||||
}
|
||||
for f in self.analysis.layout.functions
|
||||
}
|
||||
},
|
||||
"feature_counts": {
|
||||
"file": self.analysis.feature_counts.file,
|
||||
"functions": {fc.address.to_capa(): fc.count for fc in self.analysis.feature_counts.functions},
|
||||
},
|
||||
"library_functions": {lf.address.to_capa(): lf.name for lf in self.analysis.library_functions},
|
||||
},
|
||||
}
|
||||
|
||||
return capa_meta
|
||||
|
||||
|
||||
class CompoundStatementType:
|
||||
AND = "and"
|
||||
@@ -179,13 +103,13 @@ class CompoundStatement(StatementModel):
|
||||
|
||||
|
||||
class SomeStatement(StatementModel):
|
||||
type = "some"
|
||||
type: Literal["some"] = "some"
|
||||
description: Optional[str] = None
|
||||
count: int
|
||||
|
||||
|
||||
class RangeStatement(StatementModel):
|
||||
type = "range"
|
||||
type: Literal["range"] = "range"
|
||||
description: Optional[str] = None
|
||||
min: int
|
||||
max: int
|
||||
@@ -193,7 +117,7 @@ class RangeStatement(StatementModel):
|
||||
|
||||
|
||||
class SubscopeStatement(StatementModel):
|
||||
type = "subscope"
|
||||
type: Literal["subscope"] = "subscope"
|
||||
description: Optional[str] = None
|
||||
scope: capa.rules.Scope
|
||||
|
||||
@@ -208,7 +132,7 @@ Statement = Union[
|
||||
|
||||
|
||||
class StatementNode(FrozenModel):
|
||||
type = "statement"
|
||||
type: Literal["statement"] = "statement"
|
||||
statement: Statement
|
||||
|
||||
|
||||
@@ -245,7 +169,7 @@ def statement_from_capa(node: capa.engine.Statement) -> Statement:
|
||||
|
||||
|
||||
class FeatureNode(FrozenModel):
|
||||
type = "feature"
|
||||
type: Literal["feature"] = "feature"
|
||||
feature: frz.Feature
|
||||
|
||||
|
||||
@@ -376,7 +300,7 @@ class Match(FrozenModel):
|
||||
# pull matches from the referenced rule into our tree here.
|
||||
rule_name = name
|
||||
rule = rules[rule_name]
|
||||
rule_matches = {address: result for (address, result) in capabilities[rule_name]}
|
||||
rule_matches = dict(capabilities[rule_name])
|
||||
|
||||
if rule.is_subscope_rule():
|
||||
# for a subscope rule, fixup the node to be a scope node, rather than a match feature node.
|
||||
@@ -421,7 +345,7 @@ class Match(FrozenModel):
|
||||
# we could introduce an intermediate node here.
|
||||
# this would be a breaking change and require updates to the renderers.
|
||||
# in the meantime, the above might be sufficient.
|
||||
rule_matches = {address: result for (address, result) in capabilities[rule.name]}
|
||||
rule_matches = dict(capabilities[rule.name])
|
||||
for location in result.locations:
|
||||
# doc[locations] contains all matches for the given namespace.
|
||||
# for example, the feature might be `match: anti-analysis/packer`
|
||||
@@ -574,15 +498,12 @@ class MaecMetadata(FrozenModel):
|
||||
malware_family: Optional[str] = Field(None, alias="malware-family")
|
||||
malware_category: Optional[str] = Field(None, alias="malware-category")
|
||||
malware_category_ov: Optional[str] = Field(None, alias="malware-category-ov")
|
||||
|
||||
class Config:
|
||||
frozen = True
|
||||
allow_population_by_field_name = True
|
||||
model_config = ConfigDict(frozen=True, populate_by_name=True)
|
||||
|
||||
|
||||
class RuleMetadata(FrozenModel):
|
||||
name: str
|
||||
namespace: Optional[str]
|
||||
namespace: Optional[str] = None
|
||||
authors: Tuple[str, ...]
|
||||
scope: capa.rules.Scope
|
||||
attack: Tuple[AttackSpec, ...] = Field(alias="att&ck")
|
||||
@@ -620,9 +541,7 @@ class RuleMetadata(FrozenModel):
|
||||
) # type: ignore
|
||||
# Mypy is unable to recognise arguments due to alias
|
||||
|
||||
class Config:
|
||||
frozen = True
|
||||
allow_population_by_field_name = True
|
||||
model_config = ConfigDict(frozen=True, populate_by_name=True)
|
||||
|
||||
|
||||
class RuleMatches(FrozenModel):
|
||||
@@ -642,7 +561,7 @@ class ResultDocument(FrozenModel):
|
||||
rules: Dict[str, RuleMatches]
|
||||
|
||||
@classmethod
|
||||
def from_capa(cls, meta, rules: RuleSet, capabilities: MatchResults) -> "ResultDocument":
|
||||
def from_capa(cls, meta: Metadata, rules: RuleSet, capabilities: MatchResults) -> "ResultDocument":
|
||||
rule_matches: Dict[str, RuleMatches] = {}
|
||||
for rule_name, matches in capabilities.items():
|
||||
rule = rules[rule_name]
|
||||
@@ -659,10 +578,9 @@ class ResultDocument(FrozenModel):
|
||||
),
|
||||
)
|
||||
|
||||
return ResultDocument(meta=Metadata.from_capa(meta), rules=rule_matches)
|
||||
return ResultDocument(meta=meta, rules=rule_matches)
|
||||
|
||||
def to_capa(self) -> Tuple[Dict, Dict]:
|
||||
meta = self.meta.to_capa()
|
||||
def to_capa(self) -> Tuple[Metadata, Dict]:
|
||||
capabilities: Dict[
|
||||
str, List[Tuple[capa.features.address.Address, capa.features.common.Result]]
|
||||
] = collections.defaultdict(list)
|
||||
@@ -678,4 +596,8 @@ class ResultDocument(FrozenModel):
|
||||
|
||||
capabilities[rule_name].append((addr.to_capa(), result))
|
||||
|
||||
return meta, capabilities
|
||||
return self.meta, capabilities
|
||||
|
||||
@classmethod
|
||||
def from_file(cls, path: Path) -> "ResultDocument":
|
||||
return cls.model_validate_json(path.read_text(encoding="utf-8"))
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user