Compare commits

..

5 Commits

Author SHA1 Message Date
Mike Hunhoff
30272d5df6 Update capa/features/extractors/dnfile/extractor.py
Co-authored-by: Moritz <mr-tz@users.noreply.github.com>
2023-02-28 15:21:31 -07:00
Mike Hunhoff
23d076e0dc use function address when emitting instructions 2023-02-27 12:01:59 -07:00
Mike Hunhoff
e99525a11e PR changes 2023-02-24 14:52:31 -07:00
Mike Hunhoff
c3778cf7b1 update CHANGELOG 2023-02-24 14:48:09 -07:00
Mike Hunhoff
969403ae51 dotnet: add support for basic blocks 2023-02-24 14:42:38 -07:00
198 changed files with 4152 additions and 51133 deletions

View File

@@ -41,7 +41,7 @@
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
"postCreateCommand": "git submodule update --init && pip3 install --user -e .[dev] && pre-commit install",
"postCreateCommand": "git submodule update --init && pip3 install --user -e .[dev]",
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode",

View File

@@ -159,25 +159,12 @@ The process described here has several goals:
Please follow these steps to have your contribution considered by the maintainers:
0. Sign the [Contributor License Agreement](#contributor-license-agreement)
1. Follow the [styleguides](#styleguides)
2. Update the CHANGELOG and add tests and documentation. In case they are not needed, indicate it in [the PR template](pull_request_template.md).
3. After you submit your pull request, verify that all [status checks](https://help.github.com/articles/about-status-checks/) are passing <details><summary>What if the status checks are failing? </summary>If a status check is failing, and you believe that the failure is unrelated to your change, please leave a comment on the pull request explaining why you believe the failure is unrelated. A maintainer will re-run the status check for you. If we conclude that the failure was a false positive, then we will open an issue to track that problem with our status check suite.</details>
While the prerequisites above must be satisfied prior to having your pull request reviewed, the reviewer(s) may ask you to complete additional design work, tests, or other changes before your pull request can be ultimately accepted.
### Contributor License Agreement
Contributions to this project must be accompanied by a Contributor License
Agreement. You (or your employer) retain the copyright to your contribution,
this simply gives us permission to use and redistribute your contributions as
part of the project. Head over to <https://cla.developers.google.com/> to see
your current agreements on file or to sign a new one.
You generally only need to submit a CLA once, so if you've already submitted one
(even if it was for a different project), you probably don't need to do it
again.
## Styleguides
### Git Commit Messages

43
.github/flake8.ini vendored
View File

@@ -1,43 +0,0 @@
[flake8]
max-line-length = 120
extend-ignore =
# E203: whitespace before ':' (black does this)
E203,
# F401: `foo` imported but unused (prefer ruff)
F401,
# F811 Redefinition of unused `foo` (prefer ruff)
F811,
# E501 line too long (prefer black)
E501,
# E701 multiple statements on one line (colon) (prefer black, see https://github.com/psf/black/issues/4173)
E701,
# B010 Do not call setattr with a constant attribute value
B010,
# G200 Logging statement uses exception in arguments
G200,
# SIM102 Use a single if-statement instead of nested if-statements
# doesn't provide a space for commenting or logical separation of conditions
SIM102,
# SIM114 Use logical or and a single body
# makes logic trees too complex
SIM114,
# SIM117 Use 'with Foo, Bar:' instead of multiple with statements
# makes lines too long
SIM117
per-file-ignores =
# T201 print found.
#
# scripts are meant to print output
scripts/*: T201
# capa.exe is meant to print output
capa/main.py: T201
# IDA tests emit results to output window so need to print
tests/test_ida_features.py: T201
# utility used to find the Binary Ninja API via invoking python.exe
capa/features/extractors/binja/find_binja_api.py: T201
copyright-check = True
copyright-min-file-size = 1
copyright-regexp = Copyright \(C\) 2023 Mandiant, Inc. All Rights Reserved.

View File

@@ -42,9 +42,6 @@ ignore_missing_imports = True
[mypy-idautils.*]
ignore_missing_imports = True
[mypy-ida_auto.*]
ignore_missing_imports = True
[mypy-ida_bytes.*]
ignore_missing_imports = True
@@ -86,6 +83,3 @@ ignore_missing_imports = True
[mypy-netnode.*]
ignore_missing_imports = True
[mypy-ghidra.*]
ignore_missing_imports = True

View File

@@ -38,36 +38,39 @@ hiddenimports = [
"vivisect",
"vivisect.analysis",
"vivisect.analysis.amd64",
"vivisect.analysis.amd64",
"vivisect.analysis.amd64.emulation",
"vivisect.analysis.amd64.golang",
"vivisect.analysis.crypto",
"vivisect.analysis.crypto",
"vivisect.analysis.crypto.constants",
"vivisect.analysis.elf",
"vivisect.analysis.elf.elfplt",
"vivisect.analysis.elf.elfplt_late",
"vivisect.analysis.elf.libc_start_main",
"vivisect.analysis.generic",
"vivisect.analysis.generic",
"vivisect.analysis.generic.codeblocks",
"vivisect.analysis.generic.emucode",
"vivisect.analysis.generic.entrypoints",
"vivisect.analysis.generic.funcentries",
"vivisect.analysis.generic.impapi",
"vivisect.analysis.generic.linker",
"vivisect.analysis.generic.mkpointers",
"vivisect.analysis.generic.noret",
"vivisect.analysis.generic.pointers",
"vivisect.analysis.generic.pointertables",
"vivisect.analysis.generic.relocations",
"vivisect.analysis.generic.strconst",
"vivisect.analysis.generic.switchcase",
"vivisect.analysis.generic.symswitchcase",
"vivisect.analysis.generic.thunks",
"vivisect.analysis.generic.noret",
"vivisect.analysis.i386",
"vivisect.analysis.i386",
"vivisect.analysis.i386.calling",
"vivisect.analysis.i386.golang",
"vivisect.analysis.i386.importcalls",
"vivisect.analysis.i386.instrhook",
"vivisect.analysis.i386.thunk_reg",
"vivisect.analysis.i386.thunk_bx",
"vivisect.analysis.ms",
"vivisect.analysis.ms",
"vivisect.analysis.ms.hotpatch",
"vivisect.analysis.ms.localhints",
@@ -78,40 +81,8 @@ hiddenimports = [
"vivisect.impapi.posix.amd64",
"vivisect.impapi.posix.i386",
"vivisect.impapi.windows",
"vivisect.impapi.windows.advapi_32",
"vivisect.impapi.windows.advapi_64",
"vivisect.impapi.windows.amd64",
"vivisect.impapi.windows.gdi_32",
"vivisect.impapi.windows.gdi_64",
"vivisect.impapi.windows.i386",
"vivisect.impapi.windows.kernel_32",
"vivisect.impapi.windows.kernel_64",
"vivisect.impapi.windows.msvcr100_32",
"vivisect.impapi.windows.msvcr100_64",
"vivisect.impapi.windows.msvcr110_32",
"vivisect.impapi.windows.msvcr110_64",
"vivisect.impapi.windows.msvcr120_32",
"vivisect.impapi.windows.msvcr120_64",
"vivisect.impapi.windows.msvcr71_32",
"vivisect.impapi.windows.msvcr80_32",
"vivisect.impapi.windows.msvcr80_64",
"vivisect.impapi.windows.msvcr90_32",
"vivisect.impapi.windows.msvcr90_64",
"vivisect.impapi.windows.msvcrt_32",
"vivisect.impapi.windows.msvcrt_64",
"vivisect.impapi.windows.ntdll_32",
"vivisect.impapi.windows.ntdll_64",
"vivisect.impapi.windows.ole_32",
"vivisect.impapi.windows.ole_64",
"vivisect.impapi.windows.rpcrt4_32",
"vivisect.impapi.windows.rpcrt4_64",
"vivisect.impapi.windows.shell_32",
"vivisect.impapi.windows.shell_64",
"vivisect.impapi.windows.user_32",
"vivisect.impapi.windows.user_64",
"vivisect.impapi.windows.ws2plus_32",
"vivisect.impapi.windows.ws2plus_64",
"vivisect.impapi.winkern",
"vivisect.impapi.winkern.i386",
"vivisect.impapi.winkern.amd64",
"vivisect.parsers.blob",

View File

@@ -61,7 +61,6 @@ a = Analysis(
"qt5",
"pyqtwebengine",
"pyasn1",
"binaryninja",
],
)
@@ -79,7 +78,7 @@ exe = EXE(
name="capa",
icon="logo.ico",
debug=False,
strip=False,
strip=None,
upx=True,
console=True,
)

43
.github/ruff.toml vendored
View File

@@ -1,43 +0,0 @@
# Enable the pycodestyle (`E`) and Pyflakes (`F`) rules by default.
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
# McCabe complexity (`C901`) by default.
select = ["E", "F"]
# Allow autofix for all enabled rules (when `--fix`) is provided.
fixable = ["ALL"]
unfixable = []
# E402 module level import not at top of file
# E722 do not use bare 'except'
# E501 line too long
ignore = ["E402", "E722", "E501"]
line-length = 120
exclude = [
# Exclude a variety of commonly ignored directories.
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".mypy_cache",
".nox",
".pants.d",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"venv",
# protobuf generated files
"*_pb2.py",
"*_pb2.pyi"
]

10
.github/tox.ini vendored Normal file
View File

@@ -0,0 +1,10 @@
[pycodestyle]
; E402: module level import not at top of file
; W503: line break before binary operator
; E231 missing whitespace after ',' (emitted by black)
; E203 whitespace before ':' (emitted by black)
ignore = E402,W503,E203,E231
max-line-length = 160
statistics = True
count = True
exclude = .*

View File

@@ -6,47 +6,37 @@ on:
release:
types: [edited, published]
permissions:
contents: write
jobs:
build:
name: PyInstaller for ${{ matrix.os }} / Py ${{ matrix.python_version }}
name: PyInstaller for ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
# set to false for debugging
fail-fast: true
matrix:
# using Python 3.8 to support running across multiple operating systems including Windows 7
include:
- os: ubuntu-20.04
- os: ubuntu-18.04
# use old linux so that the shared library versioning is more portable
artifact_name: capa
asset_name: linux
python_version: 3.8
- os: ubuntu-20.04
artifact_name: capa
asset_name: linux-py311
python_version: 3.11
- os: windows-2019
artifact_name: capa.exe
asset_name: windows
python_version: 3.8
- os: macos-11
# use older macOS for assumed better portability
artifact_name: capa
asset_name: macos
python_version: 3.8
steps:
- name: Checkout capa
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
with:
submodules: true
- name: Set up Python ${{ matrix.python_version }}
# using Python 3.8 to support running across multiple operating systems including Windows 7
- name: Set up Python 3.8
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
with:
python-version: ${{ matrix.python_version }}
- if: matrix.os == 'ubuntu-20.04'
python-version: 3.8
- if: matrix.os == 'ubuntu-18.04'
run: sudo apt-get install -y libyaml-dev
- name: Upgrade pip, setuptools
run: python -m pip install --upgrade pip setuptools
@@ -57,34 +47,30 @@ jobs:
- name: Build standalone executable
run: pyinstaller --log-level DEBUG .github/pyinstaller/pyinstaller.spec
- name: Does it run (PE)?
run: dist/capa -d "tests/data/Practical Malware Analysis Lab 01-01.dll_"
run: dist/capa "tests/data/Practical Malware Analysis Lab 01-01.dll_"
- name: Does it run (Shellcode)?
run: dist/capa -d "tests/data/499c2a85f6e8142c3f48d4251c9c7cd6.raw32"
run: dist/capa "tests/data/499c2a85f6e8142c3f48d4251c9c7cd6.raw32"
- name: Does it run (ELF)?
run: dist/capa -d "tests/data/7351f8a40c5450557b24622417fc478d.elf_"
- name: Does it run (CAPE)?
run: |
7z e "tests/data/dynamic/cape/v2.2/d46900384c78863420fb3e297d0a2f743cd2b6b3f7f82bf64059a168e07aceb7.json.gz"
dist/capa -d "d46900384c78863420fb3e297d0a2f743cd2b6b3f7f82bf64059a168e07aceb7.json"
run: dist/capa "tests/data/7351f8a40c5450557b24622417fc478d.elf_"
- uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2
with:
name: ${{ matrix.asset_name }}
path: dist/${{ matrix.artifact_name }}
test_run:
name: Test run on ${{ matrix.os }} / ${{ matrix.asset_name }}
name: Test run on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
needs: [build]
strategy:
matrix:
include:
# OSs not already tested above
- os: ubuntu-22.04
- os: ubuntu-18.04
artifact_name: capa
asset_name: linux
- os: ubuntu-22.04
- os: ubuntu-20.04
artifact_name: capa
asset_name: linux-py311
asset_name: linux
- os: windows-2022
artifact_name: capa.exe
asset_name: windows
@@ -110,8 +96,6 @@ jobs:
include:
- asset_name: linux
artifact_name: capa
- asset_name: linux-py311
artifact_name: capa
- asset_name: windows
artifact_name: capa.exe
- asset_name: macos

View File

@@ -7,8 +7,6 @@ on:
pull_request_target:
types: [opened, edited, synchronize]
permissions: read-all
jobs:
check_changelog:
# no need to check for dependency updates via dependabot

View File

@@ -1,21 +0,0 @@
name: PIP audit
on:
schedule:
- cron: '0 8 * * 1'
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 20
strategy:
matrix:
python-version: ["3.11"]
steps:
- name: Check out repository code
uses: actions/checkout@v4
- uses: pypa/gh-action-pip-audit@v1.0.8
with:
inputs: .

View File

@@ -1,41 +1,29 @@
# use PyPI trusted publishing, as described here:
# https://blog.trailofbits.com/2023/05/23/trusted-publishing-a-new-benchmark-for-packaging-security/
# This workflows will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
name: publish to pypi
on:
release:
types: [published]
permissions:
contents: write
jobs:
pypi-publish:
runs-on: ubuntu-latest
environment:
name: release
permissions:
id-token: write
deploy:
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
- name: Set up Python
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
with:
python-version: '3.8'
python-version: '3.7'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .[build]
- name: build package
pip install setuptools wheel twine
- name: Build and publish
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
python -m build
- name: upload package artifacts
uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2
with:
path: dist/*
- name: publish package
uses: pypa/gh-action-pypi-publish@f5622bde02b04381239da3573277701ceca8f6a0 # release/v1
with:
skip-existing: true
verbose: true
print-hash: true
python setup.py sdist bdist_wheel
twine upload --skip-existing dist/*

View File

@@ -4,8 +4,6 @@ on:
release:
types: [published]
permissions: read-all
jobs:
tag:
name: Tag capa rules

View File

@@ -6,8 +6,6 @@ on:
pull_request:
branches: [ master ]
permissions: read-all
# save workspaces to speed up testing
env:
CAPA_SAVE_WORKSPACE: "True"
@@ -29,23 +27,20 @@ jobs:
steps:
- name: Checkout capa
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
# use latest available python to take advantage of best performance
- name: Set up Python 3.11
- name: Set up Python 3.8
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
with:
python-version: "3.11"
python-version: "3.8"
- name: Install dependencies
run: pip install -e .[dev]
- name: Lint with ruff
run: pre-commit run ruff
- name: Lint with isort
run: pre-commit run isort --show-diff-on-failure
run: isort --profile black --length-sort --line-width 120 -c .
- name: Lint with black
run: pre-commit run black --show-diff-on-failure
- name: Lint with flake8
run: pre-commit run flake8 --hook-stage manual
run: black -l 120 --check .
- name: Lint with pycodestyle
run: pycodestyle --show-source capa/ scripts/ tests/
- name: Check types with mypy
run: pre-commit run mypy --hook-stage manual
run: mypy --config-file .github/mypy/mypy.ini --check-untyped-defs capa/ scripts/ tests/
rule_linter:
runs-on: ubuntu-20.04
@@ -54,12 +49,12 @@ jobs:
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
with:
submodules: recursive
- name: Set up Python 3.11
- name: Set up Python 3.8
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
with:
python-version: "3.11"
python-version: "3.8"
- name: Install capa
run: pip install -e .[dev]
run: pip install -e .
- name: Run rule linter
run: python scripts/lint.py rules/
@@ -72,15 +67,13 @@ jobs:
matrix:
os: [ubuntu-20.04, windows-2019, macos-11]
# across all operating systems
python-version: ["3.8", "3.11"]
python-version: ["3.7", "3.11"]
include:
# on Ubuntu run these as well
- os: ubuntu-20.04
python-version: "3.8"
- os: ubuntu-20.04
python-version: "3.9"
- os: ubuntu-20.04
python-version: "3.10"
steps:
- name: Checkout capa with submodules
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
@@ -95,110 +88,5 @@ jobs:
run: sudo apt-get install -y libyaml-dev
- name: Install capa
run: pip install -e .[dev]
- name: Run tests (fast)
# this set of tests runs about 80% of the cases in 20% of the time,
# and should catch most errors quickly.
run: pre-commit run pytest-fast --all-files --hook-stage manual
- name: Run tests
run: pytest -v tests/
binja-tests:
name: Binary Ninja tests for ${{ matrix.python-version }}
env:
BN_SERIAL: ${{ secrets.BN_SERIAL }}
runs-on: ubuntu-20.04
needs: [tests]
strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.11"]
steps:
- name: Checkout capa with submodules
# do only run if BN_SERIAL is available, have to do this in every step, see https://github.com/orgs/community/discussions/26726#discussioncomment-3253118
if: ${{ env.BN_SERIAL != 0 }}
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
with:
submodules: recursive
- name: Set up Python ${{ matrix.python-version }}
if: ${{ env.BN_SERIAL != 0 }}
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
with:
python-version: ${{ matrix.python-version }}
- name: Install pyyaml
if: ${{ env.BN_SERIAL != 0 }}
run: sudo apt-get install -y libyaml-dev
- name: Install capa
if: ${{ env.BN_SERIAL != 0 }}
run: pip install -e .[dev]
- name: install Binary Ninja
if: ${{ env.BN_SERIAL != 0 }}
run: |
mkdir ./.github/binja
curl "https://raw.githubusercontent.com/Vector35/binaryninja-api/6812c97/scripts/download_headless.py" -o ./.github/binja/download_headless.py
python ./.github/binja/download_headless.py --serial ${{ env.BN_SERIAL }} --output .github/binja/BinaryNinja-headless.zip
unzip .github/binja/BinaryNinja-headless.zip -d .github/binja/
python .github/binja/binaryninja/scripts/install_api.py --install-on-root --silent
- name: Run tests
if: ${{ env.BN_SERIAL != 0 }}
env:
BN_LICENSE: ${{ secrets.BN_LICENSE }}
run: pytest -v tests/test_binja_features.py # explicitly refer to the binja tests for performance. other tests run above.
ghidra-tests:
name: Ghidra tests for ${{ matrix.python-version }}
runs-on: ubuntu-20.04
needs: [tests]
strategy:
fail-fast: false
matrix:
python-version: ["3.8", "3.11"]
java-version: ["17"]
gradle-version: ["7.3"]
ghidra-version: ["10.3"]
public-version: ["PUBLIC_20230510"] # for ghidra releases
jep-version: ["4.1.1"]
ghidrathon-version: ["3.0.0"]
steps:
- name: Checkout capa with submodules
uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0
with:
submodules: true
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0
with:
python-version: ${{ matrix.python-version }}
- name: Set up Java ${{ matrix.java-version }}
uses: actions/setup-java@5ffc13f4174014e2d4d4572b3d74c3fa61aeb2c2 # v3
with:
distribution: 'temurin'
java-version: ${{ matrix.java-version }}
- name: Set up Gradle ${{ matrix.gradle-version }}
uses: gradle/gradle-build-action@40b6781dcdec2762ad36556682ac74e31030cfe2 # v2.5.1
with:
gradle-version: ${{ matrix.gradle-version }}
- name: Install Jep ${{ matrix.jep-version }}
run : pip install jep==${{ matrix.jep-version }}
- name: Install Ghidra ${{ matrix.ghidra-version }}
run: |
mkdir ./.github/ghidra
wget "https://github.com/NationalSecurityAgency/ghidra/releases/download/Ghidra_${{ matrix.ghidra-version }}_build/ghidra_${{ matrix.ghidra-version }}_${{ matrix.public-version }}.zip" -O ./.github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC.zip
unzip .github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC.zip -d .github/ghidra/
- name: Install Ghidrathon
run : |
mkdir ./.github/ghidrathon
curl -o ./.github/ghidrathon/ghidrathon-${{ matrix.ghidrathon-version }}.zip "https://codeload.github.com/mandiant/Ghidrathon/zip/refs/tags/v${{ matrix.ghidrathon-version }}"
unzip .github/ghidrathon/ghidrathon-${{ matrix.ghidrathon-version }}.zip -d .github/ghidrathon/
gradle -p ./.github/ghidrathon/Ghidrathon-${{ matrix.ghidrathon-version }}/ -PGHIDRA_INSTALL_DIR=$(pwd)/.github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC
unzip .github/ghidrathon/Ghidrathon-${{ matrix.ghidrathon-version }}/dist/*.zip -d .github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC/Ghidra/Extensions
- name: Install pyyaml
run: sudo apt-get install -y libyaml-dev
- name: Install capa
run: pip install -e .[dev]
- name: Run tests
run: |
mkdir ./.github/ghidra/project
.github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC/support/analyzeHeadless .github/ghidra/project ghidra_test -Import ./tests/data/mimikatz.exe_ -ScriptPath ./tests/ -PostScript test_ghidra_features.py > ../output.log
cat ../output.log
exit_code=$(cat ../output.log | grep exit | awk '{print $NF}')
exit $exit_code

14
.gitignore vendored
View File

@@ -108,21 +108,17 @@ venv.bak/
*.viv
*.idb
*.i64
.vscode
!rules/lib
# hooks/ci.sh output
isort-output.log
black-output.log
rule-linter-output.log
.vscode
scripts/perf/*.txt
scripts/perf/*.svg
scripts/perf/*.zip
.direnv
.envrc
.DS_Store
*/.DS_Store
Pipfile
Pipfile.lock
/cache/
.github/binja/binaryninja
.github/binja/download_headless.py
.github/binja/BinaryNinja-headless.zip

2
.gitmodules vendored
View File

@@ -1,8 +1,6 @@
[submodule "rules"]
path = rules
url = ../capa-rules.git
branch = dynamic-syntax
[submodule "tests/data"]
path = tests/data
url = ../capa-testfiles.git
branch = dynamic-feature-extractor

View File

@@ -1,129 +0,0 @@
# install the pre-commit hooks:
#
# pre-commit install --hook-type pre-commit
# pre-commit installed at .git/hooks/pre-commit
#
# pre-commit install --hook-type pre-push
# pre-commit installed at .git/hooks/pre-push
#
# run all linters liks:
#
# pre-commit run --all-files
# isort....................................................................Passed
# black....................................................................Passed
# ruff.....................................................................Passed
# flake8...................................................................Passed
# mypy.....................................................................Passed
#
# run a single linter like:
#
# pre-commit run --all-files isort
# isort....................................................................Passed
repos:
- repo: local
hooks:
- id: isort
name: isort
stages: [commit, push, manual]
language: system
entry: isort
args:
- "--length-sort"
- "--profile"
- "black"
- "--line-length=120"
- "--skip-glob"
- "*_pb2.py"
- "capa/"
- "scripts/"
- "tests/"
always_run: true
pass_filenames: false
- repo: local
hooks:
- id: black
name: black
stages: [commit, push, manual]
language: system
entry: black
args:
- "--line-length=120"
- "--extend-exclude"
- ".*_pb2.py"
- "capa/"
- "scripts/"
- "tests/"
always_run: true
pass_filenames: false
- repo: local
hooks:
- id: ruff
name: ruff
stages: [commit, push, manual]
language: system
entry: ruff
args:
- "check"
- "--config"
- ".github/ruff.toml"
- "capa/"
- "scripts/"
- "tests/"
always_run: true
pass_filenames: false
- repo: local
hooks:
- id: flake8
name: flake8
stages: [push, manual]
language: system
entry: flake8
args:
- "--config"
- ".github/flake8.ini"
- "--extend-exclude"
- "capa/render/proto/capa_pb2.py"
- "capa/"
- "scripts/"
- "tests/"
always_run: true
pass_filenames: false
- repo: local
hooks:
- id: mypy
name: mypy
stages: [push, manual]
language: system
entry: mypy
args:
- "--check-untyped-defs"
- "--ignore-missing-imports"
- "--config-file=.github/mypy/mypy.ini"
- "capa/"
- "scripts/"
- "tests/"
always_run: true
pass_filenames: false
- repo: local
hooks:
- id: pytest-fast
name: pytest (fast)
stages: [manual]
language: system
entry: pytest
args:
- "tests/"
- "--ignore=tests/test_binja_features.py"
- "--ignore=tests/test_ghidra_features.py"
- "--ignore=tests/test_ida_features.py"
- "--ignore=tests/test_viv_features.py"
- "--ignore=tests/test_main.py"
- "--ignore=tests/test_scripts.py"
always_run: true
pass_filenames: false

View File

@@ -3,276 +3,11 @@
## master (unreleased)
### New Features
- dotnet: add support for basic blocks #1326 @mike-hunhoff
### Breaking Changes
### New Rules (0)
-
### Bug Fixes
### capa explorer IDA Pro plugin
### Development
### Raw diffs
- [capa v7.0.0...master](https://github.com/mandiant/capa/compare/v7.0.0...master)
- [capa-rules v7.0.0...master](https://github.com/mandiant/capa-rules/compare/v7.0.0...master)
## v7.0.0
This is the v7.0.0 release of capa which was mainly worked on during the Google Summer of Code (GSoC) 2023. A huge
shoutout to our GSoC contributors @colton-gabertan and @yelhamer for their amazing work.
Also, a big thanks to the other contributors: @aaronatp, @Aayush-Goel-04, @bkojusner, @doomedraven, @ruppde, @larchchen, @JCoonradt, and @xusheng6.
### New Features
- add Ghidra backend #1770 #1767 @colton-gabertan @mike-hunhoff
- add Ghidra UI integration #1734 @colton-gabertan @mike-hunhoff
- add dynamic analysis via CAPE sandbox reports #48 #1535 @yelhamer
- add call scope #771 @yelhamer
- add thread scope #1517 @yelhamer
- add process scope #1517 @yelhamer
- rules: change `meta.scope` to `meta.scopes` @yelhamer
- protobuf: add `Metadata.flavor` @williballenthin
- binja: add support for forwarded exports #1646 @xusheng6
- binja: add support for symtab names #1504 @xusheng6
- add com class/interface features #322 @Aayush-goel-04
- dotnet: emit enclosing class information for nested classes #1780 #1913 @bkojusner @mike-hunhoff
### Breaking Changes
- remove the `SCOPE_*` constants in favor of the `Scope` enum #1764 @williballenthin
- protobuf: deprecate `RuleMetadata.scope` in favor of `RuleMetadata.scopes` @williballenthin
- protobuf: deprecate `Metadata.analysis` in favor of `Metadata.analysis2` that is dynamic analysis aware @williballenthin
- update freeze format to v3, adding support for dynamic analysis @williballenthin
- extractor: ignore DLL name for api features #1815 @mr-tz
- main: introduce wrapping routines within main for working with CLI args #1813 @williballenthin
- move functions from `capa.main` to new `capa.loader` namespace #1821 @williballenthin
- proto: add `package` declaration #1960 @larchchen
### New Rules (41)
- nursery/get-ntoskrnl-base-address @mr-tz
- host-interaction/network/connectivity/set-tcp-connection-state @johnk3r
- nursery/capture-process-snapshot-data @mr-tz
- collection/network/capture-packets-using-sharppcap jakub.jozwiak@mandiant.com
- nursery/communicate-with-kernel-module-via-netlink-socket-on-linux michael.hunhoff@mandiant.com
- nursery/get-current-pid-on-linux michael.hunhoff@mandiant.com
- nursery/get-file-system-information-on-linux michael.hunhoff@mandiant.com
- nursery/get-password-database-entry-on-linux michael.hunhoff@mandiant.com
- nursery/mark-thread-detached-on-linux michael.hunhoff@mandiant.com
- nursery/persist-via-gnome-autostart-on-linux michael.hunhoff@mandiant.com
- nursery/set-thread-name-on-linux michael.hunhoff@mandiant.com
- load-code/dotnet/load-windows-common-language-runtime michael.hunhoff@mandiant.com blas.kojusner@mandiant.com jakub.jozwiak@mandiant.com
- nursery/log-keystrokes-via-input-method-manager @mr-tz
- nursery/encrypt-data-using-rc4-via-systemfunction032 richard.weiss@mandiant.com
- nursery/add-value-to-global-atom-table @mr-tz
- nursery/enumerate-processes-that-use-resource @Ana06
- host-interaction/process/inject/allocate-or-change-rwx-memory @mr-tz
- lib/allocate-or-change-rw-memory 0x534a@mailbox.org @mr-tz
- lib/change-memory-protection @mr-tz
- anti-analysis/anti-av/patch-antimalware-scan-interface-function jakub.jozwiak@mandiant.com
- executable/dotnet-singlefile/bundled-with-dotnet-single-file-deployment sara.rincon@mandiant.com
- internal/limitation/file/internal-dotnet-single-file-deployment-limitation sara.rincon@mandiant.com
- data-manipulation/encoding/encode-data-using-add-xor-sub-operations jakub.jozwiak@mandiant.com
- nursery/access-camera-in-dotnet-on-android michael.hunhoff@mandiant.com
- nursery/capture-microphone-audio-in-dotnet-on-android michael.hunhoff@mandiant.com
- nursery/capture-screenshot-in-dotnet-on-android michael.hunhoff@mandiant.com
- nursery/check-for-incoming-call-in-dotnet-on-android michael.hunhoff@mandiant.com
- nursery/check-for-outgoing-call-in-dotnet-on-android michael.hunhoff@mandiant.com
- nursery/compiled-with-xamarin michael.hunhoff@mandiant.com
- nursery/get-os-version-in-dotnet-on-android michael.hunhoff@mandiant.com
- data-manipulation/compression/create-cabinet-on-windows michael.hunhoff@mandiant.com jakub.jozwiak@mandiant.com
- data-manipulation/compression/extract-cabinet-on-windows jakub.jozwiak@mandiant.com
- lib/create-file-decompression-interface-context-on-windows jakub.jozwiak@mandiant.com
- nursery/enumerate-files-in-dotnet moritz.raabe@mandiant.com anushka.virgaonkar@mandiant.com
- nursery/get-mac-address-in-dotnet moritz.raabe@mandiant.com michael.hunhoff@mandiant.com echernofsky@google.com
- nursery/get-current-process-command-line william.ballenthin@mandiant.com
- nursery/get-current-process-file-path william.ballenthin@mandiant.com
- nursery/hook-routines-via-dlsym-rtld_next william.ballenthin@mandiant.com
- nursery/linked-against-hp-socket still@teamt5.org
- host-interaction/process/inject/process-ghostly-hollowing sara.rincon@mandiant.com
### Bug Fixes
- ghidra: fix `ints_to_bytes` performance #1761 @mike-hunhoff
- binja: improve function call site detection @xusheng6
- binja: use `binaryninja.load` to open files @xusheng6
- binja: bump binja version to 3.5 #1789 @xusheng6
- elf: better detect ELF OS via GCC .ident directives #1928 @williballenthin
- elf: better detect ELF OS via Android dependencies #1947 @williballenthin
- fix setuptools package discovery #1886 @gmacon @mr-tz
- remove unnecessary scripts/vivisect-py2-vs-py3.sh file #1949 @JCoonradt
### capa explorer IDA Pro plugin
- various integration updates and minor bug fixes
### Development
- update ATT&CK/MBC data for linting #1932 @mr-tz
#### Developer Notes
With this new release, many classes and concepts have been split up into static (mostly identical to the
prior implementations) and dynamic ones. For example, the legacy FeatureExtractor class has been renamed to
StaticFeatureExtractor and the DynamicFeatureExtractor has been added.
Starting from version 7.0, we have moved the component responsible for feature extractor from main to a new
capabilities' module. Now, users wishing to utilize capas feature extraction abilities should use that module instead
of importing the relevant logic from the main file.
For sandbox-based feature extractors, we are using Pydantic models. Contributions of more models for other sandboxes
are very welcome!
With this release we've reorganized the logic found in `main()` to localize logic and ease readability and ease changes
and integrations. The new "main routines" are expected to be used only within main functions, either capa main or
related scripts. These functions should not be invoked from library code.
Beyond copying code around, we've refined the handling of the input file/format/backend. The logic for picking the
format and backend is more consistent. We've documented that the input file is not necessarily the sample itself
(cape/freeze/etc.) inputs are not actually the sample.
### Raw diffs
- [capa v6.1.0...v7.0.0](https://github.com/mandiant/capa/compare/v6.1.0...v7.0.0)
- [capa-rules v6.1.0...v7.0.0](https://github.com/mandiant/capa-rules/compare/v6.1.0...v7.0.0)
## v6.1.0
capa v6.1.0 is a bug fix release, most notably fixing unhandled exceptions in the capa explorer IDA Pro plugin.
@Aayush-Goel-04 put a lot of effort into improving code quality and adding a script for rule authors.
The script shows which features are present in a sample but not referenced by any existing rule.
You could use this script to find opportunities for new rules.
Speaking of new rules, we have eight additions, coming from Ronnie, Jakub, Moritz, Ervin, and still@teamt5.org!
### New Features
- ELF: implement import and export name extractor #1607 #1608 @Aayush-Goel-04
- bump pydantic from 1.10.9 to 2.1.1 #1582 @Aayush-Goel-04
- develop script to highlight features not used during matching #331 @Aayush-Goel-04
### New Rules (8)
- executable/pe/export/forwarded-export ronnie.salomonsen@mandiant.com
- host-interaction/bootloader/get-uefi-variable jakub.jozwiak@mandiant.com
- host-interaction/bootloader/set-uefi-variable jakub.jozwiak@mandiant.com
- nursery/enumerate-device-drivers-on-linux @mr-tz
- anti-analysis/anti-vm/vm-detection/check-for-foreground-window-switch ervin.ocampo@mandiant.com
- linking/static/sqlite3/linked-against-cppsqlite3 still@teamt5.org
- linking/static/sqlite3/linked-against-sqlite3 still@teamt5.org
### Bug Fixes
- rules: fix forwarded export characteristic #1656 @RonnieSalomonsen
- Binary Ninja: Fix stack string detection #1473 @xusheng6
- linter: skip native API check for NtProtectVirtualMemory #1675 @williballenthin
- OS: detect Android ELF files #1705 @williballenthin
- ELF: fix parsing of symtab #1704 @williballenthin
- result document: don't use deprecated pydantic functions #1718 @williballenthin
- pytest: don't mark IDA tests as pytest tests #1719 @williballenthin
### capa explorer IDA Pro plugin
- fix unhandled exception when resolving rule path #1693 @mike-hunhoff
### Raw diffs
- [capa v6.0.0...v6.1.0](https://github.com/mandiant/capa/compare/v6.0.0...v6.1.0)
- [capa-rules v6.0.0...v6.1.0](https://github.com/mandiant/capa-rules/compare/v6.0.0...v6.1.0)
## v6.0.0
capa v6.0 brings many bug fixes and quality improvements, including 64 rule updates and 26 new rules. We're now publishing to PyPI via [Trusted Publishing](https://blog.pypi.org/posts/2023-04-20-introducing-trusted-publishers/) and have migrated to using a `pyproject.toml` file. @Aayush-Goel-04 contributed a lot of new code across many files, so please welcome them to the project, along with @anders-v @crowface28 @dkelly2e @RonnieSalomonsen and @ejfocampo as first-time rule contributors!
For those that use capa as a library, we've introduced some limited breaking changes that better represent data types (versus less-structured data like dictionaries and strings). With the recent deprecation, we've also dropped support for Python 3.7.
### New Features
- add script to detect feature overlap between new and existing capa rules [#1451](https://github.com/mandiant/capa/issues/1451) [@Aayush-Goel-04](https://github.com/aayush-goel-04)
- extract forwarded exports from PE files #1624 @williballenthin
- extract function and API names from ELF symtab entries @yelhamer https://github.com/mandiant/capa-rules/issues/736
- use fancy box drawing characters for default output #1586 @williballenthin
### Breaking Changes
- use a class to represent Metadata (not dict) #1411 @Aayush-Goel-04 @manasghandat
- use pathlib.Path to represent file paths #1534 @Aayush-Goel-04
- Python 3.8 is now the minimum supported Python version #1578 @williballenthin
- Require a Contributor License Agreement (CLA) for PRs going forward #1642 @williballenthin
### New Rules (26)
- load-code/shellcode/execute-shellcode-via-windows-callback-function ervin.ocampo@mandiant.com jakub.jozwiak@mandiant.com
- nursery/execute-shellcode-via-indirect-call ronnie.salomonsen@mandiant.com
- data-manipulation/encryption/aes/encrypt-data-using-aes-mixcolumns-step @mr-tz
- linking/static/aplib/linked-against-aplib still@teamt5.org
- communication/mailslot/read-from-mailslot nick.simonian@mandiant.com
- nursery/hash-data-using-sha512managed-in-dotnet jonathanlepore@google.com
- nursery/compiled-with-exescript jonathanlepore@google.com
- nursery/check-for-sandbox-via-mac-address-ouis-in-dotnet jonathanlepore@google.com
- host-interaction/hardware/enumerate-devices-by-category @mr-tz
- host-interaction/service/continue-service @mr-tz
- host-interaction/service/pause-service @mr-tz
- persistence/exchange/act-as-exchange-transport-agent jakub.jozwiak@mandiant.com
- host-interaction/file-system/create-virtual-file-system-in-dotnet jakub.jozwiak@mandiant.com
- compiler/cx_freeze/compiled-with-cx_freeze @mr-tz jakub.jozwiak@mandiant.com
- communication/socket/create-vmci-socket jakub.jozwiak@mandiant.com
- persistence/office/act-as-excel-xll-add-in jakub.jozwiak@mandiant.com
- persistence/office/act-as-office-com-add-in jakub.jozwiak@mandiant.com
- persistence/office/act-as-word-wll-add-in jakub.jozwiak@mandiant.com
- anti-analysis/anti-debugging/debugger-evasion/hide-thread-from-debugger michael.hunhoff@mandiant.com jakub.jozwiak@mandiant.com
- host-interaction/memory/create-new-application-domain-in-dotnet jakub.jozwiak@mandiant.com
- host-interaction/gui/switch-active-desktop jakub.jozwiak@mandiant.com
- host-interaction/service/query-service-configuration @mr-tz
- anti-analysis/anti-av/patch-event-tracing-for-windows-function jakub.jozwiak@mandiant.com
- data-manipulation/encoding/xor/covertly-decode-and-write-data-to-windows-directory-using-indirect-calls dan.kelly@mandiant.com
- linking/runtime-linking/resolve-function-by-brute-ratel-badger-hash jakub.jozwiak@mandiant.com
### Bug Fixes
- extractor: add a Binary Ninja test that asserts its version #1487 @xusheng6
- extractor: update Binary Ninja stack string detection after the new constant outlining feature #1473 @xusheng6
- extractor: update vivisect Arch extraction #1334 @mr-tz
- extractor: avoid Binary Ninja exception when analyzing certain files #1441 @xusheng6
- symtab: fix struct.unpack() format for 64-bit ELF files @yelhamer
- symtab: safeguard against ZeroDivisionError for files containing a symtab with a null entry size @yelhamer
- improve ELF strtab and needed parsing @mr-tz
- better handle exceptional cases when parsing ELF files #1458 @Aayush-Goel-04
- improved testing coverage for Binary Ninja backend #1446 @Aayush-Goel-04
- add logging and print redirect to tqdm for capa main #749 @Aayush-Goel-04
- extractor: fix binja installation path detection does not work with Python 3.11
- tests: refine the IDA test runner script #1513 @williballenthin
- output: don't leave behind traces of progress bar @williballenthin
- import-to-ida: fix bug introduced with JSON report changes in v5 #1584 @williballenthin
- main: don't show spinner when emitting debug messages #1636 @williballenthin
- rules: add forwarded export characteristics to rule syntax file scope #1653 @RonnieSalomonsen
### capa explorer IDA Pro plugin
### Development
- update ATT&CK/MBC data for linting #1568 @mr-tz
- log time taken to analyze each function #1290 @williballenthin
- tests: make fixture available via conftest.py #1592 @williballenthin
- publish via PyPI trusted publishing #1491 @williballenthin
- migrate to pyproject.toml #1301 @williballenthin
- use [pre-commit](https://pre-commit.com/) to invoke linters #1579 @williballenthin
### Raw diffs
- [capa v5.1.0...v6.0.0](https://github.com/mandiant/capa/compare/v5.1.0...v6.0.0)
- [capa-rules v5.1.0...v6.0.0](https://github.com/mandiant/capa-rules/compare/v5.1.0...v6.0.0)
## v5.1.0
capa version 5.1.0 adds a Protocol Buffers (protobuf) format for result documents. Additionally, the [Vector35](https://vector35.com/) team contributed a new feature extractor using Binary Ninja. Other new features are a new CLI flag to override the detected operating system, functionality to read and render existing result documents, and a output color format that's easier to read.
Over 25 capa rules have been added and improved.
Thanks for all the support, especially to @xusheng6, @captainGeech42, @ggold7046, @manasghandat, @ooprathamm, @linpeiyu164, @yelhamer, @HongThatCong, @naikordian, @stevemk14ebr, @emtuls, @raymondlleong, @bkojusner, @joren485, and everyone else who submitted bugs and provided feedback!
### New Features
- add protobuf format for result documents #1219 @williballenthin @mr-tz
- extractor: add Binary Ninja feature extractor @xusheng6
- new cli flag `--os` to override auto-detected operating system for a sample @captainGeech42
- change colour/highlight to "cyan" instead of "blue" for better readability #1384 @ggold7046
- add new format to parse output json back to capa #1396 @ooprathamm
- parse ELF symbols' names to guess OS #1403 @yelhamer
### New Rules (26)
### New Rules (9)
- persistence/scheduled-tasks/schedule-task-via-at joren485
- data-manipulation/prng/generate-random-numbers-via-rtlgenrandom william.ballenthin@mandiant.com
@@ -283,39 +18,18 @@ Thanks for all the support, especially to @xusheng6, @captainGeech42, @ggold7046
- communication/http/reference-http-user-agent-string @mr-tz
- communication/http/get-http-content-length william.ballenthin@mandiant.com
- nursery/move-directory michael.hunhoff@mandiant.com
- nursery/get-http-request-uri william.ballenthin@mandiant.com
- nursery/create-zip-archive-in-dotnet michael.hunhoff@mandiant.com
- nursery/extract-zip-archive-in-dotnet anushka.virgaonkar@mandiant.com michael.hunhoff@mandiant.com
- data-manipulation/encryption/tea/decrypt-data-using-tea william.ballenthin@mandiant.com raymond.leong@mandiant.com
- data-manipulation/encryption/tea/encrypt-data-using-tea william.ballenthin@mandiant.com raymond.leong@mandiant.com
- data-manipulation/encryption/xtea/encrypt-data-using-xtea raymond.leong@mandiant.com
- data-manipulation/encryption/xxtea/encrypt-data-using-xxtea raymond.leong@mandiant.com
- nursery/hash-data-using-ripemd128 raymond.leong@mandiant.com
- nursery/hash-data-using-ripemd256 raymond.leong@mandiant.com
- nursery/hash-data-using-ripemd320 raymond.leong@mandiant.com
- nursery/set-web-proxy-in-dotnet michael.hunhoff@mandiant.com
- nursery/check-for-windows-sandbox-via-subdirectory echernofsky@google.com
- nursery/enumerate-pe-sections-in-dotnet @mr-tz
- nursery/destroy-software-breakpoint-capability echernofsky@google.com
- nursery/send-data-to-internet michael.hunhoff@mandiant.com
- nursery/compiled-with-cx_freeze @mr-tz
- nursery/contain-a-thread-local-storage-tls-section-in-dotnet michael.hunhoff@mandiant.com
-
### Bug Fixes
- extractor: interface of cache modified to prevent extracting file and global features multiple times @stevemk14ebr
- extractor: removed '.dynsym' as the library name for ELF imports #1318 @stevemk14ebr
- extractor: fix vivisect loop detection corner case #1310 @mr-tz
- match: extend OS characteristic to match OS_ANY to all supported OSes #1324 @mike-hunhoff
- extractor: fix IDA and vivisect string and bytes features overlap and tests #1327 #1336 @xusheng6
### capa explorer IDA Pro plugin
- rule generator plugin now loads faster when jumping between functions @stevemk14ebr
- fix exception when plugin loaded in IDA hosted under idat #1341 @mike-hunhoff
- improve embedded PE detection performance and reduce FP potential #1344 @mike-hunhoff
### Development
### Raw diffs
- [capa v5.0.0...v5.1.0](https://github.com/mandiant/capa/compare/v5.0.0...v5.1.0)
- [capa-rules v5.0.0...v5.1.0](https://github.com/mandiant/capa-rules/compare/v5.0.0...v5.1.0)
- [capa v5.0.0...master](https://github.com/mandiant/capa/compare/v5.0.0...master)
- [capa-rules v5.0.0...master](https://github.com/mandiant/capa-rules/compare/v5.0.0...master)
## v5.0.0 (2023-02-08)

View File

@@ -187,7 +187,7 @@
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright (C) 2023 Mandiant, Inc.
Copyright (C) 2020 Mandiant, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.

131
README.md
View File

@@ -2,13 +2,13 @@
[![PyPI - Python Version](https://img.shields.io/pypi/pyversions/flare-capa)](https://pypi.org/project/flare-capa)
[![Last release](https://img.shields.io/github/v/release/mandiant/capa)](https://github.com/mandiant/capa/releases)
[![Number of rules](https://img.shields.io/badge/rules-866-blue.svg)](https://github.com/mandiant/capa-rules)
[![Number of rules](https://img.shields.io/badge/rules-779-blue.svg)](https://github.com/mandiant/capa-rules)
[![CI status](https://github.com/mandiant/capa/workflows/CI/badge.svg)](https://github.com/mandiant/capa/actions?query=workflow%3ACI+event%3Apush+branch%3Amaster)
[![Downloads](https://img.shields.io/github/downloads/mandiant/capa/total)](https://github.com/mandiant/capa/releases)
[![License](https://img.shields.io/badge/license-Apache--2.0-green.svg)](LICENSE.txt)
capa detects capabilities in executable files.
You run it against a PE, ELF, .NET module, shellcode file, or a sandbox report and it tells you what it thinks the program can do.
You run it against a PE, ELF, .NET module, or shellcode file and it tells you what it thinks the program can do.
For example, it might suggest that the file is a backdoor, is capable of installing services, or relies on HTTP to communicate.
Check out:
@@ -125,96 +125,6 @@ function @ 0x4011C0
...
```
Additionally, capa also supports analyzing [CAPE](https://github.com/kevoreilly/CAPEv2) sandbox reports for dynamic capabilty extraction.
In order to use this, you first submit your sample to CAPE for analysis, and then run capa against the generated report (JSON).
Here's an example of running capa against a packed binary, and then running capa against the CAPE report of that binary:
```yaml
$ capa 05be49819139a3fdcdbddbdefd298398779521f3d68daa25275cc77508e42310.exe
WARNING:capa.capabilities.common:--------------------------------------------------------------------------------
WARNING:capa.capabilities.common: This sample appears to be packed.
WARNING:capa.capabilities.common:
WARNING:capa.capabilities.common: Packed samples have often been obfuscated to hide their logic.
WARNING:capa.capabilities.common: capa cannot handle obfuscation well using static analysis. This means the results may be misleading or incomplete.
WARNING:capa.capabilities.common: If possible, you should try to unpack this input file before analyzing it with capa.
WARNING:capa.capabilities.common: Alternatively, run the sample in a supported sandbox and invoke capa against the report to obtain dynamic analysis results.
WARNING:capa.capabilities.common:
WARNING:capa.capabilities.common: Identified via rule: (internal) packer file limitation
WARNING:capa.capabilities.common:
WARNING:capa.capabilities.common: Use -v or -vv if you really want to see the capabilities identified by capa.
WARNING:capa.capabilities.common:--------------------------------------------------------------------------------
$ capa 05be49819139a3fdcdbddbdefd298398779521f3d68daa25275cc77508e42310.json
┍━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑
│ ATT&CK Tactic │ ATT&CK Technique │
┝━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥
│ CREDENTIAL ACCESS │ Credentials from Password Stores T1555 │
├────────────────────────┼────────────────────────────────────────────────────────────────────────────────────┤
│ DEFENSE EVASION │ File and Directory Permissions Modification T1222 │
│ │ Modify Registry T1112 │
│ │ Obfuscated Files or Information T1027 │
│ │ Virtualization/Sandbox Evasion::User Activity Based Checks T1497.002 │
├────────────────────────┼────────────────────────────────────────────────────────────────────────────────────┤
│ DISCOVERY │ Account Discovery T1087 │
│ │ Application Window Discovery T1010 │
│ │ File and Directory Discovery T1083 │
│ │ Query Registry T1012 │
│ │ System Information Discovery T1082 │
│ │ System Location Discovery::System Language Discovery T1614.001 │
│ │ System Owner/User Discovery T1033 │
├────────────────────────┼────────────────────────────────────────────────────────────────────────────────────┤
│ EXECUTION │ System Services::Service Execution T1569.002 │
├────────────────────────┼────────────────────────────────────────────────────────────────────────────────────┤
│ PERSISTENCE │ Boot or Logon Autostart Execution::Registry Run Keys / Startup Folder T1547.001 │
│ │ Boot or Logon Autostart Execution::Winlogon Helper DLL T1547.004 │
│ │ Create or Modify System Process::Windows Service T1543.003 │
┕━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙
┍━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑
│ Capability │ Namespace │
┝━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥
│ check for unmoving mouse cursor (3 matches) │ anti-analysis/anti-vm/vm-detection │
│ gather bitkinex information │ collection/file-managers │
│ gather classicftp information │ collection/file-managers │
│ gather filezilla information │ collection/file-managers │
│ gather total-commander information │ collection/file-managers │
│ gather ultrafxp information │ collection/file-managers │
│ resolve DNS (23 matches) │ communication/dns │
│ initialize Winsock library (7 matches) │ communication/socket │
│ act as TCP client (3 matches) │ communication/tcp/client │
│ create new key via CryptAcquireContext │ data-manipulation/encryption │
│ encrypt or decrypt via WinCrypt │ data-manipulation/encryption │
│ hash data via WinCrypt │ data-manipulation/hashing │
│ initialize hashing via WinCrypt │ data-manipulation/hashing │
│ hash data with MD5 │ data-manipulation/hashing/md5 │
│ generate random numbers via WinAPI │ data-manipulation/prng │
│ extract resource via kernel32 functions (2 matches) │ executable/resource │
│ interact with driver via control codes (2 matches) │ host-interaction/driver │
│ get Program Files directory (18 matches) │ host-interaction/file-system │
│ get common file path (575 matches) │ host-interaction/file-system │
│ create directory (2 matches) │ host-interaction/file-system/create │
│ delete file │ host-interaction/file-system/delete │
│ get file attributes (122 matches) │ host-interaction/file-system/meta │
│ set file attributes (8 matches) │ host-interaction/file-system/meta │
│ move file │ host-interaction/file-system/move │
│ find taskbar (3 matches) │ host-interaction/gui/taskbar/find │
│ get keyboard layout (12 matches) │ host-interaction/hardware/keyboard │
│ get disk size │ host-interaction/hardware/storage │
│ get hostname (4 matches) │ host-interaction/os/hostname │
│ allocate or change RWX memory (3 matches) │ host-interaction/process/inject │
│ query or enumerate registry key (3 matches) │ host-interaction/registry │
│ query or enumerate registry value (8 matches) │ host-interaction/registry │
│ delete registry key │ host-interaction/registry/delete │
│ start service │ host-interaction/service/start │
│ get session user name │ host-interaction/session │
│ persist via Run registry key │ persistence/registry/run │
│ persist via Winlogon Helper DLL registry key │ persistence/registry/winlogon-helper │
│ persist via Windows service (2 matches) │ persistence/service │
┕━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙
```
capa uses a collection of rules to identify capabilities within a program.
These rules are easy to write, even for those new to reverse engineering.
By authoring rules, you can extend the capabilities that capa recognizes.
@@ -225,30 +135,31 @@ Here's an example rule used by capa:
```yaml
rule:
meta:
name: create TCP socket
namespace: communication/socket/tcp
name: hash data with CRC32
namespace: data-manipulation/checksum/crc32
authors:
- william.ballenthin@mandiant.com
- joakim@intezer.com
- anushka.virgaonkar@mandiant.com
scopes:
static: basic block
dynamic: call
- moritz.raabe@mandiant.com
scope: function
mbc:
- Communication::Socket Communication::Create TCP Socket [C0001.011]
- Data::Checksum::CRC32 [C0032.001]
examples:
- Practical Malware Analysis Lab 01-01.dll_:0x10001010
- 2D3EDC218A90F03089CC01715A9F047F:0x403CBD
- 7D28CB106CB54876B2A5C111724A07CD:0x402350 # RtlComputeCrc32
- 7EFF498DE13CC734262F87E6B3EF38AB:0x100084A6
features:
- or:
- and:
- number: 6 = IPPROTO_TCP
- number: 1 = SOCK_STREAM
- number: 2 = AF_INET
- mnemonic: shr
- or:
- api: ws2_32.socket
- api: ws2_32.WSASocket
- api: socket
- property/read: System.Net.Sockets.TcpClient::Client
- number: 0xEDB88320
- bytes: 00 00 00 00 96 30 07 77 2C 61 0E EE BA 51 09 99 19 C4 6D 07 8F F4 6A 70 35 A5 63 E9 A3 95 64 9E = crc32_tab
- number: 8
- characteristic: nzxor
- and:
- number: 0x8320
- number: 0xEDB8
- characteristic: nzxor
- api: RtlComputeCrc32
```
The [github.com/mandiant/capa-rules](https://github.com/mandiant/capa-rules) repository contains hundreds of standard library rules that are distributed with capa.
@@ -259,8 +170,6 @@ capa explorer helps you identify interesting areas of a program and build new ca
![capa + IDA Pro integration](https://github.com/mandiant/capa/blob/master/doc/img/explorer_expanded.png)
If you use Ghidra, you can use the Python 3 [Ghidra feature extractor](/capa/ghidra/). This integration enables capa to extract features directly from your Ghidra database, which can help you identify capabilities in programs that you analyze using Ghidra.
# further information
## capa
- [Installation](https://github.com/mandiant/capa/blob/master/doc/installation.md)

View File

@@ -1,79 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
import itertools
import collections
from typing import Any, Tuple
from capa.rules import Scope, RuleSet
from capa.engine import FeatureSet, MatchResults
from capa.features.address import NO_ADDRESS
from capa.features.extractors.base_extractor import FeatureExtractor, StaticFeatureExtractor, DynamicFeatureExtractor
logger = logging.getLogger(__name__)
def find_file_capabilities(ruleset: RuleSet, extractor: FeatureExtractor, function_features: FeatureSet):
file_features: FeatureSet = collections.defaultdict(set)
for feature, va in itertools.chain(extractor.extract_file_features(), extractor.extract_global_features()):
# not all file features may have virtual addresses.
# if not, then at least ensure the feature shows up in the index.
# the set of addresses will still be empty.
if va:
file_features[feature].add(va)
else:
if feature not in file_features:
file_features[feature] = set()
logger.debug("analyzed file and extracted %d features", len(file_features))
file_features.update(function_features)
_, matches = ruleset.match(Scope.FILE, file_features, NO_ADDRESS)
return matches, len(file_features)
def has_file_limitation(rules: RuleSet, capabilities: MatchResults, is_standalone=True) -> bool:
file_limitation_rules = list(filter(lambda r: r.is_file_limitation_rule(), rules.rules.values()))
for file_limitation_rule in file_limitation_rules:
if file_limitation_rule.name not in capabilities:
continue
logger.warning("-" * 80)
for line in file_limitation_rule.meta.get("description", "").split("\n"):
logger.warning(" %s", line)
logger.warning(" Identified via rule: %s", file_limitation_rule.name)
if is_standalone:
logger.warning(" ")
logger.warning(" Use -v or -vv if you really want to see the capabilities identified by capa.")
logger.warning("-" * 80)
# bail on first file limitation
return True
return False
def find_capabilities(
ruleset: RuleSet, extractor: FeatureExtractor, disable_progress=None, **kwargs
) -> Tuple[MatchResults, Any]:
from capa.capabilities.static import find_static_capabilities
from capa.capabilities.dynamic import find_dynamic_capabilities
if isinstance(extractor, StaticFeatureExtractor):
# for the time being, extractors are either static or dynamic.
# Remove this assertion once that has changed
assert not isinstance(extractor, DynamicFeatureExtractor)
return find_static_capabilities(ruleset, extractor, disable_progress=disable_progress, **kwargs)
if isinstance(extractor, DynamicFeatureExtractor):
return find_dynamic_capabilities(ruleset, extractor, disable_progress=disable_progress, **kwargs)
raise ValueError(f"unexpected extractor type: {extractor.__class__.__name__}")

View File

@@ -1,198 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
import itertools
import collections
from typing import Any, Tuple
import tqdm
import capa.perf
import capa.features.freeze as frz
import capa.render.result_document as rdoc
from capa.rules import Scope, RuleSet
from capa.engine import FeatureSet, MatchResults
from capa.helpers import redirecting_print_to_tqdm
from capa.capabilities.common import find_file_capabilities
from capa.features.extractors.base_extractor import CallHandle, ThreadHandle, ProcessHandle, DynamicFeatureExtractor
logger = logging.getLogger(__name__)
def find_call_capabilities(
ruleset: RuleSet, extractor: DynamicFeatureExtractor, ph: ProcessHandle, th: ThreadHandle, ch: CallHandle
) -> Tuple[FeatureSet, MatchResults]:
"""
find matches for the given rules for the given call.
returns: tuple containing (features for call, match results for call)
"""
# all features found for the call.
features: FeatureSet = collections.defaultdict(set)
for feature, addr in itertools.chain(
extractor.extract_call_features(ph, th, ch), extractor.extract_global_features()
):
features[feature].add(addr)
# matches found at this thread.
_, matches = ruleset.match(Scope.CALL, features, ch.address)
for rule_name, res in matches.items():
rule = ruleset[rule_name]
for addr, _ in res:
capa.engine.index_rule_matches(features, rule, [addr])
return features, matches
def find_thread_capabilities(
ruleset: RuleSet, extractor: DynamicFeatureExtractor, ph: ProcessHandle, th: ThreadHandle
) -> Tuple[FeatureSet, MatchResults, MatchResults]:
"""
find matches for the given rules within the given thread.
returns: tuple containing (features for thread, match results for thread, match results for calls)
"""
# all features found within this thread,
# includes features found within calls.
features: FeatureSet = collections.defaultdict(set)
# matches found at the call scope.
# might be found at different calls, thats ok.
call_matches: MatchResults = collections.defaultdict(list)
for ch in extractor.get_calls(ph, th):
ifeatures, imatches = find_call_capabilities(ruleset, extractor, ph, th, ch)
for feature, vas in ifeatures.items():
features[feature].update(vas)
for rule_name, res in imatches.items():
call_matches[rule_name].extend(res)
for feature, va in itertools.chain(extractor.extract_thread_features(ph, th), extractor.extract_global_features()):
features[feature].add(va)
# matches found within this thread.
_, matches = ruleset.match(Scope.THREAD, features, th.address)
for rule_name, res in matches.items():
rule = ruleset[rule_name]
for va, _ in res:
capa.engine.index_rule_matches(features, rule, [va])
return features, matches, call_matches
def find_process_capabilities(
ruleset: RuleSet, extractor: DynamicFeatureExtractor, ph: ProcessHandle
) -> Tuple[MatchResults, MatchResults, MatchResults, int]:
"""
find matches for the given rules within the given process.
returns: tuple containing (match results for process, match results for threads, match results for calls, number of features)
"""
# all features found within this process,
# includes features found within threads (and calls).
process_features: FeatureSet = collections.defaultdict(set)
# matches found at the basic threads.
# might be found at different threads, thats ok.
thread_matches: MatchResults = collections.defaultdict(list)
# matches found at the call scope.
# might be found at different calls, thats ok.
call_matches: MatchResults = collections.defaultdict(list)
for th in extractor.get_threads(ph):
features, tmatches, cmatches = find_thread_capabilities(ruleset, extractor, ph, th)
for feature, vas in features.items():
process_features[feature].update(vas)
for rule_name, res in tmatches.items():
thread_matches[rule_name].extend(res)
for rule_name, res in cmatches.items():
call_matches[rule_name].extend(res)
for feature, va in itertools.chain(extractor.extract_process_features(ph), extractor.extract_global_features()):
process_features[feature].add(va)
_, process_matches = ruleset.match(Scope.PROCESS, process_features, ph.address)
return process_matches, thread_matches, call_matches, len(process_features)
def find_dynamic_capabilities(
ruleset: RuleSet, extractor: DynamicFeatureExtractor, disable_progress=None
) -> Tuple[MatchResults, Any]:
all_process_matches: MatchResults = collections.defaultdict(list)
all_thread_matches: MatchResults = collections.defaultdict(list)
all_call_matches: MatchResults = collections.defaultdict(list)
feature_counts = rdoc.DynamicFeatureCounts(file=0, processes=())
assert isinstance(extractor, DynamicFeatureExtractor)
with redirecting_print_to_tqdm(disable_progress):
with tqdm.contrib.logging.logging_redirect_tqdm():
pbar = tqdm.tqdm
if disable_progress:
# do not use tqdm to avoid unnecessary side effects when caller intends
# to disable progress completely
def pbar(s, *args, **kwargs):
return s
processes = list(extractor.get_processes())
pb = pbar(processes, desc="matching", unit=" processes", leave=False)
for p in pb:
process_matches, thread_matches, call_matches, feature_count = find_process_capabilities(
ruleset, extractor, p
)
feature_counts.processes += (
rdoc.ProcessFeatureCount(address=frz.Address.from_capa(p.address), count=feature_count),
)
logger.debug("analyzed %s and extracted %d features", p.address, feature_count)
for rule_name, res in process_matches.items():
all_process_matches[rule_name].extend(res)
for rule_name, res in thread_matches.items():
all_thread_matches[rule_name].extend(res)
for rule_name, res in call_matches.items():
all_call_matches[rule_name].extend(res)
# collection of features that captures the rule matches within process and thread scopes.
# mapping from feature (matched rule) to set of addresses at which it matched.
process_and_lower_features: FeatureSet = collections.defaultdict(set)
for rule_name, results in itertools.chain(
all_process_matches.items(), all_thread_matches.items(), all_call_matches.items()
):
locations = {p[0] for p in results}
rule = ruleset[rule_name]
capa.engine.index_rule_matches(process_and_lower_features, rule, locations)
all_file_matches, feature_count = find_file_capabilities(ruleset, extractor, process_and_lower_features)
feature_counts.file = feature_count
matches = dict(
itertools.chain(
# each rule exists in exactly one scope,
# so there won't be any overlap among these following MatchResults,
# and we can merge the dictionaries naively.
all_thread_matches.items(),
all_process_matches.items(),
all_call_matches.items(),
all_file_matches.items(),
)
)
meta = {
"feature_counts": feature_counts,
}
return matches, meta

View File

@@ -1,233 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import time
import logging
import itertools
import collections
from typing import Any, Tuple
import tqdm.contrib.logging
import capa.perf
import capa.features.freeze as frz
import capa.render.result_document as rdoc
from capa.rules import Scope, RuleSet
from capa.engine import FeatureSet, MatchResults
from capa.helpers import redirecting_print_to_tqdm
from capa.capabilities.common import find_file_capabilities
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle, StaticFeatureExtractor
logger = logging.getLogger(__name__)
def find_instruction_capabilities(
ruleset: RuleSet, extractor: StaticFeatureExtractor, f: FunctionHandle, bb: BBHandle, insn: InsnHandle
) -> Tuple[FeatureSet, MatchResults]:
"""
find matches for the given rules for the given instruction.
returns: tuple containing (features for instruction, match results for instruction)
"""
# all features found for the instruction.
features: FeatureSet = collections.defaultdict(set)
for feature, addr in itertools.chain(
extractor.extract_insn_features(f, bb, insn), extractor.extract_global_features()
):
features[feature].add(addr)
# matches found at this instruction.
_, matches = ruleset.match(Scope.INSTRUCTION, features, insn.address)
for rule_name, res in matches.items():
rule = ruleset[rule_name]
for addr, _ in res:
capa.engine.index_rule_matches(features, rule, [addr])
return features, matches
def find_basic_block_capabilities(
ruleset: RuleSet, extractor: StaticFeatureExtractor, f: FunctionHandle, bb: BBHandle
) -> Tuple[FeatureSet, MatchResults, MatchResults]:
"""
find matches for the given rules within the given basic block.
returns: tuple containing (features for basic block, match results for basic block, match results for instructions)
"""
# all features found within this basic block,
# includes features found within instructions.
features: FeatureSet = collections.defaultdict(set)
# matches found at the instruction scope.
# might be found at different instructions, thats ok.
insn_matches: MatchResults = collections.defaultdict(list)
for insn in extractor.get_instructions(f, bb):
ifeatures, imatches = find_instruction_capabilities(ruleset, extractor, f, bb, insn)
for feature, vas in ifeatures.items():
features[feature].update(vas)
for rule_name, res in imatches.items():
insn_matches[rule_name].extend(res)
for feature, va in itertools.chain(
extractor.extract_basic_block_features(f, bb), extractor.extract_global_features()
):
features[feature].add(va)
# matches found within this basic block.
_, matches = ruleset.match(Scope.BASIC_BLOCK, features, bb.address)
for rule_name, res in matches.items():
rule = ruleset[rule_name]
for va, _ in res:
capa.engine.index_rule_matches(features, rule, [va])
return features, matches, insn_matches
def find_code_capabilities(
ruleset: RuleSet, extractor: StaticFeatureExtractor, fh: FunctionHandle
) -> Tuple[MatchResults, MatchResults, MatchResults, int]:
"""
find matches for the given rules within the given function.
returns: tuple containing (match results for function, match results for basic blocks, match results for instructions, number of features)
"""
# all features found within this function,
# includes features found within basic blocks (and instructions).
function_features: FeatureSet = collections.defaultdict(set)
# matches found at the basic block scope.
# might be found at different basic blocks, thats ok.
bb_matches: MatchResults = collections.defaultdict(list)
# matches found at the instruction scope.
# might be found at different instructions, thats ok.
insn_matches: MatchResults = collections.defaultdict(list)
for bb in extractor.get_basic_blocks(fh):
features, bmatches, imatches = find_basic_block_capabilities(ruleset, extractor, fh, bb)
for feature, vas in features.items():
function_features[feature].update(vas)
for rule_name, res in bmatches.items():
bb_matches[rule_name].extend(res)
for rule_name, res in imatches.items():
insn_matches[rule_name].extend(res)
for feature, va in itertools.chain(extractor.extract_function_features(fh), extractor.extract_global_features()):
function_features[feature].add(va)
_, function_matches = ruleset.match(Scope.FUNCTION, function_features, fh.address)
return function_matches, bb_matches, insn_matches, len(function_features)
def find_static_capabilities(
ruleset: RuleSet, extractor: StaticFeatureExtractor, disable_progress=None
) -> Tuple[MatchResults, Any]:
all_function_matches: MatchResults = collections.defaultdict(list)
all_bb_matches: MatchResults = collections.defaultdict(list)
all_insn_matches: MatchResults = collections.defaultdict(list)
feature_counts = rdoc.StaticFeatureCounts(file=0, functions=())
library_functions: Tuple[rdoc.LibraryFunction, ...] = ()
assert isinstance(extractor, StaticFeatureExtractor)
with redirecting_print_to_tqdm(disable_progress):
with tqdm.contrib.logging.logging_redirect_tqdm():
pbar = tqdm.tqdm
if capa.helpers.is_runtime_ghidra():
# Ghidrathon interpreter cannot properly handle
# the TMonitor thread that is created via a monitor_interval
# > 0
pbar.monitor_interval = 0
if disable_progress:
# do not use tqdm to avoid unnecessary side effects when caller intends
# to disable progress completely
def pbar(s, *args, **kwargs):
return s
functions = list(extractor.get_functions())
n_funcs = len(functions)
pb = pbar(functions, desc="matching", unit=" functions", postfix="skipped 0 library functions", leave=False)
for f in pb:
t0 = time.time()
if extractor.is_library_function(f.address):
function_name = extractor.get_function_name(f.address)
logger.debug("skipping library function 0x%x (%s)", f.address, function_name)
library_functions += (
rdoc.LibraryFunction(address=frz.Address.from_capa(f.address), name=function_name),
)
n_libs = len(library_functions)
percentage = round(100 * (n_libs / n_funcs))
if isinstance(pb, tqdm.tqdm):
pb.set_postfix_str(f"skipped {n_libs} library functions ({percentage}%)")
continue
function_matches, bb_matches, insn_matches, feature_count = find_code_capabilities(
ruleset, extractor, f
)
feature_counts.functions += (
rdoc.FunctionFeatureCount(address=frz.Address.from_capa(f.address), count=feature_count),
)
t1 = time.time()
match_count = sum(len(res) for res in function_matches.values())
match_count += sum(len(res) for res in bb_matches.values())
match_count += sum(len(res) for res in insn_matches.values())
logger.debug(
"analyzed function 0x%x and extracted %d features, %d matches in %0.02fs",
f.address,
feature_count,
match_count,
t1 - t0,
)
for rule_name, res in function_matches.items():
all_function_matches[rule_name].extend(res)
for rule_name, res in bb_matches.items():
all_bb_matches[rule_name].extend(res)
for rule_name, res in insn_matches.items():
all_insn_matches[rule_name].extend(res)
# collection of features that captures the rule matches within function, BB, and instruction scopes.
# mapping from feature (matched rule) to set of addresses at which it matched.
function_and_lower_features: FeatureSet = collections.defaultdict(set)
for rule_name, results in itertools.chain(
all_function_matches.items(), all_bb_matches.items(), all_insn_matches.items()
):
locations = {p[0] for p in results}
rule = ruleset[rule_name]
capa.engine.index_rule_matches(function_and_lower_features, rule, locations)
all_file_matches, feature_count = find_file_capabilities(ruleset, extractor, function_and_lower_features)
feature_counts.file = feature_count
matches = dict(
itertools.chain(
# each rule exists in exactly one scope,
# so there won't be any overlap among these following MatchResults,
# and we can merge the dictionaries naively.
all_insn_matches.items(),
all_bb_matches.items(),
all_function_matches.items(),
all_file_matches.items(),
)
)
meta = {
"feature_counts": feature_counts,
"library_functions": library_functions,
}
return matches, meta

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -8,7 +8,7 @@
import copy
import collections
from typing import TYPE_CHECKING, Set, Dict, List, Tuple, Union, Mapping, Iterable, Iterator
from typing import TYPE_CHECKING, Set, Dict, List, Tuple, Union, Mapping, Iterable, Iterator, cast
import capa.perf
import capa.features.common
@@ -43,12 +43,10 @@ class Statement:
self.description = description
def __str__(self):
name = self.name.lower()
children = ",".join(map(str, self.get_children()))
if self.description:
return f"{name}({children} = {self.description})"
return "%s(%s = %s)" % (self.name.lower(), ",".join(map(str, self.get_children())), self.description)
else:
return f"{name}({children})"
return "%s(%s)" % (self.name.lower(), ",".join(map(str, self.get_children())))
def __repr__(self):
return str(self)
@@ -71,7 +69,7 @@ class Statement:
yield child
if hasattr(self, "children"):
for child in self.children:
for child in getattr(self, "children"):
assert isinstance(child, (Statement, Feature))
yield child
@@ -83,7 +81,7 @@ class Statement:
self.child = new
if hasattr(self, "children"):
children = self.children
children = getattr(self, "children")
for i, child in enumerate(children):
if child is existing:
children[i] = new
@@ -234,9 +232,9 @@ class Range(Statement):
def __str__(self):
if self.max == (1 << 64 - 1):
return f"range({str(self.child)}, min={self.min}, max=infinity)"
return "range(%s, min=%d, max=infinity)" % (str(self.child), self.min)
else:
return f"range({str(self.child)}, min={self.min}, max={self.max})"
return "range(%s, min=%d, max=%d)" % (str(self.child), self.min, self.max)
class Subscope(Statement):
@@ -304,7 +302,7 @@ def match(rules: List["capa.rules.Rule"], features: FeatureSet, addr: Address) -
other strategies can be imagined that match differently; implement these elsewhere.
specifically, this routine does "top down" matching of the given rules against the feature set.
"""
results: MatchResults = collections.defaultdict(list)
results = collections.defaultdict(list) # type: MatchResults
# copy features so that we can modify it
# without affecting the caller (keep this function pure)

View File

@@ -1,10 +1,3 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
class UnsupportedRuntimeError(RuntimeError):
pass
@@ -19,7 +12,3 @@ class UnsupportedArchError(ValueError):
class UnsupportedOSError(ValueError):
pass
class EmptyReportError(ValueError):
pass

View File

@@ -1,16 +1,10 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import abc
class Address(abc.ABC):
@abc.abstractmethod
def __eq__(self, other): ...
def __eq__(self, other):
...
@abc.abstractmethod
def __lt__(self, other):
@@ -42,79 +36,6 @@ class AbsoluteVirtualAddress(int, Address):
return int.__hash__(self)
class ProcessAddress(Address):
"""an address of a process in a dynamic execution trace"""
def __init__(self, pid: int, ppid: int = 0):
assert ppid >= 0
assert pid > 0
self.ppid = ppid
self.pid = pid
def __repr__(self):
return "process(%s%s)" % (
f"ppid: {self.ppid}, " if self.ppid > 0 else "",
f"pid: {self.pid}",
)
def __hash__(self):
return hash((self.ppid, self.pid))
def __eq__(self, other):
assert isinstance(other, ProcessAddress)
return (self.ppid, self.pid) == (other.ppid, other.pid)
def __lt__(self, other):
assert isinstance(other, ProcessAddress)
return (self.ppid, self.pid) < (other.ppid, other.pid)
class ThreadAddress(Address):
"""addresses a thread in a dynamic execution trace"""
def __init__(self, process: ProcessAddress, tid: int):
assert tid >= 0
self.process = process
self.tid = tid
def __repr__(self):
return f"{self.process}, thread(tid: {self.tid})"
def __hash__(self):
return hash((self.process, self.tid))
def __eq__(self, other):
assert isinstance(other, ThreadAddress)
return (self.process, self.tid) == (other.process, other.tid)
def __lt__(self, other):
assert isinstance(other, ThreadAddress)
return (self.process, self.tid) < (other.process, other.tid)
class DynamicCallAddress(Address):
"""addesses a call in a dynamic execution trace"""
def __init__(self, thread: ThreadAddress, id: int):
assert id >= 0
self.thread = thread
self.id = id
def __repr__(self):
return f"{self.thread}, call(id: {self.id})"
def __hash__(self):
return hash((self.thread, self.id))
def __eq__(self, other):
assert isinstance(other, DynamicCallAddress)
return (self.thread, self.id) == (other.thread, other.id)
def __lt__(self, other):
assert isinstance(other, DynamicCallAddress)
return (self.thread, self.id) < (other.thread, other.id)
class RelativeVirtualAddress(int, Address):
"""a memory address relative to a base address"""

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt

View File

@@ -1,36 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from enum import Enum
from typing import Dict, List
from capa.helpers import assert_never
class ComType(Enum):
CLASS = "class"
INTERFACE = "interface"
COM_PREFIXES = {
ComType.CLASS: "CLSID_",
ComType.INTERFACE: "IID_",
}
def load_com_database(com_type: ComType) -> Dict[str, List[str]]:
# lazy load these python files since they are so large.
# that is, don't load them unless a COM feature is being handled.
import capa.features.com.classes
import capa.features.com.interfaces
if com_type == ComType.CLASS:
return capa.features.com.classes.COM_CLASSES
elif com_type == ComType.INTERFACE:
return capa.features.com.interfaces.COM_INTERFACES
else:
assert_never(com_type)

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -100,10 +100,7 @@ class Result:
return self.success
class Feature(abc.ABC): # noqa: B024
# this is an abstract class, since we don't want anyone to instantiate it directly,
# but it doesn't have any abstract methods.
class Feature(abc.ABC):
def __init__(
self,
value: Union[str, int, float, bytes],
@@ -127,17 +124,12 @@ class Feature(abc.ABC): # noqa: B024
return self.name == other.name and self.value == other.value
def __lt__(self, other):
# implementing sorting by serializing to JSON is a huge hack.
# its slow, inelegant, and probably doesn't work intuitively;
# however, we only use it for deterministic output, so it's good enough for now.
# circular import
# we should fix if this wasn't already a huge hack.
# TODO: this is a huge hack!
import capa.features.freeze.features
return (
capa.features.freeze.features.feature_from_capa(self).model_dump_json()
< capa.features.freeze.features.feature_from_capa(other).model_dump_json()
capa.features.freeze.features.feature_from_capa(self).json()
< capa.features.freeze.features.feature_from_capa(other).json()
)
def get_name_str(self) -> str:
@@ -157,11 +149,11 @@ class Feature(abc.ABC): # noqa: B024
def __str__(self):
if self.value is not None:
if self.description:
return f"{self.get_name_str()}({self.get_value_str()} = {self.description})"
return "%s(%s = %s)" % (self.get_name_str(), self.get_value_str(), self.description)
else:
return f"{self.get_name_str()}({self.get_value_str()})"
return "%s(%s)" % (self.get_name_str(), self.get_value_str())
else:
return f"{self.get_name_str()}"
return "%s" % self.get_name_str()
def __repr__(self):
return str(self)
@@ -250,7 +242,7 @@ class Substring(String):
def __str__(self):
assert isinstance(self.value, str)
return f"substring({escape_string(self.value)})"
return "substring(%s)" % escape_string(self.value)
class _MatchedSubstring(Substring):
@@ -275,9 +267,11 @@ class _MatchedSubstring(Substring):
self.matches = matches
def __str__(self):
matches = ", ".join(f'"{s}"' for s in (self.matches or {}).keys())
assert isinstance(self.value, str)
return f'substring("{self.value}", matches = {matches})'
return 'substring("%s", matches = %s)' % (
self.value,
", ".join(map(lambda s: '"' + s + '"', (self.matches or {}).keys())),
)
class Regex(String):
@@ -296,7 +290,7 @@ class Regex(String):
if value.endswith("/i"):
value = value[: -len("i")]
raise ValueError(
f"invalid regular expression: {value} it should use Python syntax, try it at https://pythex.org"
"invalid regular expression: %s it should use Python syntax, try it at https://pythex.org" % value
) from exc
def evaluate(self, ctx, short_circuit=True):
@@ -342,7 +336,7 @@ class Regex(String):
def __str__(self):
assert isinstance(self.value, str)
return f"regex(string =~ {self.value})"
return "regex(string =~ %s)" % self.value
class _MatchedRegex(Regex):
@@ -367,9 +361,11 @@ class _MatchedRegex(Regex):
self.matches = matches
def __str__(self):
matches = ", ".join(f'"{s}"' for s in (self.matches or {}).keys())
assert isinstance(self.value, str)
return f"regex(string =~ {self.value}, matches = {matches})"
return "regex(string =~ %s, matches = %s)" % (
self.value,
", ".join(map(lambda s: '"' + s + '"', (self.matches or {}).keys())),
)
class StringFactory:
@@ -425,8 +421,6 @@ OS_MACOS = "macos"
OS_ANY = "any"
VALID_OS = {os.value for os in capa.features.extractors.elf.OS}
VALID_OS.update({OS_WINDOWS, OS_LINUX, OS_MACOS, OS_ANY})
# internal only, not to be used in rules
OS_AUTO = "auto"
class OS(Feature):
@@ -434,20 +428,6 @@ class OS(Feature):
super().__init__(value, description=description)
self.name = "os"
def evaluate(self, ctx, **kwargs):
capa.perf.counters["evaluate.feature"] += 1
capa.perf.counters["evaluate.feature." + self.name] += 1
for feature, locations in ctx.items():
if not isinstance(feature, (OS,)):
continue
assert isinstance(feature.value, str)
if OS_ANY in (self.value, feature.value) or self.value == feature.value:
return Result(True, self, [], locations=locations)
return Result(False, self, [])
FORMAT_PE = "pe"
FORMAT_ELF = "elf"
@@ -457,23 +437,7 @@ VALID_FORMAT = (FORMAT_PE, FORMAT_ELF, FORMAT_DOTNET)
FORMAT_AUTO = "auto"
FORMAT_SC32 = "sc32"
FORMAT_SC64 = "sc64"
FORMAT_CAPE = "cape"
FORMAT_FREEZE = "freeze"
FORMAT_RESULT = "result"
STATIC_FORMATS = {
FORMAT_SC32,
FORMAT_SC64,
FORMAT_PE,
FORMAT_ELF,
FORMAT_DOTNET,
FORMAT_FREEZE,
FORMAT_RESULT,
}
DYNAMIC_FORMATS = {
FORMAT_CAPE,
FORMAT_FREEZE,
FORMAT_RESULT,
}
FORMAT_UNKNOWN = "unknown"

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -7,18 +7,13 @@
# See the License for the specific language governing permissions and limitations under the License.
import abc
import hashlib
import dataclasses
from typing import Any, Dict, Tuple, Union, Iterator
from dataclasses import dataclass
# TODO(williballenthin): use typing.TypeAlias directly when Python 3.9 is deprecated
# https://github.com/mandiant/capa/issues/1699
from typing_extensions import TypeAlias
import capa.features.address
from capa.features.common import Feature
from capa.features.address import Address, ThreadAddress, ProcessAddress, DynamicCallAddress, AbsoluteVirtualAddress
from capa.features.address import Address, AbsoluteVirtualAddress
# feature extractors may reference functions, BBs, insns by opaque handle values.
# you can use the `.address` property to get and render the address of the feature.
@@ -27,24 +22,6 @@ from capa.features.address import Address, ThreadAddress, ProcessAddress, Dynami
# the feature extractor from which they were created.
@dataclass
class SampleHashes:
md5: str
sha1: str
sha256: str
@classmethod
def from_bytes(cls, buf: bytes) -> "SampleHashes":
md5 = hashlib.md5()
sha1 = hashlib.sha1()
sha256 = hashlib.sha256()
md5.update(buf)
sha1.update(buf)
sha256.update(buf)
return cls(md5=md5.hexdigest(), sha1=sha1.hexdigest(), sha256=sha256.hexdigest())
@dataclass
class FunctionHandle:
"""reference to a function recognized by a feature extractor.
@@ -86,18 +63,16 @@ class InsnHandle:
inner: Any
class StaticFeatureExtractor:
class FeatureExtractor:
"""
StaticFeatureExtractor defines the interface for fetching features from a
sample without running it; extractors that rely on the execution trace of
a sample must implement the other sibling class, DynamicFeatureExtracor.
FeatureExtractor defines the interface for fetching features from a sample.
There may be multiple backends that support fetching features for capa.
For example, we use vivisect by default, but also want to support saving
and restoring features from a JSON file.
When we restore the features, we'd like to use exactly the same matching logic
to find matching rules.
Therefore, we can define a StaticFeatureExtractor that provides features from the
Therefore, we can define a FeatureExtractor that provides features from the
serialized JSON file and do matching without a binary analysis pass.
Also, this provides a way to hook in an IDA backend.
@@ -106,14 +81,13 @@ class StaticFeatureExtractor:
__metaclass__ = abc.ABCMeta
def __init__(self, hashes: SampleHashes):
def __init__(self):
#
# note: a subclass should define ctor parameters for its own use.
# for example, the Vivisect feature extract might require the vw and/or path.
# this base class doesn't know what to do with that info, though.
#
super().__init__()
self._sample_hashes = hashes
@abc.abstractmethod
def get_base_address(self) -> Union[AbsoluteVirtualAddress, capa.features.address._NoAddress]:
@@ -126,12 +100,6 @@ class StaticFeatureExtractor:
"""
raise NotImplementedError()
def get_sample_hashes(self) -> SampleHashes:
"""
fetch the hashes for the sample contained within the extractor.
"""
return self._sample_hashes
@abc.abstractmethod
def extract_global_features(self) -> Iterator[Tuple[Feature, Address]]:
"""
@@ -294,177 +262,3 @@ class StaticFeatureExtractor:
Tuple[Feature, Address]: feature and its location
"""
raise NotImplementedError()
@dataclass
class ProcessHandle:
"""
reference to a process extracted by the sandbox.
Attributes:
address: process's address (pid)
inner: sandbox-specific data
"""
address: ProcessAddress
inner: Any
@dataclass
class ThreadHandle:
"""
reference to a thread extracted by the sandbox.
Attributes:
address: thread's address (tid)
inner: sandbox-specific data
"""
address: ThreadAddress
inner: Any
@dataclass
class CallHandle:
"""
reference to an api call extracted by the sandbox.
Attributes:
address: call's address, such as event index or id
inner: sandbox-specific data
"""
address: DynamicCallAddress
inner: Any
class DynamicFeatureExtractor:
"""
DynamicFeatureExtractor defines the interface for fetching features from a
sandbox' analysis of a sample; extractors that rely on statically analyzing
a sample must implement the sibling extractor, StaticFeatureExtractor.
Features are grouped mainly into threads that alongside their meta-features are also grouped into
processes (that also have their own features). Other scopes (such as function and file) may also apply
for a specific sandbox.
This class is not instantiated directly; it is the base class for other implementations.
"""
__metaclass__ = abc.ABCMeta
def __init__(self, hashes: SampleHashes):
#
# note: a subclass should define ctor parameters for its own use.
# for example, the Vivisect feature extract might require the vw and/or path.
# this base class doesn't know what to do with that info, though.
#
super().__init__()
self._sample_hashes = hashes
def get_sample_hashes(self) -> SampleHashes:
"""
fetch the hashes for the sample contained within the extractor.
"""
return self._sample_hashes
@abc.abstractmethod
def extract_global_features(self) -> Iterator[Tuple[Feature, Address]]:
"""
extract features found at every scope ("global").
example::
extractor = CapeFeatureExtractor.from_report(json.loads(buf))
for feature, addr in extractor.get_global_features():
print(addr, feature)
yields:
Tuple[Feature, Address]: feature and its location
"""
raise NotImplementedError()
@abc.abstractmethod
def extract_file_features(self) -> Iterator[Tuple[Feature, Address]]:
"""
extract file-scope features.
example::
extractor = CapeFeatureExtractor.from_report(json.loads(buf))
for feature, addr in extractor.get_file_features():
print(addr, feature)
yields:
Tuple[Feature, Address]: feature and its location
"""
raise NotImplementedError()
@abc.abstractmethod
def get_processes(self) -> Iterator[ProcessHandle]:
"""
Enumerate processes in the trace.
"""
raise NotImplementedError()
@abc.abstractmethod
def extract_process_features(self, ph: ProcessHandle) -> Iterator[Tuple[Feature, Address]]:
"""
Yields all the features of a process. These include:
- file features of the process' image
"""
raise NotImplementedError()
@abc.abstractmethod
def get_process_name(self, ph: ProcessHandle) -> str:
"""
Returns the human-readable name for the given process,
such as the filename.
"""
raise NotImplementedError()
@abc.abstractmethod
def get_threads(self, ph: ProcessHandle) -> Iterator[ThreadHandle]:
"""
Enumerate threads in the given process.
"""
raise NotImplementedError()
@abc.abstractmethod
def extract_thread_features(self, ph: ProcessHandle, th: ThreadHandle) -> Iterator[Tuple[Feature, Address]]:
"""
Yields all the features of a thread. These include:
- sequenced api traces
"""
raise NotImplementedError()
@abc.abstractmethod
def get_calls(self, ph: ProcessHandle, th: ThreadHandle) -> Iterator[CallHandle]:
"""
Enumerate calls in the given thread
"""
raise NotImplementedError()
@abc.abstractmethod
def extract_call_features(
self, ph: ProcessHandle, th: ThreadHandle, ch: CallHandle
) -> Iterator[Tuple[Feature, Address]]:
"""
Yields all features of a call. These include:
- api name
- bytes/strings/numbers extracted from arguments
"""
raise NotImplementedError()
@abc.abstractmethod
def get_call_name(self, ph: ProcessHandle, th: ThreadHandle, ch: CallHandle) -> str:
"""
Returns the human-readable name for the given call,
such as as rendered API log entry, like:
Foo(1, "two", b"\x00\x11") -> -1
"""
raise NotImplementedError()
FeatureExtractor: TypeAlias = Union[StaticFeatureExtractor, DynamicFeatureExtractor]

View File

@@ -1,184 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import string
import struct
from typing import Tuple, Iterator
from binaryninja import Function, Settings
from binaryninja import BasicBlock as BinjaBasicBlock
from binaryninja import (
BinaryView,
SymbolType,
RegisterValueType,
VariableSourceType,
MediumLevelILSetVar,
MediumLevelILOperation,
MediumLevelILBasicBlock,
MediumLevelILInstruction,
)
from capa.features.common import Feature, Characteristic
from capa.features.address import Address
from capa.features.basicblock import BasicBlock
from capa.features.extractors.helpers import MIN_STACKSTRING_LEN
from capa.features.extractors.base_extractor import BBHandle, FunctionHandle
use_const_outline: bool = False
settings: Settings = Settings()
if settings.contains("analysis.outlining.builtins") and settings.get_bool("analysis.outlining.builtins"):
use_const_outline = True
def get_printable_len_ascii(s: bytes) -> int:
"""Return string length if all operand bytes are ascii or utf16-le printable"""
count = 0
for c in s:
if c == 0:
return count
if c < 127 and chr(c) in string.printable:
count += 1
return count
def get_printable_len_wide(s: bytes) -> int:
"""Return string length if all operand bytes are ascii or utf16-le printable"""
if all(c == 0x00 for c in s[1::2]):
return get_printable_len_ascii(s[::2])
return 0
def get_stack_string_len(f: Function, il: MediumLevelILInstruction) -> int:
bv: BinaryView = f.view
if il.operation != MediumLevelILOperation.MLIL_CALL:
return 0
target = il.dest
if target.operation not in [MediumLevelILOperation.MLIL_CONST, MediumLevelILOperation.MLIL_CONST_PTR]:
return 0
addr = target.value.value
sym = bv.get_symbol_at(addr)
if not sym or sym.type != SymbolType.LibraryFunctionSymbol:
return 0
if sym.name not in ["__builtin_strncpy", "__builtin_strcpy", "__builtin_wcscpy"]:
return 0
if len(il.params) < 2:
return 0
dest = il.params[0]
if dest.operation in [MediumLevelILOperation.MLIL_ADDRESS_OF, MediumLevelILOperation.MLIL_VAR]:
var = dest.src
else:
return 0
if var.source_type != VariableSourceType.StackVariableSourceType:
return 0
src = il.params[1]
if src.value.type != RegisterValueType.ConstantDataAggregateValue:
return 0
s = f.get_constant_data(RegisterValueType.ConstantDataAggregateValue, src.value.value)
return max(get_printable_len_ascii(bytes(s)), get_printable_len_wide(bytes(s)))
def get_printable_len(il: MediumLevelILSetVar) -> int:
"""Return string length if all operand bytes are ascii or utf16-le printable"""
width = il.dest.type.width
value = il.src.value.value
if width == 1:
chars = struct.pack("<B", value & 0xFF)
elif width == 2:
chars = struct.pack("<H", value & 0xFFFF)
elif width == 4:
chars = struct.pack("<I", value & 0xFFFFFFFF)
elif width == 8:
chars = struct.pack("<Q", value & 0xFFFFFFFFFFFFFFFF)
else:
return 0
def is_printable_ascii(chars_: bytes):
return all(c < 127 and chr(c) in string.printable for c in chars_)
def is_printable_utf16le(chars_: bytes):
if all(c == 0x00 for c in chars_[1::2]):
return is_printable_ascii(chars_[::2])
if is_printable_ascii(chars):
return width
if is_printable_utf16le(chars):
return width // 2
return 0
def is_mov_imm_to_stack(il: MediumLevelILInstruction) -> bool:
"""verify instruction moves immediate onto stack"""
if il.operation != MediumLevelILOperation.MLIL_SET_VAR:
return False
if il.src.operation != MediumLevelILOperation.MLIL_CONST:
return False
if il.dest.source_type != VariableSourceType.StackVariableSourceType:
return False
return True
def bb_contains_stackstring(f: Function, bb: MediumLevelILBasicBlock) -> bool:
"""check basic block for stackstring indicators
true if basic block contains enough moves of constant bytes to the stack
"""
count = 0
for il in bb:
if use_const_outline:
count += get_stack_string_len(f, il)
else:
if is_mov_imm_to_stack(il):
count += get_printable_len(il)
if count > MIN_STACKSTRING_LEN:
return True
return False
def extract_bb_stackstring(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
"""extract stackstring indicators from basic block"""
bb: Tuple[BinjaBasicBlock, MediumLevelILBasicBlock] = bbh.inner
if bb[1] is not None and bb_contains_stackstring(fh.inner, bb[1]):
yield Characteristic("stack string"), bbh.address
def extract_bb_tight_loop(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
"""extract tight loop indicators from a basic block"""
bb: Tuple[BinjaBasicBlock, MediumLevelILBasicBlock] = bbh.inner
for edge in bb[0].outgoing_edges:
if edge.target.start == bb[0].start:
yield Characteristic("tight loop"), bbh.address
def extract_features(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
"""extract basic block features"""
for bb_handler in BASIC_BLOCK_HANDLERS:
for feature, addr in bb_handler(fh, bbh):
yield feature, addr
yield BasicBlock(), bbh.address
BASIC_BLOCK_HANDLERS = (
extract_bb_tight_loop,
extract_bb_stackstring,
)

View File

@@ -1,81 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import List, Tuple, Iterator
import binaryninja as binja
import capa.features.extractors.elf
import capa.features.extractors.binja.file
import capa.features.extractors.binja.insn
import capa.features.extractors.binja.global_
import capa.features.extractors.binja.function
import capa.features.extractors.binja.basicblock
from capa.features.common import Feature
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors.base_extractor import (
BBHandle,
InsnHandle,
SampleHashes,
FunctionHandle,
StaticFeatureExtractor,
)
class BinjaFeatureExtractor(StaticFeatureExtractor):
def __init__(self, bv: binja.BinaryView):
super().__init__(hashes=SampleHashes.from_bytes(bv.file.raw.read(0, len(bv.file.raw))))
self.bv = bv
self.global_features: List[Tuple[Feature, Address]] = []
self.global_features.extend(capa.features.extractors.binja.file.extract_file_format(self.bv))
self.global_features.extend(capa.features.extractors.binja.global_.extract_os(self.bv))
self.global_features.extend(capa.features.extractors.binja.global_.extract_arch(self.bv))
def get_base_address(self):
return AbsoluteVirtualAddress(self.bv.start)
def extract_global_features(self):
yield from self.global_features
def extract_file_features(self):
yield from capa.features.extractors.binja.file.extract_features(self.bv)
def get_functions(self) -> Iterator[FunctionHandle]:
for f in self.bv.functions:
yield FunctionHandle(address=AbsoluteVirtualAddress(f.start), inner=f)
def extract_function_features(self, fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
yield from capa.features.extractors.binja.function.extract_features(fh)
def get_basic_blocks(self, fh: FunctionHandle) -> Iterator[BBHandle]:
f: binja.Function = fh.inner
# Set up a MLIL basic block dict look up to associate the disassembly basic block with its MLIL basic block
mlil_lookup = {}
for mlil_bb in f.mlil.basic_blocks:
mlil_lookup[mlil_bb.source_block.start] = mlil_bb
for bb in f.basic_blocks:
mlil_bb = mlil_lookup.get(bb.start)
yield BBHandle(address=AbsoluteVirtualAddress(bb.start), inner=(bb, mlil_bb))
def extract_basic_block_features(self, fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
yield from capa.features.extractors.binja.basicblock.extract_features(fh, bbh)
def get_instructions(self, fh: FunctionHandle, bbh: BBHandle) -> Iterator[InsnHandle]:
import capa.features.extractors.binja.helpers as binja_helpers
bb: Tuple[binja.BasicBlock, binja.MediumLevelILBasicBlock] = bbh.inner
addr = bb[0].start
for text, length in bb[0]:
insn = binja_helpers.DisassemblyInstruction(addr, length, text)
yield InsnHandle(address=AbsoluteVirtualAddress(addr), inner=insn)
addr += length
def extract_insn_features(self, fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle):
yield from capa.features.extractors.binja.insn.extract_features(fh, bbh, ih)

View File

@@ -1,187 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import struct
from typing import Tuple, Iterator
from binaryninja import Segment, BinaryView, SymbolType, SymbolBinding
import capa.features.extractors.common
import capa.features.extractors.helpers
import capa.features.extractors.strings
from capa.features.file import Export, Import, Section, FunctionName
from capa.features.common import FORMAT_PE, FORMAT_ELF, Format, String, Feature, Characteristic
from capa.features.address import NO_ADDRESS, Address, FileOffsetAddress, AbsoluteVirtualAddress
from capa.features.extractors.binja.helpers import read_c_string, unmangle_c_name
def check_segment_for_pe(bv: BinaryView, seg: Segment) -> Iterator[Tuple[int, int]]:
"""check segment for embedded PE
adapted for binja from:
https://github.com/vivisect/vivisect/blob/7be4037b1cecc4551b397f840405a1fc606f9b53/PE/carve.py#L19
"""
mz_xor = [
(
capa.features.extractors.helpers.xor_static(b"MZ", i),
capa.features.extractors.helpers.xor_static(b"PE", i),
i,
)
for i in range(256)
]
todo = []
# If this is the first segment of the binary, skip the first bytes. Otherwise, there will always be a matched
# PE at the start of the binaryview.
start = seg.start
if bv.view_type == "PE" and start == bv.start:
start += 1
for mzx, pex, i in mz_xor:
for off, _ in bv.find_all_data(start, seg.end, mzx):
todo.append((off, mzx, pex, i))
while len(todo):
off, mzx, pex, i = todo.pop()
# The MZ header has one field we will check e_lfanew is at 0x3c
e_lfanew = off + 0x3C
if seg.end < (e_lfanew + 4):
continue
newoff = struct.unpack("<I", capa.features.extractors.helpers.xor_static(bv.read(e_lfanew, 4), i))[0]
peoff = off + newoff
if seg.end < (peoff + 2):
continue
if bv.read(peoff, 2) == pex:
yield off, i
def extract_file_embedded_pe(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
"""extract embedded PE features"""
for seg in bv.segments:
for ea, _ in check_segment_for_pe(bv, seg):
yield Characteristic("embedded pe"), FileOffsetAddress(ea)
def extract_file_export_names(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
"""extract function exports"""
for sym in bv.get_symbols_of_type(SymbolType.FunctionSymbol):
if sym.binding in [SymbolBinding.GlobalBinding, SymbolBinding.WeakBinding]:
name = sym.short_name
yield Export(name), AbsoluteVirtualAddress(sym.address)
unmangled_name = unmangle_c_name(name)
if name != unmangled_name:
yield Export(unmangled_name), AbsoluteVirtualAddress(sym.address)
for sym in bv.get_symbols_of_type(SymbolType.DataSymbol):
if sym.binding not in [SymbolBinding.GlobalBinding]:
continue
name = sym.short_name
if not name.startswith("__forwarder_name"):
continue
# Due to https://github.com/Vector35/binaryninja-api/issues/4641, in binja version 3.5, the symbol's name
# does not contain the DLL name. As a workaround, we read the C string at the symbol's address, which contains
# both the DLL name and the function name.
# Once the above issue is closed in the next binjs stable release, we can update the code here to use the
# symbol name directly.
name = read_c_string(bv, sym.address, 1024)
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(name)
yield Export(forwarded_name), AbsoluteVirtualAddress(sym.address)
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(sym.address)
def extract_file_import_names(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
"""extract function imports
1. imports by ordinal:
- modulename.#ordinal
2. imports by name, results in two features to support importname-only
matching:
- modulename.importname
- importname
"""
for sym in bv.get_symbols_of_type(SymbolType.ImportAddressSymbol):
lib_name = str(sym.namespace)
addr = AbsoluteVirtualAddress(sym.address)
for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym.short_name, include_dll=True):
yield Import(name), addr
ordinal = sym.ordinal
if ordinal != 0 and (lib_name != ""):
ordinal_name = f"#{ordinal}"
for name in capa.features.extractors.helpers.generate_symbols(lib_name, ordinal_name, include_dll=True):
yield Import(name), addr
def extract_file_section_names(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
"""extract section names"""
for name, section in bv.sections.items():
yield Section(name), AbsoluteVirtualAddress(section.start)
def extract_file_strings(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
"""extract ASCII and UTF-16 LE strings"""
for s in bv.strings:
yield String(s.value), FileOffsetAddress(s.start)
def extract_file_function_names(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
"""
extract the names of statically-linked library functions.
"""
for sym_name in bv.symbols:
for sym in bv.symbols[sym_name]:
if sym.type not in [SymbolType.LibraryFunctionSymbol, SymbolType.FunctionSymbol]:
continue
name = sym.short_name
yield FunctionName(name), sym.address
if name.startswith("_"):
# some linkers may prefix linked routines with a `_` to avoid name collisions.
# extract features for both the mangled and un-mangled representations.
# e.g. `_fwrite` -> `fwrite`
# see: https://stackoverflow.com/a/2628384/87207
yield FunctionName(name[1:]), sym.address
def extract_file_format(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
view_type = bv.view_type
if view_type in ["PE", "COFF"]:
yield Format(FORMAT_PE), NO_ADDRESS
elif view_type == "ELF":
yield Format(FORMAT_ELF), NO_ADDRESS
elif view_type == "Raw":
# no file type to return when processing a binary file, but we want to continue processing
return
else:
raise NotImplementedError(f"unexpected file format: {view_type}")
def extract_features(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
"""extract file features"""
for file_handler in FILE_HANDLERS:
for feature, addr in file_handler(bv):
yield feature, addr
FILE_HANDLERS = (
extract_file_export_names,
extract_file_import_names,
extract_file_strings,
extract_file_section_names,
extract_file_embedded_pe,
extract_file_function_names,
extract_file_format,
)

View File

@@ -1,35 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import subprocess
from pathlib import Path
# When the script gets executed as a standalone executable (via PyInstaller), `import binaryninja` does not work because
# we have excluded the binaryninja module in `pyinstaller.spec`. The trick here is to call the system Python and try
# to find out the path of the binaryninja module that has been installed.
# Note, including the binaryninja module in the `pyintaller.spec` would not work, since the binaryninja module tries to
# find the binaryninja core e.g., `libbinaryninjacore.dylib`, using a relative path. And this does not work when the
# binaryninja module is extracted by the PyInstaller.
code = r"""
from pathlib import Path
from importlib import util
spec = util.find_spec('binaryninja')
if spec is not None:
if len(spec.submodule_search_locations) > 0:
path = Path(spec.submodule_search_locations[0])
# encode the path with utf8 then convert to hex, make sure it can be read and restored properly
print(str(path.parent).encode('utf8').hex())
"""
def find_binja_path() -> Path:
raw_output = subprocess.check_output(["python", "-c", code]).decode("ascii").strip()
return Path(bytes.fromhex(raw_output).decode("utf8"))
if __name__ == "__main__":
print(find_binja_path())

View File

@@ -1,104 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import Tuple, Iterator
from binaryninja import Function, BinaryView, SymbolType, RegisterValueType, LowLevelILOperation
from capa.features.file import FunctionName
from capa.features.common import Feature, Characteristic
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors import loops
from capa.features.extractors.base_extractor import FunctionHandle
def extract_function_calls_to(fh: FunctionHandle):
"""extract callers to a function"""
func: Function = fh.inner
for caller in func.caller_sites:
# Everything that is a code reference to the current function is considered a caller, which actually includes
# many other references that are NOT a caller. For example, an instruction `push function_start` will also be
# considered a caller to the function
llil = caller.llil
if (llil is None) or llil.operation not in [
LowLevelILOperation.LLIL_CALL,
LowLevelILOperation.LLIL_CALL_STACK_ADJUST,
LowLevelILOperation.LLIL_JUMP,
LowLevelILOperation.LLIL_TAILCALL,
]:
continue
if llil.dest.value.type not in [
RegisterValueType.ImportedAddressValue,
RegisterValueType.ConstantValue,
RegisterValueType.ConstantPointerValue,
]:
continue
address = llil.dest.value.value
if address != func.start:
continue
yield Characteristic("calls to"), AbsoluteVirtualAddress(caller.address)
def extract_function_loop(fh: FunctionHandle):
"""extract loop indicators from a function"""
func: Function = fh.inner
edges = []
# construct control flow graph
for bb in func.basic_blocks:
for edge in bb.outgoing_edges:
edges.append((bb.start, edge.target.start))
if loops.has_loop(edges):
yield Characteristic("loop"), fh.address
def extract_recursive_call(fh: FunctionHandle):
"""extract recursive function call"""
func: Function = fh.inner
bv: BinaryView = func.view
if bv is None:
return
for ref in bv.get_code_refs(func.start):
if ref.function == func:
yield Characteristic("recursive call"), fh.address
def extract_function_name(fh: FunctionHandle):
"""extract function names (e.g., symtab names)"""
func: Function = fh.inner
bv: BinaryView = func.view
if bv is None:
return
for sym in bv.get_symbols(func.start):
if sym.type not in [SymbolType.LibraryFunctionSymbol, SymbolType.FunctionSymbol]:
continue
name = sym.short_name
yield FunctionName(name), sym.address
if name.startswith("_"):
# some linkers may prefix linked routines with a `_` to avoid name collisions.
# extract features for both the mangled and un-mangled representations.
# e.g. `_fwrite` -> `fwrite`
# see: https://stackoverflow.com/a/2628384/87207
yield FunctionName(name[1:]), sym.address
def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
for func_handler in FUNCTION_HANDLERS:
for feature, addr in func_handler(fh):
yield feature, addr
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call, extract_function_name)

View File

@@ -1,60 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import Tuple, Iterator
from binaryninja import BinaryView
from capa.features.common import OS, OS_MACOS, ARCH_I386, ARCH_AMD64, OS_WINDOWS, Arch, Feature
from capa.features.address import NO_ADDRESS, Address
logger = logging.getLogger(__name__)
def extract_os(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
name = bv.platform.name
if "-" in name:
name = name.split("-")[0]
if name == "windows":
yield OS(OS_WINDOWS), NO_ADDRESS
elif name == "macos":
yield OS(OS_MACOS), NO_ADDRESS
elif name in ["linux", "freebsd", "decree"]:
yield OS(name), NO_ADDRESS
else:
# we likely end up here:
# 1. handling shellcode, or
# 2. handling a new file format (e.g. macho)
#
# for (1) we can't do much - its shellcode and all bets are off.
# we could maybe accept a further CLI argument to specify the OS,
# but i think this would be rarely used.
# rules that rely on OS conditions will fail to match on shellcode.
#
# for (2), this logic will need to be updated as the format is implemented.
logger.debug("unsupported file format: %s, will not guess OS", name)
return
def extract_arch(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]:
arch = bv.arch.name
if arch == "x86_64":
yield Arch(ARCH_AMD64), NO_ADDRESS
elif arch == "x86":
yield Arch(ARCH_I386), NO_ADDRESS
else:
# we likely end up here:
# 1. handling a new architecture (e.g. aarch64)
#
# for (1), this logic will need to be updated as the format is implemented.
logger.debug("unsupported architecture: %s", arch)
return

View File

@@ -1,69 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import re
from typing import List, Callable
from dataclasses import dataclass
from binaryninja import BinaryView, LowLevelILInstruction
from binaryninja.architecture import InstructionTextToken
@dataclass
class DisassemblyInstruction:
address: int
length: int
text: List[InstructionTextToken]
LLIL_VISITOR = Callable[[LowLevelILInstruction, LowLevelILInstruction, int], bool]
def visit_llil_exprs(il: LowLevelILInstruction, func: LLIL_VISITOR):
# BN does not really support operand index at the disassembly level, so use the LLIL operand index as a substitute.
# Note, this is NOT always guaranteed to be the same as disassembly operand.
for i, op in enumerate(il.operands):
if isinstance(op, LowLevelILInstruction) and func(op, il, i):
visit_llil_exprs(op, func)
def unmangle_c_name(name: str) -> str:
# https://learn.microsoft.com/en-us/cpp/build/reference/decorated-names?view=msvc-170#FormatC
# Possible variations for BaseThreadInitThunk:
# @BaseThreadInitThunk@12
# _BaseThreadInitThunk
# _BaseThreadInitThunk@12
# It is also possible for a function to have a `Stub` appended to its name:
# _lstrlenWStub@4
# A small optimization to avoid running the regex too many times
# this still increases the unit test execution time from 170s to 200s, should be able to accelerate it
#
# TODO(xusheng): performance optimizations to improve test execution time
# https://github.com/mandiant/capa/issues/1610
if name[0] in ["@", "_"]:
match = re.match(r"^[@|_](.*?)(Stub)?(@\d+)?$", name)
if match:
return match.group(1)
return name
def read_c_string(bv: BinaryView, offset: int, max_len: int) -> str:
s: List[str] = []
while len(s) < max_len:
try:
c = bv.read(offset + len(s), 1)[0]
except Exception:
break
if c == 0:
break
s.append(chr(c))
return "".join(s)

View File

@@ -1,586 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import Any, List, Tuple, Iterator, Optional
from binaryninja import Function
from binaryninja import BasicBlock as BinjaBasicBlock
from binaryninja import (
BinaryView,
ILRegister,
SymbolType,
BinaryReader,
RegisterValueType,
LowLevelILOperation,
LowLevelILInstruction,
)
import capa.features.extractors.helpers
from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset
from capa.features.common import MAX_BYTES_FEATURE_SIZE, Bytes, String, Feature, Characteristic
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors.binja.helpers import DisassemblyInstruction, visit_llil_exprs
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
# security cookie checks may perform non-zeroing XORs, these are expected within a certain
# byte range within the first and returning basic blocks, this helps to reduce FP features
SECURITY_COOKIE_BYTES_DELTA = 0x40
# check if a function is a stub function to another function/symbol. The criteria is:
# 1. The function must only have one basic block
# 2. The function must only make one call/jump to another address
# If the function being checked is a stub function, returns the target address. Otherwise, return None.
def is_stub_function(bv: BinaryView, addr: int) -> Optional[int]:
funcs = bv.get_functions_at(addr)
for func in funcs:
if len(func.basic_blocks) != 1:
continue
call_count = 0
call_target = None
for il in func.llil.instructions:
if il.operation in [
LowLevelILOperation.LLIL_CALL,
LowLevelILOperation.LLIL_CALL_STACK_ADJUST,
LowLevelILOperation.LLIL_JUMP,
LowLevelILOperation.LLIL_TAILCALL,
]:
call_count += 1
if il.dest.value.type in [
RegisterValueType.ImportedAddressValue,
RegisterValueType.ConstantValue,
RegisterValueType.ConstantPointerValue,
]:
call_target = il.dest.value.value
if call_count == 1 and call_target is not None:
return call_target
return None
def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
"""
parse instruction API features
example:
call dword [0x00473038]
"""
func: Function = fh.inner
bv: BinaryView = func.view
for llil in func.get_llils_at(ih.address):
if llil.operation in [
LowLevelILOperation.LLIL_CALL,
LowLevelILOperation.LLIL_CALL_STACK_ADJUST,
LowLevelILOperation.LLIL_JUMP,
LowLevelILOperation.LLIL_TAILCALL,
]:
if llil.dest.value.type not in [
RegisterValueType.ImportedAddressValue,
RegisterValueType.ConstantValue,
RegisterValueType.ConstantPointerValue,
]:
continue
address = llil.dest.value.value
candidate_addrs = [address]
stub_addr = is_stub_function(bv, address)
if stub_addr is not None:
candidate_addrs.append(stub_addr)
for address in candidate_addrs:
for sym in func.view.get_symbols(address):
if sym is None or sym.type not in [
SymbolType.ImportAddressSymbol,
SymbolType.ImportedFunctionSymbol,
SymbolType.FunctionSymbol,
]:
continue
sym_name = sym.short_name
lib_name = ""
import_lib = bv.lookup_imported_object_library(sym.address)
if import_lib is not None:
lib_name = import_lib[0].name
if lib_name.endswith(".dll"):
lib_name = lib_name[:-4]
elif lib_name.endswith(".so"):
lib_name = lib_name[:-3]
for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name):
yield API(name), ih.address
if sym_name.startswith("_"):
for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name[1:]):
yield API(name), ih.address
def extract_insn_number_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""
parse instruction number features
example:
push 3136B0h ; dwControlCode
"""
func: Function = fh.inner
results: List[Tuple[Any[Number, OperandNumber], Address]] = []
def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index: int) -> bool:
if il.operation == LowLevelILOperation.LLIL_LOAD:
return False
if il.operation not in [LowLevelILOperation.LLIL_CONST, LowLevelILOperation.LLIL_CONST_PTR]:
return True
for op in parent.operands:
if isinstance(op, ILRegister) and op.name in ["esp", "ebp", "rsp", "rbp", "sp"]:
return False
elif isinstance(op, LowLevelILInstruction) and op.operation == LowLevelILOperation.LLIL_REG:
if op.src.name in ["esp", "ebp", "rsp", "rbp", "sp"]:
return False
raw_value = il.value.value
if parent.operation == LowLevelILOperation.LLIL_SUB:
raw_value = -raw_value
results.append((Number(raw_value), ih.address))
results.append((OperandNumber(index, raw_value), ih.address))
return False
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)
yield from results
def extract_insn_bytes_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
"""
parse referenced byte sequences
example:
push offset iid_004118d4_IShellLinkA ; riid
"""
func: Function = fh.inner
bv: BinaryView = func.view
candidate_addrs = set()
llil = func.get_llil_at(ih.address)
if llil is None or llil.operation in [LowLevelILOperation.LLIL_CALL, LowLevelILOperation.LLIL_CALL_STACK_ADJUST]:
return
for ref in bv.get_code_refs_from(ih.address):
if ref == ih.address:
continue
if len(bv.get_functions_containing(ref)) > 0:
continue
candidate_addrs.add(ref)
# collect candidate address by enumerating all integers, https://github.com/Vector35/binaryninja-api/issues/3966
def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index: int) -> bool:
if il.operation in [LowLevelILOperation.LLIL_CONST, LowLevelILOperation.LLIL_CONST_PTR]:
value = il.value.value
if value > 0:
candidate_addrs.add(value)
return False
return True
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)
for addr in candidate_addrs:
extracted_bytes = bv.read(addr, MAX_BYTES_FEATURE_SIZE)
if extracted_bytes and not capa.features.extractors.helpers.all_zeros(extracted_bytes):
if bv.get_string_at(addr) is None:
# don't extract byte features for obvious strings
yield Bytes(extracted_bytes), ih.address
def extract_insn_string_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""
parse instruction string features
example:
push offset aAcr ; "ACR > "
"""
func: Function = fh.inner
bv: BinaryView = func.view
candidate_addrs = set()
# collect candidate address from code refs directly
for ref in bv.get_code_refs_from(ih.address):
if ref == ih.address:
continue
if len(bv.get_functions_containing(ref)) > 0:
continue
candidate_addrs.add(ref)
# collect candidate address by enumerating all integers, https://github.com/Vector35/binaryninja-api/issues/3966
def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index: int) -> bool:
if il.operation in [LowLevelILOperation.LLIL_CONST, LowLevelILOperation.LLIL_CONST_PTR]:
value = il.value.value
if value > 0:
candidate_addrs.add(value)
return False
return True
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)
# Now we have all the candidate address, check them for string or pointer to string
br = BinaryReader(bv)
for addr in candidate_addrs:
found = bv.get_string_at(addr)
if found:
yield String(found.value), ih.address
br.seek(addr)
pointer = None
if bv.arch.address_size == 4:
pointer = br.read32()
elif bv.arch.address_size == 8:
pointer = br.read64()
if pointer is not None:
found = bv.get_string_at(pointer)
if found:
yield String(found.value), ih.address
def extract_insn_offset_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""
parse instruction structure offset features
example:
.text:0040112F cmp [esi+4], ebx
"""
func: Function = fh.inner
results: List[Tuple[Any[Offset, OperandOffset], Address]] = []
address_size = func.view.arch.address_size * 8
def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index: int) -> bool:
# The most common case, read/write dereference to something like `dword [eax+0x28]`
if il.operation in [LowLevelILOperation.LLIL_ADD, LowLevelILOperation.LLIL_SUB]:
left = il.left
right = il.right
# Exclude offsets based on stack/franme pointers
if left.operation == LowLevelILOperation.LLIL_REG and left.src.name in ["esp", "ebp", "rsp", "rbp", "sp"]:
return True
if right.operation != LowLevelILOperation.LLIL_CONST:
return True
raw_value = right.value.value
# If this is not a dereference, then this must be an add and the offset must be in the range \
# [0, MAX_STRUCTURE_SIZE]. For example,
# add eax, 0x10,
# lea ebx, [eax + 1]
if parent.operation not in [LowLevelILOperation.LLIL_LOAD, LowLevelILOperation.LLIL_STORE]:
if il.operation != LowLevelILOperation.LLIL_ADD or (not 0 < raw_value < MAX_STRUCTURE_SIZE):
return False
if address_size > 0:
# BN also encodes the constant value as two's complement, we need to restore its original value
value = capa.features.extractors.helpers.twos_complement(raw_value, address_size)
else:
value = raw_value
results.append((Offset(value), ih.address))
results.append((OperandOffset(index, value), ih.address))
return False
# An edge case: for code like `push dword [esi]`, we need to generate a feature for offset 0x0
elif il.operation in [LowLevelILOperation.LLIL_LOAD, LowLevelILOperation.LLIL_STORE]:
if il.operands[0].operation == LowLevelILOperation.LLIL_REG:
results.append((Offset(0), ih.address))
results.append((OperandOffset(index, 0), ih.address))
return False
return True
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)
yield from results
def is_nzxor_stack_cookie(f: Function, bb: BinjaBasicBlock, llil: LowLevelILInstruction) -> bool:
"""check if nzxor exists within stack cookie delta"""
# TODO(xusheng): use LLIL SSA to do more accurate analysis
# https://github.com/mandiant/capa/issues/1609
reg_names = []
if llil.left.operation == LowLevelILOperation.LLIL_REG:
reg_names.append(llil.left.src.name)
if llil.right.operation == LowLevelILOperation.LLIL_REG:
reg_names.append(llil.right.src.name)
# stack cookie reg should be stack/frame pointer
if not any(reg in ["ebp", "esp", "rbp", "rsp", "sp"] for reg in reg_names):
return False
# expect security cookie init in first basic block within first bytes (instructions)
if len(bb.incoming_edges) == 0 and llil.address < (bb.start + SECURITY_COOKIE_BYTES_DELTA):
return True
# ... or within last bytes (instructions) before a return
if len(bb.outgoing_edges) == 0 and llil.address > (bb.end - SECURITY_COOKIE_BYTES_DELTA):
return True
return False
def extract_insn_nzxor_characteristic_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""
parse instruction non-zeroing XOR instruction
ignore expected non-zeroing XORs, e.g. security cookies
"""
func: Function = fh.inner
results = []
def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index: int) -> bool:
# If the two operands of the xor instruction are the same, the LLIL will be translated to other instructions,
# e.g., <llil: eax = 0>, (LLIL_SET_REG). So we do not need to check whether the two operands are the same.
if il.operation == LowLevelILOperation.LLIL_XOR:
# Exclude cases related to the stack cookie
if is_nzxor_stack_cookie(fh.inner, bbh.inner[0], il):
return False
results.append((Characteristic("nzxor"), ih.address))
return False
else:
return True
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)
yield from results
def extract_insn_mnemonic_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""parse instruction mnemonic features"""
insn: DisassemblyInstruction = ih.inner
yield Mnemonic(insn.text[0].text), ih.address
def extract_insn_obfs_call_plus_5_characteristic_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""
parse call $+5 instruction from the given instruction.
"""
insn: DisassemblyInstruction = ih.inner
if insn.text[0].text == "call" and insn.text[2].text == "$+5" and insn.length == 5:
yield Characteristic("call $+5"), ih.address
def extract_insn_peb_access_characteristic_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""parse instruction peb access
fs:[0x30] on x86, gs:[0x60] on x64
"""
func: Function = fh.inner
results = []
def llil_checker(il: LowLevelILInstruction, parent: LowLevelILOperation, index: int) -> bool:
if il.operation != LowLevelILOperation.LLIL_LOAD:
return True
src = il.src
if src.operation != LowLevelILOperation.LLIL_ADD:
return True
left = src.left
right = src.right
if left.operation != LowLevelILOperation.LLIL_REG:
return True
reg = left.src.name
if right.operation != LowLevelILOperation.LLIL_CONST:
return True
value = right.value.value
if (reg, value) not in (("fsbase", 0x30), ("gsbase", 0x60)):
return True
results.append((Characteristic("peb access"), ih.address))
return False
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)
yield from results
def extract_insn_segment_access_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""parse instruction fs or gs access"""
func: Function = fh.inner
results = []
def llil_checker(il: LowLevelILInstruction, parent: LowLevelILInstruction, index: int) -> bool:
if il.operation == LowLevelILOperation.LLIL_REG:
reg = il.src.name
if reg == "fsbase":
results.append((Characteristic("fs access"), ih.address))
return False
elif reg == "gsbase":
results.append((Characteristic("gs access"), ih.address))
return False
return False
return True
for llil in func.get_llils_at(ih.address):
visit_llil_exprs(llil, llil_checker)
yield from results
def extract_insn_cross_section_cflow(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""inspect the instruction for a CALL or JMP that crosses section boundaries"""
func: Function = fh.inner
bv: BinaryView = func.view
if bv is None:
return
seg1 = bv.get_segment_at(ih.address)
sections1 = bv.get_sections_at(ih.address)
for ref in bv.get_code_refs_from(ih.address):
if len(bv.get_functions_at(ref)) == 0:
continue
seg2 = bv.get_segment_at(ref)
sections2 = bv.get_sections_at(ref)
if seg1 != seg2 or sections1 != sections2:
yield Characteristic("cross section flow"), ih.address
def extract_function_calls_from(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
"""extract functions calls from features
most relevant at the function scope, however, its most efficient to extract at the instruction scope
"""
func: Function = fh.inner
bv: BinaryView = func.view
if bv is None:
return
for il in func.get_llils_at(ih.address):
if il.operation not in [
LowLevelILOperation.LLIL_CALL,
LowLevelILOperation.LLIL_CALL_STACK_ADJUST,
LowLevelILOperation.LLIL_TAILCALL,
]:
continue
dest = il.dest
if dest.operation == LowLevelILOperation.LLIL_CONST_PTR:
value = dest.value.value
yield Characteristic("calls from"), AbsoluteVirtualAddress(value)
elif dest.operation == LowLevelILOperation.LLIL_CONST:
yield Characteristic("calls from"), AbsoluteVirtualAddress(dest.value)
elif dest.operation == LowLevelILOperation.LLIL_LOAD:
indirect_src = dest.src
if indirect_src.operation == LowLevelILOperation.LLIL_CONST_PTR:
value = indirect_src.value.value
yield Characteristic("calls from"), AbsoluteVirtualAddress(value)
elif indirect_src.operation == LowLevelILOperation.LLIL_CONST:
yield Characteristic("calls from"), AbsoluteVirtualAddress(indirect_src.value)
elif dest.operation == LowLevelILOperation.LLIL_REG:
if dest.value.type in [
RegisterValueType.ImportedAddressValue,
RegisterValueType.ConstantValue,
RegisterValueType.ConstantPointerValue,
]:
yield Characteristic("calls from"), AbsoluteVirtualAddress(dest.value.value)
def extract_function_indirect_call_characteristic_features(
fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""extract indirect function calls (e.g., call eax or call dword ptr [edx+4])
does not include calls like => call ds:dword_ABD4974
most relevant at the function or basic block scope;
however, its most efficient to extract at the instruction scope
"""
func: Function = fh.inner
llil = func.get_llil_at(ih.address)
if llil is None or llil.operation not in [
LowLevelILOperation.LLIL_CALL,
LowLevelILOperation.LLIL_CALL_STACK_ADJUST,
LowLevelILOperation.LLIL_TAILCALL,
]:
return
if llil.dest.operation in [LowLevelILOperation.LLIL_CONST, LowLevelILOperation.LLIL_CONST_PTR]:
return
if llil.dest.operation == LowLevelILOperation.LLIL_LOAD:
src = llil.dest.src
if src.operation in [LowLevelILOperation.LLIL_CONST, LowLevelILOperation.LLIL_CONST_PTR]:
return
yield Characteristic("indirect call"), ih.address
def extract_features(f: FunctionHandle, bbh: BBHandle, insn: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
"""extract instruction features"""
for inst_handler in INSTRUCTION_HANDLERS:
for feature, ea in inst_handler(f, bbh, insn):
yield feature, ea
INSTRUCTION_HANDLERS = (
extract_insn_api_features,
extract_insn_number_features,
extract_insn_bytes_features,
extract_insn_string_features,
extract_insn_offset_features,
extract_insn_nzxor_characteristic_features,
extract_insn_mnemonic_features,
extract_insn_obfs_call_plus_5_characteristic_features,
extract_insn_peb_access_characteristic_features,
extract_insn_cross_section_cflow,
extract_insn_segment_access_features,
extract_function_calls_from,
extract_function_indirect_call_characteristic_features,
)

View File

@@ -1,62 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import Tuple, Iterator
from capa.helpers import assert_never
from capa.features.insn import API, Number
from capa.features.common import String, Feature
from capa.features.address import Address
from capa.features.extractors.cape.models import Call
from capa.features.extractors.base_extractor import CallHandle, ThreadHandle, ProcessHandle
logger = logging.getLogger(__name__)
def extract_call_features(ph: ProcessHandle, th: ThreadHandle, ch: CallHandle) -> Iterator[Tuple[Feature, Address]]:
"""
this method extracts the given call's features (such as API name and arguments),
and returns them as API, Number, and String features.
args:
ph: process handle (for defining the extraction scope)
th: thread handle (for defining the extraction scope)
ch: call handle (for defining the extraction scope)
yields:
Feature, address; where Feature is either: API, Number, or String.
"""
call: Call = ch.inner
# list similar to disassembly: arguments right-to-left, call
for arg in reversed(call.arguments):
value = arg.value
if isinstance(value, list) and len(value) == 0:
# unsure why CAPE captures arguments as empty lists?
continue
elif isinstance(value, str):
yield String(value), ch.address
elif isinstance(value, int):
yield Number(value), ch.address
else:
assert_never(value)
yield API(call.api), ch.address
def extract_features(ph: ProcessHandle, th: ThreadHandle, ch: CallHandle) -> Iterator[Tuple[Feature, Address]]:
for handler in CALL_HANDLERS:
for feature, addr in handler(ph, th, ch):
yield feature, addr
CALL_HANDLERS = (extract_call_features,)

View File

@@ -1,153 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import Dict, Tuple, Union, Iterator
import capa.features.extractors.cape.call
import capa.features.extractors.cape.file
import capa.features.extractors.cape.thread
import capa.features.extractors.cape.global_
import capa.features.extractors.cape.process
from capa.exceptions import EmptyReportError, UnsupportedFormatError
from capa.features.common import Feature, Characteristic
from capa.features.address import NO_ADDRESS, Address, AbsoluteVirtualAddress, _NoAddress
from capa.features.extractors.cape.models import Call, Static, Process, CapeReport
from capa.features.extractors.base_extractor import (
CallHandle,
SampleHashes,
ThreadHandle,
ProcessHandle,
DynamicFeatureExtractor,
)
logger = logging.getLogger(__name__)
TESTED_VERSIONS = {"2.2-CAPE", "2.4-CAPE"}
class CapeExtractor(DynamicFeatureExtractor):
def __init__(self, report: CapeReport):
super().__init__(
hashes=SampleHashes(
md5=report.target.file.md5.lower(),
sha1=report.target.file.sha1.lower(),
sha256=report.target.file.sha256.lower(),
)
)
self.report: CapeReport = report
# pre-compute these because we'll yield them at *every* scope.
self.global_features = list(capa.features.extractors.cape.global_.extract_features(self.report))
def get_base_address(self) -> Union[AbsoluteVirtualAddress, _NoAddress, None]:
# value according to the PE header, the actual trace may use a different imagebase
assert self.report.static is not None and self.report.static.pe is not None
return AbsoluteVirtualAddress(self.report.static.pe.imagebase)
def extract_global_features(self) -> Iterator[Tuple[Feature, Address]]:
yield from self.global_features
def extract_file_features(self) -> Iterator[Tuple[Feature, Address]]:
yield from capa.features.extractors.cape.file.extract_features(self.report)
def get_processes(self) -> Iterator[ProcessHandle]:
yield from capa.features.extractors.cape.file.get_processes(self.report)
def extract_process_features(self, ph: ProcessHandle) -> Iterator[Tuple[Feature, Address]]:
yield from capa.features.extractors.cape.process.extract_features(ph)
def get_process_name(self, ph) -> str:
process: Process = ph.inner
return process.process_name
def get_threads(self, ph: ProcessHandle) -> Iterator[ThreadHandle]:
yield from capa.features.extractors.cape.process.get_threads(ph)
def extract_thread_features(self, ph: ProcessHandle, th: ThreadHandle) -> Iterator[Tuple[Feature, Address]]:
if False:
# force this routine to be a generator,
# but we don't actually have any elements to generate.
yield Characteristic("never"), NO_ADDRESS
return
def get_calls(self, ph: ProcessHandle, th: ThreadHandle) -> Iterator[CallHandle]:
yield from capa.features.extractors.cape.thread.get_calls(ph, th)
def extract_call_features(
self, ph: ProcessHandle, th: ThreadHandle, ch: CallHandle
) -> Iterator[Tuple[Feature, Address]]:
yield from capa.features.extractors.cape.call.extract_features(ph, th, ch)
def get_call_name(self, ph, th, ch) -> str:
call: Call = ch.inner
parts = []
parts.append(call.api)
parts.append("(")
for argument in call.arguments:
parts.append(argument.name)
parts.append("=")
if argument.pretty_value:
parts.append(argument.pretty_value)
else:
if isinstance(argument.value, int):
parts.append(hex(argument.value))
elif isinstance(argument.value, str):
parts.append('"')
parts.append(argument.value)
parts.append('"')
elif isinstance(argument.value, list):
pass
else:
capa.helpers.assert_never(argument.value)
parts.append(", ")
if call.arguments:
# remove the trailing comma
parts.pop()
parts.append(")")
parts.append(" -> ")
if call.pretty_return:
parts.append(call.pretty_return)
else:
parts.append(hex(call.return_))
return "".join(parts)
@classmethod
def from_report(cls, report: Dict) -> "CapeExtractor":
cr = CapeReport.model_validate(report)
if cr.info.version not in TESTED_VERSIONS:
logger.warning("CAPE version '%s' not tested/supported yet", cr.info.version)
# TODO(mr-tz): support more file types
# https://github.com/mandiant/capa/issues/1933
if "PE" not in cr.target.file.type:
logger.error(
"capa currently only supports PE target files, this target file's type is: '%s'.\nPlease report this at: https://github.com/mandiant/capa/issues/1933",
cr.target.file.type,
)
# observed in 2.4-CAPE reports from capesandbox.com
if cr.static is None and cr.target.file.pe is not None:
cr.static = Static()
cr.static.pe = cr.target.file.pe
if cr.static is None:
raise UnsupportedFormatError("CAPE report missing static analysis")
if cr.static.pe is None:
raise UnsupportedFormatError("CAPE report missing PE analysis")
if len(cr.behavior.processes) == 0:
raise EmptyReportError("CAPE did not capture any processes")
return cls(cr)

View File

@@ -1,132 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import Tuple, Iterator
from capa.features.file import Export, Import, Section
from capa.features.common import String, Feature
from capa.features.address import NO_ADDRESS, Address, ProcessAddress, AbsoluteVirtualAddress
from capa.features.extractors.helpers import generate_symbols
from capa.features.extractors.cape.models import CapeReport
from capa.features.extractors.base_extractor import ProcessHandle
logger = logging.getLogger(__name__)
def get_processes(report: CapeReport) -> Iterator[ProcessHandle]:
"""
get all the created processes for a sample
"""
seen_processes = {}
for process in report.behavior.processes:
addr = ProcessAddress(pid=process.process_id, ppid=process.parent_id)
yield ProcessHandle(address=addr, inner=process)
# check for pid and ppid reuse
if addr not in seen_processes:
seen_processes[addr] = [process]
else:
logger.warning(
"pid and ppid reuse detected between process %s and process%s: %s",
process,
"es" if len(seen_processes[addr]) > 1 else "",
seen_processes[addr],
)
seen_processes[addr].append(process)
def extract_import_names(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
"""
extract imported function names
"""
assert report.static is not None and report.static.pe is not None
imports = report.static.pe.imports
if isinstance(imports, dict):
imports = list(imports.values())
assert isinstance(imports, list)
for library in imports:
for function in library.imports:
if not function.name:
continue
for name in generate_symbols(library.dll, function.name, include_dll=True):
yield Import(name), AbsoluteVirtualAddress(function.address)
def extract_export_names(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
assert report.static is not None and report.static.pe is not None
for function in report.static.pe.exports:
yield Export(function.name), AbsoluteVirtualAddress(function.address)
def extract_section_names(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
assert report.static is not None and report.static.pe is not None
for section in report.static.pe.sections:
yield Section(section.name), AbsoluteVirtualAddress(section.virtual_address)
def extract_file_strings(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
if report.strings is not None:
for string in report.strings:
yield String(string), NO_ADDRESS
def extract_used_regkeys(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
for regkey in report.behavior.summary.keys:
yield String(regkey), NO_ADDRESS
def extract_used_files(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
for file in report.behavior.summary.files:
yield String(file), NO_ADDRESS
def extract_used_mutexes(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
for mutex in report.behavior.summary.mutexes:
yield String(mutex), NO_ADDRESS
def extract_used_commands(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
for cmd in report.behavior.summary.executed_commands:
yield String(cmd), NO_ADDRESS
def extract_used_apis(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
for symbol in report.behavior.summary.resolved_apis:
yield String(symbol), NO_ADDRESS
def extract_used_services(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
for svc in report.behavior.summary.created_services:
yield String(svc), NO_ADDRESS
for svc in report.behavior.summary.started_services:
yield String(svc), NO_ADDRESS
def extract_features(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
for handler in FILE_HANDLERS:
for feature, addr in handler(report):
yield feature, addr
FILE_HANDLERS = (
extract_import_names,
extract_export_names,
extract_section_names,
extract_file_strings,
extract_used_regkeys,
extract_used_files,
extract_used_mutexes,
extract_used_commands,
extract_used_apis,
extract_used_services,
)

View File

@@ -1,93 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import Tuple, Iterator
from capa.features.common import (
OS,
OS_ANY,
OS_LINUX,
ARCH_I386,
FORMAT_PE,
ARCH_AMD64,
FORMAT_ELF,
OS_WINDOWS,
Arch,
Format,
Feature,
)
from capa.features.address import NO_ADDRESS, Address
from capa.features.extractors.cape.models import CapeReport
logger = logging.getLogger(__name__)
def extract_arch(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
if "Intel 80386" in report.target.file.type:
yield Arch(ARCH_I386), NO_ADDRESS
elif "x86-64" in report.target.file.type:
yield Arch(ARCH_AMD64), NO_ADDRESS
else:
logger.warning("unrecognized Architecture: %s", report.target.file.type)
raise ValueError(
f"unrecognized Architecture from the CAPE report; output of file command: {report.target.file.type}"
)
def extract_format(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
if "PE" in report.target.file.type:
yield Format(FORMAT_PE), NO_ADDRESS
elif "ELF" in report.target.file.type:
yield Format(FORMAT_ELF), NO_ADDRESS
else:
logger.warning("unknown file format, file command output: %s", report.target.file.type)
raise ValueError(
"unrecognized file format from the CAPE report; output of file command: {report.target.file.type}"
)
def extract_os(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
# this variable contains the output of the file command
file_output = report.target.file.type
if "windows" in file_output.lower():
yield OS(OS_WINDOWS), NO_ADDRESS
elif "elf" in file_output.lower():
# operating systems recognized by the file command: https://github.com/file/file/blob/master/src/readelf.c#L609
if "Linux" in file_output:
yield OS(OS_LINUX), NO_ADDRESS
elif "Hurd" in file_output:
yield OS("hurd"), NO_ADDRESS
elif "Solaris" in file_output:
yield OS("solaris"), NO_ADDRESS
elif "kFreeBSD" in file_output:
yield OS("freebsd"), NO_ADDRESS
elif "kNetBSD" in file_output:
yield OS("netbsd"), NO_ADDRESS
else:
# if the operating system information is missing from the cape report, it's likely a bug
logger.warning("unrecognized OS: %s", file_output)
raise ValueError("unrecognized OS from the CAPE report; output of file command: {file_output}")
else:
# the sample is shellcode
logger.debug("unsupported file format, file command output: %s", file_output)
yield OS(OS_ANY), NO_ADDRESS
def extract_features(report: CapeReport) -> Iterator[Tuple[Feature, Address]]:
for global_handler in GLOBAL_HANDLER:
for feature, addr in global_handler(report):
yield feature, addr
GLOBAL_HANDLER = (
extract_format,
extract_os,
extract_arch,
)

View File

@@ -1,29 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import Any, Dict, List
from capa.features.extractors.base_extractor import ProcessHandle
def find_process(processes: List[Dict[str, Any]], ph: ProcessHandle) -> Dict[str, Any]:
"""
find a specific process identified by a process handler.
args:
processes: a list of processes extracted by CAPE
ph: handle of the sought process
return:
a CAPE-defined dictionary for the sought process' information
"""
for process in processes:
if ph.address.ppid == process["parent_id"] and ph.address.pid == process["process_id"]:
return process
return {}

View File

@@ -1,446 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import binascii
from typing import Any, Dict, List, Union, Literal, Optional
from pydantic import Field, BaseModel, ConfigDict
from typing_extensions import Annotated, TypeAlias
from pydantic.functional_validators import BeforeValidator
def validate_hex_int(value):
if isinstance(value, str):
return int(value, 16) if value.startswith("0x") else int(value, 10)
else:
return value
def validate_hex_bytes(value):
return binascii.unhexlify(value) if isinstance(value, str) else value
HexInt = Annotated[int, BeforeValidator(validate_hex_int)]
HexBytes = Annotated[bytes, BeforeValidator(validate_hex_bytes)]
# a model that *cannot* have extra fields
# if they do, pydantic raises an exception.
# use this for models we rely upon and cannot change.
#
# for things that may be extended and we don't care,
# use FlexibleModel.
class ExactModel(BaseModel):
model_config = ConfigDict(extra="forbid")
# a model that can have extra fields that we ignore.
# use this if we don't want to raise an exception for extra
# data fields that we didn't expect.
class FlexibleModel(BaseModel):
pass
# use this type to indicate that we won't model this data.
# because its not relevant to our use in capa.
#
# while its nice to have full coverage of the data shape,
# it can easily change and break our parsing.
# so we really only want to describe what we'll use.
Skip: TypeAlias = Optional[Any]
# mark fields that we haven't seen yet and need to model.
# pydantic should raise an error when encountering data
# in a field with this type.
# then we can update the model with the discovered shape.
TODO: TypeAlias = None
ListTODO: TypeAlias = List[None]
DictTODO: TypeAlias = ExactModel
EmptyDict: TypeAlias = BaseModel
EmptyList: TypeAlias = List[Any]
class Info(FlexibleModel):
version: str
class ImportedSymbol(ExactModel):
address: HexInt
name: Optional[str] = None
class ImportedDll(ExactModel):
dll: str
imports: List[ImportedSymbol]
class DirectoryEntry(ExactModel):
name: str
virtual_address: HexInt
size: HexInt
class Section(ExactModel):
name: str
raw_address: HexInt
virtual_address: HexInt
virtual_size: HexInt
size_of_data: HexInt
characteristics: str
characteristics_raw: HexInt
entropy: float
class Resource(ExactModel):
name: str
language: Optional[str] = None
sublanguage: str
filetype: Optional[str]
offset: HexInt
size: HexInt
entropy: float
class DigitalSigner(FlexibleModel):
md5_fingerprint: str
not_after: str
not_before: str
serial_number: str
sha1_fingerprint: str
sha256_fingerprint: str
issuer_commonName: Optional[str] = None
issuer_countryName: Optional[str] = None
issuer_localityName: Optional[str] = None
issuer_organizationName: Optional[str] = None
issuer_stateOrProvinceName: Optional[str] = None
subject_commonName: Optional[str] = None
subject_countryName: Optional[str] = None
subject_localityName: Optional[str] = None
subject_organizationName: Optional[str] = None
subject_stateOrProvinceName: Optional[str] = None
extensions_authorityInfoAccess_caIssuers: Optional[str] = None
extensions_authorityKeyIdentifier: Optional[str] = None
extensions_cRLDistributionPoints_0: Optional[str] = None
extensions_certificatePolicies_0: Optional[str] = None
extensions_subjectAltName_0: Optional[str] = None
extensions_subjectKeyIdentifier: Optional[str] = None
class AuxSigner(ExactModel):
name: str
issued_to: str = Field(alias="Issued to")
issued_by: str = Field(alias="Issued by")
expires: str = Field(alias="Expires")
sha1_hash: str = Field(alias="SHA1 hash")
class Signer(ExactModel):
aux_sha1: Optional[str] = None
aux_timestamp: Optional[str] = None
aux_valid: Optional[bool] = None
aux_error: Optional[bool] = None
aux_error_desc: Optional[str] = None
aux_signers: Optional[List[AuxSigner]] = None
class Overlay(ExactModel):
offset: HexInt
size: HexInt
class KV(ExactModel):
name: str
value: str
class ExportedSymbol(ExactModel):
address: HexInt
name: str
ordinal: int
class PE(ExactModel):
peid_signatures: TODO
imagebase: HexInt
entrypoint: HexInt
reported_checksum: HexInt
actual_checksum: HexInt
osversion: str
pdbpath: Optional[str] = None
timestamp: str
# List[ImportedDll], or Dict[basename(dll), ImportedDll]
imports: Union[List[ImportedDll], Dict[str, ImportedDll]]
imported_dll_count: Optional[int] = None
imphash: str
exported_dll_name: Optional[str] = None
exports: List[ExportedSymbol]
dirents: List[DirectoryEntry]
sections: List[Section]
ep_bytes: Optional[HexBytes] = None
overlay: Optional[Overlay] = None
resources: List[Resource]
versioninfo: List[KV]
# base64 encoded data
icon: Optional[str] = None
# MD5-like hash
icon_hash: Optional[str] = None
# MD5-like hash
icon_fuzzy: Optional[str] = None
# short hex string
icon_dhash: Optional[str] = None
digital_signers: List[DigitalSigner]
guest_signers: Signer
# TODO(mr-tz): target.file.dotnet, target.file.extracted_files, target.file.extracted_files_tool,
# target.file.extracted_files_time
# https://github.com/mandiant/capa/issues/1814
class File(FlexibleModel):
type: str
cape_type_code: Optional[int] = None
cape_type: Optional[str] = None
pid: Optional[Union[int, Literal[""]]] = None
name: Union[List[str], str]
path: str
guest_paths: Union[List[str], str, None]
timestamp: Optional[str] = None
#
# hashes
#
crc32: str
md5: str
sha1: str
sha256: str
sha512: str
sha3_384: str
ssdeep: str
# unsure why this would ever be "False"
tlsh: Optional[Union[str, bool]] = None
rh_hash: Optional[str] = None
#
# other metadata, static analysis
#
size: int
pe: Optional[PE] = None
ep_bytes: Optional[HexBytes] = None
entrypoint: Optional[int] = None
data: Optional[str] = None
strings: Optional[List[str]] = None
#
# detections (skip)
#
yara: Skip = None
cape_yara: Skip = None
clamav: Skip = None
virustotal: Skip = None
class ProcessFile(File):
#
# like a File, but also has dynamic analysis results
#
pid: Optional[int] = None
process_path: Optional[str] = None
process_name: Optional[str] = None
module_path: Optional[str] = None
virtual_address: Optional[HexInt] = None
target_pid: Optional[Union[int, str]] = None
target_path: Optional[str] = None
target_process: Optional[str] = None
class Argument(ExactModel):
name: str
# unsure why empty list is provided here
value: Union[HexInt, int, str, EmptyList]
pretty_value: Optional[str] = None
class Call(ExactModel):
timestamp: str
thread_id: int
category: str
api: str
arguments: List[Argument]
status: bool
return_: HexInt = Field(alias="return")
pretty_return: Optional[str] = None
repeated: int
# virtual addresses
caller: HexInt
parentcaller: HexInt
# index into calls array
id: int
class Process(ExactModel):
process_id: int
process_name: str
parent_id: int
module_path: str
first_seen: str
calls: List[Call]
threads: List[int]
environ: Dict[str, str]
class ProcessTree(ExactModel):
name: str
pid: int
parent_id: int
module_path: str
threads: List[int]
environ: Dict[str, str]
children: List["ProcessTree"]
class Summary(ExactModel):
files: List[str]
read_files: List[str]
write_files: List[str]
delete_files: List[str]
keys: List[str]
read_keys: List[str]
write_keys: List[str]
delete_keys: List[str]
executed_commands: List[str]
resolved_apis: List[str]
mutexes: List[str]
created_services: List[str]
started_services: List[str]
class EncryptedBuffer(ExactModel):
process_name: str
pid: int
api_call: str
buffer: str
buffer_size: Optional[int] = None
crypt_key: Optional[Union[HexInt, str]] = None
class Behavior(ExactModel):
summary: Summary
# list of processes, of threads, of calls
processes: List[Process]
# tree of processes
processtree: List[ProcessTree]
anomaly: List[str]
encryptedbuffers: List[EncryptedBuffer]
# these are small objects that describe atomic events,
# like file move, registery access.
# we'll detect the same with our API call analyis.
enhanced: Skip = None
class Target(ExactModel):
category: str
file: File
pe: Optional[PE] = None
class Static(ExactModel):
pe: Optional[PE] = None
flare_capa: Skip = None
class Cape(ExactModel):
payloads: List[ProcessFile]
configs: Skip = None
# flexible because there may be more sorts of analysis
# but we only care about the ones described here.
class CapeReport(FlexibleModel):
# the input file, I think
target: Target
# info about the processing job, like machine and distributed metadata.
info: Info
#
# static analysis results
#
static: Optional[Static] = None
strings: Optional[List[str]] = None
#
# dynamic analysis results
#
# post-processed results: process tree, anomalies, etc
behavior: Behavior
# post-processed results: payloads and extracted configs
CAPE: Optional[Cape] = None
dropped: Optional[List[File]] = None
procdump: Optional[List[ProcessFile]] = None
procmemory: ListTODO
# =========================================================================
# information we won't use in capa
#
#
# NBIs and HBIs
# these are super interesting, but they don't enable use to detect behaviors.
# they take a lot of code to model and details to maintain.
#
# if we come up with a future use for this, go ahead and re-enable!
#
network: Skip = None
suricata: Skip = None
curtain: Skip = None
sysmon: Skip = None
url_analysis: Skip = None
# screenshot hash values
deduplicated_shots: Skip = None
# k-v pairs describing the time it took to run each stage.
statistics: Skip = None
# k-v pairs of ATT&CK ID to signature name or similar.
ttps: Skip = None
# debug log messages
debug: Skip = None
# various signature matches
# we could potentially extend capa to use this info one day,
# though it would be quite sandbox-specific,
# and more detection-oriented than capability detection.
signatures: Skip = None
malfamily_tag: Optional[str] = None
malscore: float
detections: Skip = None
detections2pid: Optional[Dict[int, List[str]]] = None
# AV detections for the sample.
virustotal: Skip = None
@classmethod
def from_buf(cls, buf: bytes) -> "CapeReport":
return cls.model_validate_json(buf)

View File

@@ -1,48 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import List, Tuple, Iterator
from capa.features.common import String, Feature
from capa.features.address import Address, ThreadAddress
from capa.features.extractors.cape.models import Process
from capa.features.extractors.base_extractor import ThreadHandle, ProcessHandle
logger = logging.getLogger(__name__)
def get_threads(ph: ProcessHandle) -> Iterator[ThreadHandle]:
"""
get the threads associated with a given process
"""
process: Process = ph.inner
threads: List[int] = process.threads
for thread in threads:
address: ThreadAddress = ThreadAddress(process=ph.address, tid=thread)
yield ThreadHandle(address=address, inner={})
def extract_environ_strings(ph: ProcessHandle) -> Iterator[Tuple[Feature, Address]]:
"""
extract strings from a process' provided environment variables.
"""
process: Process = ph.inner
for value in (value for value in process.environ.values() if value):
yield String(value), ph.address
def extract_features(ph: ProcessHandle) -> Iterator[Tuple[Feature, Address]]:
for handler in PROCESS_HANDLERS:
for feature, addr in handler(ph):
yield feature, addr
PROCESS_HANDLERS = (extract_environ_strings,)

View File

@@ -1,32 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import Iterator
from capa.features.address import DynamicCallAddress
from capa.features.extractors.helpers import generate_symbols
from capa.features.extractors.cape.models import Process
from capa.features.extractors.base_extractor import CallHandle, ThreadHandle, ProcessHandle
logger = logging.getLogger(__name__)
def get_calls(ph: ProcessHandle, th: ThreadHandle) -> Iterator[CallHandle]:
process: Process = ph.inner
tid = th.address.tid
for call_index, call in enumerate(process.calls):
if call.thread_id != tid:
continue
for symbol in generate_symbols("", call.api):
call.api = symbol
addr = DynamicCallAddress(thread=th.address, id=call_index)
yield CallHandle(address=addr, inner=call)

View File

@@ -1,12 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import io
import re
import logging
import binascii
import contextlib
@@ -18,34 +10,14 @@ import capa.features
import capa.features.extractors.elf
import capa.features.extractors.pefile
import capa.features.extractors.strings
from capa.features.common import (
OS,
OS_ANY,
OS_AUTO,
ARCH_ANY,
FORMAT_PE,
FORMAT_ELF,
OS_WINDOWS,
FORMAT_FREEZE,
FORMAT_RESULT,
Arch,
Format,
String,
Feature,
)
from capa.features.common import OS, FORMAT_PE, FORMAT_ELF, OS_WINDOWS, FORMAT_FREEZE, Arch, Format, String, Feature
from capa.features.freeze import is_freeze
from capa.features.address import NO_ADDRESS, Address, FileOffsetAddress
logger = logging.getLogger(__name__)
# match strings for formats
MATCH_PE = b"MZ"
MATCH_ELF = b"\x7fELF"
MATCH_RESULT = b'{"meta":'
MATCH_JSON_OBJECT = b'{"'
def extract_file_strings(buf: bytes, **kwargs) -> Iterator[Tuple[String, Address]]:
def extract_file_strings(buf, **kwargs) -> Iterator[Tuple[String, Address]]:
"""
extract ASCII and UTF-16 LE strings from file
"""
@@ -56,20 +28,13 @@ def extract_file_strings(buf: bytes, **kwargs) -> Iterator[Tuple[String, Address
yield String(s.s), FileOffsetAddress(s.offset)
def extract_format(buf: bytes) -> Iterator[Tuple[Feature, Address]]:
if buf.startswith(MATCH_PE):
def extract_format(buf) -> Iterator[Tuple[Feature, Address]]:
if buf.startswith(b"MZ"):
yield Format(FORMAT_PE), NO_ADDRESS
elif buf.startswith(MATCH_ELF):
elif buf.startswith(b"\x7fELF"):
yield Format(FORMAT_ELF), NO_ADDRESS
elif is_freeze(buf):
yield Format(FORMAT_FREEZE), NO_ADDRESS
elif buf.startswith(MATCH_RESULT):
yield Format(FORMAT_RESULT), NO_ADDRESS
elif re.sub(rb"\s", b"", buf[:20]).startswith(MATCH_JSON_OBJECT):
# potential start of JSON object data without whitespace
# we don't know what it is exactly, but may support it (e.g. a dynamic CAPE sandbox report)
# skip verdict here and let subsequent code analyze this further
return
else:
# we likely end up here:
# 1. handling a file format (e.g. macho)
@@ -80,13 +45,10 @@ def extract_format(buf: bytes) -> Iterator[Tuple[Feature, Address]]:
def extract_arch(buf) -> Iterator[Tuple[Feature, Address]]:
if buf.startswith(MATCH_PE):
if buf.startswith(b"MZ"):
yield from capa.features.extractors.pefile.extract_file_arch(pe=pefile.PE(data=buf))
elif buf.startswith(MATCH_RESULT):
yield Arch(ARCH_ANY), NO_ADDRESS
elif buf.startswith(MATCH_ELF):
elif buf.startswith(b"\x7fELF"):
with contextlib.closing(io.BytesIO(buf)) as f:
arch = capa.features.extractors.elf.detect_elf_arch(f)
@@ -111,15 +73,10 @@ def extract_arch(buf) -> Iterator[Tuple[Feature, Address]]:
return
def extract_os(buf, os=OS_AUTO) -> Iterator[Tuple[Feature, Address]]:
if os != OS_AUTO:
yield OS(os), NO_ADDRESS
if buf.startswith(MATCH_PE):
def extract_os(buf) -> Iterator[Tuple[Feature, Address]]:
if buf.startswith(b"MZ"):
yield OS(OS_WINDOWS), NO_ADDRESS
elif buf.startswith(MATCH_RESULT):
yield OS(OS_ANY), NO_ADDRESS
elif buf.startswith(MATCH_ELF):
elif buf.startswith(b"\x7fELF"):
with contextlib.closing(io.BytesIO(buf)) as f:
os = capa.features.extractors.elf.detect_elf_os(f)
@@ -135,6 +92,8 @@ def extract_os(buf, os=OS_AUTO) -> Iterator[Tuple[Feature, Address]]:
# 2. handling a new file format (e.g. macho)
#
# for (1) we can't do much - its shellcode and all bets are off.
# we could maybe accept a further CLI argument to specify the OS,
# but i think this would be rarely used.
# rules that rely on OS conditions will fail to match on shellcode.
#
# for (2), this logic will need to be updated as the format is implemented.

View File

@@ -0,0 +1,45 @@
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import Tuple, Iterator
from dncil.cil.instruction import Instruction
from capa.features.common import Feature, Characteristic
from capa.features.address import Address
from capa.features.basicblock import BasicBlock
from capa.features.extractors.base_extractor import BBHandle, FunctionHandle
def extract_bb_stackstring(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
"""extract stackstring indicators from basic block"""
raise NotImplementedError
def extract_bb_tight_loop(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
"""extract tight loop indicators from a basic block"""
first: Instruction = bbh.inner.instructions[0]
last: Instruction = bbh.inner.instructions[-1]
if any((last.is_br(), last.is_cond_br(), last.is_leave())):
if last.operand == first.offset:
yield Characteristic("tight loop"), bbh.address
def extract_features(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
"""extract basic block features"""
for bb_handler in BASIC_BLOCK_HANDLERS:
for feature, addr in bb_handler(fh, bbh):
yield feature, addr
yield BasicBlock(), bbh.address
BASIC_BLOCK_HANDLERS = (
extract_bb_tight_loop,
# extract_bb_stackstring,
)

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -8,27 +8,22 @@
from __future__ import annotations
from typing import Dict, List, Tuple, Union, Iterator, Optional
from pathlib import Path
from typing import Set, Dict, List, Tuple, Union, Iterator, Optional
import dnfile
from dncil.cil.opcode import OpCodes
from dncil.cil.instruction import Instruction
import capa.features.extractors
import capa.features.extractors.dotnetfile
import capa.features.extractors.dnfile.file
import capa.features.extractors.dnfile.insn
import capa.features.extractors.dnfile.function
import capa.features.extractors.dnfile.basicblock
from capa.features.common import Feature
from capa.features.address import NO_ADDRESS, Address, DNTokenAddress, DNTokenOffsetAddress
from capa.features.extractors.dnfile.types import DnType, DnUnmanagedMethod
from capa.features.extractors.base_extractor import (
BBHandle,
InsnHandle,
SampleHashes,
FunctionHandle,
StaticFeatureExtractor,
)
from capa.features.extractors.dnfile.types import DnType, DnBasicBlock, DnUnmanagedMethod
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle, FeatureExtractor
from capa.features.extractors.dnfile.helpers import (
get_dotnet_types,
get_dotnet_fields,
@@ -59,25 +54,25 @@ class DnFileFeatureExtractorCache:
self.types[type_.token] = type_
def get_import(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.imports.get(token)
return self.imports.get(token, None)
def get_native_import(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.native_imports.get(token)
return self.native_imports.get(token, None)
def get_method(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.methods.get(token)
return self.methods.get(token, None)
def get_field(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.fields.get(token)
return self.fields.get(token, None)
def get_type(self, token: int) -> Optional[Union[DnType, DnUnmanagedMethod]]:
return self.types.get(token)
return self.types.get(token, None)
class DnfileFeatureExtractor(StaticFeatureExtractor):
def __init__(self, path: Path):
self.pe: dnfile.dnPE = dnfile.dnPE(str(path))
super().__init__(hashes=SampleHashes.from_bytes(path.read_bytes()))
class DnfileFeatureExtractor(FeatureExtractor):
def __init__(self, path: str):
super().__init__()
self.pe: dnfile.dnPE = dnfile.dnPE(path)
# pre-compute .NET token lookup tables; each .NET method has access to this cache for feature extraction
# most relevant at instruction scope
@@ -105,7 +100,13 @@ class DnfileFeatureExtractor(StaticFeatureExtractor):
fh: FunctionHandle = FunctionHandle(
address=DNTokenAddress(token),
inner=method,
ctx={"pe": self.pe, "calls_from": set(), "calls_to": set(), "cache": self.token_cache},
ctx={
"pe": self.pe,
"calls_from": set(),
"calls_to": set(),
"blocks": list(),
"cache": self.token_cache,
},
)
# method tokens should be unique
@@ -126,7 +127,7 @@ class DnfileFeatureExtractor(StaticFeatureExtractor):
address: DNTokenAddress = DNTokenAddress(insn.operand.value)
# record call to destination method; note: we only consider MethodDef methods for destinations
dest: Optional[FunctionHandle] = methods.get(address)
dest: Optional[FunctionHandle] = methods.get(address, None)
if dest is not None:
dest.ctx["calls_to"].add(fh.address)
@@ -134,26 +135,99 @@ class DnfileFeatureExtractor(StaticFeatureExtractor):
# those calls to other MethodDef methods e.g. calls to imported MemberRef methods
fh.ctx["calls_from"].add(address)
# calculate basic blocks
for fh in methods.values():
# calculate basic block leaders where,
# 1. The first instruction of the intermediate code is a leader
# 2. Instructions that are targets of unconditional or conditional jump/goto statements are leaders
# 3. Instructions that immediately follow unconditional or conditional jump/goto statements are considered leaders
# https://www.geeksforgeeks.org/basic-blocks-in-compiler-design/
leaders: Set[int] = set()
for idx, insn in enumerate(fh.inner.instructions):
if idx == 0:
# add #1
leaders.add(insn.offset)
if any((insn.is_br(), insn.is_cond_br(), insn.is_leave())):
# add #2
leaders.add(insn.operand)
# add #3
try:
leaders.add(fh.inner.instructions[idx + 1].offset)
except IndexError:
# may encounter branch at end of method
continue
# build basic blocks using leaders
bb_curr: Optional[DnBasicBlock] = None
for idx, insn in enumerate(fh.inner.instructions):
if insn.offset in leaders:
# new leader, new basic block
bb_curr = DnBasicBlock(instructions=[insn])
fh.ctx["blocks"].append(bb_curr)
continue
assert bb_curr is not None
bb_curr.instructions.append(insn)
# create mapping of first instruction to basic block
bb_map: Dict[int, DnBasicBlock] = {}
for bb in fh.ctx["blocks"]:
if len(bb.instructions) == 0:
# TODO: consider error?
continue
bb_map[bb.instructions[0].offset] = bb
# connect basic blocks
for idx, bb in enumerate(fh.ctx["blocks"]):
if len(bb.instructions) == 0:
# TODO: consider error?
continue
last = bb.instructions[-1]
# connect branches to other basic blocks
if any((last.is_br(), last.is_cond_br(), last.is_leave())):
bb_branch: Optional[DnBasicBlock] = bb_map.get(last.operand, None)
if bb_branch is not None:
# TODO: consider None error?
bb.succs.append(bb_branch)
bb_branch.preds.append(bb)
if any((last.is_br(), last.is_leave())):
# no fallthrough
continue
# connect fallthrough
try:
bb_next: DnBasicBlock = fh.ctx["blocks"][idx + 1]
bb.succs.append(bb_next)
bb_next.preds.append(bb)
except IndexError:
continue
yield from methods.values()
def extract_function_features(self, fh) -> Iterator[Tuple[Feature, Address]]:
yield from capa.features.extractors.dnfile.function.extract_features(fh)
def get_basic_blocks(self, f) -> Iterator[BBHandle]:
# each dotnet method is considered 1 basic block
yield BBHandle(
address=f.address,
inner=f.inner,
)
def get_basic_blocks(self, fh) -> Iterator[BBHandle]:
for bb in fh.ctx["blocks"]:
yield BBHandle(
address=DNTokenOffsetAddress(
fh.address, bb.instructions[0].offset - (fh.inner.offset + fh.inner.header_size)
),
inner=bb,
)
def extract_basic_block_features(self, fh, bbh):
# we don't support basic block features
yield from []
yield from capa.features.extractors.dnfile.basicblock.extract_features(fh, bbh)
def get_instructions(self, fh, bbh):
for insn in bbh.inner.instructions:
yield InsnHandle(
address=DNTokenOffsetAddress(bbh.address, insn.offset - (fh.inner.offset + fh.inner.header_size)),
address=DNTokenOffsetAddress(fh.address, insn.offset - (fh.inner.offset + fh.inner.header_size)),
inner=insn,
)

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -13,6 +13,7 @@ from typing import Tuple, Iterator
from capa.features.common import Feature, Characteristic
from capa.features.address import Address
from capa.features.extractors import loops
from capa.features.extractors.base_extractor import FunctionHandle
logger = logging.getLogger(__name__)
@@ -38,7 +39,13 @@ def extract_recursive_call(fh: FunctionHandle) -> Iterator[Tuple[Characteristic,
def extract_function_loop(fh: FunctionHandle) -> Iterator[Tuple[Characteristic, Address]]:
"""extract loop indicators from a function"""
raise NotImplementedError()
edges = []
for bb in fh.ctx["blocks"]:
for succ in bb.succs:
edges.append((bb.instructions[0].offset, succ.instructions[0].offset))
if loops.has_loop(edges):
yield Characteristic("loop"), fh.address
def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
@@ -47,4 +54,9 @@ def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
yield feature, addr
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_calls_from, extract_recursive_call)
FUNCTION_HANDLERS = (
extract_function_calls_to,
extract_function_calls_from,
extract_recursive_call,
extract_function_loop,
)

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -52,7 +52,7 @@ def resolve_dotnet_token(pe: dnfile.dnPE, token: Token) -> Union[dnfile.base.MDT
return InvalidToken(token.value)
return user_string
table: Optional[dnfile.base.ClrMetaDataTable] = pe.net.mdtables.tables.get(token.table)
table: Optional[dnfile.base.ClrMetaDataTable] = pe.net.mdtables.tables.get(token.table, None)
if table is None:
# table index is not valid
return InvalidToken(token.value)
@@ -131,14 +131,10 @@ def get_dotnet_managed_imports(pe: dnfile.dnPE) -> Iterator[DnType]:
# remove get_/set_ from MemberRef name
member_ref_name = member_ref_name[4:]
typerefnamespace, typerefname = resolve_nested_typeref_name(
member_ref.Class.row_index, member_ref.Class.row, pe
)
yield DnType(
token,
typerefname,
namespace=typerefnamespace,
member_ref.Class.row.TypeName,
namespace=member_ref.Class.row.TypeNamespace,
member=member_ref_name,
access=access,
)
@@ -192,8 +188,6 @@ def get_dotnet_managed_methods(pe: dnfile.dnPE) -> Iterator[DnType]:
TypeNamespace (index into String heap)
MethodList (index into MethodDef table; it marks the first of a contiguous run of Methods owned by this Type)
"""
nested_class_table = get_dotnet_nested_class_table_index(pe)
accessor_map: Dict[int, str] = {}
for methoddef, methoddef_access in get_dotnet_methoddef_property_accessors(pe):
accessor_map[methoddef] = methoddef_access
@@ -210,16 +204,14 @@ def get_dotnet_managed_methods(pe: dnfile.dnPE) -> Iterator[DnType]:
continue
token: int = calculate_dotnet_token_value(method.table.number, method.row_index)
access: Optional[str] = accessor_map.get(token)
access: Optional[str] = accessor_map.get(token, None)
method_name: str = method.row.Name
if method_name.startswith(("get_", "set_")):
# remove get_/set_
method_name = method_name[4:]
typedefnamespace, typedefname = resolve_nested_typedef_name(nested_class_table, rid, typedef, pe)
yield DnType(token, typedefname, namespace=typedefnamespace, member=method_name, access=access)
yield DnType(token, typedef.TypeName, namespace=typedef.TypeNamespace, member=method_name, access=access)
def get_dotnet_fields(pe: dnfile.dnPE) -> Iterator[DnType]:
@@ -233,8 +225,6 @@ def get_dotnet_fields(pe: dnfile.dnPE) -> Iterator[DnType]:
TypeNamespace (index into String heap)
FieldList (index into Field table; it marks the first of a contiguous run of Fields owned by this Type)
"""
nested_class_table = get_dotnet_nested_class_table_index(pe)
for rid, typedef in iter_dotnet_table(pe, dnfile.mdtable.TypeDef.number):
assert isinstance(typedef, dnfile.mdtable.TypeDefRow)
@@ -245,11 +235,8 @@ def get_dotnet_fields(pe: dnfile.dnPE) -> Iterator[DnType]:
if field.row is None:
logger.debug("TypeDef[0x%X] FieldList[0x%X] row is None", rid, idx)
continue
typedefnamespace, typedefname = resolve_nested_typedef_name(nested_class_table, rid, typedef, pe)
token: int = calculate_dotnet_token_value(field.table.number, field.row_index)
yield DnType(token, typedefname, namespace=typedefnamespace, member=field.row.Name)
yield DnType(token, typedef.TypeName, namespace=typedef.TypeNamespace, member=field.row.Name)
def get_dotnet_managed_method_bodies(pe: dnfile.dnPE) -> Iterator[Tuple[int, CilMethodBody]]:
@@ -313,119 +300,19 @@ def get_dotnet_unmanaged_imports(pe: dnfile.dnPE) -> Iterator[DnUnmanagedMethod]
yield DnUnmanagedMethod(token, module, method)
def get_dotnet_table_row(pe: dnfile.dnPE, table_index: int, row_index: int) -> Optional[dnfile.base.MDTableRow]:
assert pe.net is not None
assert pe.net.mdtables is not None
if row_index - 1 <= 0:
return None
try:
table = pe.net.mdtables.tables.get(table_index, [])
return table[row_index - 1]
except IndexError:
return None
def resolve_nested_typedef_name(
nested_class_table: dict, index: int, typedef: dnfile.mdtable.TypeDefRow, pe: dnfile.dnPE
) -> Tuple[str, Tuple[str, ...]]:
"""Resolves all nested TypeDef class names. Returns the namespace as a str and the nested TypeRef name as a tuple"""
if index in nested_class_table:
typedef_name = []
name = typedef.TypeName
# Append the current typedef name
typedef_name.append(name)
while nested_class_table[index] in nested_class_table:
# Iterate through the typedef table to resolve the nested name
table_row = get_dotnet_table_row(pe, dnfile.mdtable.TypeDef.number, nested_class_table[index])
if table_row is None:
return typedef.TypeNamespace, tuple(typedef_name[::-1])
name = table_row.TypeName
typedef_name.append(name)
index = nested_class_table[index]
# Document the root enclosing details
table_row = get_dotnet_table_row(pe, dnfile.mdtable.TypeDef.number, nested_class_table[index])
if table_row is None:
return typedef.TypeNamespace, tuple(typedef_name[::-1])
enclosing_name = table_row.TypeName
typedef_name.append(enclosing_name)
return table_row.TypeNamespace, tuple(typedef_name[::-1])
else:
return typedef.TypeNamespace, (typedef.TypeName,)
def resolve_nested_typeref_name(
index: int, typeref: dnfile.mdtable.TypeRefRow, pe: dnfile.dnPE
) -> Tuple[str, Tuple[str, ...]]:
"""Resolves all nested TypeRef class names. Returns the namespace as a str and the nested TypeRef name as a tuple"""
# If the ResolutionScope decodes to a typeRef type then it is nested
if isinstance(typeref.ResolutionScope.table, dnfile.mdtable.TypeRef):
typeref_name = []
name = typeref.TypeName
# Not appending the current typeref name to avoid potential duplicate
# Validate index
table_row = get_dotnet_table_row(pe, dnfile.mdtable.TypeRef.number, index)
if table_row is None:
return typeref.TypeNamespace, (typeref.TypeName,)
while isinstance(table_row.ResolutionScope.table, dnfile.mdtable.TypeRef):
# Iterate through the typeref table to resolve the nested name
typeref_name.append(name)
name = table_row.TypeName
table_row = get_dotnet_table_row(pe, dnfile.mdtable.TypeRef.number, table_row.ResolutionScope.row_index)
if table_row is None:
return typeref.TypeNamespace, tuple(typeref_name[::-1])
# Document the root enclosing details
typeref_name.append(table_row.TypeName)
return table_row.TypeNamespace, tuple(typeref_name[::-1])
else:
return typeref.TypeNamespace, (typeref.TypeName,)
def get_dotnet_nested_class_table_index(pe: dnfile.dnPE) -> Dict[int, int]:
"""Build index for EnclosingClass based off the NestedClass row index in the nestedclass table"""
nested_class_table = {}
# Used to find nested classes in typedef
for _, nestedclass in iter_dotnet_table(pe, dnfile.mdtable.NestedClass.number):
assert isinstance(nestedclass, dnfile.mdtable.NestedClassRow)
nested_class_table[nestedclass.NestedClass.row_index] = nestedclass.EnclosingClass.row_index
return nested_class_table
def get_dotnet_types(pe: dnfile.dnPE) -> Iterator[DnType]:
"""get .NET types from TypeDef and TypeRef tables"""
nested_class_table = get_dotnet_nested_class_table_index(pe)
for rid, typedef in iter_dotnet_table(pe, dnfile.mdtable.TypeDef.number):
assert isinstance(typedef, dnfile.mdtable.TypeDefRow)
typedefnamespace, typedefname = resolve_nested_typedef_name(nested_class_table, rid, typedef, pe)
typedef_token: int = calculate_dotnet_token_value(dnfile.mdtable.TypeDef.number, rid)
yield DnType(typedef_token, typedefname, namespace=typedefnamespace)
yield DnType(typedef_token, typedef.TypeName, namespace=typedef.TypeNamespace)
for rid, typeref in iter_dotnet_table(pe, dnfile.mdtable.TypeRef.number):
assert isinstance(typeref, dnfile.mdtable.TypeRefRow)
typerefnamespace, typerefname = resolve_nested_typeref_name(typeref.ResolutionScope.row_index, typeref, pe)
typeref_token: int = calculate_dotnet_token_value(dnfile.mdtable.TypeRef.number, rid)
yield DnType(typeref_token, typerefname, namespace=typerefnamespace)
yield DnType(typeref_token, typeref.TypeName, namespace=typeref.TypeNamespace)
def calculate_dotnet_token_value(table: int, rid: int) -> int:

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -9,7 +9,7 @@
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Tuple, Union, Iterator, Optional
from typing import TYPE_CHECKING, Any, Dict, Tuple, Union, Iterator, Optional
if TYPE_CHECKING:
from capa.features.extractors.dnfile.extractor import DnFileFeatureExtractorCache

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -6,17 +6,18 @@
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import Tuple, Optional
from typing import TYPE_CHECKING, Dict, List, Optional
if TYPE_CHECKING:
from dncil.cil.instruction import Instruction
class DnType:
def __init__(
self, token: int, class_: Tuple[str, ...], namespace: str = "", member: str = "", access: Optional[str] = None
):
class DnType(object):
def __init__(self, token: int, class_: str, namespace: str = "", member: str = "", access: Optional[str] = None):
self.token: int = token
self.access: Optional[str] = access
self.namespace: str = namespace
self.class_: Tuple[str, ...] = class_
self.class_: str = class_
if member == ".ctor":
member = "ctor"
@@ -44,13 +45,9 @@ class DnType:
return str(self)
@staticmethod
def format_name(class_: Tuple[str, ...], namespace: str = "", member: str = ""):
if len(class_) > 1:
class_str = "/".join(class_) # Concat items in tuple, separated by a "/"
else:
class_str = "".join(class_) # Convert tuple to str
def format_name(class_: str, namespace: str = "", member: str = ""):
# like File::OpenRead
name: str = f"{class_str}::{member}" if member else class_str
name: str = f"{class_}::{member}" if member else class_
if namespace:
# like System.IO.File::OpenRead
name = f"{namespace}.{name}"
@@ -78,3 +75,10 @@ class DnUnmanagedMethod:
@staticmethod
def format_name(module, method):
return f"{module}.{method}"
class DnBasicBlock:
def __init__(self, preds=None, succs=None, instructions=None):
self.succs: List[DnBasicBlock] = succs or []
self.preds: List[DnBasicBlock] = preds or []
self.instructions: List[Instruction] = instructions or []

View File

@@ -0,0 +1,150 @@
import logging
from typing import Tuple, Iterator
import dnfile
import pefile
from capa.features.common import (
OS,
OS_ANY,
ARCH_ANY,
ARCH_I386,
FORMAT_PE,
ARCH_AMD64,
FORMAT_DOTNET,
Arch,
Format,
Feature,
)
from capa.features.address import NO_ADDRESS, Address, AbsoluteVirtualAddress
from capa.features.extractors.base_extractor import FeatureExtractor
logger = logging.getLogger(__name__)
def extract_file_format(**kwargs) -> Iterator[Tuple[Feature, Address]]:
yield Format(FORMAT_PE), NO_ADDRESS
yield Format(FORMAT_DOTNET), NO_ADDRESS
def extract_file_os(**kwargs) -> Iterator[Tuple[Feature, Address]]:
yield OS(OS_ANY), NO_ADDRESS
def extract_file_arch(pe: dnfile.dnPE, **kwargs) -> Iterator[Tuple[Feature, Address]]:
# to distinguish in more detail, see https://stackoverflow.com/a/23614024/10548020
# .NET 4.5 added option: any CPU, 32-bit preferred
assert pe.net is not None
assert pe.net.Flags is not None
if pe.net.Flags.CLR_32BITREQUIRED and pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE:
yield Arch(ARCH_I386), NO_ADDRESS
elif not pe.net.Flags.CLR_32BITREQUIRED and pe.PE_TYPE == pefile.OPTIONAL_HEADER_MAGIC_PE_PLUS:
yield Arch(ARCH_AMD64), NO_ADDRESS
else:
yield Arch(ARCH_ANY), NO_ADDRESS
def extract_file_features(pe: dnfile.dnPE) -> Iterator[Tuple[Feature, Address]]:
for file_handler in FILE_HANDLERS:
for feature, address in file_handler(pe=pe): # type: ignore
yield feature, address
FILE_HANDLERS = (
# extract_file_export_names,
# extract_file_import_names,
# extract_file_section_names,
# extract_file_strings,
# extract_file_function_names,
extract_file_format,
)
def extract_global_features(pe: dnfile.dnPE) -> Iterator[Tuple[Feature, Address]]:
for handler in GLOBAL_HANDLERS:
for feature, addr in handler(pe=pe): # type: ignore
yield feature, addr
GLOBAL_HANDLERS = (
extract_file_os,
extract_file_arch,
)
class DnfileFeatureExtractor(FeatureExtractor):
def __init__(self, path: str):
super().__init__()
self.path: str = path
self.pe: dnfile.dnPE = dnfile.dnPE(path)
def get_base_address(self) -> AbsoluteVirtualAddress:
return AbsoluteVirtualAddress(0x0)
def get_entry_point(self) -> int:
# self.pe.net.Flags.CLT_NATIVE_ENTRYPOINT
# True: native EP: Token
# False: managed EP: RVA
assert self.pe.net is not None
assert self.pe.net.struct is not None
return self.pe.net.struct.EntryPointTokenOrRva
def extract_global_features(self):
yield from extract_global_features(self.pe)
def extract_file_features(self):
yield from extract_file_features(self.pe)
def is_dotnet_file(self) -> bool:
return bool(self.pe.net)
def is_mixed_mode(self) -> bool:
assert self.pe is not None
assert self.pe.net is not None
assert self.pe.net.Flags is not None
return not bool(self.pe.net.Flags.CLR_ILONLY)
def get_runtime_version(self) -> Tuple[int, int]:
assert self.pe is not None
assert self.pe.net is not None
assert self.pe.net.struct is not None
return self.pe.net.struct.MajorRuntimeVersion, self.pe.net.struct.MinorRuntimeVersion
def get_meta_version_string(self) -> str:
assert self.pe.net is not None
assert self.pe.net.metadata is not None
assert self.pe.net.metadata.struct is not None
assert self.pe.net.metadata.struct.Version is not None
vbuf = self.pe.net.metadata.struct.Version
assert isinstance(vbuf, bytes)
return vbuf.rstrip(b"\x00").decode("utf-8")
def get_functions(self):
raise NotImplementedError("DnfileFeatureExtractor can only be used to extract file features")
def extract_function_features(self, f):
raise NotImplementedError("DnfileFeatureExtractor can only be used to extract file features")
def get_basic_blocks(self, f):
raise NotImplementedError("DnfileFeatureExtractor can only be used to extract file features")
def extract_basic_block_features(self, f, bb):
raise NotImplementedError("DnfileFeatureExtractor can only be used to extract file features")
def get_instructions(self, f, bb):
raise NotImplementedError("DnfileFeatureExtractor can only be used to extract file features")
def extract_insn_features(self, f, bb, insn):
raise NotImplementedError("DnfileFeatureExtractor can only be used to extract file features")
def is_library_function(self, va):
raise NotImplementedError("DnfileFeatureExtractor can only be used to extract file features")
def get_function_name(self, va):
raise NotImplementedError("DnfileFeatureExtractor can only be used to extract file features")

View File

@@ -1,13 +1,5 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import Tuple, Iterator
from pathlib import Path
from typing import Tuple, Iterator, cast
import dnfile
import pefile
@@ -31,18 +23,15 @@ from capa.features.common import (
Characteristic,
)
from capa.features.address import NO_ADDRESS, Address, DNTokenAddress
from capa.features.extractors.dnfile.types import DnType
from capa.features.extractors.base_extractor import SampleHashes, StaticFeatureExtractor
from capa.features.extractors.base_extractor import FeatureExtractor
from capa.features.extractors.dnfile.helpers import (
DnType,
iter_dotnet_table,
is_dotnet_mixed_mode,
get_dotnet_managed_imports,
get_dotnet_managed_methods,
resolve_nested_typedef_name,
resolve_nested_typeref_name,
calculate_dotnet_token_value,
get_dotnet_unmanaged_imports,
get_dotnet_nested_class_table_index,
)
logger = logging.getLogger(__name__)
@@ -60,7 +49,7 @@ def extract_file_import_names(pe: dnfile.dnPE, **kwargs) -> Iterator[Tuple[Impor
for imp in get_dotnet_unmanaged_imports(pe):
# like kernel32.CreateFileA
for name in capa.features.extractors.helpers.generate_symbols(imp.module, imp.method, include_dll=True):
for name in capa.features.extractors.helpers.generate_symbols(imp.module, imp.method):
yield Import(name), DNTokenAddress(imp.token)
@@ -95,25 +84,19 @@ def extract_file_namespace_features(pe: dnfile.dnPE, **kwargs) -> Iterator[Tuple
def extract_file_class_features(pe: dnfile.dnPE, **kwargs) -> Iterator[Tuple[Class, Address]]:
"""emit class features from TypeRef and TypeDef tables"""
nested_class_table = get_dotnet_nested_class_table_index(pe)
for rid, typedef in iter_dotnet_table(pe, dnfile.mdtable.TypeDef.number):
# emit internal .NET classes
assert isinstance(typedef, dnfile.mdtable.TypeDefRow)
typedefnamespace, typedefname = resolve_nested_typedef_name(nested_class_table, rid, typedef, pe)
token = calculate_dotnet_token_value(dnfile.mdtable.TypeDef.number, rid)
yield Class(DnType.format_name(typedefname, namespace=typedefnamespace)), DNTokenAddress(token)
yield Class(DnType.format_name(typedef.TypeName, namespace=typedef.TypeNamespace)), DNTokenAddress(token)
for rid, typeref in iter_dotnet_table(pe, dnfile.mdtable.TypeRef.number):
# emit external .NET classes
assert isinstance(typeref, dnfile.mdtable.TypeRefRow)
typerefnamespace, typerefname = resolve_nested_typeref_name(typeref.ResolutionScope.row_index, typeref, pe)
token = calculate_dotnet_token_value(dnfile.mdtable.TypeRef.number, rid)
yield Class(DnType.format_name(typerefname, namespace=typerefnamespace)), DNTokenAddress(token)
yield Class(DnType.format_name(typeref.TypeName, namespace=typeref.TypeNamespace)), DNTokenAddress(token)
def extract_file_os(**kwargs) -> Iterator[Tuple[OS, Address]]:
@@ -174,11 +157,11 @@ GLOBAL_HANDLERS = (
)
class DotnetFileFeatureExtractor(StaticFeatureExtractor):
def __init__(self, path: Path):
super().__init__(hashes=SampleHashes.from_bytes(path.read_bytes()))
self.path: Path = path
self.pe: dnfile.dnPE = dnfile.dnPE(str(path))
class DotnetFileFeatureExtractor(FeatureExtractor):
def __init__(self, path: str):
super().__init__()
self.path: str = path
self.pe: dnfile.dnPE = dnfile.dnPE(path)
def get_base_address(self):
return NO_ADDRESS

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -13,8 +13,6 @@ from enum import Enum
from typing import Set, Dict, List, Tuple, BinaryIO, Iterator, Optional
from dataclasses import dataclass
import Elf # from vivisect
logger = logging.getLogger(__name__)
@@ -26,7 +24,7 @@ def align(v, alignment):
return v + (alignment - remainder)
def read_cstr(buf, offset) -> str:
def read_cstr(buf, offset):
s = buf[offset:]
s, _, _ = s.partition(b"\x00")
return s.decode("utf-8")
@@ -56,7 +54,6 @@ class OS(str, Enum):
CLOUD = "cloud"
SYLLABLE = "syllable"
NACL = "nacl"
ANDROID = "android"
# via readelf: https://github.com/bminor/binutils-gdb/blob/c0e94211e1ac05049a4ce7c192c9d14d1764eb3e/binutils/readelf.c#L19635-L19658
@@ -91,26 +88,8 @@ class Shdr:
offset: int
size: int
link: int
entsize: int
buf: bytes
@classmethod
def from_viv(cls, section, buf: bytes) -> "Shdr":
return cls(
section.sh_name,
section.sh_type,
section.sh_flags,
section.sh_addr,
section.sh_offset,
section.sh_size,
section.sh_link,
section.sh_entsize,
buf,
)
def get_name(self, elf: "ELF") -> str:
return elf.shstrtab.buf[self.name :].partition(b"\x00")[0].decode("ascii")
class ELF:
def __init__(self, f: BinaryIO):
@@ -123,7 +102,6 @@ class ELF:
self.e_phnum: int
self.e_shentsize: int
self.e_shnum: int
self.e_shstrndx: int
self.phbuf: bytes
self.shbuf: bytes
@@ -143,27 +121,23 @@ class ELF:
elif ei_class == 2:
self.bitness = 64
else:
raise CorruptElfFile(f"invalid ei_class: 0x{ei_class:02x}")
raise CorruptElfFile("invalid ei_class: 0x%02x" % ei_class)
if ei_data == 1:
self.endian = "<"
elif ei_data == 2:
self.endian = ">"
else:
raise CorruptElfFile(f"not an ELF file: invalid ei_data: 0x{ei_data:02x}")
raise CorruptElfFile("not an ELF file: invalid ei_data: 0x%02x" % ei_data)
if self.bitness == 32:
e_phoff, e_shoff = struct.unpack_from(self.endian + "II", self.file_header, 0x1C)
self.e_phentsize, self.e_phnum = struct.unpack_from(self.endian + "HH", self.file_header, 0x2A)
self.e_shentsize, self.e_shnum, self.e_shstrndx = struct.unpack_from(
self.endian + "HHH", self.file_header, 0x2E
)
self.e_shentsize, self.e_shnum = struct.unpack_from(self.endian + "HH", self.file_header, 0x2E)
elif self.bitness == 64:
e_phoff, e_shoff = struct.unpack_from(self.endian + "QQ", self.file_header, 0x20)
self.e_phentsize, self.e_phnum = struct.unpack_from(self.endian + "HH", self.file_header, 0x36)
self.e_shentsize, self.e_shnum, self.e_shstrndx = struct.unpack_from(
self.endian + "HHH", self.file_header, 0x3A
)
self.e_shentsize, self.e_shnum = struct.unpack_from(self.endian + "HH", self.file_header, 0x3A)
else:
raise NotImplementedError()
@@ -346,12 +320,12 @@ class ELF:
shent = self.shbuf[shent_offset : shent_offset + self.e_shentsize]
if self.bitness == 32:
sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link, _, _, sh_entsize = struct.unpack_from(
self.endian + "IIIIIIIIII", shent, 0x0
sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link = struct.unpack_from(
self.endian + "IIIIIII", shent, 0x0
)
elif self.bitness == 64:
sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link, _, _, sh_entsize = struct.unpack_from(
self.endian + "IIQQQQIIQQ", shent, 0x0
sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link = struct.unpack_from(
self.endian + "IIQQQQI", shent, 0x0
)
else:
raise NotImplementedError()
@@ -363,7 +337,7 @@ class ELF:
if len(buf) != sh_size:
raise ValueError("failed to read section header content")
return Shdr(sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link, sh_entsize, buf)
return Shdr(sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, sh_link, buf)
@property
def section_headers(self):
@@ -373,10 +347,6 @@ class ELF:
except ValueError:
continue
@property
def shstrtab(self) -> Shdr:
return self.parse_section_header(self.e_shstrndx)
@property
def linker(self):
PT_INTERP = 0x3
@@ -426,7 +396,7 @@ class ELF:
# there should be vn_cnt of these.
# each entry describes an ABI name required by the shared object.
vna_offset = vn_offset + vn_aux
for _ in range(vn_cnt):
for i in range(vn_cnt):
# ElfXX_Vernaux layout is the same on 32 and 64 bit
_, _, _, vna_name, vna_next = struct.unpack_from(self.endian + "IHHII", shdr.buf, vna_offset)
@@ -487,12 +457,10 @@ class ELF:
for d_tag, d_val in self.dynamic_entries:
if d_tag == DT_STRTAB:
strtab_addr = d_val
break
for d_tag, d_val in self.dynamic_entries:
if d_tag == DT_STRSZ:
strtab_size = d_val
break
if strtab_addr is None:
return None
@@ -502,10 +470,8 @@ class ELF:
strtab_offset = None
for shdr in self.section_headers:
# the section header address should be defined
if shdr.addr and shdr.addr <= strtab_addr < shdr.addr + shdr.size:
if shdr.addr <= strtab_addr < shdr.addr + shdr.size:
strtab_offset = shdr.offset + (strtab_addr - shdr.addr)
break
if strtab_offset is None:
return None
@@ -534,27 +500,7 @@ class ELF:
if d_tag != DT_NEEDED:
continue
try:
yield read_cstr(strtab, d_val)
except UnicodeDecodeError as e:
logger.warning("failed to read DT_NEEDED entry: %s", str(e))
@property
def symtab(self) -> Optional[Tuple[Shdr, Shdr]]:
"""
fetch the Shdr for the symtab and the associated strtab.
"""
SHT_SYMTAB = 0x2
for shdr in self.section_headers:
if shdr.type != SHT_SYMTAB:
continue
# the linked section contains strings referenced by the symtab structures.
strtab_shdr = self.parse_section_header(shdr.link)
return shdr, strtab_shdr
return None
yield read_cstr(strtab, d_val)
@dataclass
@@ -657,101 +603,11 @@ class SHNote:
return ABITag(os, kmajor, kminor, kpatch)
@dataclass
class Symbol:
name_offset: int
value: int
size: int
info: int
other: int
shndx: int
class SymTab:
def __init__(
self,
endian: str,
bitness: int,
symtab: Shdr,
strtab: Shdr,
) -> None:
self.symbols: List[Symbol] = []
self.symtab = symtab
self.strtab = strtab
self._parse(endian, bitness, symtab.buf)
def _parse(self, endian: str, bitness: int, symtab_buf: bytes) -> None:
"""
return the symbol's information in
the order specified by sys/elf32.h
"""
if self.symtab.entsize == 0:
return
for i in range(int(len(self.symtab.buf) / self.symtab.entsize)):
if bitness == 32:
name_offset, value, size, info, other, shndx = struct.unpack_from(
endian + "IIIBBH", symtab_buf, i * self.symtab.entsize
)
elif bitness == 64:
name_offset, info, other, shndx, value, size = struct.unpack_from(
endian + "IBBHQQ", symtab_buf, i * self.symtab.entsize
)
self.symbols.append(Symbol(name_offset, value, size, info, other, shndx))
def get_name(self, symbol: Symbol) -> str:
"""
fetch a symbol's name from symtab's
associated strings' section (SHT_STRTAB)
"""
if not self.strtab:
raise ValueError("no strings found")
for i in range(symbol.name_offset, self.strtab.size):
if self.strtab.buf[i] == 0:
return self.strtab.buf[symbol.name_offset : i].decode("utf-8")
raise ValueError("symbol name not found")
def get_symbols(self) -> Iterator[Symbol]:
"""
return a tuple: (name, value, size, info, other, shndx)
for each symbol contained in the symbol table
"""
yield from self.symbols
@classmethod
def from_viv(cls, elf: Elf.Elf) -> Optional["SymTab"]:
endian = "<" if elf.getEndian() == 0 else ">"
bitness = elf.bits
SHT_SYMTAB = 0x2
for section in elf.sections:
if section.sh_type == SHT_SYMTAB:
strtab_section = elf.sections[section.sh_link]
sh_symtab = Shdr.from_viv(section, elf.readAtOffset(section.sh_offset, section.sh_size))
sh_strtab = Shdr.from_viv(
strtab_section, elf.readAtOffset(strtab_section.sh_offset, strtab_section.sh_size)
)
try:
return cls(endian, bitness, sh_symtab, sh_strtab)
except NameError:
return None
except Exception:
# all exceptions that could be encountered by
# cls._parse() imply a faulty symbol's table.
raise CorruptElfFile("malformed symbol's table")
def guess_os_from_osabi(elf: ELF) -> Optional[OS]:
def guess_os_from_osabi(elf) -> Optional[OS]:
return elf.ei_osabi
def guess_os_from_ph_notes(elf: ELF) -> Optional[OS]:
def guess_os_from_ph_notes(elf) -> Optional[OS]:
# search for PT_NOTE sections that specify an OS
# for example, on Linux there is a GNU section with minimum kernel version
PT_NOTE = 0x4
@@ -779,11 +635,6 @@ def guess_os_from_ph_notes(elf: ELF) -> Optional[OS]:
elif note.name == "FreeBSD":
logger.debug("note owner: %s", "FREEBSD")
return OS.FREEBSD
elif note.name == "Android":
logger.debug("note owner: %s", "Android")
# see the following for parsing the structure:
# https://android.googlesource.com/platform/ndk/+/master/parse_elfnote.py
return OS.ANDROID
elif note.name == "GNU":
abi_tag = note.abi_tag
if abi_tag:
@@ -795,7 +646,7 @@ def guess_os_from_ph_notes(elf: ELF) -> Optional[OS]:
return None
def guess_os_from_sh_notes(elf: ELF) -> Optional[OS]:
def guess_os_from_sh_notes(elf) -> Optional[OS]:
# search for notes stored in sections that aren't visible in program headers.
# e.g. .note.Linux in Linux kernel modules.
SHT_NOTE = 0x7
@@ -828,51 +679,7 @@ def guess_os_from_sh_notes(elf: ELF) -> Optional[OS]:
return None
def guess_os_from_ident_directive(elf: ELF) -> Optional[OS]:
# GCC inserts the GNU version via an .ident directive
# that gets stored in a section named ".comment".
# look at the version and recognize common OSes.
#
# assume the GCC version matches the target OS version,
# which I guess could be wrong during cross-compilation?
# therefore, don't rely on this if possible.
#
# https://stackoverflow.com/q/6263425
# https://gcc.gnu.org/onlinedocs/cpp/Other-Directives.html
SHT_PROGBITS = 0x1
for shdr in elf.section_headers:
if shdr.type != SHT_PROGBITS:
continue
if shdr.get_name(elf) != ".comment":
continue
try:
comment = shdr.buf.decode("utf-8")
except ValueError:
continue
if "GCC:" not in comment:
continue
logger.debug(".ident: %s", comment)
# these values come from our testfiles, like:
# rg -a "GCC: " tests/data/
if "Debian" in comment:
return OS.LINUX
elif "Ubuntu" in comment:
return OS.LINUX
elif "Red Hat" in comment:
return OS.LINUX
elif "Android" in comment:
return OS.ANDROID
return None
def guess_os_from_linker(elf: ELF) -> Optional[OS]:
def guess_os_from_linker(elf) -> Optional[OS]:
# search for recognizable dynamic linkers (interpreters)
# for example, on linux, we see file paths like: /lib64/ld-linux-x86-64.so.2
linker = elf.linker
@@ -882,12 +689,12 @@ def guess_os_from_linker(elf: ELF) -> Optional[OS]:
return None
def guess_os_from_abi_versions_needed(elf: ELF) -> Optional[OS]:
def guess_os_from_abi_versions_needed(elf) -> Optional[OS]:
# then lets look for GLIBC symbol versioning requirements.
# this will let us guess about linux/hurd in some cases.
versions_needed = elf.versions_needed
if any(abi.startswith("GLIBC") for abi in itertools.chain(*versions_needed.values())):
if any(map(lambda abi: abi.startswith("GLIBC"), itertools.chain(*versions_needed.values()))):
# there are any GLIBC versions needed
if elf.e_machine != "i386":
@@ -907,120 +714,45 @@ def guess_os_from_abi_versions_needed(elf: ELF) -> Optional[OS]:
return OS.HURD
else:
# in practice, Hurd isn't a common/viable OS,
# so this is almost certain to be Linux,
# so lets just make that guess.
return OS.LINUX
# we don't have any good guesses based on versions needed
pass
return None
def guess_os_from_needed_dependencies(elf: ELF) -> Optional[OS]:
def guess_os_from_needed_dependencies(elf) -> Optional[OS]:
for needed in elf.needed:
if needed.startswith("libmachuser.so"):
return OS.HURD
if needed.startswith("libhurduser.so"):
return OS.HURD
if needed.startswith("libandroid.so"):
return OS.ANDROID
if needed.startswith("liblog.so"):
return OS.ANDROID
return None
def guess_os_from_symtab(elf: ELF) -> Optional[OS]:
shdrs = elf.symtab
if not shdrs:
# executable does not contain a symbol table
# or the symbol's names are stripped
return None
symtab_shdr, strtab_shdr = shdrs
symtab = SymTab(elf.endian, elf.bitness, symtab_shdr, strtab_shdr)
keywords = {
OS.LINUX: [
"linux",
"/linux/",
],
}
for symbol in symtab.get_symbols():
sym_name = symtab.get_name(symbol)
for os, hints in keywords.items():
if any(hint in sym_name for hint in hints):
return os
return None
def detect_elf_os(f) -> str:
"""
f: type Union[BinaryIO, IDAIO, GHIDRAIO]
f: type Union[BinaryIO, IDAIO]
"""
try:
elf = ELF(f)
except Exception as e:
logger.warning("Error parsing ELF file: %s", e)
return "unknown"
elf = ELF(f)
try:
osabi_guess = guess_os_from_osabi(elf)
logger.debug("guess: osabi: %s", osabi_guess)
except Exception as e:
logger.warning("Error guessing OS from OSABI: %s", e)
osabi_guess = None
osabi_guess = guess_os_from_osabi(elf)
logger.debug("guess: osabi: %s", osabi_guess)
try:
ph_notes_guess = guess_os_from_ph_notes(elf)
logger.debug("guess: ph notes: %s", ph_notes_guess)
except Exception as e:
logger.warning("Error guessing OS from program header notes: %s", e)
ph_notes_guess = None
ph_notes_guess = guess_os_from_ph_notes(elf)
logger.debug("guess: ph notes: %s", ph_notes_guess)
try:
sh_notes_guess = guess_os_from_sh_notes(elf)
logger.debug("guess: sh notes: %s", sh_notes_guess)
except Exception as e:
logger.warning("Error guessing OS from section header notes: %s", e)
sh_notes_guess = None
sh_notes_guess = guess_os_from_sh_notes(elf)
logger.debug("guess: sh notes: %s", sh_notes_guess)
try:
ident_guess = guess_os_from_ident_directive(elf)
logger.debug("guess: .ident: %s", ident_guess)
except Exception as e:
logger.warning("Error guessing OS from .ident directive: %s", e)
ident_guess = None
linker_guess = guess_os_from_linker(elf)
logger.debug("guess: linker: %s", linker_guess)
try:
linker_guess = guess_os_from_linker(elf)
logger.debug("guess: linker: %s", linker_guess)
except Exception as e:
logger.warning("Error guessing OS from linker: %s", e)
linker_guess = None
abi_versions_needed_guess = guess_os_from_abi_versions_needed(elf)
logger.debug("guess: ABI versions needed: %s", abi_versions_needed_guess)
try:
abi_versions_needed_guess = guess_os_from_abi_versions_needed(elf)
logger.debug("guess: ABI versions needed: %s", abi_versions_needed_guess)
except Exception as e:
logger.warning("Error guessing OS from ABI versions needed: %s", e)
abi_versions_needed_guess = None
try:
needed_dependencies_guess = guess_os_from_needed_dependencies(elf)
logger.debug("guess: needed dependencies: %s", needed_dependencies_guess)
except Exception as e:
logger.warning("Error guessing OS from needed dependencies: %s", e)
needed_dependencies_guess = None
try:
symtab_guess = guess_os_from_symtab(elf)
logger.debug("guess: pertinent symbol name: %s", symtab_guess)
except Exception as e:
logger.warning("Error guessing OS from symbol table: %s", e)
symtab_guess = None
needed_dependencies_guess = guess_os_from_needed_dependencies(elf)
logger.debug("guess: needed dependencies: %s", needed_dependencies_guess)
ret = None
@@ -1042,14 +774,6 @@ def detect_elf_os(f) -> str:
elif needed_dependencies_guess:
ret = needed_dependencies_guess
elif symtab_guess:
ret = symtab_guess
elif ident_guess:
# at the bottom because we don't trust this too much
# due to potential for bugs with cross-compilation.
ret = ident_guess
return ret.value if ret is not None else "unknown"

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -8,22 +8,23 @@
import io
import logging
from typing import Tuple, Iterator
from pathlib import Path
from elftools.elf.elffile import ELFFile, SymbolTableSection
from elftools.elf.relocation import RelocationSection
import capa.features.extractors.common
from capa.features.file import Export, Import, Section
from capa.features.file import Import, Section
from capa.features.common import OS, FORMAT_ELF, Arch, Format, Feature
from capa.features.address import NO_ADDRESS, FileOffsetAddress, AbsoluteVirtualAddress
from capa.features.extractors.base_extractor import SampleHashes, StaticFeatureExtractor
from capa.features.extractors.base_extractor import FeatureExtractor
logger = logging.getLogger(__name__)
def extract_file_export_names(elf: ELFFile, **kwargs):
for section in elf.iter_sections():
def extract_file_import_names(elf, **kwargs):
# see https://github.com/eliben/pyelftools/blob/0664de05ed2db3d39041e2d51d19622a8ef4fb0f/scripts/readelf.py#L372
symbol_tables = [(idx, s) for idx, s in enumerate(elf.iter_sections()) if isinstance(s, SymbolTableSection)]
for _, section in symbol_tables:
if not isinstance(section, SymbolTableSection):
continue
@@ -33,64 +34,14 @@ def extract_file_export_names(elf: ELFFile, **kwargs):
logger.debug("Symbol table '%s' contains %s entries:", section.name, section.num_symbols())
for symbol in section.iter_symbols():
# The following conditions are based on the following article
# http://www.m4b.io/elf/export/binary/analysis/2015/05/25/what-is-an-elf-export.html
if not symbol.name:
continue
if symbol.entry.st_info.type not in ["STT_FUNC", "STT_OBJECT", "STT_IFUNC"]:
continue
if symbol.entry.st_value == 0:
continue
if symbol.entry.st_shndx == "SHN_UNDEF":
continue
yield Export(symbol.name), AbsoluteVirtualAddress(symbol.entry.st_value)
def extract_file_import_names(elf: ELFFile, **kwargs):
# Create a dictionary to store symbol names by their index
symbol_names = {}
# Extract symbol names and store them in the dictionary
for section in elf.iter_sections():
if not isinstance(section, SymbolTableSection):
continue
for _, symbol in enumerate(section.iter_symbols()):
# The following conditions are based on the following article
# http://www.m4b.io/elf/export/binary/analysis/2015/05/25/what-is-an-elf-export.html
if not symbol.name:
continue
if symbol.entry.st_info.type not in ["STT_FUNC", "STT_OBJECT", "STT_IFUNC"]:
continue
if symbol.entry.st_value != 0:
continue
if symbol.entry.st_shndx != "SHN_UNDEF":
continue
if symbol.entry.st_name == 0:
continue
symbol_names[_] = symbol.name
for section in elf.iter_sections():
if not isinstance(section, RelocationSection):
continue
if section["sh_entsize"] == 0:
logger.debug("Symbol table '%s' has a sh_entsize of zero!", section.name)
continue
logger.debug("Symbol table '%s' contains %s entries:", section.name, section.num_relocations())
for relocation in section.iter_relocations():
# Extract the symbol name from the symbol table using the symbol index in the relocation
if relocation["r_info_sym"] not in symbol_names:
continue
yield Import(symbol_names[relocation["r_info_sym"]]), FileOffsetAddress(relocation["r_offset"])
if symbol.name and symbol.entry.st_info.type == "STT_FUNC":
# TODO symbol address
# TODO symbol version info?
yield Import(symbol.name), FileOffsetAddress(0x0)
def extract_file_section_names(elf: ELFFile, **kwargs):
def extract_file_section_names(elf, **kwargs):
for section in elf.iter_sections():
if section.name:
yield Section(section.name), AbsoluteVirtualAddress(section.header.sh_addr)
@@ -102,7 +53,7 @@ def extract_file_strings(buf, **kwargs):
yield from capa.features.extractors.common.extract_file_strings(buf)
def extract_file_os(elf: ELFFile, buf, **kwargs):
def extract_file_os(elf, buf, **kwargs):
# our current approach does not always get an OS value, e.g. for packed samples
# for file limitation purposes, we're more lax here
try:
@@ -116,7 +67,8 @@ def extract_file_format(**kwargs):
yield Format(FORMAT_ELF), NO_ADDRESS
def extract_file_arch(elf: ELFFile, **kwargs):
def extract_file_arch(elf, **kwargs):
# TODO merge with capa.features.extractors.elf.detect_elf_arch()
arch = elf.get_machine_arch()
if arch == "x86":
yield Arch("i386"), NO_ADDRESS
@@ -133,7 +85,7 @@ def extract_file_features(elf: ELFFile, buf: bytes) -> Iterator[Tuple[Feature, i
FILE_HANDLERS = (
extract_file_export_names,
# TODO extract_file_export_names,
extract_file_import_names,
extract_file_section_names,
extract_file_strings,
@@ -154,11 +106,12 @@ GLOBAL_HANDLERS = (
)
class ElfFeatureExtractor(StaticFeatureExtractor):
def __init__(self, path: Path):
super().__init__(SampleHashes.from_bytes(path.read_bytes()))
self.path: Path = path
self.elf = ELFFile(io.BytesIO(path.read_bytes()))
class ElfFeatureExtractor(FeatureExtractor):
def __init__(self, path: str):
super().__init__()
self.path = path
with open(self.path, "rb") as f:
self.elf = ELFFile(io.BytesIO(f.read()))
def get_base_address(self):
# virtual address of the first segment with type LOAD
@@ -167,13 +120,15 @@ class ElfFeatureExtractor(StaticFeatureExtractor):
return AbsoluteVirtualAddress(segment.header.p_vaddr)
def extract_global_features(self):
buf = self.path.read_bytes()
with open(self.path, "rb") as f:
buf = f.read()
for feature, addr in extract_global_features(self.elf, buf):
yield feature, addr
def extract_file_features(self):
buf = self.path.read_bytes()
with open(self.path, "rb") as f:
buf = f.read()
for feature, addr in extract_file_features(self.elf, buf):
yield feature, addr

View File

@@ -1,152 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import string
import struct
from typing import Tuple, Iterator
import ghidra
from ghidra.program.model.lang import OperandType
import capa.features.extractors.ghidra.helpers
from capa.features.common import Feature, Characteristic
from capa.features.address import Address
from capa.features.basicblock import BasicBlock
from capa.features.extractors.helpers import MIN_STACKSTRING_LEN
from capa.features.extractors.base_extractor import BBHandle, FunctionHandle
def get_printable_len(op: ghidra.program.model.scalar.Scalar) -> int:
"""Return string length if all operand bytes are ascii or utf16-le printable"""
op_bit_len = op.bitLength()
op_byte_len = op_bit_len // 8
op_val = op.getValue()
if op_bit_len == 8:
chars = struct.pack("<B", op_val & 0xFF)
elif op_bit_len == 16:
chars = struct.pack("<H", op_val & 0xFFFF)
elif op_bit_len == 32:
chars = struct.pack("<I", op_val & 0xFFFFFFFF)
elif op_bit_len == 64:
chars = struct.pack("<Q", op_val & 0xFFFFFFFFFFFFFFFF)
else:
raise ValueError(f"Unhandled operand data type 0x{op_bit_len:x}.")
def is_printable_ascii(chars_: bytes):
return all(c < 127 and chr(c) in string.printable for c in chars_)
def is_printable_utf16le(chars_: bytes):
if all(c == 0x00 for c in chars_[1::2]):
return is_printable_ascii(chars_[::2])
if is_printable_ascii(chars):
return op_byte_len
if is_printable_utf16le(chars):
return op_byte_len
return 0
def is_mov_imm_to_stack(insn: ghidra.program.database.code.InstructionDB) -> bool:
"""verify instruction moves immediate onto stack"""
# Ghidra will Bitwise OR the OperandTypes to assign multiple
# i.e., the first operand is a stackvar (dynamically allocated),
# and the second is a scalar value (single int/char/float/etc.)
mov_its_ops = [(OperandType.ADDRESS | OperandType.DYNAMIC), OperandType.SCALAR]
found = False
# MOV dword ptr [EBP + local_*], 0x65
if insn.getMnemonicString().startswith("MOV"):
found = all(insn.getOperandType(i) == mov_its_ops[i] for i in range(2))
return found
def bb_contains_stackstring(bb: ghidra.program.model.block.CodeBlock) -> bool:
"""check basic block for stackstring indicators
true if basic block contains enough moves of constant bytes to the stack
"""
count = 0
for insn in currentProgram().getListing().getInstructions(bb, True): # type: ignore [name-defined] # noqa: F821
if is_mov_imm_to_stack(insn):
count += get_printable_len(insn.getScalar(1))
if count > MIN_STACKSTRING_LEN:
return True
return False
def _bb_has_tight_loop(bb: ghidra.program.model.block.CodeBlock):
"""
parse tight loops, true if last instruction in basic block branches to bb start
"""
# Reverse Ordered, first InstructionDB
last_insn = currentProgram().getListing().getInstructions(bb, False).next() # type: ignore [name-defined] # noqa: F821
if last_insn.getFlowType().isJump():
return last_insn.getAddress(0) == bb.getMinAddress()
return False
def extract_bb_stackstring(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
"""extract stackstring indicators from basic block"""
bb: ghidra.program.model.block.CodeBlock = bbh.inner
if bb_contains_stackstring(bb):
yield Characteristic("stack string"), bbh.address
def extract_bb_tight_loop(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
"""check basic block for tight loop indicators"""
bb: ghidra.program.model.block.CodeBlock = bbh.inner
if _bb_has_tight_loop(bb):
yield Characteristic("tight loop"), bbh.address
BASIC_BLOCK_HANDLERS = (
extract_bb_tight_loop,
extract_bb_stackstring,
)
def extract_features(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
"""
extract features from the given basic block.
args:
bb: the basic block to process.
yields:
Tuple[Feature, int]: the features and their location found in this basic block.
"""
yield BasicBlock(), bbh.address
for bb_handler in BASIC_BLOCK_HANDLERS:
for feature, addr in bb_handler(fh, bbh):
yield feature, addr
def main():
features = []
from capa.features.extractors.ghidra.extractor import GhidraFeatureExtractor
for fh in GhidraFeatureExtractor().get_functions():
for bbh in capa.features.extractors.ghidra.helpers.get_function_blocks(fh):
features.extend(list(extract_features(fh, bbh)))
import pprint
pprint.pprint(features) # noqa: T203
if __name__ == "__main__":
main()

View File

@@ -1,93 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import List, Tuple, Iterator
import capa.features.extractors.ghidra.file
import capa.features.extractors.ghidra.insn
import capa.features.extractors.ghidra.global_
import capa.features.extractors.ghidra.function
import capa.features.extractors.ghidra.basicblock
from capa.features.common import Feature
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors.base_extractor import (
BBHandle,
InsnHandle,
SampleHashes,
FunctionHandle,
StaticFeatureExtractor,
)
class GhidraFeatureExtractor(StaticFeatureExtractor):
def __init__(self):
import capa.features.extractors.ghidra.helpers as ghidra_helpers
super().__init__(
SampleHashes(
md5=capa.ghidra.helpers.get_file_md5(),
# ghidra doesn't expose this hash.
# https://ghidra.re/ghidra_docs/api/ghidra/program/model/listing/Program.html
#
# the hashes are stored in the database, not computed on the fly,
# so its probably not trivial to add SHA1.
sha1="",
sha256=capa.ghidra.helpers.get_file_sha256(),
)
)
self.global_features: List[Tuple[Feature, Address]] = []
self.global_features.extend(capa.features.extractors.ghidra.file.extract_file_format())
self.global_features.extend(capa.features.extractors.ghidra.global_.extract_os())
self.global_features.extend(capa.features.extractors.ghidra.global_.extract_arch())
self.imports = ghidra_helpers.get_file_imports()
self.externs = ghidra_helpers.get_file_externs()
self.fakes = ghidra_helpers.map_fake_import_addrs()
def get_base_address(self):
return AbsoluteVirtualAddress(currentProgram().getImageBase().getOffset()) # type: ignore [name-defined] # noqa: F821
def extract_global_features(self):
yield from self.global_features
def extract_file_features(self):
yield from capa.features.extractors.ghidra.file.extract_features()
def get_functions(self) -> Iterator[FunctionHandle]:
import capa.features.extractors.ghidra.helpers as ghidra_helpers
for fhandle in ghidra_helpers.get_function_symbols():
fh: FunctionHandle = FunctionHandle(
address=AbsoluteVirtualAddress(fhandle.getEntryPoint().getOffset()),
inner=fhandle,
ctx={"imports_cache": self.imports, "externs_cache": self.externs, "fakes_cache": self.fakes},
)
yield fh
@staticmethod
def get_function(addr: int) -> FunctionHandle:
func = getFunctionContaining(toAddr(addr)) # type: ignore [name-defined] # noqa: F821
return FunctionHandle(address=AbsoluteVirtualAddress(func.getEntryPoint().getOffset()), inner=func)
def extract_function_features(self, fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
yield from capa.features.extractors.ghidra.function.extract_features(fh)
def get_basic_blocks(self, fh: FunctionHandle) -> Iterator[BBHandle]:
import capa.features.extractors.ghidra.helpers as ghidra_helpers
yield from ghidra_helpers.get_function_blocks(fh)
def extract_basic_block_features(self, fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]:
yield from capa.features.extractors.ghidra.basicblock.extract_features(fh, bbh)
def get_instructions(self, fh: FunctionHandle, bbh: BBHandle) -> Iterator[InsnHandle]:
import capa.features.extractors.ghidra.helpers as ghidra_helpers
yield from ghidra_helpers.get_insn_in_range(bbh)
def extract_insn_features(self, fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle):
yield from capa.features.extractors.ghidra.insn.extract_features(fh, bbh, ih)

View File

@@ -1,204 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import re
import struct
from typing import List, Tuple, Iterator
from ghidra.program.model.symbol import SourceType, SymbolType
import capa.features.extractors.common
import capa.features.extractors.helpers
import capa.features.extractors.strings
import capa.features.extractors.ghidra.helpers
from capa.features.file import Export, Import, Section, FunctionName
from capa.features.common import FORMAT_PE, FORMAT_ELF, Format, String, Feature, Characteristic
from capa.features.address import NO_ADDRESS, Address, FileOffsetAddress, AbsoluteVirtualAddress
MAX_OFFSET_PE_AFTER_MZ = 0x200
def find_embedded_pe(block_bytez: bytes, mz_xor: List[Tuple[bytes, bytes, int]]) -> Iterator[Tuple[int, int]]:
"""check segment for embedded PE
adapted for Ghidra from:
https://github.com/vivisect/vivisect/blob/91e8419a861f4977https://github.com/vivisect/vivisect/blob/91e8419a861f49779f18316f155311967e696836/PE/carve.py#L259f18316f155311967e696836/PE/carve.py#L25
"""
todo = []
for mzx, pex, i in mz_xor:
for match in re.finditer(re.escape(mzx), block_bytez):
todo.append((match.start(), mzx, pex, i))
seg_max = len(block_bytez) # noqa: F821
while len(todo):
off, mzx, pex, i = todo.pop()
# MZ header has one field we will check e_lfanew is at 0x3c
e_lfanew = off + 0x3C
if seg_max < e_lfanew + 4:
continue
e_lfanew_bytes = block_bytez[e_lfanew : e_lfanew + 4]
newoff = struct.unpack("<I", capa.features.extractors.helpers.xor_static(e_lfanew_bytes, i))[0]
# assume XOR'd "PE" bytes exist within threshold
if newoff > MAX_OFFSET_PE_AFTER_MZ:
continue
peoff = off + newoff
if seg_max < peoff + 2:
continue
pe_bytes = block_bytez[peoff : peoff + 2]
if pe_bytes == pex:
yield off, i
def extract_file_embedded_pe() -> Iterator[Tuple[Feature, Address]]:
"""extract embedded PE features"""
# pre-compute XOR pairs
mz_xor: List[Tuple[bytes, bytes, int]] = [
(
capa.features.extractors.helpers.xor_static(b"MZ", i),
capa.features.extractors.helpers.xor_static(b"PE", i),
i,
)
for i in range(256)
]
for block in currentProgram().getMemory().getBlocks(): # type: ignore [name-defined] # noqa: F821
if not all((block.isLoaded(), block.isInitialized(), "Headers" not in block.getName())):
continue
for off, _ in find_embedded_pe(capa.features.extractors.ghidra.helpers.get_block_bytes(block), mz_xor):
# add offset back to block start
ea: int = block.getStart().add(off).getOffset()
yield Characteristic("embedded pe"), FileOffsetAddress(ea)
def extract_file_export_names() -> Iterator[Tuple[Feature, Address]]:
"""extract function exports"""
st = currentProgram().getSymbolTable() # type: ignore [name-defined] # noqa: F821
for addr in st.getExternalEntryPointIterator():
yield Export(st.getPrimarySymbol(addr).getName()), AbsoluteVirtualAddress(addr.getOffset())
def extract_file_import_names() -> Iterator[Tuple[Feature, Address]]:
"""extract function imports
1. imports by ordinal:
- modulename.#ordinal
2. imports by name, results in two features to support importname-only
matching:
- modulename.importname
- importname
"""
for f in currentProgram().getFunctionManager().getExternalFunctions(): # type: ignore [name-defined] # noqa: F821
for r in f.getSymbol().getReferences():
if r.getReferenceType().isData():
addr = r.getFromAddress().getOffset() # gets pointer to fake external addr
fstr = f.toString().split("::") # format: MODULE.dll::import / MODULE::Ordinal_*
if "Ordinal_" in fstr[1]:
fstr[1] = f"#{fstr[1].split('_')[1]}"
for name in capa.features.extractors.helpers.generate_symbols(fstr[0][:-4], fstr[1], include_dll=True):
yield Import(name), AbsoluteVirtualAddress(addr)
def extract_file_section_names() -> Iterator[Tuple[Feature, Address]]:
"""extract section names"""
for block in currentProgram().getMemory().getBlocks(): # type: ignore [name-defined] # noqa: F821
yield Section(block.getName()), AbsoluteVirtualAddress(block.getStart().getOffset())
def extract_file_strings() -> Iterator[Tuple[Feature, Address]]:
"""extract ASCII and UTF-16 LE strings"""
for block in currentProgram().getMemory().getBlocks(): # type: ignore [name-defined] # noqa: F821
if not block.isInitialized():
continue
p_bytes = capa.features.extractors.ghidra.helpers.get_block_bytes(block)
for s in capa.features.extractors.strings.extract_ascii_strings(p_bytes):
offset = block.getStart().getOffset() + s.offset
yield String(s.s), FileOffsetAddress(offset)
for s in capa.features.extractors.strings.extract_unicode_strings(p_bytes):
offset = block.getStart().getOffset() + s.offset
yield String(s.s), FileOffsetAddress(offset)
def extract_file_function_names() -> Iterator[Tuple[Feature, Address]]:
"""
extract the names of statically-linked library functions.
"""
for sym in currentProgram().getSymbolTable().getAllSymbols(True): # type: ignore [name-defined] # noqa: F821
# .isExternal() misses more than this config for the function symbols
if sym.getSymbolType() == SymbolType.FUNCTION and sym.getSource() == SourceType.ANALYSIS and sym.isGlobal():
name = sym.getName() # starts to resolve names based on Ghidra's FidDB
if name.startswith("FID_conflict:"): # format: FID_conflict:<function-name>
name = name[13:]
addr = AbsoluteVirtualAddress(sym.getAddress().getOffset())
yield FunctionName(name), addr
if name.startswith("_"):
# some linkers may prefix linked routines with a `_` to avoid name collisions.
# extract features for both the mangled and un-mangled representations.
# e.g. `_fwrite` -> `fwrite`
# see: https://stackoverflow.com/a/2628384/87207
yield FunctionName(name[1:]), addr
def extract_file_format() -> Iterator[Tuple[Feature, Address]]:
ef = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821
if "PE" in ef:
yield Format(FORMAT_PE), NO_ADDRESS
elif "ELF" in ef:
yield Format(FORMAT_ELF), NO_ADDRESS
elif "Raw" in ef:
# no file type to return when processing a binary file, but we want to continue processing
return
else:
raise NotImplementedError(f"unexpected file format: {ef}")
def extract_features() -> Iterator[Tuple[Feature, Address]]:
"""extract file features"""
for file_handler in FILE_HANDLERS:
for feature, addr in file_handler():
yield feature, addr
FILE_HANDLERS = (
extract_file_embedded_pe,
extract_file_export_names,
extract_file_import_names,
extract_file_section_names,
extract_file_strings,
extract_file_function_names,
extract_file_format,
)
def main():
""" """
import pprint
pprint.pprint(list(extract_features())) # noqa: T203
if __name__ == "__main__":
main()

View File

@@ -1,73 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import Tuple, Iterator
import ghidra
from ghidra.program.model.block import BasicBlockModel, SimpleBlockIterator
import capa.features.extractors.ghidra.helpers
from capa.features.common import Feature, Characteristic
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors import loops
from capa.features.extractors.base_extractor import FunctionHandle
def extract_function_calls_to(fh: FunctionHandle):
"""extract callers to a function"""
f: ghidra.program.database.function.FunctionDB = fh.inner
for ref in f.getSymbol().getReferences():
if ref.getReferenceType().isCall():
yield Characteristic("calls to"), AbsoluteVirtualAddress(ref.getFromAddress().getOffset())
def extract_function_loop(fh: FunctionHandle):
f: ghidra.program.database.function.FunctionDB = fh.inner
edges = []
for block in SimpleBlockIterator(BasicBlockModel(currentProgram()), f.getBody(), monitor()): # type: ignore [name-defined] # noqa: F821
dests = block.getDestinations(monitor()) # type: ignore [name-defined] # noqa: F821
s_addrs = block.getStartAddresses()
while dests.hasNext(): # For loop throws Python TypeError
for addr in s_addrs:
edges.append((addr.getOffset(), dests.next().getDestinationAddress().getOffset()))
if loops.has_loop(edges):
yield Characteristic("loop"), AbsoluteVirtualAddress(f.getEntryPoint().getOffset())
def extract_recursive_call(fh: FunctionHandle):
f: ghidra.program.database.function.FunctionDB = fh.inner
for func in f.getCalledFunctions(monitor()): # type: ignore [name-defined] # noqa: F821
if func.getEntryPoint().getOffset() == f.getEntryPoint().getOffset():
yield Characteristic("recursive call"), AbsoluteVirtualAddress(f.getEntryPoint().getOffset())
def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
for func_handler in FUNCTION_HANDLERS:
for feature, addr in func_handler(fh):
yield feature, addr
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call)
def main():
""" """
features = []
for fhandle in capa.features.extractors.ghidra.helpers.get_function_symbols():
features.extend(list(extract_features(fhandle)))
import pprint
pprint.pprint(features) # noqa: T203
if __name__ == "__main__":
main()

View File

@@ -1,67 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
import contextlib
from typing import Tuple, Iterator
import capa.ghidra.helpers
import capa.features.extractors.elf
import capa.features.extractors.ghidra.helpers
from capa.features.common import OS, ARCH_I386, ARCH_AMD64, OS_WINDOWS, Arch, Feature
from capa.features.address import NO_ADDRESS, Address
logger = logging.getLogger(__name__)
def extract_os() -> Iterator[Tuple[Feature, Address]]:
format_name: str = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821
if "PE" in format_name:
yield OS(OS_WINDOWS), NO_ADDRESS
elif "ELF" in format_name:
with contextlib.closing(capa.ghidra.helpers.GHIDRAIO()) as f:
os = capa.features.extractors.elf.detect_elf_os(f)
yield OS(os), NO_ADDRESS
else:
# we likely end up here:
# 1. handling shellcode, or
# 2. handling a new file format (e.g. macho)
#
# for (1) we can't do much - its shellcode and all bets are off.
# we could maybe accept a further CLI argument to specify the OS,
# but i think this would be rarely used.
# rules that rely on OS conditions will fail to match on shellcode.
#
# for (2), this logic will need to be updated as the format is implemented.
logger.debug("unsupported file format: %s, will not guess OS", format_name)
return
def extract_arch() -> Iterator[Tuple[Feature, Address]]:
lang_id = currentProgram().getMetadata().get("Language ID") # type: ignore [name-defined] # noqa: F821
if "x86" in lang_id and "64" in lang_id:
yield Arch(ARCH_AMD64), NO_ADDRESS
elif "x86" in lang_id and "32" in lang_id:
yield Arch(ARCH_I386), NO_ADDRESS
elif "x86" not in lang_id:
logger.debug("unsupported architecture: non-32-bit nor non-64-bit intel")
return
else:
# we likely end up here:
# 1. handling a new architecture (e.g. aarch64)
#
# for (1), this logic will need to be updated as the format is implemented.
logger.debug("unsupported architecture: %s", lang_id)
return

View File

@@ -1,301 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import Dict, List, Iterator
import ghidra
import java.lang
from ghidra.program.model.lang import OperandType
from ghidra.program.model.block import BasicBlockModel, SimpleBlockIterator
from ghidra.program.model.symbol import SourceType, SymbolType
from ghidra.program.model.address import AddressSpace
import capa.features.extractors.helpers
from capa.features.common import THUNK_CHAIN_DEPTH_DELTA
from capa.features.address import AbsoluteVirtualAddress
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
def ints_to_bytes(bytez: List[int]) -> bytes:
"""convert Java signed ints to Python bytes
args:
bytez: list of Java signed ints
"""
return bytes([b & 0xFF for b in bytez])
def find_byte_sequence(addr: ghidra.program.model.address.Address, seq: bytes) -> Iterator[int]:
"""yield all ea of a given byte sequence
args:
addr: start address
seq: bytes to search e.g. b"\x01\x03"
"""
seqstr = "".join([f"\\x{b:02x}" for b in seq])
eas = findBytes(addr, seqstr, java.lang.Integer.MAX_VALUE, 1) # type: ignore [name-defined] # noqa: F821
yield from eas
def get_bytes(addr: ghidra.program.model.address.Address, length: int) -> bytes:
"""yield length bytes at addr
args:
addr: Address to begin pull from
length: length of bytes to pull
"""
try:
return ints_to_bytes(getBytes(addr, length)) # type: ignore [name-defined] # noqa: F821
except RuntimeError:
return b""
def get_block_bytes(block: ghidra.program.model.mem.MemoryBlock) -> bytes:
"""yield all bytes in a given block
args:
block: MemoryBlock to pull from
"""
return get_bytes(block.getStart(), block.getSize())
def get_function_symbols():
"""yield all non-external function symbols"""
yield from currentProgram().getFunctionManager().getFunctionsNoStubs(True) # type: ignore [name-defined] # noqa: F821
def get_function_blocks(fh: FunctionHandle) -> Iterator[BBHandle]:
"""yield BBHandle for each bb in a given function"""
func: ghidra.program.database.function.FunctionDB = fh.inner
for bb in SimpleBlockIterator(BasicBlockModel(currentProgram()), func.getBody(), monitor()): # type: ignore [name-defined] # noqa: F821
yield BBHandle(address=AbsoluteVirtualAddress(bb.getMinAddress().getOffset()), inner=bb)
def get_insn_in_range(bbh: BBHandle) -> Iterator[InsnHandle]:
"""yield InshHandle for each insn in a given basicblock"""
for insn in currentProgram().getListing().getInstructions(bbh.inner, True): # type: ignore [name-defined] # noqa: F821
yield InsnHandle(address=AbsoluteVirtualAddress(insn.getAddress().getOffset()), inner=insn)
def get_file_imports() -> Dict[int, List[str]]:
"""get all import names & addrs"""
import_dict: Dict[int, List[str]] = {}
for f in currentProgram().getFunctionManager().getExternalFunctions(): # type: ignore [name-defined] # noqa: F821
for r in f.getSymbol().getReferences():
if r.getReferenceType().isData():
addr = r.getFromAddress().getOffset() # gets pointer to fake external addr
ex_loc = f.getExternalLocation().getAddress() # map external locations as well (offset into module files)
fstr = f.toString().split("::") # format: MODULE.dll::import / MODULE::Ordinal_* / <EXTERNAL>::import
if "Ordinal_" in fstr[1]:
fstr[1] = f"#{fstr[1].split('_')[1]}"
# <EXTERNAL> mostly shows up in ELF files, otherwise, strip '.dll' w/ [:-4]
fstr[0] = "*" if "<EXTERNAL>" in fstr[0] else fstr[0][:-4]
for name in capa.features.extractors.helpers.generate_symbols(fstr[0], fstr[1]):
import_dict.setdefault(addr, []).append(name)
if ex_loc:
import_dict.setdefault(ex_loc.getOffset(), []).append(name)
return import_dict
def get_file_externs() -> Dict[int, List[str]]:
"""
Gets function names & addresses of statically-linked library functions
Ghidra's external namespace is mostly reserved for dynamically-linked
imports. Statically-linked functions are part of the global namespace.
Filtering on the type, source, and namespace of the symbols yield more
statically-linked library functions.
Example: (PMA Lab 16-01.exe_) 7faafc7e4a5c736ebfee6abbbc812d80:0x407490
- __aulldiv
- Note: See Symbol Table labels
"""
extern_dict: Dict[int, List[str]] = {}
for sym in currentProgram().getSymbolTable().getAllSymbols(True): # type: ignore [name-defined] # noqa: F821
# .isExternal() misses more than this config for the function symbols
if sym.getSymbolType() == SymbolType.FUNCTION and sym.getSource() == SourceType.ANALYSIS and sym.isGlobal():
name = sym.getName() # starts to resolve names based on Ghidra's FidDB
if name.startswith("FID_conflict:"): # format: FID_conflict:<function-name>
name = name[13:]
extern_dict.setdefault(sym.getAddress().getOffset(), []).append(name)
if name.startswith("_"):
# some linkers may prefix linked routines with a `_` to avoid name collisions.
# extract features for both the mangled and un-mangled representations.
# e.g. `_fwrite` -> `fwrite`
# see: https://stackoverflow.com/a/2628384/87207
extern_dict.setdefault(sym.getAddress().getOffset(), []).append(name[1:])
return extern_dict
def map_fake_import_addrs() -> Dict[int, List[int]]:
"""
Map ghidra's fake import entrypoints to their
real addresses
Helps as many Ghidra Scripting API calls end up returning
these external (fake) addresses.
Undocumented but intended Ghidra behavior:
- Import entryPoint fields are stored in the 'EXTERNAL:' AddressSpace.
'getEntryPoint()' returns the entryPoint field, which is an offset
from the beginning of the assigned AddressSpace. In the case of externals,
they start from 1 and increment.
https://github.com/NationalSecurityAgency/ghidra/blob/26d4bd9104809747c21f2528cab8aba9aef9acd5/Ghidra/Features/Base/src/test.slow/java/ghidra/program/database/function/ExternalFunctionDBTest.java#L90
Example: (mimikatz.exe_) 5f66b82558ca92e54e77f216ef4c066c:0x473090
- 0x473090 -> PTR_CreateServiceW_00473090
- 'EXTERNAL:00000025' -> External Address (ghidra.program.model.address.SpecialAddress)
"""
fake_dict: Dict[int, List[int]] = {}
for f in currentProgram().getFunctionManager().getExternalFunctions(): # type: ignore [name-defined] # noqa: F821
for r in f.getSymbol().getReferences():
if r.getReferenceType().isData():
fake_dict.setdefault(f.getEntryPoint().getOffset(), []).append(r.getFromAddress().getOffset())
return fake_dict
def check_addr_for_api(
addr: ghidra.program.model.address.Address,
fakes: Dict[int, List[int]],
imports: Dict[int, List[str]],
externs: Dict[int, List[str]],
) -> bool:
offset = addr.getOffset()
fake = fakes.get(offset)
if fake:
return True
imp = imports.get(offset)
if imp:
return True
extern = externs.get(offset)
if extern:
return True
return False
def is_call_or_jmp(insn: ghidra.program.database.code.InstructionDB) -> bool:
return any(mnem in insn.getMnemonicString() for mnem in ["CALL", "J"]) # JMP, JNE, JNZ, etc
def is_sp_modified(insn: ghidra.program.database.code.InstructionDB) -> bool:
for i in range(insn.getNumOperands()):
if insn.getOperandType(i) == OperandType.REGISTER:
return "SP" in insn.getRegister(i).getName() and insn.getOperandRefType(i).isWrite()
return False
def is_stack_referenced(insn: ghidra.program.database.code.InstructionDB) -> bool:
"""generic catch-all for stack references"""
for i in range(insn.getNumOperands()):
if insn.getOperandType(i) == OperandType.REGISTER:
if "BP" in insn.getRegister(i).getName():
return True
else:
continue
return any(ref.isStackReference() for ref in insn.getReferencesFrom())
def is_zxor(insn: ghidra.program.database.code.InstructionDB) -> bool:
# assume XOR insn
# XOR's against the same operand zero out
ops = []
operands = []
for i in range(insn.getNumOperands()):
ops.append(insn.getOpObjects(i))
# Operands stored in a 2D array
for j in range(len(ops)):
for k in range(len(ops[j])):
operands.append(ops[j][k])
return all(n == operands[0] for n in operands)
def handle_thunk(addr: ghidra.program.model.address.Address):
"""Follow thunk chains down to a reasonable depth"""
ref = addr
for _ in range(THUNK_CHAIN_DEPTH_DELTA):
thunk_jmp = getInstructionAt(ref) # type: ignore [name-defined] # noqa: F821
if thunk_jmp and is_call_or_jmp(thunk_jmp):
if OperandType.isAddress(thunk_jmp.getOperandType(0)):
ref = thunk_jmp.getAddress(0)
else:
thunk_dat = getDataContaining(ref) # type: ignore [name-defined] # noqa: F821
if thunk_dat and thunk_dat.isDefined() and thunk_dat.isPointer():
ref = thunk_dat.getValue()
break # end of thunk chain reached
return ref
def dereference_ptr(insn: ghidra.program.database.code.InstructionDB):
addr_code = OperandType.ADDRESS | OperandType.CODE
to_deref = insn.getAddress(0)
dat = getDataContaining(to_deref) # type: ignore [name-defined] # noqa: F821
if insn.getOperandType(0) == addr_code:
thfunc = getFunctionContaining(to_deref) # type: ignore [name-defined] # noqa: F821
if thfunc and thfunc.isThunk():
return handle_thunk(to_deref)
else:
# if it doesn't poin to a thunk, it's usually a jmp to a label
return to_deref
if not dat:
return to_deref
if dat.isDefined() and dat.isPointer():
addr = dat.getValue()
# now we need to check the addr space to see if it is truly resolvable
# ghidra sometimes likes to hand us direct RAM addrs, which typically point
# to api calls that we can't actually resolve as such
if addr.getAddressSpace().getType() == AddressSpace.TYPE_RAM:
return to_deref
else:
return addr
else:
return to_deref
def find_data_references_from_insn(insn, max_depth: int = 10):
"""yield data references from given instruction"""
for reference in insn.getReferencesFrom():
if not reference.getReferenceType().isData():
# only care about data references
continue
to_addr = reference.getToAddress()
for _ in range(max_depth - 1):
data = getDataAt(to_addr) # type: ignore [name-defined] # noqa: F821
if data and data.isPointer():
ptr_value = data.getValue()
if ptr_value is None:
break
to_addr = ptr_value
else:
break
yield to_addr

View File

@@ -1,503 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import Any, Dict, Tuple, Iterator
import ghidra
from ghidra.program.model.lang import OperandType
from ghidra.program.model.block import SimpleBlockModel
import capa.features.extractors.helpers
import capa.features.extractors.ghidra.helpers
from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset
from capa.features.common import MAX_BYTES_FEATURE_SIZE, Bytes, String, Feature, Characteristic
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
# security cookie checks may perform non-zeroing XORs, these are expected within a certain
# byte range within the first and returning basic blocks, this helps to reduce FP features
SECURITY_COOKIE_BYTES_DELTA = 0x40
OPERAND_TYPE_DYNAMIC_ADDRESS = OperandType.DYNAMIC | OperandType.ADDRESS
def get_imports(ctx: Dict[str, Any]) -> Dict[int, Any]:
"""Populate the import cache for this context"""
if "imports_cache" not in ctx:
ctx["imports_cache"] = capa.features.extractors.ghidra.helpers.get_file_imports()
return ctx["imports_cache"]
def get_externs(ctx: Dict[str, Any]) -> Dict[int, Any]:
"""Populate the externs cache for this context"""
if "externs_cache" not in ctx:
ctx["externs_cache"] = capa.features.extractors.ghidra.helpers.get_file_externs()
return ctx["externs_cache"]
def get_fakes(ctx: Dict[str, Any]) -> Dict[int, Any]:
"""Populate the fake import addrs cache for this context"""
if "fakes_cache" not in ctx:
ctx["fakes_cache"] = capa.features.extractors.ghidra.helpers.map_fake_import_addrs()
return ctx["fakes_cache"]
def check_for_api_call(
insn, externs: Dict[int, Any], fakes: Dict[int, Any], imports: Dict[int, Any], imp_or_ex: bool
) -> Iterator[Any]:
"""check instruction for API call
params:
externs - external library functions cache
fakes - mapped fake import addresses cache
imports - imported functions cache
imp_or_ex - flag to check imports or externs
yields:
matched api calls
"""
info = ()
funcs = imports if imp_or_ex else externs
# assume only CALLs or JMPs are passed
ref_type = insn.getOperandType(0)
addr_data = OperandType.ADDRESS | OperandType.DATA # needs dereferencing
addr_code = OperandType.ADDRESS | OperandType.CODE # needs dereferencing
if OperandType.isRegister(ref_type):
if OperandType.isAddress(ref_type):
# If it's an address in a register, check the mapped fake addrs
# since they're dereferenced to their fake addrs
op_ref = insn.getAddress(0).getOffset()
ref = fakes.get(op_ref) # obtain the real addr
if not ref:
return
else:
return
elif ref_type in (addr_data, addr_code) or (OperandType.isIndirect(ref_type) and OperandType.isAddress(ref_type)):
# we must dereference and check if the addr is a pointer to an api function
addr_ref = capa.features.extractors.ghidra.helpers.dereference_ptr(insn)
if not capa.features.extractors.ghidra.helpers.check_addr_for_api(addr_ref, fakes, imports, externs):
return
ref = addr_ref.getOffset()
elif ref_type == OPERAND_TYPE_DYNAMIC_ADDRESS or ref_type == OperandType.DYNAMIC:
return # cannot resolve dynamics statically
else:
# pure address does not need to get dereferenced/ handled
addr_ref = insn.getAddress(0)
if not addr_ref:
# If it returned null, it was an indirect
# that had no address reference.
# This check is faster than checking for (indirect and not address)
return
if not capa.features.extractors.ghidra.helpers.check_addr_for_api(addr_ref, fakes, imports, externs):
return
ref = addr_ref.getOffset()
if isinstance(ref, list): # ref from REG | ADDR
for r in ref:
info = funcs.get(r) # type: ignore
if info:
yield info
else:
info = funcs.get(ref) # type: ignore
if info:
yield info
def extract_insn_api_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
insn: ghidra.program.database.code.InstructionDB = ih.inner
if not capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn):
return
externs = get_externs(fh.ctx)
fakes = get_fakes(fh.ctx)
imports = get_imports(fh.ctx)
# check calls to imported functions
for api in check_for_api_call(insn, externs, fakes, imports, True):
for imp in api:
yield API(imp), ih.address
# check calls to extern functions
for api in check_for_api_call(insn, externs, fakes, imports, False):
for ext in api:
yield API(ext), ih.address
def extract_insn_number_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
"""
parse instruction number features
example:
push 3136B0h ; dwControlCode
"""
insn: ghidra.program.database.code.InstructionDB = ih.inner
if insn.getMnemonicString().startswith("RET"):
# skip things like:
# .text:0042250E retn 8
return
if capa.features.extractors.ghidra.helpers.is_sp_modified(insn):
# skip things like:
# .text:00401145 add esp, 0Ch
return
for i in range(insn.getNumOperands()):
# Exceptions for LEA insn:
# invalid operand encoding, considered numbers instead of offsets
# see: mimikatz.exe_:0x4018C0
if insn.getOperandType(i) == OperandType.DYNAMIC and insn.getMnemonicString().startswith("LEA"):
# Additional check, avoid yielding "wide" values (ex. mimikatz.exe:0x471EE6 LEA EBX, [ECX + EAX*0x4])
op_objs = insn.getOpObjects(i)
if len(op_objs) == 3: # ECX, EAX, 0x4
continue
if isinstance(op_objs[-1], ghidra.program.model.scalar.Scalar):
const = op_objs[-1].getUnsignedValue()
addr = ih.address
yield Number(const), addr
yield OperandNumber(i, const), addr
elif not OperandType.isScalar(insn.getOperandType(i)):
# skip things like:
# references, void types
continue
else:
const = insn.getScalar(i).getUnsignedValue()
addr = ih.address
yield Number(const), addr
yield OperandNumber(i, const), addr
if insn.getMnemonicString().startswith("ADD") and 0 < const < MAX_STRUCTURE_SIZE:
# for pattern like:
#
# add eax, 0x10
#
# assume 0x10 is also an offset (imagine eax is a pointer).
yield Offset(const), addr
yield OperandOffset(i, const), addr
def extract_insn_offset_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
"""
parse instruction structure offset features
example:
.text:0040112F cmp [esi+4], ebx
"""
insn: ghidra.program.database.code.InstructionDB = ih.inner
if insn.getMnemonicString().startswith("LEA"):
return
if capa.features.extractors.ghidra.helpers.is_stack_referenced(insn):
# ignore stack references
return
# Ghidra stores operands in 2D arrays if they contain offsets
for i in range(insn.getNumOperands()):
if insn.getOperandType(i) == OperandType.DYNAMIC: # e.g. [esi + 4]
# manual extraction, since the default api calls only work on the 1st dimension of the array
op_objs = insn.getOpObjects(i)
if not op_objs:
continue
if isinstance(op_objs[-1], ghidra.program.model.scalar.Scalar):
op_off = op_objs[-1].getValue()
else:
op_off = 0
yield Offset(op_off), ih.address
yield OperandOffset(i, op_off), ih.address
def extract_insn_bytes_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
"""
parse referenced byte sequences
example:
push offset iid_004118d4_IShellLinkA ; riid
"""
for addr in capa.features.extractors.ghidra.helpers.find_data_references_from_insn(ih.inner):
data = getDataAt(addr) # type: ignore [name-defined] # noqa: F821
if data and not data.hasStringValue():
extracted_bytes = capa.features.extractors.ghidra.helpers.get_bytes(addr, MAX_BYTES_FEATURE_SIZE)
if extracted_bytes and not capa.features.extractors.helpers.all_zeros(extracted_bytes):
yield Bytes(extracted_bytes), ih.address
def extract_insn_string_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]:
"""
parse instruction string features
example:
push offset aAcr ; "ACR > "
"""
for addr in capa.features.extractors.ghidra.helpers.find_data_references_from_insn(ih.inner):
data = getDataAt(addr) # type: ignore [name-defined] # noqa: F821
if data and data.hasStringValue():
yield String(data.getValue()), ih.address
def extract_insn_mnemonic_features(
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""parse instruction mnemonic features"""
insn: ghidra.program.database.code.InstructionDB = ih.inner
yield Mnemonic(insn.getMnemonicString().lower()), ih.address
def extract_insn_obfs_call_plus_5_characteristic_features(
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""
parse call $+5 instruction from the given instruction.
"""
insn: ghidra.program.database.code.InstructionDB = ih.inner
if not capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn):
return
code_ref = OperandType.ADDRESS | OperandType.CODE
ref = insn.getAddress()
for i in range(insn.getNumOperands()):
if insn.getOperandType(i) == code_ref:
ref = insn.getAddress(i)
if insn.getAddress().add(5) == ref:
yield Characteristic("call $+5"), ih.address
def extract_insn_segment_access_features(
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""parse instruction fs or gs access"""
insn: ghidra.program.database.code.InstructionDB = ih.inner
insn_str = insn.toString()
if "FS:" in insn_str:
yield Characteristic("fs access"), ih.address
if "GS:" in insn_str:
yield Characteristic("gs access"), ih.address
def extract_insn_peb_access_characteristic_features(
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""parse instruction peb access
fs:[0x30] on x86, gs:[0x60] on x64
"""
insn: ghidra.program.database.code.InstructionDB = ih.inner
insn_str = insn.toString()
if insn_str.startswith(("PUSH", "MOV")):
if "FS:[0x30]" in insn_str or "GS:[0x60]" in insn_str:
yield Characteristic("peb access"), ih.address
def extract_insn_cross_section_cflow(
fh: FunctionHandle, bb: BBHandle, ih: InsnHandle
) -> Iterator[Tuple[Feature, Address]]:
"""inspect the instruction for a CALL or JMP that crosses section boundaries"""
insn: ghidra.program.database.code.InstructionDB = ih.inner
if not capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn):
return
externs = get_externs(fh.ctx)
fakes = get_fakes(fh.ctx)
imports = get_imports(fh.ctx)
# OperandType to dereference
addr_data = OperandType.ADDRESS | OperandType.DATA
addr_code = OperandType.ADDRESS | OperandType.CODE
ref_type = insn.getOperandType(0)
# both OperandType flags must be present
# bail on REGISTER alone
if OperandType.isRegister(ref_type):
if OperandType.isAddress(ref_type):
ref = insn.getAddress(0) # Ghidra dereferences REG | ADDR
if capa.features.extractors.ghidra.helpers.check_addr_for_api(ref, fakes, imports, externs):
return
else:
return
elif ref_type in (addr_data, addr_code) or (OperandType.isIndirect(ref_type) and OperandType.isAddress(ref_type)):
# we must dereference and check if the addr is a pointer to an api function
ref = capa.features.extractors.ghidra.helpers.dereference_ptr(insn)
if capa.features.extractors.ghidra.helpers.check_addr_for_api(ref, fakes, imports, externs):
return
elif ref_type == OPERAND_TYPE_DYNAMIC_ADDRESS or ref_type == OperandType.DYNAMIC:
return # cannot resolve dynamics statically
else:
# pure address does not need to get dereferenced/ handled
ref = insn.getAddress(0)
if not ref:
# If it returned null, it was an indirect
# that had no address reference.
# This check is faster than checking for (indirect and not address)
return
if capa.features.extractors.ghidra.helpers.check_addr_for_api(ref, fakes, imports, externs):
return
this_mem_block = getMemoryBlock(insn.getAddress()) # type: ignore [name-defined] # noqa: F821
ref_block = getMemoryBlock(ref) # type: ignore [name-defined] # noqa: F821
if ref_block != this_mem_block:
yield Characteristic("cross section flow"), ih.address
def extract_function_calls_from(
fh: FunctionHandle,
bb: BBHandle,
ih: InsnHandle,
) -> Iterator[Tuple[Feature, Address]]:
"""extract functions calls from features
most relevant at the function scope, however, its most efficient to extract at the instruction scope
"""
insn: ghidra.program.database.code.InstructionDB = ih.inner
if insn.getMnemonicString().startswith("CALL"):
# This method of "dereferencing" addresses/ pointers
# is not as robust as methods in other functions,
# but works just fine for this one
reference = 0
for ref in insn.getReferencesFrom():
addr = ref.getToAddress()
# avoid returning fake addrs
if not addr.isExternalAddress():
reference = addr.getOffset()
# if a reference is < 0, then ghidra pulled an offset from a DYNAMIC | ADDR (usually a stackvar)
# these cannot be resolved to actual addrs
if reference > 0:
yield Characteristic("calls from"), AbsoluteVirtualAddress(reference)
def extract_function_indirect_call_characteristic_features(
fh: FunctionHandle,
bb: BBHandle,
ih: InsnHandle,
) -> Iterator[Tuple[Feature, Address]]:
"""extract indirect function calls (e.g., call eax or call dword ptr [edx+4])
does not include calls like => call ds:dword_ABD4974
most relevant at the function or basic block scope;
however, its most efficient to extract at the instruction scope
"""
insn: ghidra.program.database.code.InstructionDB = ih.inner
if insn.getMnemonicString().startswith("CALL"):
if OperandType.isRegister(insn.getOperandType(0)):
yield Characteristic("indirect call"), ih.address
if OperandType.isIndirect(insn.getOperandType(0)):
yield Characteristic("indirect call"), ih.address
def check_nzxor_security_cookie_delta(
fh: ghidra.program.database.function.FunctionDB, insn: ghidra.program.database.code.InstructionDB
):
"""Get the function containing the insn
Get the last block of the function that contains the insn
Check the bb containing the insn
Check the last bb of the function containing the insn
"""
model = SimpleBlockModel(currentProgram()) # type: ignore [name-defined] # noqa: F821
insn_addr = insn.getAddress()
func_asv = fh.getBody()
first_addr = func_asv.getMinAddress()
last_addr = func_asv.getMaxAddress()
if model.getFirstCodeBlockContaining(
first_addr, monitor() # type: ignore [name-defined] # noqa: F821
) == model.getFirstCodeBlockContaining(
last_addr, monitor() # type: ignore [name-defined] # noqa: F821
):
if insn_addr < first_addr.add(SECURITY_COOKIE_BYTES_DELTA):
return True
else:
return insn_addr > last_addr.add(SECURITY_COOKIE_BYTES_DELTA * -1)
else:
return False
def extract_insn_nzxor_characteristic_features(
fh: FunctionHandle,
bb: BBHandle,
ih: InsnHandle,
) -> Iterator[Tuple[Feature, Address]]:
f: ghidra.program.database.function.FunctionDB = fh.inner
insn: ghidra.program.database.code.InstructionDB = ih.inner
if "XOR" not in insn.getMnemonicString():
return
if capa.features.extractors.ghidra.helpers.is_stack_referenced(insn):
return
if capa.features.extractors.ghidra.helpers.is_zxor(insn):
return
if check_nzxor_security_cookie_delta(f, insn):
return
yield Characteristic("nzxor"), ih.address
def extract_features(
fh: FunctionHandle,
bb: BBHandle,
insn: InsnHandle,
) -> Iterator[Tuple[Feature, Address]]:
for insn_handler in INSTRUCTION_HANDLERS:
for feature, addr in insn_handler(fh, bb, insn):
yield feature, addr
INSTRUCTION_HANDLERS = (
extract_insn_api_features,
extract_insn_number_features,
extract_insn_bytes_features,
extract_insn_string_features,
extract_insn_offset_features,
extract_insn_nzxor_characteristic_features,
extract_insn_mnemonic_features,
extract_insn_obfs_call_plus_5_characteristic_features,
extract_insn_peb_access_characteristic_features,
extract_insn_cross_section_cflow,
extract_insn_segment_access_features,
extract_function_calls_from,
extract_function_indirect_call_characteristic_features,
)
def main():
""" """
features = []
from capa.features.extractors.ghidra.extractor import GhidraFeatureExtractor
for fh in GhidraFeatureExtractor().get_functions():
for bb in capa.features.extractors.ghidra.helpers.get_function_blocks(fh):
for insn in capa.features.extractors.ghidra.helpers.get_insn_in_range(bb):
features.extend(list(extract_features(fh, bb, insn)))
import pprint
pprint.pprint(features) # noqa: T203
if __name__ == "__main__":
main()

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -41,64 +41,35 @@ def is_ordinal(symbol: str) -> bool:
return False
def generate_symbols(dll: str, symbol: str, include_dll=False) -> Iterator[str]:
def generate_symbols(dll: str, symbol: str) -> Iterator[str]:
"""
for a given dll and symbol name, generate variants.
we over-generate features to make matching easier.
these include:
- CreateFileA
- CreateFile
- ws2_32.#1
note that since capa v7 only `import` features and APIs called via ordinal include DLL names:
- kernel32.CreateFileA
- kernel32.CreateFile
- ws2_32.#1
for `api` features dll names are good for documentation but not used during matching
- CreateFileA
- CreateFile
"""
# normalize dll name
dll = dll.lower()
# trim extensions observed in dynamic traces
dll = dll[0:-4] if dll.endswith(".dll") else dll
dll = dll[0:-4] if dll.endswith(".drv") else dll
if include_dll or is_ordinal(symbol):
# ws2_32.#1
# kernel32.CreateFileA
yield f"{dll}.{symbol}"
# kernel32.CreateFileA
yield "%s.%s" % (dll, symbol)
if not is_ordinal(symbol):
# CreateFileA
yield symbol
if is_aw_function(symbol):
if include_dll:
# kernel32.CreateFile
yield f"{dll}.{symbol[:-1]}"
if is_aw_function(symbol):
# kernel32.CreateFile
yield "%s.%s" % (dll, symbol[:-1])
if not is_ordinal(symbol):
# CreateFile
yield symbol[:-1]
def reformat_forwarded_export_name(forwarded_name: str) -> str:
"""
a forwarded export has a DLL name/path and symbol name.
we want the former to be lowercase, and the latter to be verbatim.
"""
# use rpartition so we can split on separator between dll and name.
# the dll name can be a full path, like in the case of
# ef64d6d7c34250af8e21a10feb931c9b
# which i assume means the path can have embedded periods.
# so we don't want the first period, we want the last.
forwarded_dll, _, forwarded_symbol = forwarded_name.rpartition(".")
forwarded_dll = forwarded_dll.lower()
return f"{forwarded_dll}.{forwarded_symbol}"
def all_zeros(bytez: bytes) -> bool:
return all(b == 0 for b in builtins.bytes(bytez))

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -34,7 +34,7 @@ def get_printable_len(op: idaapi.op_t) -> int:
elif op.dtype == idaapi.dt_qword:
chars = struct.pack("<Q", op_val)
else:
raise ValueError(f"Unhandled operand data type 0x{op.dtype:x}.")
raise ValueError("Unhandled operand data type 0x%x." % op.dtype)
def is_printable_ascii(chars_: bytes):
return all(c < 127 and chr(c) in string.printable for c in chars_)
@@ -104,3 +104,19 @@ BASIC_BLOCK_HANDLERS = (
extract_bb_tight_loop,
extract_bb_stackstring,
)
def main():
features = []
for fhandle in helpers.get_functions(skip_thunks=True, skip_libs=True):
f: idaapi.func_t = fhandle.inner
for bb in idaapi.FlowChart(f, flags=idaapi.FC_PREDS):
features.extend(list(extract_features(fhandle, bb)))
import pprint
pprint.pprint(features)
if __name__ == "__main__":
main()

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -8,7 +8,6 @@
from typing import List, Tuple, Iterator
import idaapi
import ida_nalt
import capa.ida.helpers
import capa.features.extractors.elf
@@ -19,22 +18,12 @@ import capa.features.extractors.ida.function
import capa.features.extractors.ida.basicblock
from capa.features.common import Feature
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors.base_extractor import (
BBHandle,
InsnHandle,
SampleHashes,
FunctionHandle,
StaticFeatureExtractor,
)
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle, FeatureExtractor
class IdaFeatureExtractor(StaticFeatureExtractor):
class IdaFeatureExtractor(FeatureExtractor):
def __init__(self):
super().__init__(
hashes=SampleHashes(
md5=ida_nalt.retrieve_input_file_md5(), sha1="(unknown)", sha256=ida_nalt.retrieve_input_file_sha256()
)
)
super().__init__()
self.global_features: List[Tuple[Feature, Address]] = []
self.global_features.extend(capa.features.extractors.ida.file.extract_file_format())
self.global_features.extend(capa.features.extractors.ida.global_.extract_os())

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -12,7 +12,6 @@ from typing import Tuple, Iterator
import idc
import idaapi
import idautils
import ida_entry
import capa.features.extractors.common
import capa.features.extractors.helpers
@@ -22,14 +21,12 @@ from capa.features.file import Export, Import, Section, FunctionName
from capa.features.common import FORMAT_PE, FORMAT_ELF, Format, String, Feature, Characteristic
from capa.features.address import NO_ADDRESS, Address, FileOffsetAddress, AbsoluteVirtualAddress
MAX_OFFSET_PE_AFTER_MZ = 0x200
def check_segment_for_pe(seg: idaapi.segment_t) -> Iterator[Tuple[int, int]]:
"""check segment for embedded PE
adapted for IDA from:
https://github.com/vivisect/vivisect/blob/91e8419a861f49779f18316f155311967e696836/PE/carve.py#L25
https://github.com/vivisect/vivisect/blob/7be4037b1cecc4551b397f840405a1fc606f9b53/PE/carve.py#L19
"""
seg_max = seg.end_ea
mz_xor = [
@@ -43,14 +40,13 @@ def check_segment_for_pe(seg: idaapi.segment_t) -> Iterator[Tuple[int, int]]:
todo = []
for mzx, pex, i in mz_xor:
# find all segment offsets containing XOR'd "MZ" bytes
for off in capa.features.extractors.ida.helpers.find_byte_sequence(seg.start_ea, seg.end_ea, mzx):
todo.append((off, mzx, pex, i))
while len(todo):
off, mzx, pex, i = todo.pop()
# MZ header has one field we will check e_lfanew is at 0x3c
# The MZ header has one field we will check e_lfanew is at 0x3c
e_lfanew = off + 0x3C
if seg_max < (e_lfanew + 4):
@@ -58,10 +54,6 @@ def check_segment_for_pe(seg: idaapi.segment_t) -> Iterator[Tuple[int, int]]:
newoff = struct.unpack("<I", capa.features.extractors.helpers.xor_static(idc.get_bytes(e_lfanew, 4), i))[0]
# assume XOR'd "PE" bytes exist within threshold
if newoff > MAX_OFFSET_PE_AFTER_MZ:
continue
peoff = off + newoff
if seg_max < (peoff + 2):
continue
@@ -69,6 +61,9 @@ def check_segment_for_pe(seg: idaapi.segment_t) -> Iterator[Tuple[int, int]]:
if idc.get_bytes(peoff, 2) == pex:
yield off, i
for nextres in capa.features.extractors.ida.helpers.find_byte_sequence(off + 1, seg.end_ea, mzx):
todo.append((nextres, mzx, pex, i))
def extract_file_embedded_pe() -> Iterator[Tuple[Feature, Address]]:
"""extract embedded PE features
@@ -84,14 +79,8 @@ def extract_file_embedded_pe() -> Iterator[Tuple[Feature, Address]]:
def extract_file_export_names() -> Iterator[Tuple[Feature, Address]]:
"""extract function exports"""
for _, ordinal, ea, name in idautils.Entries():
forwarded_name = ida_entry.get_entry_forwarder(ordinal)
if forwarded_name is None:
yield Export(name), AbsoluteVirtualAddress(ea)
else:
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(forwarded_name)
yield Export(forwarded_name), AbsoluteVirtualAddress(ea)
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(ea)
for _, _, ea, name in idautils.Entries():
yield Export(name), AbsoluteVirtualAddress(ea)
def extract_file_import_names() -> Iterator[Tuple[Feature, Address]]:
@@ -110,20 +99,20 @@ def extract_file_import_names() -> Iterator[Tuple[Feature, Address]]:
if info[1] and info[2]:
# e.g. in mimikatz: ('cabinet', 'FCIAddFile', 11L)
# extract by name here and by ordinal below
for name in capa.features.extractors.helpers.generate_symbols(info[0], info[1], include_dll=True):
for name in capa.features.extractors.helpers.generate_symbols(info[0], info[1]):
yield Import(name), addr
dll = info[0]
symbol = f"#{info[2]}"
symbol = "#%d" % (info[2])
elif info[1]:
dll = info[0]
symbol = info[1]
elif info[2]:
dll = info[0]
symbol = f"#{info[2]}"
symbol = "#%d" % (info[2])
else:
continue
for name in capa.features.extractors.helpers.generate_symbols(dll, symbol, include_dll=True):
for name in capa.features.extractors.helpers.generate_symbols(dll, symbol):
yield Import(name), addr
for ea, info in capa.features.extractors.ida.helpers.get_file_externs().items():
@@ -187,7 +176,7 @@ def extract_file_format() -> Iterator[Tuple[Feature, Address]]:
# no file type to return when processing a binary file, but we want to continue processing
return
else:
raise NotImplementedError(f"unexpected file format: {file_info.filetype}")
raise NotImplementedError("unexpected file format: %d" % file_info.filetype)
def extract_features() -> Iterator[Tuple[Feature, Address]]:
@@ -206,3 +195,14 @@ FILE_HANDLERS = (
extract_file_function_names,
extract_file_format,
)
def main():
""" """
import pprint
pprint.pprint(list(extract_features()))
if __name__ == "__main__":
main()

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -50,3 +50,18 @@ def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call)
def main():
""" """
features = []
for fhandle in capa.features.extractors.ida.helpers.get_functions(skip_thunks=True, skip_libs=True):
features.extend(list(extract_features(fhandle)))
import pprint
pprint.pprint(features)
if __name__ == "__main__":
main()

View File

@@ -1,10 +1,3 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
import contextlib
from typing import Tuple, Iterator

View File

@@ -1,11 +1,10 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import functools
from typing import Any, Dict, Tuple, Iterator, Optional
import idc
@@ -26,10 +25,9 @@ def find_byte_sequence(start: int, end: int, seq: bytes) -> Iterator[int]:
end: max virtual address
seq: bytes to search e.g. b"\x01\x03"
"""
seqstr = " ".join([f"{b:02x}" for b in seq])
seqstr = " ".join(["%02x" % b for b in seq])
while True:
# TODO(mike-hunhoff): find_binary is deprecated. Please use ida_bytes.bin_search() instead.
# https://github.com/mandiant/capa/issues/1606
# TODO find_binary: Deprecated. Please use ida_bytes.bin_search() instead.
ea = idaapi.find_binary(start, end, seqstr, 0, idaapi.SEARCH_DOWN)
if ea == idaapi.BADADDR:
break
@@ -82,22 +80,9 @@ def get_segment_buffer(seg: idaapi.segment_t) -> bytes:
return buff if buff else b""
def inspect_import(imports, library, ea, function, ordinal):
if function and function.startswith("__imp_"):
# handle mangled PE imports
function = function[len("__imp_") :]
if function and "@@" in function:
# handle mangled ELF imports, like "fopen@@glibc_2.2.5"
function, _, _ = function.partition("@@")
imports[ea] = (library.lower(), function, ordinal)
return True
def get_file_imports() -> Dict[int, Tuple[str, str, int]]:
"""get file imports"""
imports: Dict[int, Tuple[str, str, int]] = {}
imports = {}
for idx in range(idaapi.get_import_module_qty()):
library = idaapi.get_import_module_name(idx)
@@ -105,15 +90,22 @@ def get_file_imports() -> Dict[int, Tuple[str, str, int]]:
if not library:
continue
# IDA uses section names for the library of ELF imports, like ".dynsym".
# These are not useful to us, we may need to expand this list over time
# TODO(williballenthin): find all section names used by IDA
# https://github.com/mandiant/capa/issues/1419
if library == ".dynsym":
library = ""
# IDA uses section names for the library of ELF imports, like ".dynsym"
library = library.lstrip(".")
cb = functools.partial(inspect_import, imports, library)
idaapi.enum_import_names(idx, cb)
def inspect_import(ea, function, ordinal):
if function and function.startswith("__imp_"):
# handle mangled PE imports
function = function[len("__imp_") :]
if function and "@@" in function:
# handle mangled ELF imports, like "fopen@@glibc_2.2.5"
function, _, _ = function.partition("@@")
imports[ea] = (library.lower(), function, ordinal)
return True
idaapi.enum_import_names(idx, inspect_import)
return imports
@@ -122,7 +114,7 @@ def get_file_externs() -> Dict[int, Tuple[str, str, int]]:
externs = {}
for seg in get_segments(skip_header_segments=True):
if seg.type != ida_segment.SEG_XTRN:
if not (seg.type == ida_segment.SEG_XTRN):
continue
for ea in idautils.Functions(seg.start_ea, seg.end_ea):
@@ -275,18 +267,20 @@ def is_op_offset(insn: idaapi.insn_t, op: idaapi.op_t) -> bool:
def is_sp_modified(insn: idaapi.insn_t) -> bool:
"""determine if instruction modifies SP, ESP, RSP"""
return any(
op.reg == idautils.procregs.sp.reg and is_op_write(insn, op)
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,))
)
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,)):
if op.reg == idautils.procregs.sp.reg and is_op_write(insn, op):
# register is stack and written
return True
return False
def is_bp_modified(insn: idaapi.insn_t) -> bool:
"""check if instruction modifies BP, EBP, RBP"""
return any(
op.reg == idautils.procregs.bp.reg and is_op_write(insn, op)
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,))
)
for op in get_insn_ops(insn, target_ops=(idaapi.o_reg,)):
if op.reg == idautils.procregs.bp.reg and is_op_write(insn, op):
# register is base and written
return True
return False
def is_frame_register(reg: int) -> bool:
@@ -332,7 +326,10 @@ def mask_op_val(op: idaapi.op_t) -> int:
def is_function_recursive(f: idaapi.func_t) -> bool:
"""check if function is recursive"""
return any(f.contains(ref) for ref in idautils.CodeRefsTo(f.start_ea, True))
for ref in idautils.CodeRefsTo(f.start_ea, True):
if f.contains(ref):
return True
return False
def is_basic_block_tight_loop(bb: idaapi.BasicBlock) -> bool:
@@ -381,7 +378,8 @@ def find_data_reference_from_insn(insn: idaapi.insn_t, max_depth: int = 10) -> i
def get_function_blocks(f: idaapi.func_t) -> Iterator[idaapi.BasicBlock]:
"""yield basic blocks contained in specified function"""
# leverage idaapi.FC_NOEXT flag to ignore useless external blocks referenced by the function
yield from idaapi.FlowChart(f, flags=(idaapi.FC_PREDS | idaapi.FC_NOEXT))
for block in idaapi.FlowChart(f, flags=(idaapi.FC_PREDS | idaapi.FC_NOEXT)):
yield block
def is_basic_block_return(bb: idaapi.BasicBlock) -> bool:

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -73,7 +73,7 @@ def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle)
"""
insn: idaapi.insn_t = ih.inner
if insn.get_canon_mnem() not in ("call", "jmp"):
if not insn.get_canon_mnem() in ("call", "jmp"):
return
# check calls to imported functions
@@ -172,7 +172,7 @@ def extract_insn_bytes_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandl
if ref != insn.ea:
extracted_bytes = capa.features.extractors.ida.helpers.read_bytes_at(ref, MAX_BYTES_FEATURE_SIZE)
if extracted_bytes and not capa.features.extractors.helpers.all_zeros(extracted_bytes):
if not capa.features.extractors.ida.helpers.find_string_at(ref):
if not capa.features.extractors.ida.helpers.find_string_at(insn.ea):
# don't extract byte features for obvious strings
yield Bytes(extracted_bytes), ih.address
@@ -216,7 +216,7 @@ def extract_insn_offset_features(
p_info = capa.features.extractors.ida.helpers.get_op_phrase_info(op)
op_off = p_info.get("offset")
op_off = p_info.get("offset", None)
if op_off is None:
continue
@@ -398,16 +398,14 @@ def extract_insn_peb_access_characteristic_features(
if insn.itype not in (idaapi.NN_push, idaapi.NN_mov):
return
if all(op.type != idaapi.o_mem for op in insn.ops):
if all(map(lambda op: op.type != idaapi.o_mem, insn.ops)):
# try to optimize for only memory references
return
disasm = idc.GetDisasm(insn.ea)
if " fs:30h" in disasm or " gs:60h" in disasm:
# TODO(mike-hunhoff): use proper IDA API for fetching segment access
# scanning the disassembly text is a hack.
# https://github.com/mandiant/capa/issues/1605
# TODO: replace above with proper IDA
yield Characteristic("peb access"), ih.address
@@ -421,22 +419,18 @@ def extract_insn_segment_access_features(
"""
insn: idaapi.insn_t = ih.inner
if all(op.type != idaapi.o_mem for op in insn.ops):
if all(map(lambda op: op.type != idaapi.o_mem, insn.ops)):
# try to optimize for only memory references
return
disasm = idc.GetDisasm(insn.ea)
if " fs:" in disasm:
# TODO(mike-hunhoff): use proper IDA API for fetching segment access
# scanning the disassembly text is a hack.
# https://github.com/mandiant/capa/issues/1605
# TODO: replace above with proper IDA
yield Characteristic("fs access"), ih.address
if " gs:" in disasm:
# TODO(mike-hunhoff): use proper IDA API for fetching segment access
# scanning the disassembly text is a hack.
# https://github.com/mandiant/capa/issues/1605
# TODO: replace above with proper IDA
yield Characteristic("gs access"), ih.address
@@ -447,7 +441,7 @@ def extract_insn_cross_section_cflow(
insn: idaapi.insn_t = ih.inner
for ref in idautils.CodeRefsFrom(insn.ea, False):
if ref in get_imports(fh.ctx):
if ref in get_imports(fh.ctx).keys():
# ignore API calls
continue
if not idaapi.getseg(ref):
@@ -507,3 +501,20 @@ INSTRUCTION_HANDLERS = (
extract_function_calls_from,
extract_function_indirect_call_characteristic_features,
)
def main():
""" """
features = []
for f in capa.features.extractors.ida.helpers.get_functions(skip_thunks=True, skip_libs=True):
for bb in idaapi.FlowChart(f, flags=idaapi.FC_PREDS):
for insn in capa.features.extractors.ida.helpers.get_instructions_in_range(bb.start_ea, bb.end_ea):
features.extend(list(extract_features(f, bb, insn)))
import pprint
pprint.pprint(features)
if __name__ == "__main__":
main()

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt

View File

@@ -1,28 +1,9 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
from typing import Dict, List, Tuple, Union
from typing import Dict, List, Tuple
from dataclasses import dataclass
from typing_extensions import TypeAlias
from capa.features.common import Feature
from capa.features.address import NO_ADDRESS, Address, ThreadAddress, ProcessAddress, DynamicCallAddress
from capa.features.extractors.base_extractor import (
BBHandle,
CallHandle,
InsnHandle,
SampleHashes,
ThreadHandle,
ProcessHandle,
FunctionHandle,
StaticFeatureExtractor,
DynamicFeatureExtractor,
)
from capa.features.address import NO_ADDRESS, Address
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle, FeatureExtractor
@dataclass
@@ -43,7 +24,7 @@ class FunctionFeatures:
@dataclass
class NullStaticFeatureExtractor(StaticFeatureExtractor):
class NullFeatureExtractor(FeatureExtractor):
"""
An extractor that extracts some user-provided features.
@@ -51,7 +32,6 @@ class NullStaticFeatureExtractor(StaticFeatureExtractor):
"""
base_address: Address
sample_hashes: SampleHashes
global_features: List[Feature]
file_features: List[Tuple[Address, Feature]]
functions: Dict[Address, FunctionFeatures]
@@ -59,9 +39,6 @@ class NullStaticFeatureExtractor(StaticFeatureExtractor):
def get_base_address(self):
return self.base_address
def get_sample_hashes(self) -> SampleHashes:
return self.sample_hashes
def extract_global_features(self):
for feature in self.global_features:
yield feature, NO_ADDRESS
@@ -93,78 +70,3 @@ class NullStaticFeatureExtractor(StaticFeatureExtractor):
def extract_insn_features(self, f, bb, insn):
for address, feature in self.functions[f.address].basic_blocks[bb.address].instructions[insn.address].features:
yield feature, address
@dataclass
class CallFeatures:
name: str
features: List[Tuple[Address, Feature]]
@dataclass
class ThreadFeatures:
features: List[Tuple[Address, Feature]]
calls: Dict[Address, CallFeatures]
@dataclass
class ProcessFeatures:
features: List[Tuple[Address, Feature]]
threads: Dict[Address, ThreadFeatures]
name: str
@dataclass
class NullDynamicFeatureExtractor(DynamicFeatureExtractor):
base_address: Address
sample_hashes: SampleHashes
global_features: List[Feature]
file_features: List[Tuple[Address, Feature]]
processes: Dict[Address, ProcessFeatures]
def extract_global_features(self):
for feature in self.global_features:
yield feature, NO_ADDRESS
def get_sample_hashes(self) -> SampleHashes:
return self.sample_hashes
def extract_file_features(self):
for address, feature in self.file_features:
yield feature, address
def get_processes(self):
for address in sorted(self.processes.keys()):
assert isinstance(address, ProcessAddress)
yield ProcessHandle(address=address, inner={})
def extract_process_features(self, ph):
for addr, feature in self.processes[ph.address].features:
yield feature, addr
def get_process_name(self, ph) -> str:
return self.processes[ph.address].name
def get_threads(self, ph):
for address in sorted(self.processes[ph.address].threads.keys()):
assert isinstance(address, ThreadAddress)
yield ThreadHandle(address=address, inner={})
def extract_thread_features(self, ph, th):
for addr, feature in self.processes[ph.address].threads[th.address].features:
yield feature, addr
def get_calls(self, ph, th):
for address in sorted(self.processes[ph.address].threads[th.address].calls.keys()):
assert isinstance(address, DynamicCallAddress)
yield CallHandle(address=address, inner={})
def extract_call_features(self, ph, th, ch):
for address, feature in self.processes[ph.address].threads[th.address].calls[ch.address].features:
yield feature, address
def get_call_name(self, ph, th, ch) -> str:
return self.processes[ph.address].threads[th.address].calls[ch.address].name
NullFeatureExtractor: TypeAlias = Union[NullStaticFeatureExtractor, NullDynamicFeatureExtractor]

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -7,7 +7,6 @@
# See the License for the specific language governing permissions and limitations under the License.
import logging
from pathlib import Path
import pefile
@@ -19,7 +18,7 @@ import capa.features.extractors.strings
from capa.features.file import Export, Import, Section
from capa.features.common import OS, ARCH_I386, FORMAT_PE, ARCH_AMD64, OS_WINDOWS, Arch, Format, Characteristic
from capa.features.address import NO_ADDRESS, FileOffsetAddress, AbsoluteVirtualAddress
from capa.features.extractors.base_extractor import SampleHashes, StaticFeatureExtractor
from capa.features.extractors.base_extractor import FeatureExtractor
logger = logging.getLogger(__name__)
@@ -40,20 +39,8 @@ def extract_file_export_names(pe, **kwargs):
name = export.name.partition(b"\x00")[0].decode("ascii")
except UnicodeDecodeError:
continue
if export.forwarder is None:
va = base_address + export.address
yield Export(name), AbsoluteVirtualAddress(va)
else:
try:
forwarded_name = export.forwarder.partition(b"\x00")[0].decode("ascii")
except UnicodeDecodeError:
continue
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(forwarded_name)
va = base_address + export.address
yield Export(forwarded_name), AbsoluteVirtualAddress(va)
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(va)
va = base_address + export.address
yield Export(name), AbsoluteVirtualAddress(va)
def extract_file_import_names(pe, **kwargs):
@@ -77,14 +64,14 @@ def extract_file_import_names(pe, **kwargs):
for imp in dll.imports:
if imp.import_by_ordinal:
impname = f"#{imp.ordinal}"
impname = "#%s" % imp.ordinal
else:
try:
impname = imp.name.partition(b"\x00")[0].decode("ascii")
except UnicodeDecodeError:
continue
for name in capa.features.extractors.helpers.generate_symbols(modname, impname, include_dll=True):
for name in capa.features.extractors.helpers.generate_symbols(modname, impname):
yield Import(name), AbsoluteVirtualAddress(imp.address)
@@ -185,22 +172,24 @@ GLOBAL_HANDLERS = (
)
class PefileFeatureExtractor(StaticFeatureExtractor):
def __init__(self, path: Path):
super().__init__(hashes=SampleHashes.from_bytes(path.read_bytes()))
self.path: Path = path
self.pe = pefile.PE(str(path))
class PefileFeatureExtractor(FeatureExtractor):
def __init__(self, path: str):
super().__init__()
self.path = path
self.pe = pefile.PE(path)
def get_base_address(self):
return AbsoluteVirtualAddress(self.pe.OPTIONAL_HEADER.ImageBase)
def extract_global_features(self):
buf = Path(self.path).read_bytes()
with open(self.path, "rb") as f:
buf = f.read()
yield from extract_global_features(self.pe, buf)
def extract_file_features(self):
buf = Path(self.path).read_bytes()
with open(self.path, "rb") as f:
buf = f.read()
yield from extract_file_features(self.pe, buf)

View File

@@ -1,6 +1,6 @@
# strings code from FLOSS, https://github.com/mandiant/flare-floss
#
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -9,7 +9,6 @@
# See the License for the specific language governing permissions and limitations under the License.
import re
import contextlib
from collections import namedtuple
ASCII_BYTE = r" !\"#\$%&\'\(\)\*\+,-\./0123456789:;<=>\?@ABCDEFGHIJKLMNOPQRSTUVWXYZ\[\]\^_`abcdefghijklmnopqrstuvwxyz\{\|\}\\\~\t".encode(
@@ -82,5 +81,24 @@ def extract_unicode_strings(buf, n=4):
reg = b"((?:[%s]\x00){%d,})" % (ASCII_BYTE, n)
r = re.compile(reg)
for match in r.finditer(buf):
with contextlib.suppress(UnicodeDecodeError):
try:
yield String(match.group().decode("utf-16"), match.start())
except UnicodeDecodeError:
pass
def main():
import sys
with open(sys.argv[1], "rb") as f:
b = f.read()
for s in extract_ascii_strings(b):
print("0x{:x}: {:s}".format(s.offset, s.s))
for s in extract_unicode_strings(b):
print("0x{:x}: {:s}".format(s.offset, s.s))
if __name__ == "__main__":
main()

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -92,6 +92,7 @@ def is_mov_imm_to_stack(instr: envi.archs.i386.disasm.i386Opcode) -> bool:
if not src.isImmed():
return False
# TODO what about 64-bit operands?
if not isinstance(dst, envi.archs.i386.disasm.i386SibOper) and not isinstance(
dst, envi.archs.i386.disasm.i386RegMemOper
):
@@ -120,7 +121,7 @@ def get_printable_len(oper: envi.archs.i386.disasm.i386ImmOper) -> int:
elif oper.tsize == 8:
chars = struct.pack("<Q", oper.imm)
else:
raise ValueError(f"unexpected oper.tsize: {oper.tsize}")
raise ValueError("unexpected oper.tsize: %d" % (oper.tsize))
if is_printable_ascii(chars):
return oper.tsize
@@ -140,7 +141,7 @@ def is_printable_ascii(chars: bytes) -> bool:
def is_printable_utf16le(chars: bytes) -> bool:
if all(c == 0x0 for c in chars[1::2]):
if all(c == b"\x00" for c in chars[1::2]):
return is_printable_ascii(chars[::2])
return False

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -6,8 +6,7 @@
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import Any, Dict, List, Tuple, Iterator
from pathlib import Path
from typing import List, Tuple, Iterator
import viv_utils
import viv_utils.flirt
@@ -20,28 +19,23 @@ import capa.features.extractors.viv.function
import capa.features.extractors.viv.basicblock
from capa.features.common import Feature
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors.base_extractor import (
BBHandle,
InsnHandle,
SampleHashes,
FunctionHandle,
StaticFeatureExtractor,
)
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle, FeatureExtractor
logger = logging.getLogger(__name__)
class VivisectFeatureExtractor(StaticFeatureExtractor):
def __init__(self, vw, path: Path, os):
class VivisectFeatureExtractor(FeatureExtractor):
def __init__(self, vw, path):
super().__init__()
self.vw = vw
self.path = path
self.buf = path.read_bytes()
super().__init__(hashes=SampleHashes.from_bytes(self.buf))
with open(self.path, "rb") as f:
self.buf = f.read()
# pre-compute these because we'll yield them at *every* scope.
self.global_features: List[Tuple[Feature, Address]] = []
self.global_features.extend(capa.features.extractors.viv.file.extract_file_format(self.buf))
self.global_features.extend(capa.features.extractors.common.extract_os(self.buf, os))
self.global_features.extend(capa.features.extractors.common.extract_os(self.buf))
self.global_features.extend(capa.features.extractors.viv.global_.extract_arch(self.vw))
def get_base_address(self):
@@ -55,11 +49,8 @@ class VivisectFeatureExtractor(StaticFeatureExtractor):
yield from capa.features.extractors.viv.file.extract_features(self.vw, self.buf)
def get_functions(self) -> Iterator[FunctionHandle]:
cache: Dict[str, Any] = {}
for va in sorted(self.vw.getFunctions()):
yield FunctionHandle(
address=AbsoluteVirtualAddress(va), inner=viv_utils.Function(self.vw, va), ctx={"cache": cache}
)
yield FunctionHandle(address=AbsoluteVirtualAddress(va), inner=viv_utils.Function(self.vw, va))
def extract_function_features(self, fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
yield from capa.features.extractors.viv.function.extract_features(fh)

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -8,7 +8,6 @@
from typing import Tuple, Iterator
import PE.carve as pe_carve # vivisect PE
import vivisect
import viv_utils
import viv_utils.flirt
@@ -17,7 +16,7 @@ import capa.features.extractors.common
import capa.features.extractors.helpers
import capa.features.extractors.strings
from capa.features.file import Export, Import, Section, FunctionName
from capa.features.common import Feature, Characteristic
from capa.features.common import String, Feature, Characteristic
from capa.features.address import Address, FileOffsetAddress, AbsoluteVirtualAddress
@@ -26,35 +25,10 @@ def extract_file_embedded_pe(buf, **kwargs) -> Iterator[Tuple[Feature, Address]]
yield Characteristic("embedded pe"), FileOffsetAddress(offset)
def get_first_vw_filename(vw: vivisect.VivWorkspace):
# vivisect associates metadata with each file that its loaded into the workspace.
# capa only loads a single file into each workspace.
# so to access the metadata for the file in question, we can just take the first one.
# otherwise, we'd have to pass around the module name of the file we're analyzing,
# which is a pain.
#
# so this is a simplifying assumption.
return next(iter(vw.filemeta.keys()))
def extract_file_export_names(vw: vivisect.VivWorkspace, **kwargs) -> Iterator[Tuple[Feature, Address]]:
def extract_file_export_names(vw, **kwargs) -> Iterator[Tuple[Feature, Address]]:
for va, _, name, _ in vw.getExports():
yield Export(name), AbsoluteVirtualAddress(va)
if vw.getMeta("Format") == "pe":
pe = vw.parsedbin
baseaddr = pe.IMAGE_NT_HEADERS.OptionalHeader.ImageBase
for rva, _, forwarded_name in vw.getFileMeta(get_first_vw_filename(vw), "forwarders"):
try:
forwarded_name = forwarded_name.partition(b"\x00")[0].decode("ascii")
except UnicodeDecodeError:
continue
forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(forwarded_name)
va = baseaddr + rva
yield Export(forwarded_name), AbsoluteVirtualAddress(va)
yield Characteristic("forwarded export"), AbsoluteVirtualAddress(va)
def extract_file_import_names(vw, **kwargs) -> Iterator[Tuple[Feature, Address]]:
"""
@@ -70,10 +44,10 @@ def extract_file_import_names(vw, **kwargs) -> Iterator[Tuple[Feature, Address]]
modname, impname = tinfo.split(".", 1)
if is_viv_ord_impname(impname):
# replace ord prefix with #
impname = "#" + impname[len("ord") :]
impname = "#%s" % impname[len("ord") :]
addr = AbsoluteVirtualAddress(va)
for name in capa.features.extractors.helpers.generate_symbols(modname, impname, include_dll=True):
for name in capa.features.extractors.helpers.generate_symbols(modname, impname):
yield Import(name), addr

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -11,11 +11,9 @@ import envi
import viv_utils
import vivisect.const
from capa.features.file import FunctionName
from capa.features.common import Feature, Characteristic
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors import loops
from capa.features.extractors.elf import SymTab
from capa.features.extractors.base_extractor import FunctionHandle
@@ -32,28 +30,6 @@ def interface_extract_function_XXX(fh: FunctionHandle) -> Iterator[Tuple[Feature
raise NotImplementedError
def extract_function_symtab_names(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
if fh.inner.vw.metadata["Format"] == "elf":
# the file's symbol table gets added to the metadata of the vivisect workspace.
# this is in order to eliminate the computational overhead of refetching symtab each time.
if "symtab" not in fh.ctx["cache"]:
try:
fh.ctx["cache"]["symtab"] = SymTab.from_viv(fh.inner.vw.parsedbin)
except Exception:
fh.ctx["cache"]["symtab"] = None
symtab = fh.ctx["cache"]["symtab"]
if symtab:
for symbol in symtab.get_symbols():
sym_name = symtab.get_name(symbol)
sym_value = symbol.value
sym_info = symbol.info
STT_FUNC = 0x2
if sym_value == fh.address and sym_info & STT_FUNC != 0:
yield FunctionName(sym_name), fh.address
def extract_function_calls_to(fhandle: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
f: viv_utils.Function = fhandle.inner
for src, _, _, _ in f.vw.getXrefsTo(f.va, rtype=vivisect.const.REF_CODE):
@@ -103,8 +79,4 @@ def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]:
yield feature, addr
FUNCTION_HANDLERS = (
extract_function_symtab_names,
extract_function_calls_to,
extract_function_loop,
)
FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop)

View File

@@ -1,13 +1,9 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
from typing import Tuple, Iterator
import envi.archs.i386
import envi.archs.amd64
from capa.features.common import ARCH_I386, ARCH_AMD64, Arch, Feature
from capa.features.address import NO_ADDRESS, Address
@@ -15,11 +11,10 @@ logger = logging.getLogger(__name__)
def extract_arch(vw) -> Iterator[Tuple[Feature, Address]]:
arch = vw.getMeta("Architecture")
if arch == "amd64":
if isinstance(vw.arch, envi.archs.amd64.Amd64Module):
yield Arch(ARCH_AMD64), NO_ADDRESS
elif arch == "i386":
elif isinstance(vw.arch, envi.archs.i386.i386Module):
yield Arch(ARCH_I386), NO_ADDRESS
else:

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -7,7 +7,7 @@
# See the License for the specific language governing permissions and limitations under the License.
import collections
from typing import Set, List, Deque, Tuple, Optional
from typing import Set, List, Deque, Tuple, Union, Optional
import envi
import vivisect.const
@@ -71,7 +71,7 @@ class NotFoundError(Exception):
pass
def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Optional[int]]:
def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Union[int, None]]:
"""
scan backwards from the given address looking for assignments to the given register.
if a constant, return that value.
@@ -87,8 +87,8 @@ def find_definition(vw: VivWorkspace, va: int, reg: int) -> Tuple[int, Optional[
raises:
NotFoundError: when the definition cannot be found.
"""
q: Deque[int] = collections.deque()
seen: Set[int] = set()
q = collections.deque() # type: Deque[int]
seen = set([]) # type: Set[int]
q.extend(get_previous_instructions(vw, va))
while q:

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -22,7 +22,6 @@ import capa.features.extractors.viv.helpers
from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset
from capa.features.common import MAX_BYTES_FEATURE_SIZE, THUNK_CHAIN_DEPTH_DELTA, Bytes, String, Feature, Characteristic
from capa.features.address import Address, AbsoluteVirtualAddress
from capa.features.extractors.elf import SymTab
from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle
from capa.features.extractors.viv.indirect_calls import NotFoundError, resolve_indirect_call
@@ -110,26 +109,6 @@ def extract_insn_api_features(fh: FunctionHandle, bb, ih: InsnHandle) -> Iterato
if not target:
return
if f.vw.metadata["Format"] == "elf":
if "symtab" not in fh.ctx["cache"]:
# the symbol table gets stored as a function's attribute in order to avoid running
# this code everytime the call is made, thus preventing the computational overhead.
try:
fh.ctx["cache"]["symtab"] = SymTab.from_viv(f.vw.parsedbin)
except Exception:
fh.ctx["cache"]["symtab"] = None
symtab = fh.ctx["cache"]["symtab"]
if symtab:
for symbol in symtab.get_symbols():
sym_name = symtab.get_name(symbol)
sym_value = symbol.value
sym_info = symbol.info
STT_FUNC = 0x2
if sym_value == target and sym_info & STT_FUNC != 0:
yield API(sym_name), ih.address
if viv_utils.flirt.is_library_function(f.vw, target):
name = viv_utils.get_function_name(f.vw, target)
yield API(name), ih.address
@@ -196,13 +175,8 @@ def derefs(vw, p):
while True:
if not vw.isValidPointer(p):
return
yield p
if vw.isProbablyString(p) or vw.isProbablyUnicode(p):
# don't deref strings that coincidentally are pointers
return
try:
next = vw.readMemoryPtr(p)
except Exception:
@@ -288,16 +262,16 @@ def extract_insn_bytes_features(fh: FunctionHandle, bb, ih: InsnHandle) -> Itera
else:
continue
for vv in derefs(f.vw, v):
for v in derefs(f.vw, v):
try:
buf = read_bytes(f.vw, vv)
buf = read_bytes(f.vw, v)
except envi.exc.SegmentationViolation:
continue
if capa.features.extractors.helpers.all_zeros(buf):
continue
if f.vw.isProbablyString(vv) or f.vw.isProbablyUnicode(vv):
if f.vw.isProbablyString(v):
# don't extract byte features for obvious strings
continue
@@ -351,6 +325,7 @@ def is_security_cookie(f, bb, insn) -> bool:
if oper.isReg() and oper.reg not in [
envi.archs.i386.regs.REG_ESP,
envi.archs.i386.regs.REG_EBP,
# TODO: do x64 support for real.
envi.archs.amd64.regs.REG_RBP,
envi.archs.amd64.regs.REG_RSP,
]:
@@ -410,7 +385,9 @@ def extract_insn_obfs_call_plus_5_characteristic_features(f, bb, ih: InsnHandle)
if insn.va + 5 == insn.opers[0].getOperValue(insn):
yield Characteristic("call $+5"), ih.address
if isinstance(insn.opers[0], (envi.archs.i386.disasm.i386ImmMemOper, envi.archs.amd64.disasm.Amd64RipRelOper)):
if isinstance(insn.opers[0], envi.archs.i386.disasm.i386ImmMemOper) or isinstance(
insn.opers[0], envi.archs.amd64.disasm.Amd64RipRelOper
):
if insn.va + 5 == insn.opers[0].getOperAddr(insn):
yield Characteristic("call $+5"), ih.address
@@ -419,6 +396,7 @@ def extract_insn_peb_access_characteristic_features(f, bb, ih: InsnHandle) -> It
"""
parse peb access from the given function. fs:[0x30] on x86, gs:[0x60] on x64
"""
# TODO handle where fs/gs are loaded into a register or onto the stack and used later
insn: envi.Opcode = ih.inner
if insn.mnem not in ["push", "mov"]:
@@ -642,6 +620,7 @@ def extract_op_offset_features(
if oper.reg == envi.archs.i386.regs.REG_EBP:
return
# TODO: do x64 support for real.
if oper.reg == envi.archs.amd64.regs.REG_RBP:
return
@@ -695,9 +674,9 @@ def extract_op_string_features(
else:
return
for vv in derefs(f.vw, v):
for v in derefs(f.vw, v):
try:
s = read_string(f.vw, vv).rstrip("\x00")
s = read_string(f.vw, v).rstrip("\x00")
except ValueError:
continue
else:

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt

View File

@@ -1,7 +1,7 @@
"""
capa freeze file format: `| capa0000 | + zlib(utf-8(json(...)))`
Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -9,20 +9,13 @@ Unless required by applicable law or agreed to in writing, software distributed
is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and limitations under the License.
"""
import json
import zlib
import logging
from enum import Enum
from typing import List, Tuple, Union, Literal
from typing import Any, List, Tuple, Union
from pydantic import Field, BaseModel, ConfigDict
from pydantic import Field, BaseModel
# TODO(williballenthin): use typing.TypeAlias directly in Python 3.10+
# https://github.com/mandiant/capa/issues/1699
from typing_extensions import TypeAlias
import capa.loader
import capa.helpers
import capa.version
import capa.features.file
@@ -30,23 +23,16 @@ import capa.features.insn
import capa.features.common
import capa.features.address
import capa.features.basicblock
import capa.features.extractors.null as null
import capa.features.extractors.base_extractor
from capa.helpers import assert_never
from capa.features.freeze.features import Feature, feature_from_capa
from capa.features.extractors.base_extractor import (
SampleHashes,
FeatureExtractor,
StaticFeatureExtractor,
DynamicFeatureExtractor,
)
logger = logging.getLogger(__name__)
CURRENT_VERSION = 3
class HashableModel(BaseModel):
model_config = ConfigDict(frozen=True)
class Config:
frozen = True
class AddressType(str, Enum):
@@ -55,15 +41,12 @@ class AddressType(str, Enum):
FILE = "file"
DN_TOKEN = "dn token"
DN_TOKEN_OFFSET = "dn token offset"
PROCESS = "process"
THREAD = "thread"
CALL = "call"
NO_ADDRESS = "no address"
class Address(HashableModel):
type: AddressType
value: Union[int, Tuple[int, ...], None] = None # None default value to support deserialization of NO_ADDRESS
value: Union[int, Tuple[int, int], None]
@classmethod
def from_capa(cls, a: capa.features.address.Address) -> "Address":
@@ -82,15 +65,6 @@ class Address(HashableModel):
elif isinstance(a, capa.features.address.DNTokenOffsetAddress):
return cls(type=AddressType.DN_TOKEN_OFFSET, value=(a.token, a.offset))
elif isinstance(a, capa.features.address.ProcessAddress):
return cls(type=AddressType.PROCESS, value=(a.ppid, a.pid))
elif isinstance(a, capa.features.address.ThreadAddress):
return cls(type=AddressType.THREAD, value=(a.process.ppid, a.process.pid, a.tid))
elif isinstance(a, capa.features.address.DynamicCallAddress):
return cls(type=AddressType.CALL, value=(a.thread.process.ppid, a.thread.process.pid, a.thread.tid, a.id))
elif a == capa.features.address.NO_ADDRESS or isinstance(a, capa.features.address._NoAddress):
return cls(type=AddressType.NO_ADDRESS, value=None)
@@ -127,33 +101,6 @@ class Address(HashableModel):
assert isinstance(offset, int)
return capa.features.address.DNTokenOffsetAddress(token, offset)
elif self.type is AddressType.PROCESS:
assert isinstance(self.value, tuple)
ppid, pid = self.value
assert isinstance(ppid, int)
assert isinstance(pid, int)
return capa.features.address.ProcessAddress(ppid=ppid, pid=pid)
elif self.type is AddressType.THREAD:
assert isinstance(self.value, tuple)
ppid, pid, tid = self.value
assert isinstance(ppid, int)
assert isinstance(pid, int)
assert isinstance(tid, int)
return capa.features.address.ThreadAddress(
process=capa.features.address.ProcessAddress(ppid=ppid, pid=pid), tid=tid
)
elif self.type is AddressType.CALL:
assert isinstance(self.value, tuple)
ppid, pid, tid, id_ = self.value
return capa.features.address.DynamicCallAddress(
thread=capa.features.address.ThreadAddress(
process=capa.features.address.ProcessAddress(ppid=ppid, pid=pid), tid=tid
),
id=id_,
)
elif self.type is AddressType.NO_ADDRESS:
return capa.features.address.NO_ADDRESS
@@ -184,48 +131,6 @@ class FileFeature(HashableModel):
feature: Feature
class ProcessFeature(HashableModel):
"""
args:
process: the address of the process to which this feature belongs.
address: the address at which this feature is found.
process != address because, e.g., the feature may be found *within* the scope (process).
"""
process: Address
address: Address
feature: Feature
class ThreadFeature(HashableModel):
"""
args:
thread: the address of the thread to which this feature belongs.
address: the address at which this feature is found.
thread != address because, e.g., the feature may be found *within* the scope (thread).
"""
thread: Address
address: Address
feature: Feature
class CallFeature(HashableModel):
"""
args:
call: the address of the call to which this feature belongs.
address: the address at which this feature is found.
call != address for consistency with Process and Thread.
"""
call: Address
address: Address
feature: Feature
class FunctionFeature(HashableModel):
"""
args:
@@ -254,7 +159,9 @@ class BasicBlockFeature(HashableModel):
basic_block: Address = Field(alias="basic block")
address: Address
feature: Feature
model_config = ConfigDict(populate_by_name=True)
class Config:
allow_population_by_field_name = True
class InstructionFeature(HashableModel):
@@ -263,7 +170,8 @@ class InstructionFeature(HashableModel):
instruction: the address of the instruction to which this feature belongs.
address: the address at which this feature is found.
instruction != address because, for consistency with Function and BasicBlock.
instruction != address because, e.g., the feature may be found *within* the scope (basic block),
versus right at its starting address.
"""
instruction: Address
@@ -286,65 +194,43 @@ class FunctionFeatures(BaseModel):
address: Address
features: Tuple[FunctionFeature, ...]
basic_blocks: Tuple[BasicBlockFeatures, ...] = Field(alias="basic blocks")
model_config = ConfigDict(populate_by_name=True)
class Config:
allow_population_by_field_name = True
class CallFeatures(BaseModel):
address: Address
name: str
features: Tuple[CallFeature, ...]
class ThreadFeatures(BaseModel):
address: Address
features: Tuple[ThreadFeature, ...]
calls: Tuple[CallFeatures, ...]
class ProcessFeatures(BaseModel):
address: Address
name: str
features: Tuple[ProcessFeature, ...]
threads: Tuple[ThreadFeatures, ...]
class StaticFeatures(BaseModel):
class Features(BaseModel):
global_: Tuple[GlobalFeature, ...] = Field(alias="global")
file: Tuple[FileFeature, ...]
functions: Tuple[FunctionFeatures, ...]
model_config = ConfigDict(populate_by_name=True)
class DynamicFeatures(BaseModel):
global_: Tuple[GlobalFeature, ...] = Field(alias="global")
file: Tuple[FileFeature, ...]
processes: Tuple[ProcessFeatures, ...]
model_config = ConfigDict(populate_by_name=True)
Features: TypeAlias = Union[StaticFeatures, DynamicFeatures]
class Config:
allow_population_by_field_name = True
class Extractor(BaseModel):
name: str
version: str = capa.version.__version__
model_config = ConfigDict(populate_by_name=True)
class Config:
allow_population_by_field_name = True
class Freeze(BaseModel):
version: int = CURRENT_VERSION
version: int = 2
base_address: Address = Field(alias="base address")
sample_hashes: SampleHashes
flavor: Literal["static", "dynamic"]
extractor: Extractor
features: Features
model_config = ConfigDict(populate_by_name=True)
class Config:
allow_population_by_field_name = True
def dumps_static(extractor: StaticFeatureExtractor) -> str:
def dumps(extractor: capa.features.extractors.base_extractor.FeatureExtractor) -> str:
"""
serialize the given extractor to a string
"""
global_features: List[GlobalFeature] = []
for feature, _ in extractor.extract_global_features():
global_features.append(
@@ -382,8 +268,7 @@ def dumps_static(extractor: StaticFeatureExtractor) -> str:
basic_block=bbaddr,
address=Address.from_capa(addr),
feature=feature_from_capa(feature),
) # type: ignore
# Mypy is unable to recognise `basic_block` as a argument due to alias
)
for feature, addr in extractor.extract_basic_block_features(f, bb)
]
@@ -402,168 +287,52 @@ def dumps_static(extractor: StaticFeatureExtractor) -> str:
instructions.append(
InstructionFeatures(
address=iaddr,
features=tuple(ifeatures),
features=ifeatures,
)
)
basic_blocks.append(
BasicBlockFeatures(
address=bbaddr,
features=tuple(bbfeatures),
instructions=tuple(instructions),
features=bbfeatures,
instructions=instructions,
)
)
function_features.append(
FunctionFeatures(
address=faddr,
features=tuple(ffeatures),
features=ffeatures,
basic_blocks=basic_blocks,
) # type: ignore
# Mypy is unable to recognise `basic_blocks` as a argument due to alias
)
)
features = StaticFeatures(
features = Features(
global_=global_features,
file=tuple(file_features),
functions=tuple(function_features),
) # type: ignore
# Mypy is unable to recognise `global_` as a argument due to alias
file=file_features,
functions=function_features,
)
freeze = Freeze(
version=CURRENT_VERSION,
version=2,
base_address=Address.from_capa(extractor.get_base_address()),
sample_hashes=extractor.get_sample_hashes(),
flavor="static",
extractor=Extractor(name=extractor.__class__.__name__),
features=features,
) # type: ignore
# Mypy is unable to recognise `base_address` as a argument due to alias
)
return freeze.model_dump_json()
return freeze.json()
def dumps_dynamic(extractor: DynamicFeatureExtractor) -> str:
"""
serialize the given extractor to a string
"""
global_features: List[GlobalFeature] = []
for feature, _ in extractor.extract_global_features():
global_features.append(
GlobalFeature(
feature=feature_from_capa(feature),
)
)
def loads(s: str) -> capa.features.extractors.base_extractor.FeatureExtractor:
"""deserialize a set of features (as a NullFeatureExtractor) from a string."""
import capa.features.extractors.null as null
file_features: List[FileFeature] = []
for feature, address in extractor.extract_file_features():
file_features.append(
FileFeature(
feature=feature_from_capa(feature),
address=Address.from_capa(address),
)
)
freeze = Freeze.parse_raw(s)
if freeze.version != 2:
raise ValueError("unsupported freeze format version: %d", freeze.version)
process_features: List[ProcessFeatures] = []
for p in extractor.get_processes():
paddr = Address.from_capa(p.address)
pname = extractor.get_process_name(p)
pfeatures = [
ProcessFeature(
process=paddr,
address=Address.from_capa(addr),
feature=feature_from_capa(feature),
)
for feature, addr in extractor.extract_process_features(p)
]
threads = []
for t in extractor.get_threads(p):
taddr = Address.from_capa(t.address)
tfeatures = [
ThreadFeature(
basic_block=taddr,
address=Address.from_capa(addr),
feature=feature_from_capa(feature),
) # type: ignore
# Mypy is unable to recognise `basic_block` as a argument due to alias
for feature, addr in extractor.extract_thread_features(p, t)
]
calls = []
for call in extractor.get_calls(p, t):
caddr = Address.from_capa(call.address)
cname = extractor.get_call_name(p, t, call)
cfeatures = [
CallFeature(
call=caddr,
address=Address.from_capa(addr),
feature=feature_from_capa(feature),
)
for feature, addr in extractor.extract_call_features(p, t, call)
]
calls.append(
CallFeatures(
address=caddr,
name=cname,
features=tuple(cfeatures),
)
)
threads.append(
ThreadFeatures(
address=taddr,
features=tuple(tfeatures),
calls=tuple(calls),
)
)
process_features.append(
ProcessFeatures(
address=paddr,
name=pname,
features=tuple(pfeatures),
threads=tuple(threads),
)
)
features = DynamicFeatures(
global_=global_features,
file=tuple(file_features),
processes=tuple(process_features),
) # type: ignore
# Mypy is unable to recognise `global_` as a argument due to alias
# workaround around mypy issue: https://github.com/python/mypy/issues/1424
get_base_addr = getattr(extractor, "get_base_addr", None)
base_addr = get_base_addr() if get_base_addr else capa.features.address.NO_ADDRESS
freeze = Freeze(
version=CURRENT_VERSION,
base_address=Address.from_capa(base_addr),
sample_hashes=extractor.get_sample_hashes(),
flavor="dynamic",
extractor=Extractor(name=extractor.__class__.__name__),
features=features,
) # type: ignore
# Mypy is unable to recognise `base_address` as a argument due to alias
return freeze.model_dump_json()
def loads_static(s: str) -> StaticFeatureExtractor:
"""deserialize a set of features (as a NullStaticFeatureExtractor) from a string."""
freeze = Freeze.model_validate_json(s)
if freeze.version != CURRENT_VERSION:
raise ValueError(f"unsupported freeze format version: {freeze.version}")
assert freeze.flavor == "static"
assert isinstance(freeze.features, StaticFeatures)
return null.NullStaticFeatureExtractor(
return null.NullFeatureExtractor(
base_address=freeze.base_address.to_capa(),
sample_hashes=freeze.sample_hashes,
global_features=[f.feature.to_capa() for f in freeze.features.global_],
file_features=[(f.address.to_capa(), f.feature.to_capa()) for f in freeze.features.file],
functions={
@@ -587,59 +356,10 @@ def loads_static(s: str) -> StaticFeatureExtractor:
)
def loads_dynamic(s: str) -> DynamicFeatureExtractor:
"""deserialize a set of features (as a NullDynamicFeatureExtractor) from a string."""
freeze = Freeze.model_validate_json(s)
if freeze.version != CURRENT_VERSION:
raise ValueError(f"unsupported freeze format version: {freeze.version}")
assert freeze.flavor == "dynamic"
assert isinstance(freeze.features, DynamicFeatures)
return null.NullDynamicFeatureExtractor(
base_address=freeze.base_address.to_capa(),
sample_hashes=freeze.sample_hashes,
global_features=[f.feature.to_capa() for f in freeze.features.global_],
file_features=[(f.address.to_capa(), f.feature.to_capa()) for f in freeze.features.file],
processes={
p.address.to_capa(): null.ProcessFeatures(
name=p.name,
features=[(fe.address.to_capa(), fe.feature.to_capa()) for fe in p.features],
threads={
t.address.to_capa(): null.ThreadFeatures(
features=[(fe.address.to_capa(), fe.feature.to_capa()) for fe in t.features],
calls={
c.address.to_capa(): null.CallFeatures(
name=c.name,
features=[(fe.address.to_capa(), fe.feature.to_capa()) for fe in c.features],
)
for c in t.calls
},
)
for t in p.threads
},
)
for p in freeze.features.processes
},
)
MAGIC = "capa0000".encode("ascii")
def dumps(extractor: FeatureExtractor) -> str:
"""serialize the given extractor to a string."""
if isinstance(extractor, StaticFeatureExtractor):
doc = dumps_static(extractor)
elif isinstance(extractor, DynamicFeatureExtractor):
doc = dumps_dynamic(extractor)
else:
raise ValueError("Invalid feature extractor")
return doc
def dump(extractor: FeatureExtractor) -> bytes:
def dump(extractor: capa.features.extractors.base_extractor.FeatureExtractor) -> bytes:
"""serialize the given extractor to a byte array."""
return MAGIC + zlib.compress(dumps(extractor).encode("utf-8"))
@@ -648,34 +368,16 @@ def is_freeze(buf: bytes) -> bool:
return buf[: len(MAGIC)] == MAGIC
def loads(s: str):
doc = json.loads(s)
if doc["version"] != CURRENT_VERSION:
raise ValueError(f"unsupported freeze format version: {doc['version']}")
if doc["flavor"] == "static":
return loads_static(s)
elif doc["flavor"] == "dynamic":
return loads_dynamic(s)
else:
raise ValueError(f"unsupported freeze format flavor: {doc['flavor']}")
def load(buf: bytes):
def load(buf: bytes) -> capa.features.extractors.base_extractor.FeatureExtractor:
"""deserialize a set of features (as a NullFeatureExtractor) from a byte array."""
if not is_freeze(buf):
raise ValueError("missing magic header")
s = zlib.decompress(buf[len(MAGIC) :]).decode("utf-8")
return loads(s)
return loads(zlib.decompress(buf[len(MAGIC) :]).decode("utf-8"))
def main(argv=None):
import sys
import argparse
from pathlib import Path
import capa.main
@@ -683,20 +385,17 @@ def main(argv=None):
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description="save capa features to a file")
capa.main.install_common_args(parser, {"input_file", "format", "backend", "os", "signatures"})
capa.main.install_common_args(parser, {"sample", "format", "backend", "signatures"})
parser.add_argument("output", type=str, help="Path to output file")
args = parser.parse_args(args=argv)
capa.main.handle_common_args(args)
try:
capa.main.handle_common_args(args)
capa.main.ensure_input_exists_from_cli(args)
input_format = capa.main.get_input_format_from_cli(args)
backend = capa.main.get_backend_from_cli(args, input_format)
extractor = capa.main.get_extractor_from_cli(args, input_format, backend)
except capa.main.ShouldExitError as e:
return e.status_code
sigpaths = capa.main.get_signatures(args.signatures)
Path(args.output).write_bytes(dump(extractor))
extractor = capa.main.get_extractor(args.sample, args.format, args.backend, sigpaths, False)
with open(args.output, "wb") as f:
f.write(dump(extractor))
return 0

View File

@@ -1,14 +1,7 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import binascii
from typing import Union, Optional
from pydantic import Field, BaseModel, ConfigDict
from pydantic import Field, BaseModel
import capa.features.file
import capa.features.insn
@@ -17,7 +10,9 @@ import capa.features.basicblock
class FeatureModel(BaseModel):
model_config = ConfigDict(frozen=True, populate_by_name=True)
class Config:
frozen = True
allow_population_by_field_name = True
def to_capa(self) -> capa.features.common.Feature:
if isinstance(self, OSFeature):
@@ -106,79 +101,59 @@ class FeatureModel(BaseModel):
def feature_from_capa(f: capa.features.common.Feature) -> "Feature":
if isinstance(f, capa.features.common.OS):
assert isinstance(f.value, str)
return OSFeature(os=f.value, description=f.description)
elif isinstance(f, capa.features.common.Arch):
assert isinstance(f.value, str)
return ArchFeature(arch=f.value, description=f.description)
elif isinstance(f, capa.features.common.Format):
assert isinstance(f.value, str)
return FormatFeature(format=f.value, description=f.description)
elif isinstance(f, capa.features.common.MatchedRule):
assert isinstance(f.value, str)
return MatchFeature(match=f.value, description=f.description)
elif isinstance(f, capa.features.common.Characteristic):
assert isinstance(f.value, str)
return CharacteristicFeature(characteristic=f.value, description=f.description)
elif isinstance(f, capa.features.file.Export):
assert isinstance(f.value, str)
return ExportFeature(export=f.value, description=f.description)
elif isinstance(f, capa.features.file.Import):
assert isinstance(f.value, str)
return ImportFeature(import_=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `import_` as a argument due to alias
return ImportFeature(import_=f.value, description=f.description)
elif isinstance(f, capa.features.file.Section):
assert isinstance(f.value, str)
return SectionFeature(section=f.value, description=f.description)
elif isinstance(f, capa.features.file.FunctionName):
assert isinstance(f.value, str)
return FunctionNameFeature(function_name=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `function_name` as a argument due to alias
return FunctionNameFeature(function_name=f.value, description=f.description)
# must come before check for String due to inheritance
elif isinstance(f, capa.features.common.Substring):
assert isinstance(f.value, str)
return SubstringFeature(substring=f.value, description=f.description)
# must come before check for String due to inheritance
elif isinstance(f, capa.features.common.Regex):
assert isinstance(f.value, str)
return RegexFeature(regex=f.value, description=f.description)
elif isinstance(f, capa.features.common.String):
assert isinstance(f.value, str)
return StringFeature(string=f.value, description=f.description)
elif isinstance(f, capa.features.common.Class):
assert isinstance(f.value, str)
return ClassFeature(class_=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `class_` as a argument due to alias
return ClassFeature(class_=f.value, description=f.description)
elif isinstance(f, capa.features.common.Namespace):
assert isinstance(f.value, str)
return NamespaceFeature(namespace=f.value, description=f.description)
elif isinstance(f, capa.features.basicblock.BasicBlock):
return BasicBlockFeature(description=f.description)
elif isinstance(f, capa.features.insn.API):
assert isinstance(f.value, str)
return APIFeature(api=f.value, description=f.description)
elif isinstance(f, capa.features.insn.Property):
assert isinstance(f.value, str)
return PropertyFeature(property=f.value, access=f.access, description=f.description)
elif isinstance(f, capa.features.insn.Number):
assert isinstance(f.value, (int, float))
return NumberFeature(number=f.value, description=f.description)
elif isinstance(f, capa.features.common.Bytes):
@@ -187,22 +162,16 @@ def feature_from_capa(f: capa.features.common.Feature) -> "Feature":
return BytesFeature(bytes=binascii.hexlify(buf).decode("ascii"), description=f.description)
elif isinstance(f, capa.features.insn.Offset):
assert isinstance(f.value, int)
return OffsetFeature(offset=f.value, description=f.description)
elif isinstance(f, capa.features.insn.Mnemonic):
assert isinstance(f.value, str)
return MnemonicFeature(mnemonic=f.value, description=f.description)
elif isinstance(f, capa.features.insn.OperandNumber):
assert isinstance(f.value, int)
return OperandNumberFeature(index=f.index, operand_number=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `operand_number` as a argument due to alias
return OperandNumberFeature(index=f.index, operand_number=f.value, description=f.description)
elif isinstance(f, capa.features.insn.OperandOffset):
assert isinstance(f.value, int)
return OperandOffsetFeature(index=f.index, operand_offset=f.value, description=f.description) # type: ignore
# Mypy is unable to recognise `operand_offset` as a argument due to alias
return OperandOffsetFeature(index=f.index, operand_offset=f.value, description=f.description)
else:
raise NotImplementedError(f"feature_from_capa({type(f)}) not implemented")
@@ -211,141 +180,141 @@ def feature_from_capa(f: capa.features.common.Feature) -> "Feature":
class OSFeature(FeatureModel):
type: str = "os"
os: str
description: Optional[str] = None
description: Optional[str]
class ArchFeature(FeatureModel):
type: str = "arch"
arch: str
description: Optional[str] = None
description: Optional[str]
class FormatFeature(FeatureModel):
type: str = "format"
format: str
description: Optional[str] = None
description: Optional[str]
class MatchFeature(FeatureModel):
type: str = "match"
match: str
description: Optional[str] = None
description: Optional[str]
class CharacteristicFeature(FeatureModel):
type: str = "characteristic"
characteristic: str
description: Optional[str] = None
description: Optional[str]
class ExportFeature(FeatureModel):
type: str = "export"
export: str
description: Optional[str] = None
description: Optional[str]
class ImportFeature(FeatureModel):
type: str = "import"
import_: str = Field(alias="import")
description: Optional[str] = None
description: Optional[str]
class SectionFeature(FeatureModel):
type: str = "section"
section: str
description: Optional[str] = None
description: Optional[str]
class FunctionNameFeature(FeatureModel):
type: str = "function name"
function_name: str = Field(alias="function name")
description: Optional[str] = None
description: Optional[str]
class SubstringFeature(FeatureModel):
type: str = "substring"
substring: str
description: Optional[str] = None
description: Optional[str]
class RegexFeature(FeatureModel):
type: str = "regex"
regex: str
description: Optional[str] = None
description: Optional[str]
class StringFeature(FeatureModel):
type: str = "string"
string: str
description: Optional[str] = None
description: Optional[str]
class ClassFeature(FeatureModel):
type: str = "class"
class_: str = Field(alias="class")
description: Optional[str] = None
description: Optional[str]
class NamespaceFeature(FeatureModel):
type: str = "namespace"
namespace: str
description: Optional[str] = None
description: Optional[str]
class BasicBlockFeature(FeatureModel):
type: str = "basic block"
description: Optional[str] = None
description: Optional[str]
class APIFeature(FeatureModel):
type: str = "api"
api: str
description: Optional[str] = None
description: Optional[str]
class PropertyFeature(FeatureModel):
type: str = "property"
access: Optional[str] = None
access: Optional[str]
property: str
description: Optional[str] = None
description: Optional[str]
class NumberFeature(FeatureModel):
type: str = "number"
number: Union[int, float]
description: Optional[str] = None
description: Optional[str]
class BytesFeature(FeatureModel):
type: str = "bytes"
bytes: str
description: Optional[str] = None
description: Optional[str]
class OffsetFeature(FeatureModel):
type: str = "offset"
offset: int
description: Optional[str] = None
description: Optional[str]
class MnemonicFeature(FeatureModel):
type: str = "mnemonic"
mnemonic: str
description: Optional[str] = None
description: Optional[str]
class OperandNumberFeature(FeatureModel):
type: str = "operand number"
index: int
operand_number: int = Field(alias="operand number")
description: Optional[str] = None
description: Optional[str]
class OperandOffsetFeature(FeatureModel):
type: str = "operand offset"
index: int
operand_offset: int = Field(alias="operand offset")
description: Optional[str] = None
description: Optional[str]
Feature = Union[

View File

@@ -1,4 +1,4 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Copyright (C) 2020 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
@@ -15,9 +15,9 @@ from capa.features.common import VALID_FEATURE_ACCESS, Feature
def hex(n: int) -> str:
"""render the given number using upper case hex, like: 0x123ABC"""
if n < 0:
return f"-0x{(-n):X}"
return "-0x%X" % (-n)
else:
return f"0x{(n):X}"
return "0x%X" % n
class API(Feature):
@@ -31,7 +31,7 @@ class _AccessFeature(Feature, abc.ABC):
super().__init__(value, description=description)
if access is not None:
if access not in VALID_FEATURE_ACCESS:
raise ValueError(f"{self.name} access type {access} not valid")
raise ValueError("%s access type %s not valid" % (self.name, access))
self.access = access
def __hash__(self):
@@ -53,15 +53,6 @@ class Property(_AccessFeature):
class Number(Feature):
def __init__(self, value: Union[int, float], description=None):
"""
args:
value (int or float): positive or negative integer, or floating point number.
the range of the value is:
- if positive, the range of u64
- if negative, the range of i64
- if floating, the range and precision of double
"""
super().__init__(value, description=description)
def get_value_str(self):
@@ -70,7 +61,7 @@ class Number(Feature):
elif isinstance(self.value, float):
return str(self.value)
else:
raise ValueError(f"invalid value type {type(self.value)}")
raise ValueError("invalid value type")
# max recognized structure size (and therefore, offset size)
@@ -79,14 +70,6 @@ MAX_STRUCTURE_SIZE = 0x10000
class Offset(Feature):
def __init__(self, value: int, description=None):
"""
args:
value (int): the offset, which can be positive or negative.
the range of the value is:
- if positive, the range of u64
- if negative, the range of i64
"""
super().__init__(value, description=description)
def get_value_str(self):
@@ -109,7 +92,7 @@ MAX_OPERAND_INDEX = MAX_OPERAND_COUNT - 1
class _Operand(Feature, abc.ABC):
# superclass: don't use directly
# subclasses should set self.name and provide the value string formatter
def __init__(self, index: int, value: Union[int, float], description=None):
def __init__(self, index: int, value: int, description=None):
super().__init__(value, description=description)
self.index = index
@@ -122,45 +105,24 @@ class _Operand(Feature, abc.ABC):
class OperandNumber(_Operand):
# cached names so we don't do extra string formatting every ctor
NAMES = [f"operand[{i}].number" for i in range(MAX_OPERAND_COUNT)]
NAMES = ["operand[%d].number" % i for i in range(MAX_OPERAND_COUNT)]
# operand[i].number: 0x12
def __init__(self, index: int, value: Union[int, float], description=None):
"""
args:
value (int or float): positive or negative integer, or floating point number.
the range of the value is:
- if positive, the range of u64
- if negative, the range of i64
- if floating, the range and precision of double
"""
super().__init__(index, value, description=description)
self.name = self.NAMES[index]
def get_value_str(self) -> str:
if isinstance(self.value, int):
return capa.helpers.hex(self.value)
elif isinstance(self.value, float):
return str(self.value)
else:
raise ValueError("invalid value type")
class OperandOffset(_Operand):
# cached names so we don't do extra string formatting every ctor
NAMES = [f"operand[{i}].offset" for i in range(MAX_OPERAND_COUNT)]
# operand[i].offset: 0x12
def __init__(self, index: int, value: int, description=None):
"""
args:
value (int): the offset, which can be positive or negative.
the range of the value is:
- if positive, the range of u64
- if negative, the range of i64
"""
super().__init__(index, value, description=description)
self.name = self.NAMES[index]
def get_value_str(self) -> str:
assert isinstance(self.value, int)
return hex(self.value)
class OperandOffset(_Operand):
# cached names so we don't do extra string formatting every ctor
NAMES = ["operand[%d].offset" % i for i in range(MAX_OPERAND_COUNT)]
# operand[i].offset: 0x12
def __init__(self, index: int, value: int, description=None):
super().__init__(index, value, description=description)
self.name = self.NAMES[index]

View File

@@ -1,184 +0,0 @@
<div align="center">
<img src="/doc/img/ghidra_backend_logo.png" width=300 height=175>
</div>
The Ghidra feature extractor is an application of the FLARE team's open-source project, Ghidrathon, to integrate capa with Ghidra using Python 3. capa is a framework that uses a well-defined collection of rules to identify capabilities in a program. You can run capa against a PE file, ELF file, or shellcode and it tells you what it thinks the program can do. For example, it might suggest that the program is a backdoor, can install services, or relies on HTTP to communicate. The Ghidra feature extractor can be used to run capa analysis on your Ghidra databases without needing access to the original binary file. As a part of this integration, we've developed two scripts, [capa_explorer.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_explorer.py) and [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py), to display capa results directly in Ghidra.
### Using `capa_explorer.py`
`capa_explorer.py` integrates capa results directly into Ghidra's UI. In the Symbol Tree Window, under the Namespaces section, you can find the matched rules as well as the corresponding functions that contain the matched features:
![image](https://github.com/mandiant/capa/assets/66766340/eeae33f4-99d4-42dc-a5e8-4c1b8c661492)
Labeled functions may be clicked in the Symbol Tree Window to navigate Ghidra's Disassembly Listing and Decompilation windows to the function locations. A comment listing each matched capa rule is inserted at the beginning of the function and a comment for each matched capa feature is added at the matched address within the function. These comments can be viewed using Ghidra's Disassembly Listing and Decompilation windows:
![image](https://github.com/mandiant/capa/assets/66766340/bb2b4170-7fd4-45fc-8c7b-ff8f2e2f101b)
The script also adds bookmarks for capa matches that are categorized under MITRE ATT&CK and Malware Behavior Catalog. These may be found and navigated using Ghidra's Bookmarks Window:
![image](https://github.com/mandiant/capa/assets/66766340/7f9a66a9-7be7-4223-91c6-4b8fc4651336)
### Using `capa_ghidra.py`
`capa_ghidra.py` displays capa results in Ghidra's Console window and can be executed using Ghidra's Headless Analyzer. The following is an example of running `capa_ghidra.py` using the Ghidra Script Manager:
Selecting capa rules:
<img src="/doc/img/ghidra_script_mngr_rules.png">
Choosing output format:
<img src="/doc/img/ghidra_script_mngr_verbosity.png">
Viewing results in Ghidra Console Window:
<img src="/doc/img/ghidra_script_mngr_output.png">
## Installation
### Requirements
| Tool | Version | Source |
|------------|---------|--------|
| Ghidrathon | `>= 3.0.0` | https://github.com/mandiant/Ghidrathon/releases |
| Ghidra | `>= 10.3.2` | https://github.com/NationalSecurityAgency/ghidra/releases |
| Python | `>= 3.8.0` | https://www.python.org/downloads |
You can run capa in Ghidra by completing the following steps using the Python 3 interpreter that you have configured for your Ghidrathon installation:
1. Install capa and its dependencies from PyPI using the following command:
```bash
$ pip install flare-capa
```
2. Download and extract the [official capa rules](https://github.com/mandiant/capa-rules/releases) that match the capa version you have installed. Use the following command to view the version of capa you have installed:
```bash
$ pip show flare-capa
OR
$ capa --version
```
3. Copy [capa_explorer.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_explorer.py) and [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) to your `$USER_HOME/ghidra_scripts` directory or manually add the absolute path of each script to the Ghidra Script Manager.
## Usage
After completing the installation steps you can execute [capa_explorer.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_explorer.py) and [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) using the Ghidra Script Manager. You can also execute [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) using Ghidra's Headless Analyzer.
### Ghidra Script Manager
Use the following steps to execute [capa_explorer.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_explorer.py) and [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) using Ghidra's Script Manager:
1. Open the Ghidra Script Manager by navigating to `Window > Script Manager`
2. Locate [capa_explorer.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_explorer.py) and [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) by selecting the `Python 3 > capa` category or using the Ghidra Script Manager search functionality
3. Double-click [capa_explorer.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_explorer.py) or [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) to execute the script
If you don't see [capa_explorer.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_explorer.py) and [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) make sure you have copied these scripts to your `$USER_HOME/ghidra_scripts` directory or manually added the absolute path of each script to the Ghidra Script Manager.
Both scripts ask you to provide the path of your capa rules directory. [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) also asks you to select `default`, `verbose`, and `vverbose` output formats used when writing output to the Ghidra Console Window.
### Ghidra Headless Analyzer
To execute [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) using the Ghidra Headless Analyzer, you can use the Ghidra `analyzeHeadless` script located in your `<ghidra_install_path>/support` directory. You will need to provide the following arguments to the Ghidra `analyzeHeadless` script:
1. `<ghidra_project_path>`: path to Ghidra project
2. `<ghidra_project_name>`: name of Ghidra Project
3. `-process <sample_name>`: name of sample `<sample_name>`
4. `-ScriptPath <capa_ghidra_path>`: OPTIONAL argument specifying the absolute path of [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py)
5. `-PostScript capa_ghidra.py`: execute [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) as post-analysis script
6. `"<capa_args>"`: single, quoted string containing capa arguments that must specify capa rules directory and output format, e.g. `"<capa_rules_path> --verbose"`. [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) supports `default`, `verbose`, `vverbose` and `json` formats when executed using the Ghidra Headless Analyzer. [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) writes output to the console window used to execute the Ghidra `analyzeHeadless` script.
The following is an example of combining these arguments into a single `analyzeHeadless` script command:
```
<ghidra_install_path>/support/analyzeHeadless <ghidra_project_path> <ghidra_project_name> -process <sample_name> -PostScript capa_ghidra.py "<capa_rules_path> --verbose"
```
You may also want to run capa against a sample that you have not yet imported into your Ghidra project. The following is an example of importing a sample and running [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) using a single `analyzeHeadless` script command:
```
<ghidra_install_path>/support/analyzeHeadless <ghidra_project_path> <ghidra_project_name> -Import <sample_path> -PostScript capa_ghidra.py "<capa_rules_path> --verbose"
```
You can also provide [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) the single argument `"help"` to view supported arguments when running the script using the Ghidra Headless Analyzer:
```
<ghidra_install_path>/support/analyzeHeadless <ghidra_project_path> <ghidra_project_name> -process <sample_name> -PostScript capa_ghidra.py "help"
```
The following is an example of running [capa_ghidra.py](https://raw.githubusercontent.com/mandiant/capa/master/capa/ghidra/capa_ghidra.py) against a shellcode sample using the Ghidra `analyzeHeadless` script:
```
$ analyzeHeadless /home/wumbo/Desktop/ghidra_projects/ capa_test -process 499c2a85f6e8142c3f48d4251c9c7cd6.raw32 -processor x86:LE:32:default -PostScript capa_ghidra.py "/home/wumbo/capa/rules -vv"
[...]
INFO REPORT: Analysis succeeded for file: /499c2a85f6e8142c3f48d4251c9c7cd6.raw32 (HeadlessAnalyzer)
INFO SCRIPT: /home/wumbo/ghidra_scripts/capa_ghidra.py (HeadlessAnalyzer)
md5 499c2a85f6e8142c3f48d4251c9c7cd6
sha1
sha256 e8e02191c1b38c808d27a899ac164b3675eb5cadd3a8907b0ffa863714000e72
path /home/wumbo/capa/tests/data/499c2a85f6e8142c3f48d4251c9c7cd6.raw32
timestamp 2023-08-29 17:57:00.946588
capa version 6.1.0
os unknown os
format Raw Binary
arch x86
extractor ghidra
base address global
rules /home/wumbo/capa/rules
function count 42
library function count 0
total feature count 1970
contain loop (24 matches, only showing first match of library rule)
author moritz.raabe@mandiant.com
scope function
function @ 0x0
or:
characteristic: loop @ 0x0
characteristic: tight loop @ 0x278
contain obfuscated stackstrings
namespace anti-analysis/obfuscation/string/stackstring
author moritz.raabe@mandiant.com
scope basic block
att&ck Defense Evasion::Obfuscated Files or Information::Indicator Removal from Tools [T1027.005]
mbc Anti-Static Analysis::Executable Code Obfuscation::Argument Obfuscation [B0032.020], Anti-Static Analysis::Executable Code Obfuscation::Stack Strings [B0032.017]
basic block @ 0x0 in function 0x0
characteristic: stack string @ 0x0
encode data using XOR
namespace data-manipulation/encoding/xor
author moritz.raabe@mandiant.com
scope basic block
att&ck Defense Evasion::Obfuscated Files or Information [T1027]
mbc Defense Evasion::Obfuscated Files or Information::Encoding-Standard Algorithm [E1027.m02], Data::Encode Data::XOR [C0026.002]
basic block @ 0x8AF in function 0x8A1
and:
characteristic: tight loop @ 0x8AF
characteristic: nzxor @ 0x8C0
not: = filter for potential false positives
or:
or: = unsigned bitwise negation operation (~i)
number: 0xFFFFFFFF = bitwise negation for unsigned 32 bits
number: 0xFFFFFFFFFFFFFFFF = bitwise negation for unsigned 64 bits
or: = signed bitwise negation operation (~i)
number: 0xFFFFFFF = bitwise negation for signed 32 bits
number: 0xFFFFFFFFFFFFFFF = bitwise negation for signed 64 bits
or: = Magic constants used in the implementation of strings functions.
number: 0x7EFEFEFF = optimized string constant for 32 bits
number: 0x81010101 = -0x81010101 = 0x7EFEFEFF
number: 0x81010100 = 0x81010100 = ~0x7EFEFEFF
number: 0x7EFEFEFEFEFEFEFF = optimized string constant for 64 bits
number: 0x8101010101010101 = -0x8101010101010101 = 0x7EFEFEFEFEFEFEFF
number: 0x8101010101010100 = 0x8101010101010100 = ~0x7EFEFEFEFEFEFEFF
get OS information via KUSER_SHARED_DATA
namespace host-interaction/os/version
author @mr-tz
scope function
att&ck Discovery::System Information Discovery [T1082]
references https://www.geoffchappell.com/studies/windows/km/ntoskrnl/inc/api/ntexapi_x/kuser_shared_data/index.htm
function @ 0x1CA6
or:
number: 0x7FFE026C = NtMajorVersion @ 0x1D18
Script /home/wumbo/ghidra_scripts/capa_ghidra.py called exit with code 0
[...]
```

View File

@@ -1,378 +0,0 @@
# Integrate capa results with Ghidra UI
# @author Colton Gabertan (gabertan.colton@gmail.com)
# @category Python 3.capa
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import sys
import json
import logging
import pathlib
from typing import Any, Dict, List
from ghidra.app.cmd.label import AddLabelCmd, CreateNamespacesCmd
from ghidra.program.model.symbol import Namespace, SourceType, SymbolType
import capa
import capa.main
import capa.rules
import capa.render.json
import capa.ghidra.helpers
import capa.capabilities.common
import capa.features.extractors.ghidra.extractor
logger = logging.getLogger("capa_explorer")
def add_bookmark(addr, txt, category="CapaExplorer"):
"""create bookmark at addr"""
currentProgram().getBookmarkManager().setBookmark(addr, "Info", category, txt) # type: ignore [name-defined] # noqa: F821
def create_namespace(namespace_str):
"""create new Ghidra namespace for each capa namespace"""
cmd = CreateNamespacesCmd(namespace_str, SourceType.USER_DEFINED)
cmd.applyTo(currentProgram()) # type: ignore [name-defined] # noqa: F821
return cmd.getNamespace()
def create_label(ghidra_addr, name, capa_namespace):
"""custom label cmd to overlay symbols under capa-generated namespaces"""
# prevent duplicate labels under the same capa-generated namespace
symbol_table = currentProgram().getSymbolTable() # type: ignore [name-defined] # noqa: F821
for sym in symbol_table.getSymbols(ghidra_addr):
if sym.getName(True) == capa_namespace.getName(True) + Namespace.DELIMITER + name:
return
# create SymbolType.LABEL at addr
# prioritize capa-generated namespace (duplicate match @ new addr), else put under global Ghidra one (new match)
cmd = AddLabelCmd(ghidra_addr, name, True, SourceType.USER_DEFINED)
cmd.applyTo(currentProgram()) # type: ignore [name-defined] # noqa: F821
# assign new match overlay label to capa-generated namespace
cmd.getSymbol().setNamespace(capa_namespace)
return
class CapaMatchData:
def __init__(
self,
namespace,
scope,
capability,
matches,
attack: List[Dict[Any, Any]],
mbc: List[Dict[Any, Any]],
):
self.namespace = namespace
self.scope = scope
self.capability = capability
self.matches = matches
self.attack = attack
self.mbc = mbc
def bookmark_functions(self):
"""create bookmarks for MITRE ATT&CK & MBC mappings"""
if self.attack == [] and self.mbc == []:
return
for key in self.matches.keys():
addr = toAddr(hex(key)) # type: ignore [name-defined] # noqa: F821
func = getFunctionContaining(addr) # type: ignore [name-defined] # noqa: F821
# bookmark & tag MITRE ATT&CK tactics & MBC @ function scope
if func is not None:
func_addr = func.getEntryPoint()
if self.attack != []:
for item in self.attack:
attack_txt = ""
for part in item.get("parts", {}):
attack_txt = attack_txt + part + Namespace.DELIMITER
attack_txt = attack_txt + item.get("id", {})
add_bookmark(func_addr, attack_txt, "CapaExplorer::MITRE ATT&CK")
if self.mbc != []:
for item in self.mbc:
mbc_txt = ""
for part in item.get("parts", {}):
mbc_txt = mbc_txt + part + Namespace.DELIMITER
mbc_txt = mbc_txt + item.get("id", {})
add_bookmark(func_addr, mbc_txt, "CapaExplorer::MBC")
def set_plate_comment(self, ghidra_addr):
"""set plate comments at matched functions"""
comment = getPlateComment(ghidra_addr) # type: ignore [name-defined] # noqa: F821
rule_path = self.namespace.replace(Namespace.DELIMITER, "/")
# 2 calls to avoid duplicate comments via subsequent script runs
if comment is None:
# first comment @ function
comment = rule_path + "\n"
setPlateComment(ghidra_addr, comment) # type: ignore [name-defined] # noqa: F821
elif rule_path not in comment:
comment = comment + rule_path + "\n"
setPlateComment(ghidra_addr, comment) # type: ignore [name-defined] # noqa: F821
else:
return
def set_pre_comment(self, ghidra_addr, sub_type, description):
"""set pre comments at subscoped matches of main rules"""
comment = getPreComment(ghidra_addr) # type: ignore [name-defined] # noqa: F821
if comment is None:
comment = "capa: " + sub_type + "(" + description + ")" + ' matched in "' + self.capability + '"\n'
setPreComment(ghidra_addr, comment) # type: ignore [name-defined] # noqa: F821
elif self.capability not in comment:
comment = (
comment + "capa: " + sub_type + "(" + description + ")" + ' matched in "' + self.capability + '"\n'
)
setPreComment(ghidra_addr, comment) # type: ignore [name-defined] # noqa: F821
else:
return
def label_matches(self):
"""label findings at function scopes and comment on subscope matches"""
capa_namespace = create_namespace(self.namespace)
symbol_table = currentProgram().getSymbolTable() # type: ignore [name-defined] # noqa: F821
# handle function main scope of matched rule
# these will typically contain further matches within
if self.scope == "function":
for addr in self.matches.keys():
ghidra_addr = toAddr(hex(addr)) # type: ignore [name-defined] # noqa: F821
# classify new function label under capa-generated namespace
sym = symbol_table.getPrimarySymbol(ghidra_addr)
if sym is not None:
if sym.getSymbolType() == SymbolType.FUNCTION:
create_label(ghidra_addr, sym.getName(), capa_namespace)
self.set_plate_comment(ghidra_addr)
# parse the corresponding nodes, and pre-comment subscope matched features
# under the encompassing function(s)
for sub_match in self.matches.get(addr):
for loc, node in sub_match.items():
sub_ghidra_addr = toAddr(hex(loc)) # type: ignore [name-defined] # noqa: F821
if sub_ghidra_addr == ghidra_addr:
# skip duplicates
continue
# precomment subscope matches under the function
if node != {}:
for sub_type, description in parse_node(node):
self.set_pre_comment(sub_ghidra_addr, sub_type, description)
else:
# resolve the encompassing function for the capa namespace
# of non-function scoped main matches
for addr in self.matches.keys():
ghidra_addr = toAddr(hex(addr)) # type: ignore [name-defined] # noqa: F821
# basic block / insn scoped main matches
# Ex. See "Create Process on Windows" Rule
func = getFunctionContaining(ghidra_addr) # type: ignore [name-defined] # noqa: F821
if func is not None:
func_addr = func.getEntryPoint()
create_label(func_addr, func.getName(), capa_namespace)
self.set_plate_comment(func_addr)
# create subscope match precomments
for sub_match in self.matches.get(addr):
for loc, node in sub_match.items():
sub_ghidra_addr = toAddr(hex(loc)) # type: ignore [name-defined] # noqa: F821
if node != {}:
if func is not None:
# basic block/ insn scope under resolved function
for sub_type, description in parse_node(node):
self.set_pre_comment(sub_ghidra_addr, sub_type, description)
else:
# this would be a global/file scoped main match
# try to resolve the encompassing function via the subscope match, instead
# Ex. "run as service" rule
sub_func = getFunctionContaining(sub_ghidra_addr) # type: ignore [name-defined] # noqa: F821
if sub_func is not None:
sub_func_addr = sub_func.getEntryPoint()
# place function in capa namespace & create the subscope match label in Ghidra's global namespace
create_label(sub_func_addr, sub_func.getName(), capa_namespace)
self.set_plate_comment(sub_func_addr)
for sub_type, description in parse_node(node):
self.set_pre_comment(sub_ghidra_addr, sub_type, description)
else:
# addr is in some other file section like .data
# represent this location with a label symbol under the capa namespace
# Ex. See "Reference Base64 String" rule
for sub_type, description in parse_node(node):
# in many cases, these will be ghidra-labeled data, so just add the existing
# label symbol to the capa namespace
for sym in symbol_table.getSymbols(sub_ghidra_addr):
if sym.getSymbolType() == SymbolType.LABEL:
sym.setNamespace(capa_namespace)
self.set_pre_comment(sub_ghidra_addr, sub_type, description)
def get_capabilities():
rules_dir: str = ""
try:
selected_dir = askDirectory("Choose capa rules directory", "Ok") # type: ignore [name-defined] # noqa: F821
if selected_dir:
rules_dir = selected_dir.getPath()
except RuntimeError:
# RuntimeError thrown when user selects "Cancel"
pass
if not rules_dir:
logger.info("You must choose a capa rules directory before running capa.")
return "" # return empty str to avoid handling both int and str types
rules_path: pathlib.Path = pathlib.Path(rules_dir)
logger.info("running capa using rules from %s", str(rules_path))
rules = capa.rules.get_rules([rules_path])
meta = capa.ghidra.helpers.collect_metadata([rules_path])
extractor = capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor()
capabilities, counts = capa.capabilities.common.find_capabilities(rules, extractor, True)
if capa.capabilities.common.has_file_limitation(rules, capabilities, is_standalone=False):
popup("capa explorer encountered warnings during analysis. Please check the console output for more information.") # type: ignore [name-defined] # noqa: F821
logger.info("capa encountered warnings during analysis")
return capa.render.json.render(meta, rules, capabilities)
def get_locations(match_dict):
"""recursively collect match addresses and associated nodes"""
for loc in match_dict.get("locations", {}):
# either an rva (absolute)
# or an offset into a file (file)
if loc.get("type", "") in ("absolute", "file"):
yield loc.get("value"), match_dict.get("node")
for child in match_dict.get("children", {}):
yield from get_locations(child)
def parse_node(node_data):
"""pull match descriptions and sub features by parsing node dicts"""
node = node_data.get(node_data.get("type"))
if "description" in node:
yield "description", node.get("description")
data = node.get(node.get("type"))
if isinstance(data, (str, int)):
feat_type = node.get("type")
if isinstance(data, int):
data = hex(data)
yield feat_type, data
def parse_json(capa_data):
"""Parse json produced by capa"""
for rule, capability in capa_data.get("rules", {}).items():
# structure to contain rule match address & supporting feature data
# {rule match addr:[{feature addr:{node_data}}]}
rule_matches: Dict[Any, List[Any]] = {}
for i in range(len(capability.get("matches"))):
# grab rule match location
match_loc = capability.get("matches")[i][0].get("value")
if match_loc is None:
# Ex. See "Reference Base64 string"
# {'type':'no address'}
match_loc = i
rule_matches[match_loc] = []
# grab extracted feature locations & corresponding node data
# feature[0]: location
# feature[1]: node
features = capability.get("matches")[i][1]
feat_dict = {}
for feature in get_locations(features):
feat_dict[feature[0]] = feature[1]
rule_matches[match_loc].append(feat_dict)
# dict data of currently matched rule
meta = capability["meta"]
# get MITRE ATT&CK and MBC
attack = meta.get("attack")
if attack is None:
attack = []
mbc = meta.get("mbc")
if mbc is None:
mbc = []
# scope match for the rule
scope = meta["scopes"].get("static")
fmt_rule = Namespace.DELIMITER + rule.replace(" ", "-")
if "namespace" in meta:
# split into list to help define child namespaces
# this requires the correct delimiter used by Ghidra
# Ex. 'communication/named-pipe/create/create pipe' -> capa::communication::named-pipe::create::create-pipe
namespace_str = Namespace.DELIMITER.join(meta["namespace"].split("/"))
namespace = "capa" + Namespace.DELIMITER + namespace_str + fmt_rule
else:
# lib rules via the official rules repo will not contain data
# for the "namespaces" key, so format using rule itself
# Ex. 'contain loop' -> capa::lib::contain-loop
namespace = "capa" + Namespace.DELIMITER + "lib" + fmt_rule
yield CapaMatchData(namespace, scope, rule, rule_matches, attack, mbc)
def main():
logging.basicConfig(level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
if isRunningHeadless(): # type: ignore [name-defined] # noqa: F821
logger.error("unsupported Ghidra execution mode")
return capa.main.E_UNSUPPORTED_GHIDRA_EXECUTION_MODE
if not capa.ghidra.helpers.is_supported_ghidra_version():
logger.error("unsupported Ghidra version")
return capa.main.E_UNSUPPORTED_GHIDRA_VERSION
if not capa.ghidra.helpers.is_supported_file_type():
logger.error("unsupported file type")
return capa.main.E_INVALID_FILE_TYPE
if not capa.ghidra.helpers.is_supported_arch_type():
logger.error("unsupported file architecture")
return capa.main.E_INVALID_FILE_ARCH
# capa_data will always contain {'meta':..., 'rules':...}
# if the 'rules' key contains no values, then there were no matches
capa_data = json.loads(get_capabilities())
if capa_data.get("rules") is None:
logger.info("capa explorer found no matches")
popup("capa explorer found no matches.") # type: ignore [name-defined] # noqa: F821
return capa.main.E_EMPTY_REPORT
for item in parse_json(capa_data):
item.bookmark_functions()
item.label_matches()
logger.info("capa explorer analysis complete")
popup("capa explorer analysis complete.\nPlease see results in the Bookmarks Window and Namespaces section of the Symbol Tree Window.") # type: ignore [name-defined] # noqa: F821
return 0
if __name__ == "__main__":
if sys.version_info < (3, 8):
from capa.exceptions import UnsupportedRuntimeError
raise UnsupportedRuntimeError("This version of capa can only be used with Python 3.8+")
exit_code = main()
if exit_code != 0:
popup("capa explorer encountered errors during analysis. Please check the console output for more information.") # type: ignore [name-defined] # noqa: F821
sys.exit(exit_code)

View File

@@ -1,167 +0,0 @@
# Run capa against loaded Ghidra database
# @author Mike Hunhoff (mehunhoff@google.com)
# @category Python 3.capa
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import sys
import logging
import pathlib
import argparse
import capa
import capa.main
import capa.rules
import capa.ghidra.helpers
import capa.render.default
import capa.capabilities.common
import capa.features.extractors.ghidra.extractor
logger = logging.getLogger("capa_ghidra")
def run_headless():
parser = argparse.ArgumentParser(description="The FLARE team's open-source tool to integrate capa with Ghidra.")
parser.add_argument(
"rules",
type=str,
help="path to rule file or directory",
)
parser.add_argument(
"-v", "--verbose", action="store_true", help="enable verbose result document (no effect with --json)"
)
parser.add_argument(
"-vv", "--vverbose", action="store_true", help="enable very verbose result document (no effect with --json)"
)
parser.add_argument("-d", "--debug", action="store_true", help="enable debugging output on STDERR")
parser.add_argument("-q", "--quiet", action="store_true", help="disable all output but errors")
parser.add_argument("-j", "--json", action="store_true", help="emit JSON instead of text")
script_args = list(getScriptArgs()) # type: ignore [name-defined] # noqa: F821
if not script_args or len(script_args) > 1:
script_args = []
else:
script_args = script_args[0].split()
for idx, arg in enumerate(script_args):
if arg.lower() == "help":
script_args[idx] = "--help"
args = parser.parse_args(args=script_args)
if args.quiet:
logging.basicConfig(level=logging.WARNING)
logging.getLogger().setLevel(logging.WARNING)
elif args.debug:
logging.basicConfig(level=logging.DEBUG)
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
logger.debug("running in Ghidra headless mode")
rules_path = pathlib.Path(args.rules)
logger.debug("rule path: %s", rules_path)
rules = capa.rules.get_rules([rules_path])
meta = capa.ghidra.helpers.collect_metadata([rules_path])
extractor = capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor()
capabilities, counts = capa.capabilities.common.find_capabilities(rules, extractor, False)
meta.analysis.feature_counts = counts["feature_counts"]
meta.analysis.library_functions = counts["library_functions"]
meta.analysis.layout = capa.loader.compute_layout(rules, extractor, capabilities)
if capa.capabilities.common.has_file_limitation(rules, capabilities, is_standalone=True):
logger.info("capa encountered warnings during analysis")
if args.json:
print(capa.render.json.render(meta, rules, capabilities)) # noqa: T201
elif args.vverbose:
print(capa.render.vverbose.render(meta, rules, capabilities)) # noqa: T201
elif args.verbose:
print(capa.render.verbose.render(meta, rules, capabilities)) # noqa: T201
else:
print(capa.render.default.render(meta, rules, capabilities)) # noqa: T201
return 0
def run_ui():
logging.basicConfig(level=logging.INFO)
logging.getLogger().setLevel(logging.INFO)
rules_dir: str = ""
try:
selected_dir = askDirectory("Choose capa rules directory", "Ok") # type: ignore [name-defined] # noqa: F821
if selected_dir:
rules_dir = selected_dir.getPath()
except RuntimeError:
# RuntimeError thrown when user selects "Cancel"
pass
if not rules_dir:
logger.info("You must choose a capa rules directory before running capa.")
return capa.main.E_MISSING_RULES
verbose = askChoice( # type: ignore [name-defined] # noqa: F821
"capa output verbosity", "Choose capa output verbosity", ["default", "verbose", "vverbose"], "default"
)
rules_path: pathlib.Path = pathlib.Path(rules_dir)
logger.info("running capa using rules from %s", str(rules_path))
rules = capa.rules.get_rules([rules_path])
meta = capa.ghidra.helpers.collect_metadata([rules_path])
extractor = capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor()
capabilities, counts = capa.capabilities.common.find_capabilities(rules, extractor, True)
meta.analysis.feature_counts = counts["feature_counts"]
meta.analysis.library_functions = counts["library_functions"]
meta.analysis.layout = capa.loader.compute_layout(rules, extractor, capabilities)
if capa.capabilities.common.has_file_limitation(rules, capabilities, is_standalone=False):
logger.info("capa encountered warnings during analysis")
if verbose == "vverbose":
print(capa.render.vverbose.render(meta, rules, capabilities)) # noqa: T201
elif verbose == "verbose":
print(capa.render.verbose.render(meta, rules, capabilities)) # noqa: T201
else:
print(capa.render.default.render(meta, rules, capabilities)) # noqa: T201
return 0
def main():
if not capa.ghidra.helpers.is_supported_ghidra_version():
return capa.main.E_UNSUPPORTED_GHIDRA_VERSION
if not capa.ghidra.helpers.is_supported_file_type():
return capa.main.E_INVALID_FILE_TYPE
if not capa.ghidra.helpers.is_supported_arch_type():
return capa.main.E_INVALID_FILE_ARCH
if isRunningHeadless(): # type: ignore [name-defined] # noqa: F821
return run_headless()
else:
return run_ui()
if __name__ == "__main__":
if sys.version_info < (3, 8):
from capa.exceptions import UnsupportedRuntimeError
raise UnsupportedRuntimeError("This version of capa can only be used with Python 3.8+")
sys.exit(main())

View File

@@ -1,160 +0,0 @@
# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at: [package root]/LICENSE.txt
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and limitations under the License.
import logging
import datetime
import contextlib
from typing import List
from pathlib import Path
import capa
import capa.version
import capa.features.common
import capa.features.freeze
import capa.render.result_document as rdoc
import capa.features.extractors.ghidra.helpers
logger = logging.getLogger("capa")
# file type as returned by Ghidra
SUPPORTED_FILE_TYPES = ("Executable and Linking Format (ELF)", "Portable Executable (PE)", "Raw Binary")
class GHIDRAIO:
"""
An object that acts as a file-like object,
using bytes from the current Ghidra listing.
"""
def __init__(self):
super().__init__()
self.offset = 0
self.bytes_ = self.get_bytes()
def seek(self, offset, whence=0):
assert whence == 0
self.offset = offset
def read(self, size):
logger.debug("reading 0x%x bytes at 0x%x (ea: 0x%x)", size, self.offset, currentProgram().getImageBase().add(self.offset).getOffset()) # type: ignore [name-defined] # noqa: F821
if size > len(self.bytes_) - self.offset:
logger.debug("cannot read 0x%x bytes at 0x%x (ea: BADADDR)", size, self.offset)
return b""
else:
return self.bytes_[self.offset : self.offset + size]
def close(self):
return
def get_bytes(self):
file_bytes = currentProgram().getMemory().getAllFileBytes()[0] # type: ignore [name-defined] # noqa: F821
# getOriginalByte() allows for raw file parsing on the Ghidra side
# other functions will fail as Ghidra will think that it's reading uninitialized memory
bytes_ = [file_bytes.getOriginalByte(i) for i in range(file_bytes.getSize())]
return capa.features.extractors.ghidra.helpers.ints_to_bytes(bytes_)
def is_supported_ghidra_version():
version = float(getGhidraVersion()[:4]) # type: ignore [name-defined] # noqa: F821
if version < 10.2:
warning_msg = "capa does not support this Ghidra version"
logger.warning(warning_msg)
logger.warning("Your Ghidra version is: %s. Supported versions are: Ghidra >= 10.2", version)
return False
return True
def is_running_headless():
return isRunningHeadless() # type: ignore [name-defined] # noqa: F821
def is_supported_file_type():
file_info = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821
if file_info not in SUPPORTED_FILE_TYPES:
logger.error("-" * 80)
logger.error(" Input file does not appear to be a supported file type.")
logger.error(" ")
logger.error(
" capa currently only supports analyzing PE, ELF, or binary files containing x86 (32- and 64-bit) shellcode."
)
logger.error(" If you don't know the input file type, you can try using the `file` utility to guess it.")
logger.error("-" * 80)
return False
return True
def is_supported_arch_type():
lang_id = str(currentProgram().getLanguageID()).lower() # type: ignore [name-defined] # noqa: F821
if not all((lang_id.startswith("x86"), any(arch in lang_id for arch in ("32", "64")))):
logger.error("-" * 80)
logger.error(" Input file does not appear to target a supported architecture.")
logger.error(" ")
logger.error(" capa currently only supports analyzing x86 (32- and 64-bit).")
logger.error("-" * 80)
return False
return True
def get_file_md5():
return currentProgram().getExecutableMD5() # type: ignore [name-defined] # noqa: F821
def get_file_sha256():
return currentProgram().getExecutableSHA256() # type: ignore [name-defined] # noqa: F821
def collect_metadata(rules: List[Path]):
md5 = get_file_md5()
sha256 = get_file_sha256()
info = currentProgram().getLanguageID().toString() # type: ignore [name-defined] # noqa: F821
if "x86" in info and "64" in info:
arch = "x86_64"
elif "x86" in info and "32" in info:
arch = "x86"
else:
arch = "unknown arch"
format_name: str = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821
if "PE" in format_name:
os = "windows"
elif "ELF" in format_name:
with contextlib.closing(capa.ghidra.helpers.GHIDRAIO()) as f:
os = capa.features.extractors.elf.detect_elf_os(f)
else:
os = "unknown os"
return rdoc.Metadata(
timestamp=datetime.datetime.now(),
version=capa.version.__version__,
argv=(),
sample=rdoc.Sample(
md5=md5,
sha1="",
sha256=sha256,
path=currentProgram().getExecutablePath(), # type: ignore [name-defined] # noqa: F821
),
flavor=rdoc.Flavor.STATIC,
analysis=rdoc.StaticAnalysis(
format=currentProgram().getExecutableFormat(), # type: ignore [name-defined] # noqa: F821
arch=arch,
os=os,
extractor="ghidra",
rules=tuple(r.resolve().absolute().as_posix() for r in rules),
base_address=capa.features.freeze.Address.from_capa(currentProgram().getImageBase().getOffset()), # type: ignore [name-defined] # noqa: F821
layout=rdoc.StaticLayout(
functions=(),
),
feature_counts=rdoc.StaticFeatureCounts(file=0, functions=()),
library_functions=(),
),
)

Some files were not shown because too many files have changed in this diff Show More