diff --git a/.github/mypy/mypy.ini b/.github/mypy/mypy.ini index 603f2e42..a3356eea 100644 --- a/.github/mypy/mypy.ini +++ b/.github/mypy/mypy.ini @@ -86,3 +86,6 @@ ignore_missing_imports = True [mypy-netnode.*] ignore_missing_imports = True + +[mypy-ghidra.*] +ignore_missing_imports = True diff --git a/.github/pyinstaller/pyinstaller.spec b/.github/pyinstaller/pyinstaller.spec index 7d90e966..a7c379d2 100644 --- a/.github/pyinstaller/pyinstaller.spec +++ b/.github/pyinstaller/pyinstaller.spec @@ -17,6 +17,7 @@ a = Analysis( # when invoking pyinstaller from the project root, # this gets invoked from the directory of the spec file, # i.e. ./.github/pyinstaller + ("../../assets", "assets"), ("../../rules", "rules"), ("../../sigs", "sigs"), ("../../cache", "cache"), @@ -79,7 +80,7 @@ exe = EXE( name="capa", icon="logo.ico", debug=False, - strip=None, + strip=False, upx=True, console=True, ) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index c8cece81..c688e20b 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -139,3 +139,62 @@ jobs: env: BN_LICENSE: ${{ secrets.BN_LICENSE }} run: pytest -v tests/test_binja_features.py # explicitly refer to the binja tests for performance. other tests run above. + + ghidra-tests: + name: Ghidra tests for ${{ matrix.python-version }} + runs-on: ubuntu-20.04 + needs: [code_style, rule_linter] + strategy: + fail-fast: false + matrix: + python-version: ["3.8", "3.11"] + java-version: ["17"] + gradle-version: ["7.3"] + ghidra-version: ["10.3"] + public-version: ["PUBLIC_20230510"] # for ghidra releases + jep-version: ["4.1.1"] + ghidrathon-version: ["3.0.0"] + steps: + - name: Checkout capa with submodules + uses: actions/checkout@ac593985615ec2ede58e132d2e21d2b1cbd6127c # v3.3.0 + with: + submodules: true + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@d27e3f3d7c64b4bbf8e4abfb9b63b83e846e0435 # v4.5.0 + with: + python-version: ${{ matrix.python-version }} + - name: Set up Java ${{ matrix.java-version }} + uses: actions/setup-java@5ffc13f4174014e2d4d4572b3d74c3fa61aeb2c2 # v3 + with: + distribution: 'temurin' + java-version: ${{ matrix.java-version }} + - name: Set up Gradle ${{ matrix.gradle-version }} + uses: gradle/gradle-build-action@40b6781dcdec2762ad36556682ac74e31030cfe2 # v2.5.1 + with: + gradle-version: ${{ matrix.gradle-version }} + - name: Install Jep ${{ matrix.jep-version }} + run : pip install jep==${{ matrix.jep-version }} + - name: Install Ghidra ${{ matrix.ghidra-version }} + run: | + mkdir ./.github/ghidra + wget "https://github.com/NationalSecurityAgency/ghidra/releases/download/Ghidra_${{ matrix.ghidra-version }}_build/ghidra_${{ matrix.ghidra-version }}_${{ matrix.public-version }}.zip" -O ./.github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC.zip + unzip .github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC.zip -d .github/ghidra/ + - name: Install Ghidrathon + run : | + mkdir ./.github/ghidrathon + curl -o ./.github/ghidrathon/ghidrathon-${{ matrix.ghidrathon-version }}.zip "https://codeload.github.com/mandiant/Ghidrathon/zip/refs/tags/v${{ matrix.ghidrathon-version }}" + unzip .github/ghidrathon/ghidrathon-${{ matrix.ghidrathon-version }}.zip -d .github/ghidrathon/ + gradle -p ./.github/ghidrathon/Ghidrathon-${{ matrix.ghidrathon-version }}/ -PGHIDRA_INSTALL_DIR=$(pwd)/.github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC + unzip .github/ghidrathon/Ghidrathon-${{ matrix.ghidrathon-version }}/dist/*.zip -d .github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC/Ghidra/Extensions + - name: Install pyyaml + run: sudo apt-get install -y libyaml-dev + - name: Install capa + run: pip install -e .[dev] + - name: Run tests + run: | + mkdir ./.github/ghidra/project + .github/ghidra/ghidra_${{ matrix.ghidra-version }}_PUBLIC/support/analyzeHeadless .github/ghidra/project ghidra_test -Import ./tests/data/mimikatz.exe_ -ScriptPath ./tests/ -PostScript test_ghidra_features.py > ../output.log + cat ../output.log + exit_code=$(cat ../output.log | grep exit | awk '{print $NF}') + exit $exit_code + diff --git a/CHANGELOG.md b/CHANGELOG.md index ad922855..b84a7672 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,11 @@ - add call scope #771 @yelhamer - add process scope for the dynamic analysis flavor #1517 @yelhamer - Add thread scope for the dynamic analysis flavor #1517 @yelhamer +- ghidra: add Ghidra feature extractor and supporting code #1770 @colton-gabertan +- ghidra: add entry script helping users run capa against a loaded Ghidra database #1767 @mike-hunhoff +- binja: add support for forwarded exports #1646 @xusheng6 +- binja: add support for symtab names #1504 @xusheng6 +- add com class/interface features #322 @Aayush-goel-04 - protobuf: add `Metadata.flavor` @williballenthin ### Breaking Changes @@ -14,11 +19,34 @@ - remove the `SCOPE_*` constants in favor of the `Scope` enum #1764 @williballenthin - protobuf: deprecate `RuleMetadata.scope` in favor of `RuleMetadata.scopes` @williballenthin -### New Rules (0) +### New Rules (19) +- nursery/get-ntoskrnl-base-address @mr-tz +- host-interaction/network/connectivity/set-tcp-connection-state @johnk3r +- nursery/capture-process-snapshot-data @mr-tz +- collection/network/capture-packets-using-sharppcap jakub.jozwiak@mandiant.com +- nursery/communicate-with-kernel-module-via-netlink-socket-on-linux michael.hunhoff@mandiant.com +- nursery/get-current-pid-on-linux michael.hunhoff@mandiant.com +- nursery/get-file-system-information-on-linux michael.hunhoff@mandiant.com +- nursery/get-password-database-entry-on-linux michael.hunhoff@mandiant.com +- nursery/mark-thread-detached-on-linux michael.hunhoff@mandiant.com +- nursery/persist-via-gnome-autostart-on-linux michael.hunhoff@mandiant.com +- nursery/set-thread-name-on-linux michael.hunhoff@mandiant.com +- load-code/dotnet/load-windows-common-language-runtime michael.hunhoff@mandiant.com blas.kojusner@mandiant.com jakub.jozwiak@mandiant.com +- nursery/log-keystrokes-via-input-method-manager @mr-tz +- nursery/encrypt-data-using-rc4-via-systemfunction032 richard.weiss@mandiant.com +- nursery/add-value-to-global-atom-table @mr-tz +- nursery/enumerate-processes-that-use-resource @Ana06 +- host-interaction/process/inject/allocate-or-change-rwx-memory @mr-tz +- lib/allocate-or-change-rw-memory 0x534a@mailbox.org @mr-tz +- lib/change-memory-protection @mr-tz - ### Bug Fixes +- ghidra: fix ints_to_bytes performance #1761 @mike-hunhoff +- binja: improve function call site detection @xusheng6 +- binja: use binaryninja.load to open files @xusheng6 +- binja: bump binja version to 3.5 #1789 @xusheng6 ### capa explorer IDA Pro plugin @@ -1581,4 +1609,4 @@ Download a standalone binary below and checkout the readme [here on GitHub](http ### Raw diffs - [capa v1.0.0...v1.1.0](https://github.com/mandiant/capa/compare/v1.0.0...v1.1.0) - - [capa-rules v1.0.0...v1.1.0](https://github.com/mandiant/capa-rules/compare/v1.0.0...v1.1.0) + - [capa-rules v1.0.0...v1.1.0](https://github.com/mandiant/capa-rules/compare/v1.0.0...v1.1.0) \ No newline at end of file diff --git a/README.md b/README.md index 9a42cdf1..eb5944b9 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/flare-capa)](https://pypi.org/project/flare-capa) [![Last release](https://img.shields.io/github/v/release/mandiant/capa)](https://github.com/mandiant/capa/releases) -[![Number of rules](https://img.shields.io/badge/rules-831-blue.svg)](https://github.com/mandiant/capa-rules) +[![Number of rules](https://img.shields.io/badge/rules-847-blue.svg)](https://github.com/mandiant/capa-rules) [![CI status](https://github.com/mandiant/capa/workflows/CI/badge.svg)](https://github.com/mandiant/capa/actions?query=workflow%3ACI+event%3Apush+branch%3Amaster) [![Downloads](https://img.shields.io/github/downloads/mandiant/capa/total)](https://github.com/mandiant/capa/releases) [![License](https://img.shields.io/badge/license-Apache--2.0-green.svg)](LICENSE.txt) @@ -170,6 +170,8 @@ capa explorer helps you identify interesting areas of a program and build new ca ![capa + IDA Pro integration](https://github.com/mandiant/capa/blob/master/doc/img/explorer_expanded.png) +If you use Ghidra, you can use the Python 3 [Ghidra feature extractor](/capa/ghidra/). This integration enables capa to extract features directly from your Ghidra database, which can help you identify capabilities in programs that you analyze using Ghidra. + # further information ## capa - [Installation](https://github.com/mandiant/capa/blob/master/doc/installation.md) diff --git a/assets/classes.json.gz b/assets/classes.json.gz new file mode 100644 index 00000000..dbebcb22 Binary files /dev/null and b/assets/classes.json.gz differ diff --git a/assets/interfaces.json.gz b/assets/interfaces.json.gz new file mode 100644 index 00000000..ae68a33d Binary files /dev/null and b/assets/interfaces.json.gz differ diff --git a/capa/exceptions.py b/capa/exceptions.py index e080791a..58af3bef 100644 --- a/capa/exceptions.py +++ b/capa/exceptions.py @@ -19,3 +19,7 @@ class UnsupportedArchError(ValueError): class UnsupportedOSError(ValueError): pass + + +class EmptyReportError(ValueError): + pass diff --git a/capa/features/extractors/binja/file.py b/capa/features/extractors/binja/file.py index d46451e7..84b25348 100644 --- a/capa/features/extractors/binja/file.py +++ b/capa/features/extractors/binja/file.py @@ -17,7 +17,7 @@ import capa.features.extractors.strings from capa.features.file import Export, Import, Section, FunctionName from capa.features.common import FORMAT_PE, FORMAT_ELF, Format, String, Feature, Characteristic from capa.features.address import NO_ADDRESS, Address, FileOffsetAddress, AbsoluteVirtualAddress -from capa.features.extractors.binja.helpers import unmangle_c_name +from capa.features.extractors.binja.helpers import read_c_string, unmangle_c_name def check_segment_for_pe(bv: BinaryView, seg: Segment) -> Iterator[Tuple[int, int]]: @@ -82,6 +82,24 @@ def extract_file_export_names(bv: BinaryView) -> Iterator[Tuple[Feature, Address if name != unmangled_name: yield Export(unmangled_name), AbsoluteVirtualAddress(sym.address) + for sym in bv.get_symbols_of_type(SymbolType.DataSymbol): + if sym.binding not in [SymbolBinding.GlobalBinding]: + continue + + name = sym.short_name + if not name.startswith("__forwarder_name"): + continue + + # Due to https://github.com/Vector35/binaryninja-api/issues/4641, in binja version 3.5, the symbol's name + # does not contain the DLL name. As a workaround, we read the C string at the symbol's address, which contains + # both the DLL name and the function name. + # Once the above issue is closed in the next binjs stable release, we can update the code here to use the + # symbol name directly. + name = read_c_string(bv, sym.address, 1024) + forwarded_name = capa.features.extractors.helpers.reformat_forwarded_export_name(name) + yield Export(forwarded_name), AbsoluteVirtualAddress(sym.address) + yield Characteristic("forwarded export"), AbsoluteVirtualAddress(sym.address) + def extract_file_import_names(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]: """extract function imports @@ -125,15 +143,17 @@ def extract_file_function_names(bv: BinaryView) -> Iterator[Tuple[Feature, Addre """ for sym_name in bv.symbols: for sym in bv.symbols[sym_name]: - if sym.type == SymbolType.LibraryFunctionSymbol: - name = sym.short_name - yield FunctionName(name), sym.address - if name.startswith("_"): - # some linkers may prefix linked routines with a `_` to avoid name collisions. - # extract features for both the mangled and un-mangled representations. - # e.g. `_fwrite` -> `fwrite` - # see: https://stackoverflow.com/a/2628384/87207 - yield FunctionName(name[1:]), sym.address + if sym.type not in [SymbolType.LibraryFunctionSymbol, SymbolType.FunctionSymbol]: + continue + + name = sym.short_name + yield FunctionName(name), sym.address + if name.startswith("_"): + # some linkers may prefix linked routines with a `_` to avoid name collisions. + # extract features for both the mangled and un-mangled representations. + # e.g. `_fwrite` -> `fwrite` + # see: https://stackoverflow.com/a/2628384/87207 + yield FunctionName(name[1:]), sym.address def extract_file_format(bv: BinaryView) -> Iterator[Tuple[Feature, Address]]: diff --git a/capa/features/extractors/binja/function.py b/capa/features/extractors/binja/function.py index a502a5f4..520de0b3 100644 --- a/capa/features/extractors/binja/function.py +++ b/capa/features/extractors/binja/function.py @@ -7,8 +7,9 @@ # See the License for the specific language governing permissions and limitations under the License. from typing import Tuple, Iterator -from binaryninja import Function, BinaryView, LowLevelILOperation +from binaryninja import Function, BinaryView, SymbolType, RegisterValueType, LowLevelILOperation +from capa.features.file import FunctionName from capa.features.common import Feature, Characteristic from capa.features.address import Address, AbsoluteVirtualAddress from capa.features.extractors import loops @@ -23,13 +24,27 @@ def extract_function_calls_to(fh: FunctionHandle): # Everything that is a code reference to the current function is considered a caller, which actually includes # many other references that are NOT a caller. For example, an instruction `push function_start` will also be # considered a caller to the function - if caller.llil is not None and caller.llil.operation in [ + llil = caller.llil + if (llil is None) or llil.operation not in [ LowLevelILOperation.LLIL_CALL, LowLevelILOperation.LLIL_CALL_STACK_ADJUST, LowLevelILOperation.LLIL_JUMP, LowLevelILOperation.LLIL_TAILCALL, ]: - yield Characteristic("calls to"), AbsoluteVirtualAddress(caller.address) + continue + + if llil.dest.value.type not in [ + RegisterValueType.ImportedAddressValue, + RegisterValueType.ConstantValue, + RegisterValueType.ConstantPointerValue, + ]: + continue + + address = llil.dest.value.value + if address != func.start: + continue + + yield Characteristic("calls to"), AbsoluteVirtualAddress(caller.address) def extract_function_loop(fh: FunctionHandle): @@ -59,10 +74,31 @@ def extract_recursive_call(fh: FunctionHandle): yield Characteristic("recursive call"), fh.address +def extract_function_name(fh: FunctionHandle): + """extract function names (e.g., symtab names)""" + func: Function = fh.inner + bv: BinaryView = func.view + if bv is None: + return + + for sym in bv.get_symbols(func.start): + if sym.type not in [SymbolType.LibraryFunctionSymbol, SymbolType.FunctionSymbol]: + continue + + name = sym.short_name + yield FunctionName(name), sym.address + if name.startswith("_"): + # some linkers may prefix linked routines with a `_` to avoid name collisions. + # extract features for both the mangled and un-mangled representations. + # e.g. `_fwrite` -> `fwrite` + # see: https://stackoverflow.com/a/2628384/87207 + yield FunctionName(name[1:]), sym.address + + def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]: for func_handler in FUNCTION_HANDLERS: for feature, addr in func_handler(fh): yield feature, addr -FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call) +FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call, extract_function_name) diff --git a/capa/features/extractors/binja/helpers.py b/capa/features/extractors/binja/helpers.py index a96f64da..0ce0f073 100644 --- a/capa/features/extractors/binja/helpers.py +++ b/capa/features/extractors/binja/helpers.py @@ -9,7 +9,7 @@ import re from typing import List, Callable from dataclasses import dataclass -from binaryninja import LowLevelILInstruction +from binaryninja import BinaryView, LowLevelILInstruction from binaryninja.architecture import InstructionTextToken @@ -51,3 +51,19 @@ def unmangle_c_name(name: str) -> str: return match.group(1) return name + + +def read_c_string(bv: BinaryView, offset: int, max_len: int) -> str: + s: List[str] = [] + while len(s) < max_len: + try: + c = bv.read(offset + len(s), 1)[0] + except Exception: + break + + if c == 0: + break + + s.append(chr(c)) + + return "".join(s) diff --git a/capa/features/extractors/binja/insn.py b/capa/features/extractors/binja/insn.py index 3144fd15..f2b8fefc 100644 --- a/capa/features/extractors/binja/insn.py +++ b/capa/features/extractors/binja/insn.py @@ -94,28 +94,32 @@ def extract_insn_api_features(fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle) candidate_addrs.append(stub_addr) for address in candidate_addrs: - sym = func.view.get_symbol_at(address) - if sym is None or sym.type not in [SymbolType.ImportAddressSymbol, SymbolType.ImportedFunctionSymbol]: - continue + for sym in func.view.get_symbols(address): + if sym is None or sym.type not in [ + SymbolType.ImportAddressSymbol, + SymbolType.ImportedFunctionSymbol, + SymbolType.FunctionSymbol, + ]: + continue - sym_name = sym.short_name + sym_name = sym.short_name - lib_name = "" - import_lib = bv.lookup_imported_object_library(sym.address) - if import_lib is not None: - lib_name = import_lib[0].name - if lib_name.endswith(".dll"): - lib_name = lib_name[:-4] - elif lib_name.endswith(".so"): - lib_name = lib_name[:-3] + lib_name = "" + import_lib = bv.lookup_imported_object_library(sym.address) + if import_lib is not None: + lib_name = import_lib[0].name + if lib_name.endswith(".dll"): + lib_name = lib_name[:-4] + elif lib_name.endswith(".so"): + lib_name = lib_name[:-3] - for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name): - yield API(name), ih.address - - if sym_name.startswith("_"): - for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name[1:]): + for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name): yield API(name), ih.address + if sym_name.startswith("_"): + for name in capa.features.extractors.helpers.generate_symbols(lib_name, sym_name[1:]): + yield API(name), ih.address + def extract_insn_number_features( fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle diff --git a/capa/features/extractors/cape/call.py b/capa/features/extractors/cape/call.py index 5d274c5e..88680b3f 100644 --- a/capa/features/extractors/cape/call.py +++ b/capa/features/extractors/cape/call.py @@ -21,7 +21,7 @@ logger = logging.getLogger(__name__) def extract_call_features(ph: ProcessHandle, th: ThreadHandle, ch: CallHandle) -> Iterator[Tuple[Feature, Address]]: """ - this method extrcts the given call's features (such as API name and arguments), + this method extracts the given call's features (such as API name and arguments), and returns them as API, Number, and String features. args: diff --git a/capa/features/extractors/cape/extractor.py b/capa/features/extractors/cape/extractor.py index 2a070c91..1c8cfd2a 100644 --- a/capa/features/extractors/cape/extractor.py +++ b/capa/features/extractors/cape/extractor.py @@ -14,10 +14,10 @@ import capa.features.extractors.cape.file import capa.features.extractors.cape.thread import capa.features.extractors.cape.global_ import capa.features.extractors.cape.process -from capa.exceptions import UnsupportedFormatError +from capa.exceptions import EmptyReportError, UnsupportedFormatError from capa.features.common import Feature, Characteristic from capa.features.address import NO_ADDRESS, Address, AbsoluteVirtualAddress, _NoAddress -from capa.features.extractors.cape.models import CapeReport +from capa.features.extractors.cape.models import Static, CapeReport from capa.features.extractors.base_extractor import ( CallHandle, SampleHashes, @@ -85,10 +85,18 @@ class CapeExtractor(DynamicFeatureExtractor): if cr.info.version not in TESTED_VERSIONS: logger.warning("CAPE version '%s' not tested/supported yet", cr.info.version) + # observed in 2.4-CAPE reports from capesandbox.com + if cr.static is None and cr.target.file.pe is not None: + cr.static = Static() + cr.static.pe = cr.target.file.pe + if cr.static is None: raise UnsupportedFormatError("CAPE report missing static analysis") if cr.static.pe is None: raise UnsupportedFormatError("CAPE report missing PE analysis") + if len(cr.behavior.processes) == 0: + raise EmptyReportError("CAPE did not capture any processes") + return cls(cr) diff --git a/capa/features/extractors/cape/file.py b/capa/features/extractors/cape/file.py index 35757b3a..66ec8c4f 100644 --- a/capa/features/extractors/cape/file.py +++ b/capa/features/extractors/cape/file.py @@ -23,10 +23,23 @@ def get_processes(report: CapeReport) -> Iterator[ProcessHandle]: """ get all the created processes for a sample """ + seen_processes = {} for process in report.behavior.processes: addr = ProcessAddress(pid=process.process_id, ppid=process.parent_id) yield ProcessHandle(address=addr, inner=process) + # check for pid and ppid reuse + if addr not in seen_processes: + seen_processes[addr] = [process] + else: + logger.warning( + "pid and ppid reuse detected between process %s and process%s: %s", + process, + "es" if len(seen_processes[addr]) > 1 else "", + seen_processes[addr], + ) + seen_processes[addr].append(process) + def extract_import_names(report: CapeReport) -> Iterator[Tuple[Feature, Address]]: """ diff --git a/capa/features/extractors/cape/global_.py b/capa/features/extractors/cape/global_.py index 81ed601b..62eeff20 100644 --- a/capa/features/extractors/cape/global_.py +++ b/capa/features/extractors/cape/global_.py @@ -12,14 +12,12 @@ from typing import Tuple, Iterator from capa.features.common import ( OS, OS_ANY, - ARCH_ANY, OS_LINUX, ARCH_I386, FORMAT_PE, ARCH_AMD64, FORMAT_ELF, OS_WINDOWS, - FORMAT_UNKNOWN, Arch, Format, Feature, @@ -37,7 +35,9 @@ def extract_arch(report: CapeReport) -> Iterator[Tuple[Feature, Address]]: yield Arch(ARCH_AMD64), NO_ADDRESS else: logger.warning("unrecognized Architecture: %s", report.target.file.type) - yield Arch(ARCH_ANY), NO_ADDRESS + raise ValueError( + f"unrecognized Architecture from the CAPE report; output of file command: {report.target.file.type}" + ) def extract_format(report: CapeReport) -> Iterator[Tuple[Feature, Address]]: @@ -47,7 +47,9 @@ def extract_format(report: CapeReport) -> Iterator[Tuple[Feature, Address]]: yield Format(FORMAT_ELF), NO_ADDRESS else: logger.warning("unknown file format, file command output: %s", report.target.file.type) - yield Format(FORMAT_UNKNOWN), NO_ADDRESS + raise ValueError( + "unrecognized file format from the CAPE report; output of file command: {report.target.file.type}" + ) def extract_os(report: CapeReport) -> Iterator[Tuple[Feature, Address]]: @@ -69,8 +71,9 @@ def extract_os(report: CapeReport) -> Iterator[Tuple[Feature, Address]]: elif "kNetBSD" in file_output: yield OS("netbsd"), NO_ADDRESS else: + # if the operating system information is missing from the cape report, it's likely a bug logger.warning("unrecognized OS: %s", file_output) - yield OS(OS_ANY), NO_ADDRESS + raise ValueError("unrecognized OS from the CAPE report; output of file command: {file_output}") else: # the sample is shellcode logger.debug("unsupported file format, file command output: %s", file_output) diff --git a/capa/features/extractors/cape/models.py b/capa/features/extractors/cape/models.py index ab479c8d..870afa82 100644 --- a/capa/features/extractors/cape/models.py +++ b/capa/features/extractors/cape/models.py @@ -132,13 +132,21 @@ class DigitalSigner(FlexibleModel): extensions_subjectKeyIdentifier: Optional[str] = None +class AuxSigner(ExactModel): + name: str + issued_to: str = Field(alias="Issued to") + issued_by: str = Field(alias="Issued by") + expires: str = Field(alias="Expires") + sha1_hash: str = Field(alias="SHA1 hash") + + class Signer(ExactModel): - aux_sha1: Optional[TODO] = None - aux_timestamp: Optional[None] = None + aux_sha1: Optional[str] = None + aux_timestamp: Optional[str] = None aux_valid: Optional[bool] = None aux_error: Optional[bool] = None aux_error_desc: Optional[str] = None - aux_signers: Optional[ListTODO] = None + aux_signers: Optional[List[AuxSigner]] = None class Overlay(ExactModel): @@ -197,7 +205,10 @@ class PE(ExactModel): guest_signers: Signer -class File(ExactModel): +# TODO(mr-tz): target.file.dotnet, target.file.extracted_files, target.file.extracted_files_tool, +# target.file.extracted_files_time +# https://github.com/mandiant/capa/issues/1814 +class File(FlexibleModel): type: str cape_type_code: Optional[int] = None cape_type: Optional[str] = None @@ -350,6 +361,7 @@ class Behavior(ExactModel): class Target(ExactModel): category: str file: File + pe: Optional[PE] = None class Static(ExactModel): @@ -385,7 +397,7 @@ class CapeReport(FlexibleModel): # post-processed results: payloads and extracted configs CAPE: Optional[Cape] = None dropped: Optional[List[File]] = None - procdump: List[ProcessFile] + procdump: Optional[List[ProcessFile]] = None procmemory: ListTODO # ========================================================================= diff --git a/capa/features/extractors/cape/thread.py b/capa/features/extractors/cape/thread.py index 24c2d3b2..cfdb081c 100644 --- a/capa/features/extractors/cape/thread.py +++ b/capa/features/extractors/cape/thread.py @@ -10,6 +10,7 @@ import logging from typing import Iterator from capa.features.address import DynamicCallAddress +from capa.features.extractors.helpers import is_aw_function from capa.features.extractors.cape.models import Process from capa.features.extractors.base_extractor import CallHandle, ThreadHandle, ProcessHandle @@ -24,5 +25,22 @@ def get_calls(ph: ProcessHandle, th: ThreadHandle) -> Iterator[CallHandle]: if call.thread_id != tid: continue - addr = DynamicCallAddress(thread=th.address, id=call_index) - yield CallHandle(address=addr, inner=call) + for symbol in generate_symbols(call.api): + call.api = symbol + + addr = DynamicCallAddress(thread=th.address, id=call_index) + yield CallHandle(address=addr, inner=call) + + +def generate_symbols(symbol: str) -> Iterator[str]: + """ + for a given symbol name, generate variants. + we over-generate features to make matching easier. + """ + + # CreateFileA + yield symbol + + if is_aw_function(symbol): + # CreateFile + yield symbol[:-1] diff --git a/capa/features/extractors/dnfile_.py b/capa/features/extractors/dnfile_.py index a6cd94c7..72dc9b7e 100644 --- a/capa/features/extractors/dnfile_.py +++ b/capa/features/extractors/dnfile_.py @@ -55,7 +55,7 @@ def extract_file_arch(pe: dnfile.dnPE, **kwargs) -> Iterator[Tuple[Feature, Addr def extract_file_features(pe: dnfile.dnPE) -> Iterator[Tuple[Feature, Address]]: for file_handler in FILE_HANDLERS: - for feature, address in file_handler(pe=pe): # type: ignore + for feature, address in file_handler(pe=pe): yield feature, address diff --git a/capa/features/extractors/dotnetfile.py b/capa/features/extractors/dotnetfile.py index a1c7375f..ff942ae7 100644 --- a/capa/features/extractors/dotnetfile.py +++ b/capa/features/extractors/dotnetfile.py @@ -31,9 +31,9 @@ from capa.features.common import ( Characteristic, ) from capa.features.address import NO_ADDRESS, Address, DNTokenAddress +from capa.features.extractors.dnfile.types import DnType from capa.features.extractors.base_extractor import SampleHashes, StaticFeatureExtractor from capa.features.extractors.dnfile.helpers import ( - DnType, iter_dotnet_table, is_dotnet_mixed_mode, get_dotnet_managed_imports, diff --git a/capa/features/extractors/elf.py b/capa/features/extractors/elf.py index f9558c1b..bad8543e 100644 --- a/capa/features/extractors/elf.py +++ b/capa/features/extractors/elf.py @@ -898,7 +898,7 @@ def guess_os_from_symtab(elf: ELF) -> Optional[OS]: def detect_elf_os(f) -> str: """ - f: type Union[BinaryIO, IDAIO] + f: type Union[BinaryIO, IDAIO, GHIDRAIO] """ try: elf = ELF(f) diff --git a/capa/features/extractors/elffile.py b/capa/features/extractors/elffile.py index fccd40ee..5881c035 100644 --- a/capa/features/extractors/elffile.py +++ b/capa/features/extractors/elffile.py @@ -156,7 +156,7 @@ GLOBAL_HANDLERS = ( class ElfFeatureExtractor(StaticFeatureExtractor): def __init__(self, path: Path): - super().__init__(SampleHashes.from_bytes(self.path.read_bytes())) + super().__init__(SampleHashes.from_bytes(path.read_bytes())) self.path: Path = path self.elf = ELFFile(io.BytesIO(path.read_bytes())) diff --git a/capa/features/extractors/ghidra/__init__.py b/capa/features/extractors/ghidra/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/capa/features/extractors/ghidra/basicblock.py b/capa/features/extractors/ghidra/basicblock.py new file mode 100644 index 00000000..b3271586 --- /dev/null +++ b/capa/features/extractors/ghidra/basicblock.py @@ -0,0 +1,152 @@ +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. + +import string +import struct +from typing import Tuple, Iterator + +import ghidra +from ghidra.program.model.lang import OperandType + +import capa.features.extractors.ghidra.helpers +from capa.features.common import Feature, Characteristic +from capa.features.address import Address +from capa.features.basicblock import BasicBlock +from capa.features.extractors.helpers import MIN_STACKSTRING_LEN +from capa.features.extractors.base_extractor import BBHandle, FunctionHandle + + +def get_printable_len(op: ghidra.program.model.scalar.Scalar) -> int: + """Return string length if all operand bytes are ascii or utf16-le printable""" + op_bit_len = op.bitLength() + op_byte_len = op_bit_len // 8 + op_val = op.getValue() + + if op_bit_len == 8: + chars = struct.pack(" bool: + """verify instruction moves immediate onto stack""" + + # Ghidra will Bitwise OR the OperandTypes to assign multiple + # i.e., the first operand is a stackvar (dynamically allocated), + # and the second is a scalar value (single int/char/float/etc.) + mov_its_ops = [(OperandType.ADDRESS | OperandType.DYNAMIC), OperandType.SCALAR] + found = False + + # MOV dword ptr [EBP + local_*], 0x65 + if insn.getMnemonicString().startswith("MOV"): + found = all(insn.getOperandType(i) == mov_its_ops[i] for i in range(2)) + + return found + + +def bb_contains_stackstring(bb: ghidra.program.model.block.CodeBlock) -> bool: + """check basic block for stackstring indicators + + true if basic block contains enough moves of constant bytes to the stack + """ + count = 0 + for insn in currentProgram().getListing().getInstructions(bb, True): # type: ignore [name-defined] # noqa: F821 + if is_mov_imm_to_stack(insn): + count += get_printable_len(insn.getScalar(1)) + if count > MIN_STACKSTRING_LEN: + return True + return False + + +def _bb_has_tight_loop(bb: ghidra.program.model.block.CodeBlock): + """ + parse tight loops, true if last instruction in basic block branches to bb start + """ + # Reverse Ordered, first InstructionDB + last_insn = currentProgram().getListing().getInstructions(bb, False).next() # type: ignore [name-defined] # noqa: F821 + + if last_insn.getFlowType().isJump(): + return last_insn.getAddress(0) == bb.getMinAddress() + + return False + + +def extract_bb_stackstring(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]: + """extract stackstring indicators from basic block""" + bb: ghidra.program.model.block.CodeBlock = bbh.inner + + if bb_contains_stackstring(bb): + yield Characteristic("stack string"), bbh.address + + +def extract_bb_tight_loop(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]: + """check basic block for tight loop indicators""" + bb: ghidra.program.model.block.CodeBlock = bbh.inner + + if _bb_has_tight_loop(bb): + yield Characteristic("tight loop"), bbh.address + + +BASIC_BLOCK_HANDLERS = ( + extract_bb_tight_loop, + extract_bb_stackstring, +) + + +def extract_features(fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]: + """ + extract features from the given basic block. + + args: + bb: the basic block to process. + + yields: + Tuple[Feature, int]: the features and their location found in this basic block. + """ + yield BasicBlock(), bbh.address + for bb_handler in BASIC_BLOCK_HANDLERS: + for feature, addr in bb_handler(fh, bbh): + yield feature, addr + + +def main(): + features = [] + from capa.features.extractors.ghidra.extractor import GhidraFeatureExtractor + + for fh in GhidraFeatureExtractor().get_functions(): + for bbh in capa.features.extractors.ghidra.helpers.get_function_blocks(fh): + features.extend(list(extract_features(fh, bbh))) + + import pprint + + pprint.pprint(features) # noqa: T203 + + +if __name__ == "__main__": + main() diff --git a/capa/features/extractors/ghidra/extractor.py b/capa/features/extractors/ghidra/extractor.py new file mode 100644 index 00000000..0c3db587 --- /dev/null +++ b/capa/features/extractors/ghidra/extractor.py @@ -0,0 +1,93 @@ +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +from typing import List, Tuple, Iterator + +import capa.features.extractors.ghidra.file +import capa.features.extractors.ghidra.insn +import capa.features.extractors.ghidra.global_ +import capa.features.extractors.ghidra.function +import capa.features.extractors.ghidra.basicblock +from capa.features.common import Feature +from capa.features.address import Address, AbsoluteVirtualAddress +from capa.features.extractors.base_extractor import ( + BBHandle, + InsnHandle, + SampleHashes, + FunctionHandle, + StaticFeatureExtractor, +) + + +class GhidraFeatureExtractor(StaticFeatureExtractor): + def __init__(self): + import capa.features.extractors.ghidra.helpers as ghidra_helpers + + super().__init__( + SampleHashes( + md5=capa.ghidra.helpers.get_file_md5(), + # ghidra doesn't expose this hash. + # https://ghidra.re/ghidra_docs/api/ghidra/program/model/listing/Program.html + # + # the hashes are stored in the database, not computed on the fly, + # so its probably not trivial to add SHA1. + sha1="", + sha256=capa.ghidra.helpers.get_file_sha256(), + ) + ) + + self.global_features: List[Tuple[Feature, Address]] = [] + self.global_features.extend(capa.features.extractors.ghidra.file.extract_file_format()) + self.global_features.extend(capa.features.extractors.ghidra.global_.extract_os()) + self.global_features.extend(capa.features.extractors.ghidra.global_.extract_arch()) + self.imports = ghidra_helpers.get_file_imports() + self.externs = ghidra_helpers.get_file_externs() + self.fakes = ghidra_helpers.map_fake_import_addrs() + + def get_base_address(self): + return AbsoluteVirtualAddress(currentProgram().getImageBase().getOffset()) # type: ignore [name-defined] # noqa: F821 + + def extract_global_features(self): + yield from self.global_features + + def extract_file_features(self): + yield from capa.features.extractors.ghidra.file.extract_features() + + def get_functions(self) -> Iterator[FunctionHandle]: + import capa.features.extractors.ghidra.helpers as ghidra_helpers + + for fhandle in ghidra_helpers.get_function_symbols(): + fh: FunctionHandle = FunctionHandle( + address=AbsoluteVirtualAddress(fhandle.getEntryPoint().getOffset()), + inner=fhandle, + ctx={"imports_cache": self.imports, "externs_cache": self.externs, "fakes_cache": self.fakes}, + ) + yield fh + + @staticmethod + def get_function(addr: int) -> FunctionHandle: + func = getFunctionContaining(toAddr(addr)) # type: ignore [name-defined] # noqa: F821 + return FunctionHandle(address=AbsoluteVirtualAddress(func.getEntryPoint().getOffset()), inner=func) + + def extract_function_features(self, fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]: + yield from capa.features.extractors.ghidra.function.extract_features(fh) + + def get_basic_blocks(self, fh: FunctionHandle) -> Iterator[BBHandle]: + import capa.features.extractors.ghidra.helpers as ghidra_helpers + + yield from ghidra_helpers.get_function_blocks(fh) + + def extract_basic_block_features(self, fh: FunctionHandle, bbh: BBHandle) -> Iterator[Tuple[Feature, Address]]: + yield from capa.features.extractors.ghidra.basicblock.extract_features(fh, bbh) + + def get_instructions(self, fh: FunctionHandle, bbh: BBHandle) -> Iterator[InsnHandle]: + import capa.features.extractors.ghidra.helpers as ghidra_helpers + + yield from ghidra_helpers.get_insn_in_range(bbh) + + def extract_insn_features(self, fh: FunctionHandle, bbh: BBHandle, ih: InsnHandle): + yield from capa.features.extractors.ghidra.insn.extract_features(fh, bbh, ih) diff --git a/capa/features/extractors/ghidra/file.py b/capa/features/extractors/ghidra/file.py new file mode 100644 index 00000000..04720502 --- /dev/null +++ b/capa/features/extractors/ghidra/file.py @@ -0,0 +1,202 @@ +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +import re +import struct +from typing import List, Tuple, Iterator + +from ghidra.program.model.symbol import SourceType, SymbolType + +import capa.features.extractors.common +import capa.features.extractors.helpers +import capa.features.extractors.strings +import capa.features.extractors.ghidra.helpers +from capa.features.file import Export, Import, Section, FunctionName +from capa.features.common import FORMAT_PE, FORMAT_ELF, Format, String, Feature, Characteristic +from capa.features.address import NO_ADDRESS, Address, FileOffsetAddress, AbsoluteVirtualAddress + +MAX_OFFSET_PE_AFTER_MZ = 0x200 + + +def find_embedded_pe(block_bytez: bytes, mz_xor: List[Tuple[bytes, bytes, int]]) -> Iterator[Tuple[int, int]]: + """check segment for embedded PE + + adapted for Ghidra from: + https://github.com/vivisect/vivisect/blob/91e8419a861f4977https://github.com/vivisect/vivisect/blob/91e8419a861f49779f18316f155311967e696836/PE/carve.py#L259f18316f155311967e696836/PE/carve.py#L25 + """ + todo = [] + + for mzx, pex, i in mz_xor: + for match in re.finditer(re.escape(mzx), block_bytez): + todo.append((match.start(), mzx, pex, i)) + + seg_max = len(block_bytez) # noqa: F821 + while len(todo): + off, mzx, pex, i = todo.pop() + + # MZ header has one field we will check e_lfanew is at 0x3c + e_lfanew = off + 0x3C + + if seg_max < e_lfanew + 4: + continue + + e_lfanew_bytes = block_bytez[e_lfanew : e_lfanew + 4] + newoff = struct.unpack(" MAX_OFFSET_PE_AFTER_MZ: + continue + + peoff = off + newoff + if seg_max < peoff + 2: + continue + + pe_bytes = block_bytez[peoff : peoff + 2] + if pe_bytes == pex: + yield off, i + + +def extract_file_embedded_pe() -> Iterator[Tuple[Feature, Address]]: + """extract embedded PE features""" + + # pre-compute XOR pairs + mz_xor: List[Tuple[bytes, bytes, int]] = [ + ( + capa.features.extractors.helpers.xor_static(b"MZ", i), + capa.features.extractors.helpers.xor_static(b"PE", i), + i, + ) + for i in range(256) + ] + + for block in currentProgram().getMemory().getBlocks(): # type: ignore [name-defined] # noqa: F821 + if not all((block.isLoaded(), block.isInitialized(), "Headers" not in block.getName())): + continue + + for off, _ in find_embedded_pe(capa.features.extractors.ghidra.helpers.get_block_bytes(block), mz_xor): + # add offset back to block start + ea: int = block.getStart().add(off).getOffset() + + yield Characteristic("embedded pe"), FileOffsetAddress(ea) + + +def extract_file_export_names() -> Iterator[Tuple[Feature, Address]]: + """extract function exports""" + st = currentProgram().getSymbolTable() # type: ignore [name-defined] # noqa: F821 + for addr in st.getExternalEntryPointIterator(): + yield Export(st.getPrimarySymbol(addr).getName()), AbsoluteVirtualAddress(addr.getOffset()) + + +def extract_file_import_names() -> Iterator[Tuple[Feature, Address]]: + """extract function imports + + 1. imports by ordinal: + - modulename.#ordinal + + 2. imports by name, results in two features to support importname-only + matching: + - modulename.importname + - importname + """ + + for f in currentProgram().getFunctionManager().getExternalFunctions(): # type: ignore [name-defined] # noqa: F821 + for r in f.getSymbol().getReferences(): + if r.getReferenceType().isData(): + addr = r.getFromAddress().getOffset() # gets pointer to fake external addr + + fstr = f.toString().split("::") # format: MODULE.dll::import / MODULE::Ordinal_* + if "Ordinal_" in fstr[1]: + fstr[1] = f"#{fstr[1].split('_')[1]}" + + for name in capa.features.extractors.helpers.generate_symbols(fstr[0][:-4], fstr[1]): + yield Import(name), AbsoluteVirtualAddress(addr) + + +def extract_file_section_names() -> Iterator[Tuple[Feature, Address]]: + """extract section names""" + + for block in currentProgram().getMemory().getBlocks(): # type: ignore [name-defined] # noqa: F821 + yield Section(block.getName()), AbsoluteVirtualAddress(block.getStart().getOffset()) + + +def extract_file_strings() -> Iterator[Tuple[Feature, Address]]: + """extract ASCII and UTF-16 LE strings""" + + for block in currentProgram().getMemory().getBlocks(): # type: ignore [name-defined] # noqa: F821 + if block.isInitialized(): + p_bytes = capa.features.extractors.ghidra.helpers.get_block_bytes(block) + + for s in capa.features.extractors.strings.extract_ascii_strings(p_bytes): + offset = block.getStart().getOffset() + s.offset + yield String(s.s), FileOffsetAddress(offset) + + for s in capa.features.extractors.strings.extract_unicode_strings(p_bytes): + offset = block.getStart().getOffset() + s.offset + yield String(s.s), FileOffsetAddress(offset) + + +def extract_file_function_names() -> Iterator[Tuple[Feature, Address]]: + """ + extract the names of statically-linked library functions. + """ + + for sym in currentProgram().getSymbolTable().getAllSymbols(True): # type: ignore [name-defined] # noqa: F821 + # .isExternal() misses more than this config for the function symbols + if sym.getSymbolType() == SymbolType.FUNCTION and sym.getSource() == SourceType.ANALYSIS and sym.isGlobal(): + name = sym.getName() # starts to resolve names based on Ghidra's FidDB + if name.startswith("FID_conflict:"): # format: FID_conflict: + name = name[13:] + addr = AbsoluteVirtualAddress(sym.getAddress().getOffset()) + yield FunctionName(name), addr + if name.startswith("_"): + # some linkers may prefix linked routines with a `_` to avoid name collisions. + # extract features for both the mangled and un-mangled representations. + # e.g. `_fwrite` -> `fwrite` + # see: https://stackoverflow.com/a/2628384/87207 + yield FunctionName(name[1:]), addr + + +def extract_file_format() -> Iterator[Tuple[Feature, Address]]: + ef = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821 + if "PE" in ef: + yield Format(FORMAT_PE), NO_ADDRESS + elif "ELF" in ef: + yield Format(FORMAT_ELF), NO_ADDRESS + elif "Raw" in ef: + # no file type to return when processing a binary file, but we want to continue processing + return + else: + raise NotImplementedError(f"unexpected file format: {ef}") + + +def extract_features() -> Iterator[Tuple[Feature, Address]]: + """extract file features""" + for file_handler in FILE_HANDLERS: + for feature, addr in file_handler(): + yield feature, addr + + +FILE_HANDLERS = ( + extract_file_embedded_pe, + extract_file_export_names, + extract_file_import_names, + extract_file_section_names, + extract_file_strings, + extract_file_function_names, + extract_file_format, +) + + +def main(): + """ """ + import pprint + + pprint.pprint(list(extract_features())) # noqa: T203 + + +if __name__ == "__main__": + main() diff --git a/capa/features/extractors/ghidra/function.py b/capa/features/extractors/ghidra/function.py new file mode 100644 index 00000000..d31ba86a --- /dev/null +++ b/capa/features/extractors/ghidra/function.py @@ -0,0 +1,73 @@ +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +from typing import Tuple, Iterator + +import ghidra +from ghidra.program.model.block import BasicBlockModel, SimpleBlockIterator + +import capa.features.extractors.ghidra.helpers +from capa.features.common import Feature, Characteristic +from capa.features.address import Address, AbsoluteVirtualAddress +from capa.features.extractors import loops +from capa.features.extractors.base_extractor import FunctionHandle + + +def extract_function_calls_to(fh: FunctionHandle): + """extract callers to a function""" + f: ghidra.program.database.function.FunctionDB = fh.inner + for ref in f.getSymbol().getReferences(): + if ref.getReferenceType().isCall(): + yield Characteristic("calls to"), AbsoluteVirtualAddress(ref.getFromAddress().getOffset()) + + +def extract_function_loop(fh: FunctionHandle): + f: ghidra.program.database.function.FunctionDB = fh.inner + + edges = [] + for block in SimpleBlockIterator(BasicBlockModel(currentProgram()), f.getBody(), monitor()): # type: ignore [name-defined] # noqa: F821 + dests = block.getDestinations(monitor()) # type: ignore [name-defined] # noqa: F821 + s_addrs = block.getStartAddresses() + + while dests.hasNext(): # For loop throws Python TypeError + for addr in s_addrs: + edges.append((addr.getOffset(), dests.next().getDestinationAddress().getOffset())) + + if loops.has_loop(edges): + yield Characteristic("loop"), AbsoluteVirtualAddress(f.getEntryPoint().getOffset()) + + +def extract_recursive_call(fh: FunctionHandle): + f: ghidra.program.database.function.FunctionDB = fh.inner + + for func in f.getCalledFunctions(monitor()): # type: ignore [name-defined] # noqa: F821 + if func.getEntryPoint().getOffset() == f.getEntryPoint().getOffset(): + yield Characteristic("recursive call"), AbsoluteVirtualAddress(f.getEntryPoint().getOffset()) + + +def extract_features(fh: FunctionHandle) -> Iterator[Tuple[Feature, Address]]: + for func_handler in FUNCTION_HANDLERS: + for feature, addr in func_handler(fh): + yield feature, addr + + +FUNCTION_HANDLERS = (extract_function_calls_to, extract_function_loop, extract_recursive_call) + + +def main(): + """ """ + features = [] + for fhandle in capa.features.extractors.ghidra.helpers.get_function_symbols(): + features.extend(list(extract_features(fhandle))) + + import pprint + + pprint.pprint(features) # noqa: T203 + + +if __name__ == "__main__": + main() diff --git a/capa/features/extractors/ghidra/global_.py b/capa/features/extractors/ghidra/global_.py new file mode 100644 index 00000000..0df58a08 --- /dev/null +++ b/capa/features/extractors/ghidra/global_.py @@ -0,0 +1,67 @@ +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +import logging +import contextlib +from typing import Tuple, Iterator + +import capa.ghidra.helpers +import capa.features.extractors.elf +import capa.features.extractors.ghidra.helpers +from capa.features.common import OS, ARCH_I386, ARCH_AMD64, OS_WINDOWS, Arch, Feature +from capa.features.address import NO_ADDRESS, Address + +logger = logging.getLogger(__name__) + + +def extract_os() -> Iterator[Tuple[Feature, Address]]: + format_name: str = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821 + + if "PE" in format_name: + yield OS(OS_WINDOWS), NO_ADDRESS + + elif "ELF" in format_name: + with contextlib.closing(capa.ghidra.helpers.GHIDRAIO()) as f: + os = capa.features.extractors.elf.detect_elf_os(f) + + yield OS(os), NO_ADDRESS + + else: + # we likely end up here: + # 1. handling shellcode, or + # 2. handling a new file format (e.g. macho) + # + # for (1) we can't do much - its shellcode and all bets are off. + # we could maybe accept a further CLI argument to specify the OS, + # but i think this would be rarely used. + # rules that rely on OS conditions will fail to match on shellcode. + # + # for (2), this logic will need to be updated as the format is implemented. + logger.debug("unsupported file format: %s, will not guess OS", format_name) + return + + +def extract_arch() -> Iterator[Tuple[Feature, Address]]: + lang_id = currentProgram().getMetadata().get("Language ID") # type: ignore [name-defined] # noqa: F821 + + if "x86" in lang_id and "64" in lang_id: + yield Arch(ARCH_AMD64), NO_ADDRESS + + elif "x86" in lang_id and "32" in lang_id: + yield Arch(ARCH_I386), NO_ADDRESS + + elif "x86" not in lang_id: + logger.debug("unsupported architecture: non-32-bit nor non-64-bit intel") + return + + else: + # we likely end up here: + # 1. handling a new architecture (e.g. aarch64) + # + # for (1), this logic will need to be updated as the format is implemented. + logger.debug("unsupported architecture: %s", lang_id) + return diff --git a/capa/features/extractors/ghidra/helpers.py b/capa/features/extractors/ghidra/helpers.py new file mode 100644 index 00000000..0f405870 --- /dev/null +++ b/capa/features/extractors/ghidra/helpers.py @@ -0,0 +1,277 @@ +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +from typing import Dict, List, Iterator + +import ghidra +import java.lang +from ghidra.program.model.lang import OperandType +from ghidra.program.model.block import BasicBlockModel, SimpleBlockIterator +from ghidra.program.model.symbol import SourceType, SymbolType +from ghidra.program.model.address import AddressSpace + +import capa.features.extractors.helpers +from capa.features.common import THUNK_CHAIN_DEPTH_DELTA +from capa.features.address import AbsoluteVirtualAddress +from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle + + +def ints_to_bytes(bytez: List[int]) -> bytes: + """convert Java signed ints to Python bytes + + args: + bytez: list of Java signed ints + """ + return bytes([b & 0xFF for b in bytez]) + + +def find_byte_sequence(addr: ghidra.program.model.address.Address, seq: bytes) -> Iterator[int]: + """yield all ea of a given byte sequence + + args: + addr: start address + seq: bytes to search e.g. b"\x01\x03" + """ + seqstr = "".join([f"\\x{b:02x}" for b in seq]) + eas = findBytes(addr, seqstr, java.lang.Integer.MAX_VALUE, 1) # type: ignore [name-defined] # noqa: F821 + + yield from eas + + +def get_bytes(addr: ghidra.program.model.address.Address, length: int) -> bytes: + """yield length bytes at addr + + args: + addr: Address to begin pull from + length: length of bytes to pull + """ + try: + return ints_to_bytes(getBytes(addr, length)) # type: ignore [name-defined] # noqa: F821 + except RuntimeError: + return b"" + + +def get_block_bytes(block: ghidra.program.model.mem.MemoryBlock) -> bytes: + """yield all bytes in a given block + + args: + block: MemoryBlock to pull from + """ + return get_bytes(block.getStart(), block.getSize()) + + +def get_function_symbols(): + """yield all non-external function symbols""" + yield from currentProgram().getFunctionManager().getFunctionsNoStubs(True) # type: ignore [name-defined] # noqa: F821 + + +def get_function_blocks(fh: FunctionHandle) -> Iterator[BBHandle]: + """yield BBHandle for each bb in a given function""" + + func: ghidra.program.database.function.FunctionDB = fh.inner + for bb in SimpleBlockIterator(BasicBlockModel(currentProgram()), func.getBody(), monitor()): # type: ignore [name-defined] # noqa: F821 + yield BBHandle(address=AbsoluteVirtualAddress(bb.getMinAddress().getOffset()), inner=bb) + + +def get_insn_in_range(bbh: BBHandle) -> Iterator[InsnHandle]: + """yield InshHandle for each insn in a given basicblock""" + for insn in currentProgram().getListing().getInstructions(bbh.inner, True): # type: ignore [name-defined] # noqa: F821 + yield InsnHandle(address=AbsoluteVirtualAddress(insn.getAddress().getOffset()), inner=insn) + + +def get_file_imports() -> Dict[int, List[str]]: + """get all import names & addrs""" + + import_dict: Dict[int, List[str]] = {} + + for f in currentProgram().getFunctionManager().getExternalFunctions(): # type: ignore [name-defined] # noqa: F821 + for r in f.getSymbol().getReferences(): + if r.getReferenceType().isData(): + addr = r.getFromAddress().getOffset() # gets pointer to fake external addr + + ex_loc = f.getExternalLocation().getAddress() # map external locations as well (offset into module files) + + fstr = f.toString().split("::") # format: MODULE.dll::import / MODULE::Ordinal_* / ::import + if "Ordinal_" in fstr[1]: + fstr[1] = f"#{fstr[1].split('_')[1]}" + + # mostly shows up in ELF files, otherwise, strip '.dll' w/ [:-4] + fstr[0] = "*" if "" in fstr[0] else fstr[0][:-4] + + for name in capa.features.extractors.helpers.generate_symbols(fstr[0], fstr[1]): + import_dict.setdefault(addr, []).append(name) + if ex_loc: + import_dict.setdefault(ex_loc.getOffset(), []).append(name) + + return import_dict + + +def get_file_externs() -> Dict[int, List[str]]: + """ + Gets function names & addresses of statically-linked library functions + + Ghidra's external namespace is mostly reserved for dynamically-linked + imports. Statically-linked functions are part of the global namespace. + Filtering on the type, source, and namespace of the symbols yield more + statically-linked library functions. + + Example: (PMA Lab 16-01.exe_) 7faafc7e4a5c736ebfee6abbbc812d80:0x407490 + - __aulldiv + - Note: See Symbol Table labels + """ + + extern_dict: Dict[int, List[str]] = {} + + for sym in currentProgram().getSymbolTable().getAllSymbols(True): # type: ignore [name-defined] # noqa: F821 + # .isExternal() misses more than this config for the function symbols + if sym.getSymbolType() == SymbolType.FUNCTION and sym.getSource() == SourceType.ANALYSIS and sym.isGlobal(): + name = sym.getName() # starts to resolve names based on Ghidra's FidDB + if name.startswith("FID_conflict:"): # format: FID_conflict: + name = name[13:] + extern_dict.setdefault(sym.getAddress().getOffset(), []).append(name) + if name.startswith("_"): + # some linkers may prefix linked routines with a `_` to avoid name collisions. + # extract features for both the mangled and un-mangled representations. + # e.g. `_fwrite` -> `fwrite` + # see: https://stackoverflow.com/a/2628384/87207 + extern_dict.setdefault(sym.getAddress().getOffset(), []).append(name[1:]) + + return extern_dict + + +def map_fake_import_addrs() -> Dict[int, List[int]]: + """ + Map ghidra's fake import entrypoints to their + real addresses + + Helps as many Ghidra Scripting API calls end up returning + these external (fake) addresses. + + Undocumented but intended Ghidra behavior: + - Import entryPoint fields are stored in the 'EXTERNAL:' AddressSpace. + 'getEntryPoint()' returns the entryPoint field, which is an offset + from the beginning of the assigned AddressSpace. In the case of externals, + they start from 1 and increment. + https://github.com/NationalSecurityAgency/ghidra/blob/26d4bd9104809747c21f2528cab8aba9aef9acd5/Ghidra/Features/Base/src/test.slow/java/ghidra/program/database/function/ExternalFunctionDBTest.java#L90 + + Example: (mimikatz.exe_) 5f66b82558ca92e54e77f216ef4c066c:0x473090 + - 0x473090 -> PTR_CreateServiceW_00473090 + - 'EXTERNAL:00000025' -> External Address (ghidra.program.model.address.SpecialAddress) + """ + fake_dict: Dict[int, List[int]] = {} + + for f in currentProgram().getFunctionManager().getExternalFunctions(): # type: ignore [name-defined] # noqa: F821 + for r in f.getSymbol().getReferences(): + if r.getReferenceType().isData(): + fake_dict.setdefault(f.getEntryPoint().getOffset(), []).append(r.getFromAddress().getOffset()) + + return fake_dict + + +def check_addr_for_api( + addr: ghidra.program.model.address.Address, + fakes: Dict[int, List[int]], + imports: Dict[int, List[str]], + externs: Dict[int, List[str]], +) -> bool: + offset = addr.getOffset() + + fake = fakes.get(offset) + if fake: + return True + + imp = imports.get(offset) + if imp: + return True + + extern = externs.get(offset) + if extern: + return True + + return False + + +def is_call_or_jmp(insn: ghidra.program.database.code.InstructionDB) -> bool: + return any(mnem in insn.getMnemonicString() for mnem in ["CALL", "J"]) # JMP, JNE, JNZ, etc + + +def is_sp_modified(insn: ghidra.program.database.code.InstructionDB) -> bool: + for i in range(insn.getNumOperands()): + if insn.getOperandType(i) == OperandType.REGISTER: + return "SP" in insn.getRegister(i).getName() and insn.getOperandRefType(i).isWrite() + return False + + +def is_stack_referenced(insn: ghidra.program.database.code.InstructionDB) -> bool: + """generic catch-all for stack references""" + for i in range(insn.getNumOperands()): + if insn.getOperandType(i) == OperandType.REGISTER: + if "BP" in insn.getRegister(i).getName(): + return True + else: + continue + + return any(ref.isStackReference() for ref in insn.getReferencesFrom()) + + +def is_zxor(insn: ghidra.program.database.code.InstructionDB) -> bool: + # assume XOR insn + # XOR's against the same operand zero out + ops = [] + operands = [] + for i in range(insn.getNumOperands()): + ops.append(insn.getOpObjects(i)) + + # Operands stored in a 2D array + for j in range(len(ops)): + for k in range(len(ops[j])): + operands.append(ops[j][k]) + + return all(n == operands[0] for n in operands) + + +def handle_thunk(addr: ghidra.program.model.address.Address): + """Follow thunk chains down to a reasonable depth""" + ref = addr + for _ in range(THUNK_CHAIN_DEPTH_DELTA): + thunk_jmp = getInstructionAt(ref) # type: ignore [name-defined] # noqa: F821 + if thunk_jmp and is_call_or_jmp(thunk_jmp): + if OperandType.isAddress(thunk_jmp.getOperandType(0)): + ref = thunk_jmp.getAddress(0) + else: + thunk_dat = getDataContaining(ref) # type: ignore [name-defined] # noqa: F821 + if thunk_dat and thunk_dat.isDefined() and thunk_dat.isPointer(): + ref = thunk_dat.getValue() + break # end of thunk chain reached + return ref + + +def dereference_ptr(insn: ghidra.program.database.code.InstructionDB): + addr_code = OperandType.ADDRESS | OperandType.CODE + to_deref = insn.getAddress(0) + dat = getDataContaining(to_deref) # type: ignore [name-defined] # noqa: F821 + + if insn.getOperandType(0) == addr_code: + thfunc = getFunctionContaining(to_deref) # type: ignore [name-defined] # noqa: F821 + if thfunc and thfunc.isThunk(): + return handle_thunk(to_deref) + else: + # if it doesn't poin to a thunk, it's usually a jmp to a label + return to_deref + if not dat: + return to_deref + if dat.isDefined() and dat.isPointer(): + addr = dat.getValue() + # now we need to check the addr space to see if it is truly resolvable + # ghidra sometimes likes to hand us direct RAM addrs, which typically point + # to api calls that we can't actually resolve as such + if addr.getAddressSpace().getType() == AddressSpace.TYPE_RAM: + return to_deref + else: + return addr + else: + return to_deref diff --git a/capa/features/extractors/ghidra/insn.py b/capa/features/extractors/ghidra/insn.py new file mode 100644 index 00000000..2404207c --- /dev/null +++ b/capa/features/extractors/ghidra/insn.py @@ -0,0 +1,521 @@ +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +from typing import Any, Dict, Tuple, Iterator + +import ghidra +from ghidra.program.model.lang import OperandType +from ghidra.program.model.block import SimpleBlockModel + +import capa.features.extractors.helpers +import capa.features.extractors.ghidra.helpers +from capa.features.insn import API, MAX_STRUCTURE_SIZE, Number, Offset, Mnemonic, OperandNumber, OperandOffset +from capa.features.common import MAX_BYTES_FEATURE_SIZE, Bytes, String, Feature, Characteristic +from capa.features.address import Address, AbsoluteVirtualAddress +from capa.features.extractors.base_extractor import BBHandle, InsnHandle, FunctionHandle + +# security cookie checks may perform non-zeroing XORs, these are expected within a certain +# byte range within the first and returning basic blocks, this helps to reduce FP features +SECURITY_COOKIE_BYTES_DELTA = 0x40 + + +def get_imports(ctx: Dict[str, Any]) -> Dict[int, Any]: + """Populate the import cache for this context""" + if "imports_cache" not in ctx: + ctx["imports_cache"] = capa.features.extractors.ghidra.helpers.get_file_imports() + return ctx["imports_cache"] + + +def get_externs(ctx: Dict[str, Any]) -> Dict[int, Any]: + """Populate the externs cache for this context""" + if "externs_cache" not in ctx: + ctx["externs_cache"] = capa.features.extractors.ghidra.helpers.get_file_externs() + return ctx["externs_cache"] + + +def get_fakes(ctx: Dict[str, Any]) -> Dict[int, Any]: + """Populate the fake import addrs cache for this context""" + if "fakes_cache" not in ctx: + ctx["fakes_cache"] = capa.features.extractors.ghidra.helpers.map_fake_import_addrs() + return ctx["fakes_cache"] + + +def check_for_api_call( + insn, externs: Dict[int, Any], fakes: Dict[int, Any], imports: Dict[int, Any], imp_or_ex: bool +) -> Iterator[Any]: + """check instruction for API call + + params: + externs - external library functions cache + fakes - mapped fake import addresses cache + imports - imported functions cache + imp_or_ex - flag to check imports or externs + + yields: + matched api calls + """ + info = () + funcs = imports if imp_or_ex else externs + + # assume only CALLs or JMPs are passed + ref_type = insn.getOperandType(0) + addr_data = OperandType.ADDRESS | OperandType.DATA # needs dereferencing + addr_code = OperandType.ADDRESS | OperandType.CODE # needs dereferencing + + if OperandType.isRegister(ref_type): + if OperandType.isAddress(ref_type): + # If it's an address in a register, check the mapped fake addrs + # since they're dereferenced to their fake addrs + op_ref = insn.getAddress(0).getOffset() + ref = fakes.get(op_ref) # obtain the real addr + if not ref: + return + else: + return + elif ref_type in (addr_data, addr_code) or (OperandType.isIndirect(ref_type) and OperandType.isAddress(ref_type)): + # we must dereference and check if the addr is a pointer to an api function + addr_ref = capa.features.extractors.ghidra.helpers.dereference_ptr(insn) + if not capa.features.extractors.ghidra.helpers.check_addr_for_api(addr_ref, fakes, imports, externs): + return + ref = addr_ref.getOffset() + elif ref_type == OperandType.DYNAMIC | OperandType.ADDRESS or ref_type == OperandType.DYNAMIC: + return # cannot resolve dynamics statically + else: + # pure address does not need to get dereferenced/ handled + addr_ref = insn.getAddress(0) + if not addr_ref: + # If it returned null, it was an indirect + # that had no address reference. + # This check is faster than checking for (indirect and not address) + return + if not capa.features.extractors.ghidra.helpers.check_addr_for_api(addr_ref, fakes, imports, externs): + return + ref = addr_ref.getOffset() + + if isinstance(ref, list): # ref from REG | ADDR + for r in ref: + info = funcs.get(r) # type: ignore + if info: + yield info + else: + info = funcs.get(ref) # type: ignore + if info: + yield info + + +def extract_insn_api_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]: + insn: ghidra.program.database.code.InstructionDB = ih.inner + + if not capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn): + return + + externs = get_externs(fh.ctx) + fakes = get_fakes(fh.ctx) + imports = get_imports(fh.ctx) + + # check calls to imported functions + for api in check_for_api_call(insn, externs, fakes, imports, True): + for imp in api: + yield API(imp), ih.address + + # check calls to extern functions + for api in check_for_api_call(insn, externs, fakes, imports, False): + for ext in api: + yield API(ext), ih.address + + +def extract_insn_number_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]: + """ + parse instruction number features + example: + push 3136B0h ; dwControlCode + """ + insn: ghidra.program.database.code.InstructionDB = ih.inner + + if insn.getMnemonicString().startswith("RET"): + # skip things like: + # .text:0042250E retn 8 + return + + if capa.features.extractors.ghidra.helpers.is_sp_modified(insn): + # skip things like: + # .text:00401145 add esp, 0Ch + return + + for i in range(insn.getNumOperands()): + # Exceptions for LEA insn: + # invalid operand encoding, considered numbers instead of offsets + # see: mimikatz.exe_:0x4018C0 + if insn.getOperandType(i) == OperandType.DYNAMIC and insn.getMnemonicString().startswith("LEA"): + # Additional check, avoid yielding "wide" values (ex. mimikatz.exe:0x471EE6 LEA EBX, [ECX + EAX*0x4]) + op_objs = insn.getOpObjects(i) + if len(op_objs) == 3: # ECX, EAX, 0x4 + continue + + if isinstance(op_objs[-1], ghidra.program.model.scalar.Scalar): + const = op_objs[-1].getUnsignedValue() + addr = ih.address + + yield Number(const), addr + yield OperandNumber(i, const), addr + elif not OperandType.isScalar(insn.getOperandType(i)): + # skip things like: + # references, void types + continue + else: + const = insn.getScalar(i).getUnsignedValue() + addr = ih.address + + yield Number(const), addr + yield OperandNumber(i, const), addr + + if insn.getMnemonicString().startswith("ADD") and 0 < const < MAX_STRUCTURE_SIZE: + # for pattern like: + # + # add eax, 0x10 + # + # assume 0x10 is also an offset (imagine eax is a pointer). + yield Offset(const), addr + yield OperandOffset(i, const), addr + + +def extract_insn_offset_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]: + """ + parse instruction structure offset features + + example: + .text:0040112F cmp [esi+4], ebx + """ + insn: ghidra.program.database.code.InstructionDB = ih.inner + + if insn.getMnemonicString().startswith("LEA"): + return + + # ignore any stack references + if not capa.features.extractors.ghidra.helpers.is_stack_referenced(insn): + # Ghidra stores operands in 2D arrays if they contain offsets + for i in range(insn.getNumOperands()): + if insn.getOperandType(i) == OperandType.DYNAMIC: # e.g. [esi + 4] + # manual extraction, since the default api calls only work on the 1st dimension of the array + op_objs = insn.getOpObjects(i) + if isinstance(op_objs[-1], ghidra.program.model.scalar.Scalar): + op_off = op_objs[-1].getValue() + yield Offset(op_off), ih.address + yield OperandOffset(i, op_off), ih.address + else: + yield Offset(0), ih.address + yield OperandOffset(i, 0), ih.address + + +def extract_insn_bytes_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]: + """ + parse referenced byte sequences + example: + push offset iid_004118d4_IShellLinkA ; riid + """ + insn: ghidra.program.database.code.InstructionDB = ih.inner + + if capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn): + return + + ref = insn.getAddress() # init to insn addr + for i in range(insn.getNumOperands()): + if OperandType.isAddress(insn.getOperandType(i)): + ref = insn.getAddress(i) # pulls pointer if there is one + + if ref != insn.getAddress(): # bail out if there's no pointer + ghidra_dat = getDataAt(ref) # type: ignore [name-defined] # noqa: F821 + if ( + ghidra_dat and not ghidra_dat.hasStringValue() and not ghidra_dat.isPointer() + ): # avoid if the data itself is a pointer + extracted_bytes = capa.features.extractors.ghidra.helpers.get_bytes(ref, MAX_BYTES_FEATURE_SIZE) + if extracted_bytes and not capa.features.extractors.helpers.all_zeros(extracted_bytes): + # don't extract byte features for obvious strings + yield Bytes(extracted_bytes), ih.address + + +def extract_insn_string_features(fh: FunctionHandle, bb: BBHandle, ih: InsnHandle) -> Iterator[Tuple[Feature, Address]]: + """ + parse instruction string features + + example: + push offset aAcr ; "ACR > " + """ + insn: ghidra.program.database.code.InstructionDB = ih.inner + dyn_addr = OperandType.DYNAMIC | OperandType.ADDRESS + + ref = insn.getAddress() + for i in range(insn.getNumOperands()): + if OperandType.isScalarAsAddress(insn.getOperandType(i)): + ref = insn.getAddress(i) + # strings are also referenced dynamically via pointers & arrays, so we need to deref them + if insn.getOperandType(i) == dyn_addr: + ref = insn.getAddress(i) + dat = getDataAt(ref) # type: ignore [name-defined] # noqa: F821 + if dat and dat.isPointer(): + ref = dat.getValue() + + if ref != insn.getAddress(): + ghidra_dat = getDataAt(ref) # type: ignore [name-defined] # noqa: F821 + if ghidra_dat and ghidra_dat.hasStringValue(): + yield String(ghidra_dat.getValue()), ih.address + + +def extract_insn_mnemonic_features( + fh: FunctionHandle, bb: BBHandle, ih: InsnHandle +) -> Iterator[Tuple[Feature, Address]]: + """parse instruction mnemonic features""" + insn: ghidra.program.database.code.InstructionDB = ih.inner + + yield Mnemonic(insn.getMnemonicString().lower()), ih.address + + +def extract_insn_obfs_call_plus_5_characteristic_features( + fh: FunctionHandle, bb: BBHandle, ih: InsnHandle +) -> Iterator[Tuple[Feature, Address]]: + """ + parse call $+5 instruction from the given instruction. + """ + insn: ghidra.program.database.code.InstructionDB = ih.inner + + if not capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn): + return + + code_ref = OperandType.ADDRESS | OperandType.CODE + ref = insn.getAddress() + for i in range(insn.getNumOperands()): + if insn.getOperandType(i) == code_ref: + ref = insn.getAddress(i) + + if insn.getAddress().add(5) == ref: + yield Characteristic("call $+5"), ih.address + + +def extract_insn_segment_access_features( + fh: FunctionHandle, bb: BBHandle, ih: InsnHandle +) -> Iterator[Tuple[Feature, Address]]: + """parse instruction fs or gs access""" + insn: ghidra.program.database.code.InstructionDB = ih.inner + + insn_str = insn.toString() + + if "FS:" in insn_str: + yield Characteristic("fs access"), ih.address + + if "GS:" in insn_str: + yield Characteristic("gs access"), ih.address + + +def extract_insn_peb_access_characteristic_features( + fh: FunctionHandle, bb: BBHandle, ih: InsnHandle +) -> Iterator[Tuple[Feature, Address]]: + """parse instruction peb access + + fs:[0x30] on x86, gs:[0x60] on x64 + + """ + insn: ghidra.program.database.code.InstructionDB = ih.inner + + insn_str = insn.toString() + if insn_str.startswith(("PUSH", "MOV")): + if "FS:[0x30]" in insn_str or "GS:[0x60]" in insn_str: + yield Characteristic("peb access"), ih.address + + +def extract_insn_cross_section_cflow( + fh: FunctionHandle, bb: BBHandle, ih: InsnHandle +) -> Iterator[Tuple[Feature, Address]]: + """inspect the instruction for a CALL or JMP that crosses section boundaries""" + insn: ghidra.program.database.code.InstructionDB = ih.inner + + if not capa.features.extractors.ghidra.helpers.is_call_or_jmp(insn): + return + + externs = get_externs(fh.ctx) + fakes = get_fakes(fh.ctx) + imports = get_imports(fh.ctx) + + # OperandType to dereference + addr_data = OperandType.ADDRESS | OperandType.DATA + addr_code = OperandType.ADDRESS | OperandType.CODE + + ref_type = insn.getOperandType(0) + + # both OperandType flags must be present + # bail on REGISTER alone + if OperandType.isRegister(ref_type): + if OperandType.isAddress(ref_type): + ref = insn.getAddress(0) # Ghidra dereferences REG | ADDR + if capa.features.extractors.ghidra.helpers.check_addr_for_api(ref, fakes, imports, externs): + return + else: + return + elif ref_type in (addr_data, addr_code) or (OperandType.isIndirect(ref_type) and OperandType.isAddress(ref_type)): + # we must dereference and check if the addr is a pointer to an api function + ref = capa.features.extractors.ghidra.helpers.dereference_ptr(insn) + if capa.features.extractors.ghidra.helpers.check_addr_for_api(ref, fakes, imports, externs): + return + elif ref_type == OperandType.DYNAMIC | OperandType.ADDRESS or ref_type == OperandType.DYNAMIC: + return # cannot resolve dynamics statically + else: + # pure address does not need to get dereferenced/ handled + ref = insn.getAddress(0) + if not ref: + # If it returned null, it was an indirect + # that had no address reference. + # This check is faster than checking for (indirect and not address) + return + if capa.features.extractors.ghidra.helpers.check_addr_for_api(ref, fakes, imports, externs): + return + + this_mem_block = getMemoryBlock(insn.getAddress()) # type: ignore [name-defined] # noqa: F821 + ref_block = getMemoryBlock(ref) # type: ignore [name-defined] # noqa: F821 + if ref_block != this_mem_block: + yield Characteristic("cross section flow"), ih.address + + +def extract_function_calls_from( + fh: FunctionHandle, + bb: BBHandle, + ih: InsnHandle, +) -> Iterator[Tuple[Feature, Address]]: + """extract functions calls from features + + most relevant at the function scope, however, its most efficient to extract at the instruction scope + """ + insn: ghidra.program.database.code.InstructionDB = ih.inner + + if insn.getMnemonicString().startswith("CALL"): + # This method of "dereferencing" addresses/ pointers + # is not as robust as methods in other functions, + # but works just fine for this one + reference = 0 + for ref in insn.getReferencesFrom(): + addr = ref.getToAddress() + + # avoid returning fake addrs + if not addr.isExternalAddress(): + reference = addr.getOffset() + + # if a reference is < 0, then ghidra pulled an offset from a DYNAMIC | ADDR (usually a stackvar) + # these cannot be resolved to actual addrs + if reference > 0: + yield Characteristic("calls from"), AbsoluteVirtualAddress(reference) + + +def extract_function_indirect_call_characteristic_features( + fh: FunctionHandle, + bb: BBHandle, + ih: InsnHandle, +) -> Iterator[Tuple[Feature, Address]]: + """extract indirect function calls (e.g., call eax or call dword ptr [edx+4]) + does not include calls like => call ds:dword_ABD4974 + + most relevant at the function or basic block scope; + however, its most efficient to extract at the instruction scope + """ + insn: ghidra.program.database.code.InstructionDB = ih.inner + + if insn.getMnemonicString().startswith("CALL"): + if OperandType.isRegister(insn.getOperandType(0)): + yield Characteristic("indirect call"), ih.address + if OperandType.isIndirect(insn.getOperandType(0)): + yield Characteristic("indirect call"), ih.address + + +def check_nzxor_security_cookie_delta( + fh: ghidra.program.database.function.FunctionDB, insn: ghidra.program.database.code.InstructionDB +): + """Get the function containing the insn + Get the last block of the function that contains the insn + + Check the bb containing the insn + Check the last bb of the function containing the insn + """ + + model = SimpleBlockModel(currentProgram()) # type: ignore [name-defined] # noqa: F821 + insn_addr = insn.getAddress() + func_asv = fh.getBody() + first_addr = func_asv.getMinAddress() + last_addr = func_asv.getMaxAddress() + + if model.getFirstCodeBlockContaining( + first_addr, monitor() # type: ignore [name-defined] # noqa: F821 + ) == model.getFirstCodeBlockContaining( + last_addr, monitor() # type: ignore [name-defined] # noqa: F821 + ): + if insn_addr < first_addr.add(SECURITY_COOKIE_BYTES_DELTA): + return True + else: + return insn_addr > last_addr.add(SECURITY_COOKIE_BYTES_DELTA * -1) + else: + return False + + +def extract_insn_nzxor_characteristic_features( + fh: FunctionHandle, + bb: BBHandle, + ih: InsnHandle, +) -> Iterator[Tuple[Feature, Address]]: + f: ghidra.program.database.function.FunctionDB = fh.inner + insn: ghidra.program.database.code.InstructionDB = ih.inner + + if "XOR" not in insn.getMnemonicString(): + return + if capa.features.extractors.ghidra.helpers.is_stack_referenced(insn): + return + if capa.features.extractors.ghidra.helpers.is_zxor(insn): + return + if check_nzxor_security_cookie_delta(f, insn): + return + yield Characteristic("nzxor"), ih.address + + +def extract_features( + fh: FunctionHandle, + bb: BBHandle, + insn: InsnHandle, +) -> Iterator[Tuple[Feature, Address]]: + for insn_handler in INSTRUCTION_HANDLERS: + for feature, addr in insn_handler(fh, bb, insn): + yield feature, addr + + +INSTRUCTION_HANDLERS = ( + extract_insn_api_features, + extract_insn_number_features, + extract_insn_bytes_features, + extract_insn_string_features, + extract_insn_offset_features, + extract_insn_nzxor_characteristic_features, + extract_insn_mnemonic_features, + extract_insn_obfs_call_plus_5_characteristic_features, + extract_insn_peb_access_characteristic_features, + extract_insn_cross_section_cflow, + extract_insn_segment_access_features, + extract_function_calls_from, + extract_function_indirect_call_characteristic_features, +) + + +def main(): + """ """ + features = [] + from capa.features.extractors.ghidra.extractor import GhidraFeatureExtractor + + for fh in GhidraFeatureExtractor().get_functions(): + for bb in capa.features.extractors.ghidra.helpers.get_function_blocks(fh): + for insn in capa.features.extractors.ghidra.helpers.get_insn_in_range(bb): + features.extend(list(extract_features(fh, bb, insn))) + + import pprint + + pprint.pprint(features) # noqa: T203 + + +if __name__ == "__main__": + main() diff --git a/capa/features/extractors/viv/basicblock.py b/capa/features/extractors/viv/basicblock.py index 46bdb2b0..0a276ee1 100644 --- a/capa/features/extractors/viv/basicblock.py +++ b/capa/features/extractors/viv/basicblock.py @@ -140,7 +140,7 @@ def is_printable_ascii(chars: bytes) -> bool: def is_printable_utf16le(chars: bytes) -> bool: - if all(c == b"\x00" for c in chars[1::2]): + if all(c == 0x0 for c in chars[1::2]): return is_printable_ascii(chars[::2]) return False diff --git a/capa/ghidra/README.md b/capa/ghidra/README.md new file mode 100644 index 00000000..f1e81195 --- /dev/null +++ b/capa/ghidra/README.md @@ -0,0 +1,172 @@ +
+ +
+ +The Ghidra feature extractor is an application of the FLARE team's open-source project, Ghidrathon, to integrate capa with Ghidra using Python 3. capa is a framework that uses a well-defined collection of rules to identify capabilities in a program. You can run capa against a PE file, ELF file, or shellcode and it tells you what it thinks the program can do. For example, it might suggest that the program is a backdoor, can install services, or relies on HTTP to communicate. The Ghidra feature extractor can be used to run capa analysis on your Ghidra databases without needing access to the original binary file. + + + +## Getting Started + +### Installation + +Please ensure that you have the following dependencies installed before continuing: + +| Dependency | Version | Source | +|------------|---------|--------| +| Ghidrathon | `>= 3.0.0` | https://github.com/mandiant/Ghidrathon | +| Python | `>= 3.8` | https://www.python.org/downloads | +| Ghidra | `>= 10.2` | https://ghidra-sre.org | + +In order to run capa using using Ghidra, you must install capa as a library, obtain the official capa rules that match the capa version you have installed, and configure the Python 3 script [capa_ghidra.py](/capa/ghidra/capa_ghidra.py). You can do this by completing the following steps using the Python 3 interpreter that you have configured for your Ghidrathon installation: + +1. Install capa and its dependencies from PyPI using the following command: +```bash +$ pip install flare-capa +``` + +2. Download and extract the [official capa rules](https://github.com/mandiant/capa-rules/releases) that match the capa version you have installed. Use the following command to view the version of capa you have installed: +```bash +$ pip show flare-capa +OR +$ capa --version +``` + +3. Copy [capa_ghidra.py](/capa/ghidra/capa_ghidra.py) to your `$USER_HOME/ghidra_scripts` directory or manually add `` to the Ghidra Script Manager. + +## Usage + +After completing the installation steps you can execute `capa_ghidra.py` using the Ghidra Script Manager or Headless Analyzer. + +### Ghidra Script Manager + +To execute `capa_ghidra.py` using the Ghidra Script Manager, first open the Ghidra Script Manager by navigating to `Window > Script Manager` in the Ghidra Code Browser. Next, locate `capa_ghidra.py` by selecting the `Python 3 > capa` category or using the Ghidra Script Manager search funtionality. Finally, double-click `capa_ghidra.py` to execute the script. If you don't see `capa_ghidra.py`, make sure you have copied the script to your `$USER_HOME/ghidra_scripts` directory or manually added `` to the Ghidra Script Manager + +When executed, `capa_ghidra.py` asks you to provide your capa rules directory and preferred output format. `capa_ghidra.py` supports `default`, `verbose`, and `vverbose` output formats when executed from the Ghidra Script Manager. `capa_ghidra.py` writes output to the Ghidra Console Window. + +#### Example + +The following is an example of running `capa_ghidra.py` using the Ghidra Script Manager: + +Selecting capa rules: + + +Choosing output format: + + +Viewing results in Ghidra Console Window: + + +### Ghidra Headless Analyzer + +To execute `capa_ghidra.py` using the Ghidra Headless Analyzer, you can use the Ghidra `analyzeHeadless` script located in your `$GHIDRA_HOME/support` directory. You will need to provide the following arguments to the Ghidra `analyzeHeadless` script: + +1. ``: path to Ghidra project +2. ``: name of Ghidra Project +3. `-process `: name of sample `` +4. `-ScriptPath `: OPTIONAL argument specifying path `` to `capa_ghidra.py` +5. `-PostScript capa_ghidra.py`: executes `capa_ghidra.py` as post-analysis script +6. `""`: single, quoted string containing capa arguments that must specify capa rules directory and output format, e.g. `" --verbose"`. `capa_ghidra.py` supports `default`, `verbose`, `vverbose` and `json` formats when executed using the Ghidra Headless Analyzer. `capa_ghidra.py` writes output to the console window used to execute the Ghidra `analyzeHeadless` script. +7. `-processor `: required ONLY if sample `` is shellcode. More information on specifying the `` can be found in the `$GHIDRA_HOME/support/analyzeHeadlessREADME.html` documentation. + +The following is an example of combining these arguments into a single `analyzeHeadless` script command: + +``` +$GHIDRA_HOME/support/analyzeHeadless -process -PostScript capa_ghidra.py "/path/to/capa/rules/ --verbose" +``` + +You may also want to run capa against a sample that you have not yet imported into your Ghidra project. The following is an example of importing a sample and running `capa_ghidra.py` using a single `analyzeHeadless` script command: + +``` +$GHIDRA_HOME/support/analyzeHeadless -Import -PostScript capa_ghidra.py "/path/to/capa/rules/ --verbose" +``` + +You can also provide `capa_ghidra.py` the single argument `"help"` to view supported arguments when running the script using the Ghidra Headless Analyzer: +``` +$GHIDRA_HOME/support/analyzeHeadless -process -PostScript capa_ghidra.py "help" +``` + +#### Example + +The following is an example of running `capa_ghidra.py` against a shellcode sample using the Ghidra `analyzeHeadless` script: +``` +$ analyzeHeadless /home/wumbo/Desktop/ghidra_projects/ capa_test -process 499c2a85f6e8142c3f48d4251c9c7cd6.raw32 -processor x86:LE:32:default -PostScript capa_ghidra.py "/home/wumbo/capa/rules -vv" +[...] + +INFO REPORT: Analysis succeeded for file: /499c2a85f6e8142c3f48d4251c9c7cd6.raw32 (HeadlessAnalyzer) +INFO SCRIPT: /home/wumbo/ghidra_scripts/capa_ghidra.py (HeadlessAnalyzer) +md5 499c2a85f6e8142c3f48d4251c9c7cd6 +sha1 +sha256 e8e02191c1b38c808d27a899ac164b3675eb5cadd3a8907b0ffa863714000e72 +path /home/wumbo/capa/tests/data/499c2a85f6e8142c3f48d4251c9c7cd6.raw32 +timestamp 2023-08-29 17:57:00.946588 +capa version 6.1.0 +os unknown os +format Raw Binary +arch x86 +extractor ghidra +base address global +rules /home/wumbo/capa/rules +function count 42 +library function count 0 +total feature count 1970 + +contain loop (24 matches, only showing first match of library rule) +author moritz.raabe@mandiant.com +scope function +function @ 0x0 + or: + characteristic: loop @ 0x0 + characteristic: tight loop @ 0x278 + +contain obfuscated stackstrings +namespace anti-analysis/obfuscation/string/stackstring +author moritz.raabe@mandiant.com +scope basic block +att&ck Defense Evasion::Obfuscated Files or Information::Indicator Removal from Tools [T1027.005] +mbc Anti-Static Analysis::Executable Code Obfuscation::Argument Obfuscation [B0032.020], Anti-Static Analysis::Executable Code Obfuscation::Stack Strings [B0032.017] +basic block @ 0x0 in function 0x0 + characteristic: stack string @ 0x0 + +encode data using XOR +namespace data-manipulation/encoding/xor +author moritz.raabe@mandiant.com +scope basic block +att&ck Defense Evasion::Obfuscated Files or Information [T1027] +mbc Defense Evasion::Obfuscated Files or Information::Encoding-Standard Algorithm [E1027.m02], Data::Encode Data::XOR [C0026.002] +basic block @ 0x8AF in function 0x8A1 + and: + characteristic: tight loop @ 0x8AF + characteristic: nzxor @ 0x8C0 + not: = filter for potential false positives + or: + or: = unsigned bitwise negation operation (~i) + number: 0xFFFFFFFF = bitwise negation for unsigned 32 bits + number: 0xFFFFFFFFFFFFFFFF = bitwise negation for unsigned 64 bits + or: = signed bitwise negation operation (~i) + number: 0xFFFFFFF = bitwise negation for signed 32 bits + number: 0xFFFFFFFFFFFFFFF = bitwise negation for signed 64 bits + or: = Magic constants used in the implementation of strings functions. + number: 0x7EFEFEFF = optimized string constant for 32 bits + number: 0x81010101 = -0x81010101 = 0x7EFEFEFF + number: 0x81010100 = 0x81010100 = ~0x7EFEFEFF + number: 0x7EFEFEFEFEFEFEFF = optimized string constant for 64 bits + number: 0x8101010101010101 = -0x8101010101010101 = 0x7EFEFEFEFEFEFEFF + number: 0x8101010101010100 = 0x8101010101010100 = ~0x7EFEFEFEFEFEFEFF + +get OS information via KUSER_SHARED_DATA +namespace host-interaction/os/version +author @mr-tz +scope function +att&ck Discovery::System Information Discovery [T1082] +references https://www.geoffchappell.com/studies/windows/km/ntoskrnl/inc/api/ntexapi_x/kuser_shared_data/index.htm +function @ 0x1CA6 + or: + number: 0x7FFE026C = NtMajorVersion @ 0x1D18 + + + +Script /home/wumbo/ghidra_scripts/capa_ghidra.py called exit with code 0 + +[...] +``` diff --git a/capa/ghidra/__init__.py b/capa/ghidra/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/capa/ghidra/capa_ghidra.py b/capa/ghidra/capa_ghidra.py new file mode 100644 index 00000000..99beaffc --- /dev/null +++ b/capa/ghidra/capa_ghidra.py @@ -0,0 +1,166 @@ +# Run capa against loaded Ghidra database +# @author Mike Hunhoff (mehunhoff@google.com) +# @category Python 3.capa + +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +import sys +import logging +import pathlib +import argparse + +import capa +import capa.main +import capa.rules +import capa.ghidra.helpers +import capa.render.default +import capa.features.extractors.ghidra.extractor + +logger = logging.getLogger("capa_ghidra") + + +def run_headless(): + parser = argparse.ArgumentParser(description="The FLARE team's open-source tool to integrate capa with Ghidra.") + + parser.add_argument( + "rules", + type=str, + help="path to rule file or directory", + ) + parser.add_argument( + "-v", "--verbose", action="store_true", help="enable verbose result document (no effect with --json)" + ) + parser.add_argument( + "-vv", "--vverbose", action="store_true", help="enable very verbose result document (no effect with --json)" + ) + parser.add_argument("-d", "--debug", action="store_true", help="enable debugging output on STDERR") + parser.add_argument("-q", "--quiet", action="store_true", help="disable all output but errors") + parser.add_argument("-j", "--json", action="store_true", help="emit JSON instead of text") + + script_args = list(getScriptArgs()) # type: ignore [name-defined] # noqa: F821 + if not script_args or len(script_args) > 1: + script_args = [] + else: + script_args = script_args[0].split() + for idx, arg in enumerate(script_args): + if arg.lower() == "help": + script_args[idx] = "--help" + + args = parser.parse_args(args=script_args) + + if args.quiet: + logging.basicConfig(level=logging.WARNING) + logging.getLogger().setLevel(logging.WARNING) + elif args.debug: + logging.basicConfig(level=logging.DEBUG) + logging.getLogger().setLevel(logging.DEBUG) + else: + logging.basicConfig(level=logging.INFO) + logging.getLogger().setLevel(logging.INFO) + + logger.debug("running in Ghidra headless mode") + + rules_path = pathlib.Path(args.rules) + + logger.debug("rule path: %s", rules_path) + rules = capa.main.get_rules([rules_path]) + + meta = capa.ghidra.helpers.collect_metadata([rules_path]) + extractor = capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor() + + capabilities, counts = capa.main.find_capabilities(rules, extractor, False) + + meta.analysis.feature_counts = counts["feature_counts"] + meta.analysis.library_functions = counts["library_functions"] + meta.analysis.layout = capa.main.compute_layout(rules, extractor, capabilities) + + if capa.main.has_file_limitation(rules, capabilities, is_standalone=True): + logger.info("capa encountered warnings during analysis") + + if args.json: + print(capa.render.json.render(meta, rules, capabilities)) # noqa: T201 + elif args.vverbose: + print(capa.render.vverbose.render(meta, rules, capabilities)) # noqa: T201 + elif args.verbose: + print(capa.render.verbose.render(meta, rules, capabilities)) # noqa: T201 + else: + print(capa.render.default.render(meta, rules, capabilities)) # noqa: T201 + + return 0 + + +def run_ui(): + logging.basicConfig(level=logging.INFO) + logging.getLogger().setLevel(logging.INFO) + + rules_dir: str = "" + try: + selected_dir = askDirectory("Choose capa rules directory", "Ok") # type: ignore [name-defined] # noqa: F821 + if selected_dir: + rules_dir = selected_dir.getPath() + except RuntimeError: + # RuntimeError thrown when user selects "Cancel" + pass + + if not rules_dir: + logger.info("You must choose a capa rules directory before running capa.") + return capa.main.E_MISSING_RULES + + verbose = askChoice( # type: ignore [name-defined] # noqa: F821 + "capa output verbosity", "Choose capa output verbosity", ["default", "verbose", "vverbose"], "default" + ) + + rules_path: pathlib.Path = pathlib.Path(rules_dir) + logger.info("running capa using rules from %s", str(rules_path)) + + rules = capa.main.get_rules([rules_path]) + + meta = capa.ghidra.helpers.collect_metadata([rules_path]) + extractor = capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor() + + capabilities, counts = capa.main.find_capabilities(rules, extractor, True) + + meta.analysis.feature_counts = counts["feature_counts"] + meta.analysis.library_functions = counts["library_functions"] + meta.analysis.layout = capa.main.compute_layout(rules, extractor, capabilities) + + if capa.main.has_file_limitation(rules, capabilities, is_standalone=False): + logger.info("capa encountered warnings during analysis") + + if verbose == "vverbose": + print(capa.render.vverbose.render(meta, rules, capabilities)) # noqa: T201 + elif verbose == "verbose": + print(capa.render.verbose.render(meta, rules, capabilities)) # noqa: T201 + else: + print(capa.render.default.render(meta, rules, capabilities)) # noqa: T201 + + return 0 + + +def main(): + if not capa.ghidra.helpers.is_supported_ghidra_version(): + return capa.main.E_UNSUPPORTED_GHIDRA_VERSION + + if not capa.ghidra.helpers.is_supported_file_type(): + return capa.main.E_INVALID_FILE_TYPE + + if not capa.ghidra.helpers.is_supported_arch_type(): + return capa.main.E_INVALID_FILE_ARCH + + if isRunningHeadless(): # type: ignore [name-defined] # noqa: F821 + return run_headless() + else: + return run_ui() + + +if __name__ == "__main__": + if sys.version_info < (3, 8): + from capa.exceptions import UnsupportedRuntimeError + + raise UnsupportedRuntimeError("This version of capa can only be used with Python 3.8+") + sys.exit(main()) diff --git a/capa/ghidra/helpers.py b/capa/ghidra/helpers.py new file mode 100644 index 00000000..b32c534a --- /dev/null +++ b/capa/ghidra/helpers.py @@ -0,0 +1,160 @@ +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +import logging +import datetime +import contextlib +from typing import List +from pathlib import Path + +import capa +import capa.version +import capa.features.common +import capa.features.freeze +import capa.render.result_document as rdoc +import capa.features.extractors.ghidra.helpers + +logger = logging.getLogger("capa") + +# file type as returned by Ghidra +SUPPORTED_FILE_TYPES = ("Executable and Linking Format (ELF)", "Portable Executable (PE)", "Raw Binary") + + +class GHIDRAIO: + """ + An object that acts as a file-like object, + using bytes from the current Ghidra listing. + """ + + def __init__(self): + super().__init__() + + self.offset = 0 + self.bytes_ = self.get_bytes() + + def seek(self, offset, whence=0): + assert whence == 0 + self.offset = offset + + def read(self, size): + logger.debug("reading 0x%x bytes at 0x%x (ea: 0x%x)", size, self.offset, currentProgram().getImageBase().add(self.offset).getOffset()) # type: ignore [name-defined] # noqa: F821 + + if size > len(self.bytes_) - self.offset: + logger.debug("cannot read 0x%x bytes at 0x%x (ea: BADADDR)", size, self.offset) + return b"" + else: + return self.bytes_[self.offset : self.offset + size] + + def close(self): + return + + def get_bytes(self): + file_bytes = currentProgram().getMemory().getAllFileBytes()[0] # type: ignore [name-defined] # noqa: F821 + + # getOriginalByte() allows for raw file parsing on the Ghidra side + # other functions will fail as Ghidra will think that it's reading uninitialized memory + bytes_ = [file_bytes.getOriginalByte(i) for i in range(file_bytes.getSize())] + + return capa.features.extractors.ghidra.helpers.ints_to_bytes(bytes_) + + +def is_supported_ghidra_version(): + version = float(getGhidraVersion()[:4]) # type: ignore [name-defined] # noqa: F821 + if version < 10.2: + warning_msg = "capa does not support this Ghidra version" + logger.warning(warning_msg) + logger.warning("Your Ghidra version is: %s. Supported versions are: Ghidra >= 10.2", version) + return False + return True + + +def is_running_headless(): + return isRunningHeadless() # type: ignore [name-defined] # noqa: F821 + + +def is_supported_file_type(): + file_info = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821 + if file_info not in SUPPORTED_FILE_TYPES: + logger.error("-" * 80) + logger.error(" Input file does not appear to be a supported file type.") + logger.error(" ") + logger.error( + " capa currently only supports analyzing PE, ELF, or binary files containing x86 (32- and 64-bit) shellcode." + ) + logger.error(" If you don't know the input file type, you can try using the `file` utility to guess it.") + logger.error("-" * 80) + return False + return True + + +def is_supported_arch_type(): + lang_id = str(currentProgram().getLanguageID()).lower() # type: ignore [name-defined] # noqa: F821 + + if not all((lang_id.startswith("x86"), any(arch in lang_id for arch in ("32", "64")))): + logger.error("-" * 80) + logger.error(" Input file does not appear to target a supported architecture.") + logger.error(" ") + logger.error(" capa currently only supports analyzing x86 (32- and 64-bit).") + logger.error("-" * 80) + return False + return True + + +def get_file_md5(): + return currentProgram().getExecutableMD5() # type: ignore [name-defined] # noqa: F821 + + +def get_file_sha256(): + return currentProgram().getExecutableSHA256() # type: ignore [name-defined] # noqa: F821 + + +def collect_metadata(rules: List[Path]): + md5 = get_file_md5() + sha256 = get_file_sha256() + + info = currentProgram().getLanguageID().toString() # type: ignore [name-defined] # noqa: F821 + if "x86" in info and "64" in info: + arch = "x86_64" + elif "x86" in info and "32" in info: + arch = "x86" + else: + arch = "unknown arch" + + format_name: str = currentProgram().getExecutableFormat() # type: ignore [name-defined] # noqa: F821 + if "PE" in format_name: + os = "windows" + elif "ELF" in format_name: + with contextlib.closing(capa.ghidra.helpers.GHIDRAIO()) as f: + os = capa.features.extractors.elf.detect_elf_os(f) + else: + os = "unknown os" + + return rdoc.Metadata( + timestamp=datetime.datetime.now(), + version=capa.version.__version__, + argv=(), + sample=rdoc.Sample( + md5=md5, + sha1="", + sha256=sha256, + path=currentProgram().getExecutablePath(), # type: ignore [name-defined] # noqa: F821 + ), + flavor=rdoc.Flavor.STATIC, + analysis=rdoc.StaticAnalysis( + format=currentProgram().getExecutableFormat(), # type: ignore [name-defined] # noqa: F821 + arch=arch, + os=os, + extractor="ghidra", + rules=tuple(r.resolve().absolute().as_posix() for r in rules), + base_address=capa.features.freeze.Address.from_capa(currentProgram().getImageBase().getOffset()), # type: ignore [name-defined] # noqa: F821 + layout=rdoc.StaticLayout( + functions=(), + ), + feature_counts=rdoc.StaticFeatureCounts(file=0, functions=()), + library_functions=(), + ), + ) diff --git a/capa/helpers.py b/capa/helpers.py index abe839af..a093ef66 100644 --- a/capa/helpers.py +++ b/capa/helpers.py @@ -45,6 +45,14 @@ def is_runtime_ida(): return importlib.util.find_spec("idc") is not None +def is_runtime_ghidra(): + try: + currentProgram # type: ignore [name-defined] # noqa: F821 + except NameError: + return False + return True + + def assert_never(value) -> NoReturn: # careful: python -O will remove this assertion. # but this is only used for type checking, so it's ok. @@ -148,9 +156,9 @@ def log_unsupported_format_error(): logger.error("-" * 80) -def log_unsupported_cape_report_error(): +def log_unsupported_cape_report_error(error: str): logger.error("-" * 80) - logger.error(" Input file is not a valid CAPE report.") + logger.error("Input file is not a valid CAPE report: %s", error) logger.error(" ") logger.error(" capa currently only supports analyzing standard CAPE json reports.") logger.error( @@ -159,6 +167,14 @@ def log_unsupported_cape_report_error(): logger.error("-" * 80) +def log_empty_cape_report_error(error: str): + logger.error("-" * 80) + logger.error(" CAPE report is empty or only contains little useful data: %s", error) + logger.error(" ") + logger.error(" Please make sure the sandbox run captures useful behaviour of your sample.") + logger.error("-" * 80) + + def log_unsupported_os_error(): logger.error("-" * 80) logger.error(" Input file does not appear to target a supported OS.") diff --git a/capa/main.py b/capa/main.py index fe6f78b2..64277887 100644 --- a/capa/main.py +++ b/capa/main.py @@ -62,10 +62,17 @@ from capa.helpers import ( log_unsupported_os_error, redirecting_print_to_tqdm, log_unsupported_arch_error, + log_empty_cape_report_error, log_unsupported_format_error, log_unsupported_cape_report_error, ) -from capa.exceptions import UnsupportedOSError, UnsupportedArchError, UnsupportedFormatError, UnsupportedRuntimeError +from capa.exceptions import ( + EmptyReportError, + UnsupportedOSError, + UnsupportedArchError, + UnsupportedFormatError, + UnsupportedRuntimeError, +) from capa.features.common import ( OS_AUTO, OS_LINUX, @@ -112,8 +119,9 @@ E_INVALID_FILE_TYPE = 16 E_INVALID_FILE_ARCH = 17 E_INVALID_FILE_OS = 18 E_UNSUPPORTED_IDA_VERSION = 19 -E_MISSING_CAPE_STATIC_ANALYSIS = 20 -E_MISSING_CAPE_DYNAMIC_ANALYSIS = 21 +E_UNSUPPORTED_GHIDRA_VERSION = 20 +E_MISSING_CAPE_STATIC_ANALYSIS = 21 +E_MISSING_CAPE_DYNAMIC_ANALYSIS = 22 logger = logging.getLogger("capa") @@ -276,6 +284,11 @@ def find_static_capabilities( with redirecting_print_to_tqdm(disable_progress): with tqdm.contrib.logging.logging_redirect_tqdm(): pbar = tqdm.tqdm + if capa.helpers.is_runtime_ghidra(): + # Ghidrathon interpreter cannot properly handle + # the TMonitor thread that is created via a monitor_interval + # > 0 + pbar.monitor_interval = 0 if disable_progress: # do not use tqdm to avoid unnecessary side effects when caller intends # to disable progress completely @@ -532,11 +545,13 @@ def find_dynamic_capabilities( return matches, meta -def find_capabilities(ruleset: RuleSet, extractor: FeatureExtractor, **kwargs) -> Tuple[MatchResults, Any]: +def find_capabilities( + ruleset: RuleSet, extractor: FeatureExtractor, disable_progress=None, **kwargs +) -> Tuple[MatchResults, Any]: if isinstance(extractor, StaticFeatureExtractor): - return find_static_capabilities(ruleset, extractor, kwargs) + return find_static_capabilities(ruleset, extractor, disable_progress=disable_progress, **kwargs) elif isinstance(extractor, DynamicFeatureExtractor): - return find_dynamic_capabilities(ruleset, extractor, kwargs) + return find_dynamic_capabilities(ruleset, extractor, disable_progress=disable_progress, **kwargs) else: raise ValueError(f"unexpected extractor type: {extractor.__class__.__name__}") @@ -761,7 +776,8 @@ def get_extractor( sys.path.append(str(bn_api)) try: - from binaryninja import BinaryView, BinaryViewType + import binaryninja + from binaryninja import BinaryView except ImportError: raise RuntimeError( "Cannot import binaryninja module. Please install the Binary Ninja Python API first: " @@ -771,7 +787,7 @@ def get_extractor( import capa.features.extractors.binja.extractor with halo.Halo(text="analyzing program", spinner="simpleDots", stream=sys.stderr, enabled=not disable_progress): - bv: BinaryView = BinaryViewType.get_view_of_file(str(path)) + bv: BinaryView = binaryninja.load(str(path)) if bv is None: raise RuntimeError(f"Binary Ninja cannot open file {path}") @@ -814,7 +830,7 @@ def get_file_extractors(sample: Path, format_: str) -> List[FeatureExtractor]: file_extractors.append(capa.features.extractors.pefile.PefileFeatureExtractor(sample)) file_extractors.append(capa.features.extractors.dnfile_.DnfileFeatureExtractor(sample)) - elif format_ == capa.features.extractors.common.FORMAT_ELF: + elif format_ == capa.features.common.FORMAT_ELF: file_extractors.append(capa.features.extractors.elffile.ElfFeatureExtractor(sample)) elif format_ == FORMAT_CAPE: @@ -1007,13 +1023,13 @@ def collect_metadata( os_ = get_os(sample_path) if os_ == OS_AUTO else os_ if isinstance(extractor, StaticFeatureExtractor): - flavor = rdoc.Flavor.STATIC + meta_class: type = rdoc.StaticMetadata elif isinstance(extractor, DynamicFeatureExtractor): - flavor = rdoc.Flavor.DYNAMIC + meta_class = rdoc.DynamicMetadata else: assert_never(extractor) - return rdoc.Metadata( + return meta_class( timestamp=datetime.datetime.now(), version=capa.version.__version__, argv=tuple(argv) if argv else None, @@ -1023,7 +1039,6 @@ def collect_metadata( sha256=sha256, path=Path(sample_path).resolve().as_posix(), ), - flavor=flavor, analysis=get_sample_analysis( format_, arch, @@ -1454,7 +1469,7 @@ def main(argv: Optional[List[str]] = None): # during the load of the RuleSet, we extract subscope statements into their own rules # that are subsequently `match`ed upon. this inflates the total rule count. # so, filter out the subscope rules when reporting total number of loaded rules. - len(list(filter(lambda r: not r.is_subscope_rule(), rules.rules.values()))), + len(list(filter(lambda r: not (r.is_subscope_rule()), rules.rules.values()))), ) if args.tag: rules = rules.filter_rules_by_meta(args.tag) @@ -1493,12 +1508,17 @@ def main(argv: Optional[List[str]] = None): except (ELFError, OverflowError) as e: logger.error("Input file '%s' is not a valid ELF file: %s", args.sample, str(e)) return E_CORRUPT_FILE - except UnsupportedFormatError: + except UnsupportedFormatError as e: if format_ == FORMAT_CAPE: - log_unsupported_cape_report_error() + log_unsupported_cape_report_error(str(e)) else: log_unsupported_format_error() return E_INVALID_FILE_TYPE + except EmptyReportError as e: + if format_ == FORMAT_CAPE: + log_empty_cape_report_error(str(e)) + else: + log_unsupported_format_error() for file_extractor in file_extractors: if isinstance(file_extractor, DynamicFeatureExtractor): @@ -1556,6 +1576,9 @@ def main(argv: Optional[List[str]] = None): should_save_workspace = os.environ.get("CAPA_SAVE_WORKSPACE") not in ("0", "no", "NO", "n", None) + # TODO(mr-tz): this should be wrapped and refactored as it's tedious to update everywhere + # see same code and show-features above examples + # https://github.com/mandiant/capa/issues/1813 try: extractor = get_extractor( args.sample, @@ -1566,9 +1589,9 @@ def main(argv: Optional[List[str]] = None): should_save_workspace, disable_progress=args.quiet or args.debug, ) - except UnsupportedFormatError: + except UnsupportedFormatError as e: if format_ == FORMAT_CAPE: - log_unsupported_cape_report_error() + log_unsupported_cape_report_error(str(e)) else: log_unsupported_format_error() return E_INVALID_FILE_TYPE @@ -1644,8 +1667,47 @@ def ida_main(): print(capa.render.default.render(meta, rules, capabilities)) +def ghidra_main(): + import capa.rules + import capa.ghidra.helpers + import capa.render.default + import capa.features.extractors.ghidra.extractor + + logging.basicConfig(level=logging.INFO) + logging.getLogger().setLevel(logging.INFO) + + logger.debug("-" * 80) + logger.debug(" Using default embedded rules.") + logger.debug(" ") + logger.debug(" You can see the current default rule set here:") + logger.debug(" https://github.com/mandiant/capa-rules") + logger.debug("-" * 80) + + rules_path = get_default_root() / "rules" + logger.debug("rule path: %s", rules_path) + rules = get_rules([rules_path]) + + meta = capa.ghidra.helpers.collect_metadata([rules_path]) + + capabilities, counts = find_capabilities( + rules, + capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor(), + not capa.ghidra.helpers.is_running_headless(), + ) + + meta.analysis.feature_counts = counts["feature_counts"] + meta.analysis.library_functions = counts["library_functions"] + + if has_file_limitation(rules, capabilities, is_standalone=False): + logger.info("capa encountered warnings during analysis") + + print(capa.render.default.render(meta, rules, capabilities)) + + if __name__ == "__main__": if capa.helpers.is_runtime_ida(): ida_main() + elif capa.helpers.is_runtime_ghidra(): + ghidra_main() else: sys.exit(main()) diff --git a/capa/render/result_document.py b/capa/render/result_document.py index 87790e53..1b1ef479 100644 --- a/capa/render/result_document.py +++ b/capa/render/result_document.py @@ -136,6 +136,16 @@ class Metadata(Model): analysis: Analysis +class StaticMetadata(Metadata): + flavor: Flavor = Flavor.STATIC + analysis: StaticAnalysis + + +class DynamicMetadata(Metadata): + flavor: Flavor = Flavor.DYNAMIC + analysis: DynamicAnalysis + + class CompoundStatementType: AND = "and" OR = "or" @@ -205,7 +215,7 @@ def statement_from_capa(node: capa.engine.Statement) -> Statement: description=node.description, min=node.min, max=node.max, - child=frz.feature_from_capa(node.child), + child=frzf.feature_from_capa(node.child), ) elif isinstance(node, capa.engine.Subscope): @@ -231,7 +241,7 @@ def node_from_capa(node: Union[capa.engine.Statement, capa.engine.Feature]) -> N return StatementNode(statement=statement_from_capa(node)) elif isinstance(node, capa.engine.Feature): - return FeatureNode(feature=frz.feature_from_capa(node)) + return FeatureNode(feature=frzf.feature_from_capa(node)) else: assert_never(node) diff --git a/capa/render/verbose.py b/capa/render/verbose.py index ae353855..63b9b845 100644 --- a/capa/render/verbose.py +++ b/capa/render/verbose.py @@ -23,6 +23,7 @@ Unless required by applicable law or agreed to in writing, software distributed See the License for the specific language governing permissions and limitations under the License. """ import enum +from typing import cast import tabulate @@ -61,10 +62,12 @@ def format_address(address: frz.Address) -> str: assert isinstance(pid, int) return f"process ppid: {ppid}, process pid: {pid}" elif address.type == frz.AddressType.THREAD: - assert isinstance(address.value, int) - tid = address.value + assert isinstance(address.value, tuple) + ppid, pid, tid = address.value + assert isinstance(ppid, int) + assert isinstance(pid, int) assert isinstance(tid, int) - return f"thread id: {tid}" + return f"process ppid: {ppid}, process pid: {pid}, thread id: {tid}" elif address.type == frz.AddressType.CALL: assert isinstance(address.value, tuple) ppid, pid, tid, id_ = address.value @@ -75,7 +78,7 @@ def format_address(address: frz.Address) -> str: raise ValueError("unexpected address type") -def render_static_meta(ostream, meta: rd.Metadata): +def render_static_meta(ostream, meta: rd.StaticMetadata): """ like: @@ -96,7 +99,6 @@ def render_static_meta(ostream, meta: rd.Metadata): total feature count 1918 """ - assert isinstance(meta.analysis, rd.StaticAnalysis) rows = [ ("md5", meta.sample.md5), ("sha1", meta.sample.sha1), @@ -122,7 +124,7 @@ def render_static_meta(ostream, meta: rd.Metadata): ostream.writeln(tabulate.tabulate(rows, tablefmt="plain")) -def render_dynamic_meta(ostream, meta: rd.Metadata): +def render_dynamic_meta(ostream, meta: rd.DynamicMetadata): """ like: @@ -141,7 +143,6 @@ def render_dynamic_meta(ostream, meta: rd.Metadata): total feature count 1918 """ - assert isinstance(meta.analysis, rd.DynamicAnalysis) rows = [ ("md5", meta.sample.md5), ("sha1", meta.sample.sha1), @@ -166,10 +167,10 @@ def render_dynamic_meta(ostream, meta: rd.Metadata): def render_meta(osstream, doc: rd.ResultDocument): - if isinstance(doc.meta.analysis, rd.StaticAnalysis): - render_static_meta(osstream, doc.meta) - elif isinstance(doc.meta.analysis, rd.DynamicAnalysis): - render_dynamic_meta(osstream, doc.meta) + if doc.meta.flavor is rd.Flavor.STATIC: + render_static_meta(osstream, cast(rd.StaticMetadata, doc.meta)) + elif doc.meta.flavor is rd.Flavor.DYNAMIC: + render_dynamic_meta(osstream, cast(rd.DynamicMetadata, doc.meta)) else: raise ValueError("invalid meta analysis") diff --git a/capa/render/vverbose.py b/capa/render/vverbose.py index 96f589df..ba232884 100644 --- a/capa/render/vverbose.py +++ b/capa/render/vverbose.py @@ -271,7 +271,6 @@ def render_rules(ostream, doc: rd.ResultDocument): """ functions_by_bb: Dict[capa.features.address.Address, capa.features.address.Address] = {} - processes_by_thread: Dict[capa.features.address.Address, capa.features.address.Address] = {} if isinstance(doc.meta.analysis, rd.StaticAnalysis): for finfo in doc.meta.analysis.layout.functions: faddress = finfo.address.to_capa() @@ -280,12 +279,7 @@ def render_rules(ostream, doc: rd.ResultDocument): bbaddress = bb.address.to_capa() functions_by_bb[bbaddress] = faddress elif isinstance(doc.meta.analysis, rd.DynamicAnalysis): - for pinfo in doc.meta.analysis.layout.processes: - paddress = pinfo.address.to_capa() - - for thread in pinfo.matched_threads: - taddress = thread.address.to_capa() - processes_by_thread[taddress] = paddress + pass else: raise ValueError("invalid analysis field in the document's meta") @@ -336,12 +330,11 @@ def render_rules(ostream, doc: rd.ResultDocument): rows.append(("author", ", ".join(rule.meta.authors))) - rows.append(("scopes", "")) - if rule.meta.scopes.static: - rows.append((" static:", str(rule.meta.scopes.static))) + if doc.meta.flavor == rd.Flavor.STATIC: + rows.append(("scope", f"{rule.meta.scopes.static}")) - if rule.meta.scopes.dynamic: - rows.append((" dynamic:", str(rule.meta.scopes.dynamic))) + if doc.meta.flavor == rd.Flavor.DYNAMIC: + rows.append(("scope", f"{rule.meta.scopes.dynamic}")) if rule.meta.attack: rows.append(("att&ck", ", ".join([rutils.format_parts_id(v) for v in rule.meta.attack]))) @@ -376,23 +369,18 @@ def render_rules(ostream, doc: rd.ResultDocument): else: capa.helpers.assert_never(doc.meta.flavor) + # TODO(mr-tz): process rendering should use human-readable name + # https://github.com/mandiant/capa/issues/1816 + ostream.write(" @ ") ostream.write(capa.render.verbose.format_address(location)) - if capa.rules.Scope.BASIC_BLOCK in rule.meta.scopes: + if doc.meta.flavor == rd.Flavor.STATIC and rule.meta.scopes.static == capa.rules.Scope.BASIC_BLOCK: ostream.write( " in function " + capa.render.verbose.format_address(frz.Address.from_capa(functions_by_bb[location.to_capa()])) ) - if capa.rules.Scope.THREAD in rule.meta.scopes: - ostream.write( - " in process " - + capa.render.verbose.format_address( - frz.Address.from_capa(processes_by_thread[location.to_capa()]) - ) - ) - ostream.write("\n") render_match(ostream, match, indent=1) if rule.meta.lib: diff --git a/capa/rules/__init__.py b/capa/rules/__init__.py index 35f2a090..e715ae86 100644 --- a/capa/rules/__init__.py +++ b/capa/rules/__init__.py @@ -8,6 +8,8 @@ import io import re +import gzip +import json import uuid import codecs import logging @@ -322,10 +324,72 @@ def ensure_feature_valid_for_scopes(scopes: Scopes, feature: Union[Feature, Stat # features of this scope that are not Characteristics will be Type instances. # check that the given feature is one of these types. types_for_scope = filter(lambda t: isinstance(t, type), supported_features) - if not isinstance(feature, tuple(types_for_scope)): # type: ignore + if not isinstance(feature, tuple(types_for_scope)): raise InvalidRule(f"feature {feature} not supported for scopes {scopes}") +class ComType(Enum): + CLASS = "class" + INTERFACE = "interface" + + +# COM data source https://github.com/stevemk14ebr/COM-Code-Helper/tree/master +VALID_COM_TYPES = { + ComType.CLASS: {"db_path": "assets/classes.json.gz", "prefix": "CLSID_"}, + ComType.INTERFACE: {"db_path": "assets/interfaces.json.gz", "prefix": "IID_"}, +} + + +@lru_cache(maxsize=None) +def load_com_database(com_type: ComType) -> Dict[str, List[str]]: + com_db_path: Path = capa.main.get_default_root() / VALID_COM_TYPES[com_type]["db_path"] + + if not com_db_path.exists(): + raise IOError(f"COM database path '{com_db_path}' does not exist or cannot be accessed") + + try: + with gzip.open(com_db_path, "rb") as gzfile: + return json.loads(gzfile.read().decode("utf-8")) + except Exception as e: + raise IOError(f"Error loading COM database from '{com_db_path}'") from e + + +def translate_com_feature(com_name: str, com_type: ComType) -> ceng.Or: + com_db = load_com_database(com_type) + guid_strings: Optional[List[str]] = com_db.get(com_name) + if guid_strings is None or len(guid_strings) == 0: + logger.error(" %s doesn't exist in COM %s database", com_name, com_type) + raise InvalidRule(f"'{com_name}' doesn't exist in COM {com_type} database") + + com_features: List = [] + for guid_string in guid_strings: + hex_chars = guid_string.replace("-", "") + h = [hex_chars[i : i + 2] for i in range(0, len(hex_chars), 2)] + reordered_hex_pairs = [ + h[3], + h[2], + h[1], + h[0], + h[5], + h[4], + h[7], + h[6], + h[8], + h[9], + h[10], + h[11], + h[12], + h[13], + h[14], + h[15], + ] + guid_bytes = bytes.fromhex("".join(reordered_hex_pairs)) + prefix = VALID_COM_TYPES[com_type]["prefix"] + com_features.append(capa.features.common.StringFactory(guid_string, f"{prefix+com_name} as GUID string")) + com_features.append(capa.features.common.Bytes(guid_bytes, f"{prefix+com_name} as bytes")) + return ceng.Or(com_features) + + def parse_int(s: str) -> int: if s.startswith("0x"): return int(s, 0x10) @@ -742,6 +806,13 @@ def build_statements(d, scopes: Scopes): ensure_feature_valid_for_scopes(scopes, feature) return feature + elif key.startswith("com/"): + com_type = str(key[len("com/") :]).upper() + if com_type not in [item.name for item in ComType]: + raise InvalidRule(f"unexpected COM type: {com_type}") + value, description = parse_description(d[key], key, d.get("description")) + return translate_com_feature(value, ComType[com_type]) + else: Feature = parse_feature(key) value, description = parse_description(d[key], key, d.get("description")) @@ -931,12 +1002,13 @@ class Rule: def from_dict(cls, d: Dict[str, Any], definition: str) -> "Rule": meta = d["rule"]["meta"] name = meta["name"] + # if scope is not specified, default to function scope. # this is probably the mode that rule authors will start with. # each rule has two scopes, a static-flavor scope, and a # dynamic-flavor one. which one is used depends on the analysis type. if "scope" in meta: - raise InvalidRule("rule is in legacy mode (has scope meta field). please update to the new syntax.") + raise InvalidRule(f"legacy rule detected (rule.meta.scope), please update to the new syntax: {name}") elif "scopes" in meta: scopes_ = meta.get("scopes") else: @@ -983,14 +1055,13 @@ class Rule: # we use the ruamel.yaml parser because it supports roundtripping of documents with comments. y = ruamel.yaml.YAML(typ="rt") - y.register_class(Scope) # use block mode, not inline json-like mode y.default_flow_style = False # leave quotes unchanged. # manually verified this property exists, even if mypy complains. - y.preserve_quotes = True # type: ignore + y.preserve_quotes = True # indent lists by two spaces below their parent # @@ -1002,7 +1073,7 @@ class Rule: # avoid word wrapping # manually verified this property exists, even if mypy complains. - y.width = 4096 # type: ignore + y.width = 4096 return y @@ -1063,7 +1134,6 @@ class Rule: meta[k] = v # the name and scope of the rule instance overrides anything in meta. meta["name"] = self.name - meta["scopes"] = asdict(self.scopes) def move_to_end(m, k): # ruamel.yaml uses an ordereddict-like structure to track maps (CommentedMap). diff --git a/doc/capa_quickstart.pdf b/doc/capa_quickstart.pdf new file mode 100644 index 00000000..dc9a5c6c Binary files /dev/null and b/doc/capa_quickstart.pdf differ diff --git a/doc/img/ghidra_backend_logo.png b/doc/img/ghidra_backend_logo.png new file mode 100755 index 00000000..61199d25 Binary files /dev/null and b/doc/img/ghidra_backend_logo.png differ diff --git a/doc/img/ghidra_script_mngr_output.png b/doc/img/ghidra_script_mngr_output.png new file mode 100755 index 00000000..6d3024c9 Binary files /dev/null and b/doc/img/ghidra_script_mngr_output.png differ diff --git a/doc/img/ghidra_script_mngr_rules.png b/doc/img/ghidra_script_mngr_rules.png new file mode 100755 index 00000000..7bce6247 Binary files /dev/null and b/doc/img/ghidra_script_mngr_rules.png differ diff --git a/doc/img/ghidra_script_mngr_verbosity.png b/doc/img/ghidra_script_mngr_verbosity.png new file mode 100755 index 00000000..ae23246c Binary files /dev/null and b/doc/img/ghidra_script_mngr_verbosity.png differ diff --git a/pyproject.toml b/pyproject.toml index 35b5554c..31f5312f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,16 +37,16 @@ dependencies = [ "tabulate==0.9.0", "colorama==0.4.6", "termcolor==2.3.0", - "wcwidth==0.2.6", + "wcwidth==0.2.8", "ida-settings==2.1.0", "viv-utils[flirt]==0.7.9", "halo==0.0.31", "networkx==3.1", - "ruamel.yaml==0.17.32", + "ruamel.yaml==0.17.35", "vivisect==1.1.1", "pefile==2023.2.7", - "pyelftools==0.29", - "dnfile==0.13.0", + "pyelftools==0.30", + "dnfile==0.14.1", "dncil==1.0.2", "pydantic==2.1.1", "protobuf==4.23.4", @@ -61,26 +61,26 @@ packages = ["capa"] [project.optional-dependencies] dev = [ - "pre-commit==3.3.3", - "pytest==7.4.0", + "pre-commit==3.5.0", + "pytest==7.4.2", "pytest-sugar==0.9.7", "pytest-instafail==0.5.0", "pytest-cov==4.1.0", "flake8==6.1.0", - "flake8-bugbear==23.7.10", + "flake8-bugbear==23.9.16", "flake8-encodings==0.5.0.post1", "flake8-comprehensions==3.14.0", "flake8-logging-format==0.9.0", "flake8-no-implicit-concat==0.3.4", "flake8-print==5.0.0", "flake8-todos==0.3.0", - "flake8-simplify==0.20.0", + "flake8-simplify==0.21.0", "flake8-use-pathlib==0.3.0", "flake8-copyright==0.2.4", - "ruff==0.0.285", - "black==23.7.0", + "ruff==0.0.291", + "black==23.9.1", "isort==5.11.4", - "mypy==1.5.1", + "mypy==1.6.0", "psutil==5.9.2", "stix2==3.0.1", "requests==2.31.0", @@ -96,9 +96,9 @@ dev = [ "types-protobuf==4.23.0.3", ] build = [ - "pyinstaller==5.10.1", + "pyinstaller==6.1.0", "setuptools==68.0.0", - "build==0.10.0" + "build==1.0.3" ] [project.urls] diff --git a/rules b/rules index d923cf4b..94de0355 160000 --- a/rules +++ b/rules @@ -1 +1 @@ -Subproject commit d923cf4b8f22936e0fde88e490ebf2c02a37f91f +Subproject commit 94de0355cde729b13b4313377d27f17a3ddf2567 diff --git a/scripts/bulk-process.py b/scripts/bulk-process.py index 2196449f..3e3cdfb2 100644 --- a/scripts/bulk-process.py +++ b/scripts/bulk-process.py @@ -112,7 +112,7 @@ def get_capa_results(args): extractor = capa.main.get_extractor( path, format, os_, capa.main.BACKEND_VIV, sigpaths, should_save_workspace, disable_progress=True ) - except capa.main.UnsupportedFormatError: + except capa.exceptions.UnsupportedFormatError: # i'm 100% sure if multiprocessing will reliably raise exceptions across process boundaries. # so instead, return an object with explicit success/failure status. # @@ -123,7 +123,7 @@ def get_capa_results(args): "status": "error", "error": f"input file does not appear to be a PE file: {path}", } - except capa.main.UnsupportedRuntimeError: + except capa.exceptions.UnsupportedRuntimeError: return { "path": path, "status": "error", diff --git a/scripts/lint.py b/scripts/lint.py index 9fcebdd0..065e694b 100644 --- a/scripts/lint.py +++ b/scripts/lint.py @@ -216,8 +216,8 @@ class InvalidScopes(Lint): recommendation = "At least one scope (static or dynamic) must be specified" def check_rule(self, ctx: Context, rule: Rule): - return (rule.meta.get("scope").get("static") in ("unspecified", "unsupported")) and ( - rule.meta.get("scope").get("dynamic") in ("unspecified", "unsupported") + return (rule.meta.get("scopes").get("static") in ("unspecified", "unsupported")) and ( + rule.meta.get("scopes").get("dynamic") in ("unspecified", "unsupported") ) @@ -359,7 +359,7 @@ def get_sample_capabilities(ctx: Context, path: Path) -> Set[str]: elif nice_path.name.endswith(capa.helpers.EXTENSIONS_SHELLCODE_64): format_ = "sc64" else: - format_ = capa.main.get_auto_format(nice_path) + format_ = capa.helpers.get_auto_format(nice_path) logger.debug("analyzing sample: %s", nice_path) extractor = capa.main.get_extractor( @@ -979,10 +979,6 @@ def main(argv=None): default_samples_path = str(Path(__file__).resolve().parent.parent / "tests" / "data") - # TODO(yelhamer): remove once support for the legacy scope field has been added - # https://github.com/mandiant/capa/pull/1580 - return 0 - parser = argparse.ArgumentParser(description="Lint capa rules.") capa.main.install_common_args(parser, wanted={"tag"}) parser.add_argument("rules", type=str, action="append", help="Path to rules") diff --git a/scripts/profile-time.py b/scripts/profile-time.py index cb2d9368..9acd60ff 100644 --- a/scripts/profile-time.py +++ b/scripts/profile-time.py @@ -54,7 +54,6 @@ import capa.helpers import capa.features import capa.features.common import capa.features.freeze -from capa.features.extractors.base_extractor import FeatureExtractor, StaticFeatureExtractor logger = logging.getLogger("capa.profile") @@ -105,14 +104,12 @@ def main(argv=None): if (args.format == "freeze") or ( args.format == capa.features.common.FORMAT_AUTO and capa.features.freeze.is_freeze(taste) ): - extractor: FeatureExtractor = capa.features.freeze.load(Path(args.sample).read_bytes()) - assert isinstance(extractor, StaticFeatureExtractor) + extractor = capa.features.freeze.load(Path(args.sample).read_bytes()) else: extractor = capa.main.get_extractor( args.sample, args.format, args.os, capa.main.BACKEND_VIV, sig_paths, should_save_workspace=False ) - assert isinstance(extractor, StaticFeatureExtractor) with tqdm.tqdm(total=args.number * args.repeat, leave=False) as pbar: def do_iteration(): diff --git a/scripts/setup-linter-dependencies.py b/scripts/setup-linter-dependencies.py index bc7f9bf0..cc8c0310 100644 --- a/scripts/setup-linter-dependencies.py +++ b/scripts/setup-linter-dependencies.py @@ -47,7 +47,7 @@ from typing import Dict, List from pathlib import Path import requests -from stix2 import Filter, MemoryStore, AttackPattern # type: ignore +from stix2 import Filter, MemoryStore, AttackPattern logging.basicConfig(level=logging.INFO, format="%(asctime)s [%(levelname)s] %(message)s") diff --git a/scripts/show-features.py b/scripts/show-features.py index d909d95b..2d5a3480 100644 --- a/scripts/show-features.py +++ b/scripts/show-features.py @@ -83,7 +83,15 @@ import capa.features.address import capa.features.extractors.pefile from capa.helpers import get_auto_format, log_unsupported_runtime_error from capa.features.insn import API, Number -from capa.features.common import FORMAT_AUTO, FORMAT_FREEZE, DYNAMIC_FORMATS, String, Feature, is_global_feature +from capa.features.common import ( + FORMAT_AUTO, + FORMAT_CAPE, + FORMAT_FREEZE, + DYNAMIC_FORMATS, + String, + Feature, + is_global_feature, +) from capa.features.extractors.base_extractor import FunctionHandle, StaticFeatureExtractor, DynamicFeatureExtractor logger = logging.getLogger("capa.show-features") @@ -132,8 +140,11 @@ def main(argv=None): extractor = capa.main.get_extractor( args.sample, format_, args.os, args.backend, sig_paths, should_save_workspace ) - except capa.exceptions.UnsupportedFormatError: - capa.helpers.log_unsupported_format_error() + except capa.exceptions.UnsupportedFormatError as e: + if format_ == FORMAT_CAPE: + capa.helpers.log_unsupported_cape_report_error(str(e)) + else: + capa.helpers.log_unsupported_format_error() return -1 except capa.exceptions.UnsupportedRuntimeError: log_unsupported_runtime_error() @@ -248,13 +259,13 @@ def print_static_features(functions, extractor: StaticFeatureExtractor): def print_dynamic_features(processes, extractor: DynamicFeatureExtractor): for p in processes: - print(f"proc: {p.inner['name']} (ppid={p.address.ppid}, pid={p.address.pid})") + print(f"proc: {p.inner.process_name} (ppid={p.address.ppid}, pid={p.address.pid})") for feature, addr in extractor.extract_process_features(p): if is_global_feature(feature): continue - print(f" proc: {p.inner['name']}: {feature}") + print(f" proc: {p.inner.process_name}: {feature}") for t in extractor.get_threads(p): print(f" thread: {t.address.tid}") @@ -283,7 +294,7 @@ def print_dynamic_features(processes, extractor: DynamicFeatureExtractor): print(f" arguments=[{', '.join(arguments)}]") for cid, api in apis: - print(f"call {cid}: {api}({', '.join(arguments)})") + print(f" call {cid}: {api}({', '.join(arguments)})") def ida_main(): @@ -315,8 +326,25 @@ def ida_main(): return 0 +def ghidra_main(): + import capa.features.extractors.ghidra.extractor + + extractor = capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor() + + for feature, addr in extractor.extract_file_features(): + print(f"file: {format_address(addr)}: {feature}") + + function_handles = tuple(extractor.get_functions()) + + print_static_features(function_handles, extractor) + + return 0 + + if __name__ == "__main__": if capa.helpers.is_runtime_ida(): ida_main() + elif capa.helpers.is_runtime_ghidra(): + ghidra_main() else: sys.exit(main()) diff --git a/tests/data b/tests/data index 609b57f7..87470600 160000 --- a/tests/data +++ b/tests/data @@ -1 +1 @@ -Subproject commit 609b57f7071e5628dc634c1e38a11a95c636efc0 +Subproject commit 8747060007335e3e8528df947e5bd207ca1b0ce3 diff --git a/tests/fixtures.py b/tests/fixtures.py index af8f3494..1cf095cb 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -166,7 +166,8 @@ def get_dnfile_extractor(path: Path): @lru_cache(maxsize=1) def get_binja_extractor(path: Path): - from binaryninja import Settings, BinaryViewType + import binaryninja + from binaryninja import Settings import capa.features.extractors.binja.extractor @@ -175,7 +176,7 @@ def get_binja_extractor(path: Path): if path.name.endswith("kernel32-64.dll_"): old_pdb = settings.get_bool("pdb.loadGlobalSymbols") settings.set_bool("pdb.loadGlobalSymbols", False) - bv = BinaryViewType.get_view_of_file(str(path)) + bv = binaryninja.load(str(path)) if path.name.endswith("kernel32-64.dll_"): settings.set_bool("pdb.loadGlobalSymbols", old_pdb) @@ -201,6 +202,16 @@ def get_cape_extractor(path): return CapeExtractor.from_report(report) +@lru_cache(maxsize=1) +def get_ghidra_extractor(path: Path): + import capa.features.extractors.ghidra.extractor + + extractor = capa.features.extractors.ghidra.extractor.GhidraFeatureExtractor() + setattr(extractor, "path", path.as_posix()) + + return extractor + + def extract_global_features(extractor): features = collections.defaultdict(set) for feature, va in extractor.extract_global_features(): @@ -307,6 +318,8 @@ def get_data_path_by_name(name) -> Path: return CD / "data" / "499c2a85f6e8142c3f48d4251c9c7cd6.raw32" elif name.startswith("9324d"): return CD / "data" / "9324d1a8ae37a36ae560c37448c9705a.exe_" + elif name.startswith("395eb"): + return CD / "data" / "395eb0ddd99d2c9e37b6d0b73485ee9c.exe_" elif name.startswith("a1982"): return CD / "data" / "a198216798ca38f280dc413f8c57f2c2.exe_" elif name.startswith("a933a"): @@ -426,7 +439,7 @@ def get_sample_md5_by_name(name): elif name.startswith("3b13b"): # file name is SHA256 hash return "56a6ffe6a02941028cc8235204eef31d" - elif name == "7351f.elf": + elif name.startswith("7351f"): return "7351f8a40c5450557b24622417fc478d" elif name.startswith("79abd"): return "79abd17391adc6251ecdc58d13d76baf" @@ -1278,6 +1291,14 @@ FEATURE_COUNT_TESTS_DOTNET = [ ] +FEATURE_COUNT_TESTS_GHIDRA = [ + # Ghidra may render functions as labels, as well as provide differing amounts of call references + # (Colton) TODO: Add more test cases + ("mimikatz", "function=0x4702FD", capa.features.common.Characteristic("calls from"), 0), + ("mimikatz", "function=0x4556E5", capa.features.common.Characteristic("calls to"), 0), +] + + def do_test_feature_presence(get_extractor, sample, scope, feature, expected): extractor = get_extractor(sample) features = scope(extractor) @@ -1327,6 +1348,11 @@ def z9324d_extractor(): return get_extractor(get_data_path_by_name("9324d...")) +@pytest.fixture +def z395eb_extractor(): + return get_extractor(get_data_path_by_name("395eb...")) + + @pytest.fixture def pma12_04_extractor(): return get_extractor(get_data_path_by_name("pma12-04")) diff --git a/tests/test_binja_features.py b/tests/test_binja_features.py index a2f0cd78..78addff7 100644 --- a/tests/test_binja_features.py +++ b/tests/test_binja_features.py @@ -36,16 +36,10 @@ except ImportError: @pytest.mark.skipif(binja_present is False, reason="Skip binja tests if the binaryninja Python API is not installed") @fixtures.parametrize( "sample,scope,feature,expected", - fixtures.FEATURE_PRESENCE_TESTS, + fixtures.FEATURE_PRESENCE_TESTS + fixtures.FEATURE_SYMTAB_FUNC_TESTS, indirect=["sample", "scope"], ) def test_binja_features(sample, scope, feature, expected): - if isinstance(feature, capa.features.file.Export) and "." in str(feature.value): - pytest.xfail("skip Binja unsupported forwarded export feature, see #1646") - - if feature == capa.features.common.Characteristic("forwarded export"): - pytest.xfail("skip Binja unsupported forwarded export feature, see #1646") - fixtures.do_test_feature_presence(fixtures.get_binja_extractor, sample, scope, feature, expected) @@ -69,4 +63,4 @@ def test_standalone_binja_backend(): @pytest.mark.skipif(binja_present is False, reason="Skip binja tests if the binaryninja Python API is not installed") def test_binja_version(): version = binaryninja.core_version_info() - assert version.major == 3 and version.minor == 4 + assert version.major == 3 and version.minor == 5 diff --git a/tests/test_dynamic_freeze.py b/tests/test_dynamic_freeze.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/test_ghidra_features.py b/tests/test_ghidra_features.py new file mode 100644 index 00000000..dc51aef4 --- /dev/null +++ b/tests/test_ghidra_features.py @@ -0,0 +1,98 @@ +# Copyright (C) 2023 Mandiant, Inc. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at: [package root]/LICENSE.txt +# Unless required by applicable law or agreed to in writing, software distributed under the License +# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and limitations under the License. +""" +Must invoke this script from within the Ghidra Runtime Enviornment +""" +import sys +import logging +from pathlib import Path + +import pytest + +try: + sys.path.append(str(Path(__file__).parent)) + import fixtures +finally: + sys.path.pop() + + +logger = logging.getLogger("test_ghidra_features") + +ghidra_present: bool = False +try: + import ghidra # noqa: F401 + + ghidra_present = True +except ImportError: + pass + + +def standardize_posix_str(psx_str): + """fixture test passes the PosixPath to the test data + + params: psx_str - PosixPath() to the test data + return: string that matches test-id sample name + """ + + if "Practical Malware Analysis Lab" in str(psx_str): + # /'Practical Malware Analysis Lab 16-01.exe_' -> 'pma16-01' + wanted_str = "pma" + str(psx_str).split("/")[-1][len("Practical Malware Analysis Lab ") : -5] + else: + # /mimikatz.exe_ -> mimikatz + wanted_str = str(psx_str).split("/")[-1][:-5] + + if "_" in wanted_str: + # al-khaser_x86 -> al-khaser x86 + wanted_str = wanted_str.replace("_", " ") + + return wanted_str + + +def check_input_file(wanted): + """check that test is running on the loaded sample + + params: wanted - PosixPath() passed from test arg + """ + + import capa.ghidra.helpers as ghidra_helpers + + found = ghidra_helpers.get_file_md5() + sample_name = standardize_posix_str(wanted) + + if not found.startswith(fixtures.get_sample_md5_by_name(sample_name)): + raise RuntimeError(f"please run the tests against sample with MD5: `{found}`") + + +@pytest.mark.skipif(ghidra_present is False, reason="Ghidra tests must be ran within Ghidra") +@fixtures.parametrize("sample,scope,feature,expected", fixtures.FEATURE_PRESENCE_TESTS, indirect=["sample", "scope"]) +def test_ghidra_features(sample, scope, feature, expected): + try: + check_input_file(sample) + except RuntimeError: + pytest.skip(reason="Test must be ran against sample loaded in Ghidra") + + fixtures.do_test_feature_presence(fixtures.get_ghidra_extractor, sample, scope, feature, expected) + + +@pytest.mark.skipif(ghidra_present is False, reason="Ghidra tests must be ran within Ghidra") +@fixtures.parametrize( + "sample,scope,feature,expected", fixtures.FEATURE_COUNT_TESTS_GHIDRA, indirect=["sample", "scope"] +) +def test_ghidra_feature_counts(sample, scope, feature, expected): + try: + check_input_file(sample) + except RuntimeError: + pytest.skip(reason="Test must be ran against sample loaded in Ghidra") + + fixtures.do_test_feature_count(fixtures.get_ghidra_extractor, sample, scope, feature, expected) + + +if __name__ == "__main__": + # No support for faulthandler module in Ghidrathon, see: + # https://github.com/mandiant/Ghidrathon/issues/70 + sys.exit(pytest.main(["--pyargs", "-p no:faulthandler", "test_ghidra_features"])) diff --git a/tests/test_main.py b/tests/test_main.py index e07e05b9..16f61ce5 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -38,7 +38,7 @@ def test_main_single_rule(z9324d_extractor, tmpdir): name: test rule scopes: static: file - dynamic: process + dynamic: file authors: - test features: @@ -401,6 +401,30 @@ def test_byte_matching(z9324d_extractor): assert "byte match test" in capabilities +def test_com_feature_matching(z395eb_extractor): + rules = capa.rules.RuleSet( + [ + capa.rules.Rule.from_yaml( + textwrap.dedent( + """ + rule: + meta: + name: initialize IWebBrowser2 + scope: basic block + features: + - and: + - api: ole32.CoCreateInstance + - com/class: InternetExplorer #bytes: 01 DF 02 00 00 00 00 00 C0 00 00 00 00 00 00 46 = CLSID_InternetExplorer + - com/interface: IWebBrowser2 #bytes: 61 16 0C D3 AF CD D0 11 8A 3E 00 C0 4F C9 E2 6E = IID_IWebBrowser2 + """ + ) + ) + ] + ) + capabilities, meta = capa.main.find_capabilities(rules, z395eb_extractor) + assert "initialize IWebBrowser2" in capabilities + + def test_count_bb(z9324d_extractor): rules = capa.rules.RuleSet( [ diff --git a/tests/test_rules.py b/tests/test_rules.py index 50a978ac..b6c9a9c1 100644 --- a/tests/test_rules.py +++ b/tests/test_rules.py @@ -420,8 +420,11 @@ def test_rules_flavor_filtering(): def test_meta_scope_keywords(): - for static_scope in sorted(capa.rules.STATIC_SCOPES): - for dynamic_scope in sorted(capa.rules.DYNAMIC_SCOPES): + static_scopes = sorted([e.value for e in capa.rules.STATIC_SCOPES]) + dynamic_scopes = sorted([e.value for e in capa.rules.DYNAMIC_SCOPES]) + + for static_scope in static_scopes: + for dynamic_scope in dynamic_scopes: _ = capa.rules.Rule.from_yaml( textwrap.dedent( f""" @@ -439,7 +442,7 @@ def test_meta_scope_keywords(): ) # its also ok to specify "unsupported" - for static_scope in sorted(capa.rules.STATIC_SCOPES): + for static_scope in static_scopes: _ = capa.rules.Rule.from_yaml( textwrap.dedent( f""" @@ -455,7 +458,7 @@ def test_meta_scope_keywords(): """ ) ) - for dynamic_scope in sorted(capa.rules.DYNAMIC_SCOPES): + for dynamic_scope in dynamic_scopes: _ = capa.rules.Rule.from_yaml( textwrap.dedent( f""" @@ -473,7 +476,7 @@ def test_meta_scope_keywords(): ) # its also ok to specify "unspecified" - for static_scope in sorted(capa.rules.STATIC_SCOPES): + for static_scope in static_scopes: _ = capa.rules.Rule.from_yaml( textwrap.dedent( f""" @@ -489,7 +492,7 @@ def test_meta_scope_keywords(): """ ) ) - for dynamic_scope in sorted(capa.rules.DYNAMIC_SCOPES): + for dynamic_scope in dynamic_scopes: _ = capa.rules.Rule.from_yaml( textwrap.dedent( f""" @@ -1528,3 +1531,72 @@ def test_property_access_symbol(): ) is True ) + + +def test_translate_com_features(): + r = capa.rules.Rule.from_yaml( + textwrap.dedent( + """ + rule: + meta: + name: test rule + features: + - com/class: WICPngDecoder + # 389ea17b-5078-4cde-b6ef-25c15175c751 WICPngDecoder + # e018945b-aa86-4008-9bd4-6777a1e40c11 WICPngDecoder + """ + ) + ) + com_name = "WICPngDecoder" + com_features = [ + capa.features.common.Bytes(b"{\xa1\x9e8xP\xdeL\xb6\xef%\xc1Qu\xc7Q", f"CLSID_{com_name} as bytes"), + capa.features.common.StringFactory("389ea17b-5078-4cde-b6ef-25c15175c751", f"CLSID_{com_name} as GUID string"), + capa.features.common.Bytes(b"[\x94\x18\xe0\x86\xaa\x08@\x9b\xd4gw\xa1\xe4\x0c\x11", f"IID_{com_name} as bytes"), + capa.features.common.StringFactory("e018945b-aa86-4008-9bd4-6777a1e40c11", f"IID_{com_name} as GUID string"), + ] + assert set(com_features) == set(r.statement.get_children()) + + +def test_invalid_com_features(): + # test for unknown COM class + with pytest.raises(capa.rules.InvalidRule): + _ = capa.rules.Rule.from_yaml( + textwrap.dedent( + """ + rule: + meta: + name: test rule + features: + - com/class: invalid_com + """ + ) + ) + + # test for unknown COM interface + with pytest.raises(capa.rules.InvalidRule): + _ = capa.rules.Rule.from_yaml( + textwrap.dedent( + """ + rule: + meta: + name: test rule + features: + - com/interface: invalid_com + """ + ) + ) + + # test for invalid COM type + # valid_com_types = "class", "interface" + with pytest.raises(capa.rules.InvalidRule): + _ = capa.rules.Rule.from_yaml( + textwrap.dedent( + """ + rule: + meta: + name: test rule + features: + - com/invalid_COM_type: WICPngDecoder + """ + ) + ) diff --git a/tests/test_static_freeze.py b/tests/test_static_freeze.py index 6bff6d22..4674afc8 100644 --- a/tests/test_static_freeze.py +++ b/tests/test_static_freeze.py @@ -140,7 +140,7 @@ def test_freeze_bytes_roundtrip(): def roundtrip_feature(feature): - assert feature == capa.features.freeze.feature_from_capa(feature).to_capa() + assert feature == capa.features.freeze.features.feature_from_capa(feature).to_capa() def test_serialize_features():