mirror of
https://github.com/mvt-project/mvt
synced 2025-10-21 22:42:15 +02:00
Compare commits
29 Commits
v2.2.4
...
feature/po
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95b2f04db6 | ||
|
|
289b7efdeb | ||
|
|
166a63e14c | ||
|
|
1b933fdb12 | ||
|
|
0c0ff7012b | ||
|
|
f9b0d07a81 | ||
|
|
d14bcdd05f | ||
|
|
e026bb0a76 | ||
|
|
253b4f031a | ||
|
|
ec14297643 | ||
|
|
3142d86edd | ||
|
|
c18998d771 | ||
|
|
22fd794fb8 | ||
|
|
27c5c76dc2 | ||
|
|
fafbac3545 | ||
|
|
bbfaadd297 | ||
|
|
85abed55b6 | ||
|
|
2fbd7607ef | ||
|
|
3787dc48cd | ||
|
|
f814244ff8 | ||
|
|
11730f164f | ||
|
|
912fb060cb | ||
|
|
a9edf4a9fe | ||
|
|
ea7b9066ba | ||
|
|
fd81e3aa13 | ||
|
|
15477cc187 | ||
|
|
551b95b38b | ||
|
|
d767abb912 | ||
|
|
8a507b0a0b |
80
.github/workflows/scripts/update-ios-releases.py
vendored
Normal file
80
.github/workflows/scripts/update-ios-releases.py
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
"""
|
||||
Python script to download the Apple RSS feed and parse it.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import urllib.request
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
from packaging import version
|
||||
|
||||
|
||||
def download_apple_rss(feed_url):
|
||||
with urllib.request.urlopen(feed_url) as f:
|
||||
rss_feed = f.read().decode('utf-8')
|
||||
print("Downloaded RSS feed from Apple.")
|
||||
return rss_feed
|
||||
|
||||
|
||||
def parse_latest_ios_versions(rss_feed_text):
|
||||
latest_ios_versions = []
|
||||
|
||||
parsed_feed = parseString(rss_feed_text)
|
||||
for item in parsed_feed.getElementsByTagName("item"):
|
||||
title = item.getElementsByTagName("title")[0].firstChild.data
|
||||
if not title.startswith("iOS"):
|
||||
continue
|
||||
|
||||
import re
|
||||
build_match = re.match(r"iOS (?P<version>[\d\.]+) (?P<beta>beta )?(\S*)?\((?P<build>.*)\)", title)
|
||||
if not build_match:
|
||||
print("Could not parse iOS build:", title)
|
||||
continue
|
||||
|
||||
release_info = build_match.groupdict()
|
||||
if release_info["beta"]:
|
||||
print("Skipping beta release:", title)
|
||||
continue
|
||||
|
||||
release_info.pop("beta")
|
||||
latest_ios_versions.append(release_info)
|
||||
|
||||
return latest_ios_versions
|
||||
|
||||
|
||||
def update_mvt(mvt_checkout_path, latest_ios_versions):
|
||||
version_path = os.path.join(mvt_checkout_path, "mvt/ios/data/ios_versions.json")
|
||||
with open(version_path, "r") as version_file:
|
||||
current_versions = json.load(version_file)
|
||||
|
||||
new_entry_count = 0
|
||||
for new_version in latest_ios_versions:
|
||||
for current_version in current_versions:
|
||||
if new_version["build"] == current_version["build"]:
|
||||
break
|
||||
else:
|
||||
# New version that does not exist in current data
|
||||
current_versions.append(new_version)
|
||||
new_entry_count += 1
|
||||
|
||||
if not new_entry_count:
|
||||
print("No new iOS versions found.")
|
||||
else:
|
||||
print("Found {} new iOS versions.".format(new_entry_count))
|
||||
new_version_list = sorted(current_versions, key=lambda x: version.Version(x["version"]))
|
||||
with open(version_path, "w") as version_file:
|
||||
json.dump(new_version_list, version_file, indent=4)
|
||||
|
||||
|
||||
def main():
|
||||
print("Downloading RSS feed...")
|
||||
mvt_checkout_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../"))
|
||||
|
||||
rss_feed = download_apple_rss("https://developer.apple.com/news/releases/rss/releases.rss")
|
||||
latest_ios_version = parse_latest_ios_versions(rss_feed)
|
||||
update_mvt(mvt_checkout_path, latest_ios_version)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
29
.github/workflows/update-ios-data.yml
vendored
Normal file
29
.github/workflows/update-ios-data.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Update iOS releases and version numbers
|
||||
run-name: ${{ github.actor }} is finding the latest iOS release version and build numbers
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
- cron: '0 */6 * * *'
|
||||
|
||||
|
||||
jobs:
|
||||
update-ios-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Run script to fetch latest iOS releases from Apple RSS feed.
|
||||
run: python3 .github/workflows/scripts/update-ios-releases.py
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
title: '[auto] Update iOS releases and versions'
|
||||
commit-message: Add new iOS versions and build numbers
|
||||
branch: auto/add-new-ios-releases
|
||||
body: |
|
||||
This is an automated pull request to update the iOS releases and version numbers.
|
||||
add-paths: |
|
||||
*.json
|
||||
labels: |
|
||||
automated pr
|
||||
@@ -39,7 +39,9 @@ export MVT_STIX2="/home/user/IOC1.stix2:/home/user/IOC2.stix2"
|
||||
- The [Amnesty International investigations repository](https://github.com/AmnestyTech/investigations) contains STIX-formatted IOCs for:
|
||||
- [Pegasus](https://en.wikipedia.org/wiki/Pegasus_(spyware)) ([STIX2](https://raw.githubusercontent.com/AmnestyTech/investigations/master/2021-07-18_nso/pegasus.stix2))
|
||||
- [Predator from Cytrox](https://citizenlab.ca/2021/12/pegasus-vs-predator-dissidents-doubly-infected-iphone-reveals-cytrox-mercenary-spyware/) ([STIX2](https://raw.githubusercontent.com/AmnestyTech/investigations/master/2021-12-16_cytrox/cytrox.stix2))
|
||||
- [An Android Spyware Campaign Linked to a Mercenary Company](https://github.com/AmnestyTech/investigations/tree/master/2023-03-29_android_campaign) ([STIX2](https://github.com/AmnestyTech/investigations/blob/master/2023-03-29_android_campaign/malware.stix2))
|
||||
- [This repository](https://github.com/Te-k/stalkerware-indicators) contains IOCs for Android stalkerware including [a STIX MVT-compatible file](https://raw.githubusercontent.com/Te-k/stalkerware-indicators/master/generated/stalkerware.stix2).
|
||||
- We are also maintaining [a list of IOCs](https://github.com/mvt-project/mvt-indicators) in STIX format from public spyware campaigns.
|
||||
|
||||
You can automaticallly download the latest public indicator files with the command `mvt-ios download-iocs` or `mvt-android download-iocs`. These commands download the list of indicators listed [here](https://github.com/mvt-project/mvt/blob/main/public_indicators.json) and store them in the [appdir](https://pypi.org/project/appdirs/) folder. They are then loaded automatically by MVT.
|
||||
|
||||
|
||||
@@ -16,6 +16,18 @@ If indicators are provided through the command-line, processes and domains are c
|
||||
|
||||
---
|
||||
|
||||
### `applications.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Applications` module. The module extracts the list of applications installed on the device from the `Info.plist` file in backup, or from the `iTunesMetadata.plist` files in a file system dump. These records contains detailed information on the source and installation of the app.
|
||||
|
||||
If indicators are provided through the command-line, processes and application ids are checked against the app name of each application. It also flags any applications not installed from the AppStore. Any matches are stored in *applications_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `backup_info.json`
|
||||
|
||||
!!! info "Availability"
|
||||
@@ -38,6 +50,18 @@ If indicators are provided through the command-line, they are checked against th
|
||||
|
||||
---
|
||||
|
||||
### `calendar.json`
|
||||
|
||||
!!! info "Availability"
|
||||
Backup: :material-check:
|
||||
Full filesystem dump: :material-check:
|
||||
|
||||
This JSON file is created by mvt-ios' `Calendar` module. This module extracts all CalendarItems from the `Calendar.sqlitedb` database. This database contains all calendar entries from the different calendars installed on the phone.
|
||||
|
||||
If indicators are provided through the command-line, email addresses are checked against the inviter's email of the different events. Any matches are stored in *calendar_detected.json*.
|
||||
|
||||
---
|
||||
|
||||
### `calls.json`
|
||||
|
||||
!!! info "Availability"
|
||||
|
||||
@@ -6,14 +6,15 @@
|
||||
import logging
|
||||
|
||||
import click
|
||||
from rich.logging import RichHandler
|
||||
|
||||
from mvt.common.cmd_check_iocs import CmdCheckIOCS
|
||||
from mvt.common.help import (HELP_MSG_FAST, HELP_MSG_HASHES, HELP_MSG_IOC,
|
||||
HELP_MSG_LIST_MODULES, HELP_MSG_MODULE,
|
||||
HELP_MSG_OUTPUT, HELP_MSG_SERIAL)
|
||||
HELP_MSG_OUTPUT, HELP_MSG_SERIAL,
|
||||
HELP_MSG_VERBOSE)
|
||||
from mvt.common.logo import logo
|
||||
from mvt.common.updates import IndicatorsUpdates
|
||||
from mvt.common.utils import init_logging, set_verbose_logging
|
||||
|
||||
from .cmd_check_adb import CmdAndroidCheckADB
|
||||
from .cmd_check_androidqf import CmdAndroidCheckAndroidQF
|
||||
@@ -25,11 +26,8 @@ from .modules.adb.packages import Packages
|
||||
from .modules.backup import BACKUP_MODULES
|
||||
from .modules.bugreport import BUGREPORT_MODULES
|
||||
|
||||
# Setup logging using Rich.
|
||||
LOG_FORMAT = "[%(name)s] %(message)s"
|
||||
logging.basicConfig(level="INFO", format=LOG_FORMAT, handlers=[
|
||||
RichHandler(show_path=False, log_time_format="%X")])
|
||||
log = logging.getLogger(__name__)
|
||||
init_logging()
|
||||
log = logging.getLogger("mvt")
|
||||
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
|
||||
|
||||
|
||||
@@ -63,8 +61,10 @@ def version():
|
||||
@click.option("--from-file", "-f", type=click.Path(exists=True),
|
||||
help="Instead of acquiring from phone, load an existing packages.json file for "
|
||||
"lookups (mainly for debug purposes)")
|
||||
@click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE)
|
||||
@click.pass_context
|
||||
def download_apks(ctx, all_apks, virustotal, output, from_file, serial):
|
||||
def download_apks(ctx, all_apks, virustotal, output, from_file, serial, verbose):
|
||||
set_verbose_logging(verbose)
|
||||
try:
|
||||
if from_file:
|
||||
download = DownloadAPKs.from_json(from_file)
|
||||
@@ -112,8 +112,10 @@ def download_apks(ctx, all_apks, virustotal, output, from_file, serial):
|
||||
@click.option("--fast", "-f", is_flag=True, help=HELP_MSG_FAST)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE)
|
||||
@click.pass_context
|
||||
def check_adb(ctx, serial, iocs, output, fast, list_modules, module):
|
||||
def check_adb(ctx, serial, iocs, output, fast, list_modules, module, verbose):
|
||||
set_verbose_logging(verbose)
|
||||
cmd = CmdAndroidCheckADB(results_path=output, ioc_files=iocs,
|
||||
module_name=module, serial=serial, fast_mode=fast)
|
||||
|
||||
@@ -141,9 +143,11 @@ def check_adb(ctx, serial, iocs, output, fast, list_modules, module):
|
||||
help=HELP_MSG_OUTPUT)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE)
|
||||
@click.argument("BUGREPORT_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_bugreport(ctx, iocs, output, list_modules, module, bugreport_path):
|
||||
def check_bugreport(ctx, iocs, output, list_modules, module, verbose, bugreport_path):
|
||||
set_verbose_logging(verbose)
|
||||
# Always generate hashes as bug reports are small.
|
||||
cmd = CmdAndroidCheckBugreport(target_path=bugreport_path,
|
||||
results_path=output, ioc_files=iocs,
|
||||
@@ -172,9 +176,11 @@ def check_bugreport(ctx, iocs, output, list_modules, module, bugreport_path):
|
||||
@click.option("--output", "-o", type=click.Path(exists=False),
|
||||
help=HELP_MSG_OUTPUT)
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE)
|
||||
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_backup(ctx, iocs, output, list_modules, backup_path):
|
||||
def check_backup(ctx, iocs, output, list_modules, verbose, backup_path):
|
||||
set_verbose_logging(verbose)
|
||||
# Always generate hashes as backups are generally small.
|
||||
cmd = CmdAndroidCheckBackup(target_path=backup_path, results_path=output,
|
||||
ioc_files=iocs, hashes=True)
|
||||
@@ -204,9 +210,11 @@ def check_backup(ctx, iocs, output, list_modules, backup_path):
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.option("--hashes", "-H", is_flag=True, help=HELP_MSG_HASHES)
|
||||
@click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE)
|
||||
@click.argument("ANDROIDQF_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_androidqf(ctx, iocs, output, list_modules, module, hashes, androidqf_path):
|
||||
def check_androidqf(ctx, iocs, output, list_modules, module, hashes, verbose, androidqf_path):
|
||||
set_verbose_logging(verbose)
|
||||
cmd = CmdAndroidCheckAndroidQF(target_path=androidqf_path,
|
||||
results_path=output, ioc_files=iocs,
|
||||
module_name=module, hashes=hashes)
|
||||
|
||||
@@ -21,15 +21,13 @@ log = logging.getLogger(__name__)
|
||||
class DownloadAPKs(AndroidExtraction):
|
||||
"""DownloadAPKs is the main class operating the download of APKs
|
||||
from the device.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
results_path: Optional[str] = None,
|
||||
all_apks: Optional[bool] = False,
|
||||
packages: Optional[list] = None
|
||||
packages: Optional[list] = None,
|
||||
) -> None:
|
||||
"""Initialize module.
|
||||
:param results_path: Path to the folder where data should be stored
|
||||
|
||||
@@ -38,7 +38,7 @@ class SMS(AndroidQFModule):
|
||||
if "body" not in message:
|
||||
continue
|
||||
|
||||
if self.indicators.check_domains(message["links"]):
|
||||
if self.indicators.check_domains(message.get("links", [])):
|
||||
self.detected.append(message)
|
||||
|
||||
def parse_backup(self, data):
|
||||
|
||||
@@ -39,8 +39,9 @@ class Getprop(BugReportModule):
|
||||
|
||||
lines = []
|
||||
in_getprop = False
|
||||
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "------ SYSTEM PROPERTIES (getprop) ------":
|
||||
if line.strip().startswith("------ SYSTEM PROPERTIES"):
|
||||
in_getprop = True
|
||||
continue
|
||||
|
||||
@@ -55,13 +56,14 @@ class Getprop(BugReportModule):
|
||||
self.results = parse_getprop("\n".join(lines))
|
||||
|
||||
# Alert if phone is outdated.
|
||||
security_patch = self.results.get("ro.build.version.security_patch", "")
|
||||
if security_patch:
|
||||
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
|
||||
if (datetime.now() - patch_date) > timedelta(days=6*30):
|
||||
self.log.warning("This phone has not received security updates "
|
||||
"for more than six months (last update: %s)",
|
||||
security_patch)
|
||||
for entry in self.results:
|
||||
if entry["name"] == "ro.build.version.security_patch":
|
||||
security_patch = entry["value"]
|
||||
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
|
||||
if (datetime.now() - patch_date) > timedelta(days=6*30):
|
||||
self.log.warning("This phone has not received security updates "
|
||||
"for more than six months (last update: %s)",
|
||||
security_patch)
|
||||
|
||||
self.log.info("Extracted %d Android system properties",
|
||||
len(self.results))
|
||||
|
||||
@@ -7,6 +7,7 @@ import logging
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from mvt.common.module import PostAnalysisModule
|
||||
from mvt.common.command import Command
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -32,6 +33,7 @@ class CmdCheckIOCS(Command):
|
||||
def run(self) -> None:
|
||||
assert self.target_path is not None
|
||||
all_modules = []
|
||||
post_modules = []
|
||||
for entry in self.modules:
|
||||
if entry not in all_modules:
|
||||
all_modules.append(entry)
|
||||
@@ -43,18 +45,24 @@ class CmdCheckIOCS(Command):
|
||||
name_only, _ = os.path.splitext(file_name)
|
||||
file_path = os.path.join(self.target_path, file_name)
|
||||
|
||||
for iocs_module in all_modules:
|
||||
if self.module_name and iocs_module.__name__ != self.module_name:
|
||||
for module in all_modules:
|
||||
if self.module_name and module.__name__ != self.module_name:
|
||||
continue
|
||||
|
||||
if iocs_module().get_slug() != name_only:
|
||||
# Handle post-analysis modules at the end
|
||||
if issubclass(module, PostAnalysisModule) and module not in post_modules:
|
||||
post_modules.append(module)
|
||||
continue
|
||||
|
||||
# Skip if the current result file does not match the module name
|
||||
if module().get_slug() != name_only:
|
||||
continue
|
||||
|
||||
log.info("Loading results from \"%s\" with module %s",
|
||||
file_name, iocs_module.__name__)
|
||||
file_name, module.__name__)
|
||||
|
||||
m = iocs_module.from_json(file_path,
|
||||
log=logging.getLogger(iocs_module.__module__))
|
||||
m = module.from_json(file_path,
|
||||
log=logging.getLogger(module.__module__))
|
||||
if self.iocs.total_ioc_count > 0:
|
||||
m.indicators = self.iocs
|
||||
m.indicators.log = m.log
|
||||
@@ -66,6 +74,13 @@ class CmdCheckIOCS(Command):
|
||||
else:
|
||||
total_detections += len(m.detected)
|
||||
|
||||
# Run post-analysis modules at end
|
||||
for post_module in post_modules:
|
||||
m = post_module.from_results(self.target_path, log=log)
|
||||
m.run()
|
||||
total_detections += len(m.detected)
|
||||
|
||||
|
||||
if total_detections > 0:
|
||||
log.warning("The check of the results produced %d detections!",
|
||||
total_detections)
|
||||
|
||||
@@ -33,6 +33,7 @@ class Command:
|
||||
) -> None:
|
||||
self.name = ""
|
||||
self.modules = []
|
||||
self.modules_post = []
|
||||
|
||||
self.target_path = target_path
|
||||
self.results_path = results_path
|
||||
@@ -42,20 +43,21 @@ class Command:
|
||||
self.fast_mode = fast_mode
|
||||
self.log = log
|
||||
|
||||
self.iocs = Indicators(log=log)
|
||||
self.iocs.load_indicators_files(self.ioc_files)
|
||||
|
||||
# This list will contain all executed modules.
|
||||
# We can use this to reference e.g. self.executed[0].results.
|
||||
self.executed = []
|
||||
|
||||
self.detected_count = 0
|
||||
|
||||
self.hashes = hashes
|
||||
self.hash_values = []
|
||||
self.timeline = []
|
||||
self.timeline_detected = []
|
||||
|
||||
# Load IOCs
|
||||
self._create_storage()
|
||||
self._setup_logging()
|
||||
self.iocs = Indicators(log=log)
|
||||
self.iocs.load_indicators_files(self.ioc_files)
|
||||
|
||||
def _create_storage(self) -> None:
|
||||
if self.results_path and not os.path.exists(self.results_path):
|
||||
try:
|
||||
@@ -65,10 +67,11 @@ class Command:
|
||||
self.results_path, exc)
|
||||
sys.exit(1)
|
||||
|
||||
def _add_log_file_handler(self, logger: logging.Logger) -> None:
|
||||
def _setup_logging(self):
|
||||
if not self.results_path:
|
||||
return
|
||||
|
||||
logger = logging.getLogger("mvt")
|
||||
file_handler = logging.FileHandler(os.path.join(self.results_path,
|
||||
"command.log"))
|
||||
formatter = logging.Formatter("%(asctime)s - %(name)s - "
|
||||
@@ -137,7 +140,7 @@ class Command:
|
||||
def list_modules(self) -> None:
|
||||
self.log.info("Following is the list of available %s modules:",
|
||||
self.name)
|
||||
for module in self.modules:
|
||||
for module in (self.modules + self.modules_post):
|
||||
self.log.info(" - %s", module.__name__)
|
||||
|
||||
def init(self) -> None:
|
||||
@@ -150,8 +153,6 @@ class Command:
|
||||
raise NotImplementedError
|
||||
|
||||
def run(self) -> None:
|
||||
self._create_storage()
|
||||
self._add_log_file_handler(self.log)
|
||||
|
||||
try:
|
||||
self.init()
|
||||
@@ -162,8 +163,8 @@ class Command:
|
||||
if self.module_name and module.__name__ != self.module_name:
|
||||
continue
|
||||
|
||||
# FIXME: do we need the logger here
|
||||
module_logger = logging.getLogger(module.__module__)
|
||||
self._add_log_file_handler(module_logger)
|
||||
|
||||
m = module(target_path=self.target_path,
|
||||
results_path=self.results_path,
|
||||
|
||||
@@ -10,6 +10,7 @@ HELP_MSG_FAST = "Avoid running time/resource consuming features"
|
||||
HELP_MSG_LIST_MODULES = "Print list of available modules and exit"
|
||||
HELP_MSG_MODULE = "Name of a single module you would like to run instead of all"
|
||||
HELP_MSG_HASHES = "Generate hashes of all the files analyzed"
|
||||
HELP_MSG_VERBOSE = "Verbose mode"
|
||||
|
||||
# Android-specific.
|
||||
HELP_MSG_SERIAL = "Specify a device serial number or HOST:PORT connection string"
|
||||
|
||||
@@ -7,6 +7,7 @@ import csv
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import glob
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
import simplejson as json
|
||||
@@ -198,6 +199,9 @@ def run_module(module: MVTModule) -> None:
|
||||
module.to_timeline()
|
||||
except NotImplementedError:
|
||||
pass
|
||||
except Exception as exc:
|
||||
module.log.exception("Error when serializing data from module %s: %s",
|
||||
module.__class__.__name__, exc)
|
||||
|
||||
module.save_to_json()
|
||||
|
||||
@@ -211,7 +215,7 @@ def save_timeline(timeline: list, timeline_path: str) -> None:
|
||||
"""
|
||||
with open(timeline_path, "a+", encoding="utf-8") as handle:
|
||||
csvoutput = csv.writer(handle, delimiter=",", quotechar="\"",
|
||||
quoting=csv.QUOTE_ALL)
|
||||
quoting=csv.QUOTE_ALL, escapechar='\\')
|
||||
csvoutput.writerow(["UTC Timestamp", "Plugin", "Event", "Description"])
|
||||
|
||||
for event in sorted(timeline, key=lambda x: x["timestamp"]
|
||||
@@ -222,3 +226,51 @@ def save_timeline(timeline: list, timeline_path: str) -> None:
|
||||
event.get("event"),
|
||||
event.get("data"),
|
||||
])
|
||||
|
||||
|
||||
class PostAnalysisModule(MVTModule):
|
||||
"""
|
||||
Base module for implementing post-processing rules against the output of
|
||||
multiple MVT modules
|
||||
"""
|
||||
@classmethod
|
||||
def from_results(cls, results_path: str, log: logging.Logger):
|
||||
results = cls.load_results(results_path, log=log)
|
||||
return cls(results=results, log=log)
|
||||
|
||||
@classmethod
|
||||
def load_results(cls, results_path: str, log: logging.Logger):
|
||||
"""Load the results from a directory of json file."""
|
||||
# TODO: Move this to run once before loading all post-processing modules
|
||||
module_results = {}
|
||||
for json_path in glob.glob(os.path.join(results_path, "*.json")):
|
||||
module_name, _ = os.path.splitext(os.path.basename(json_path))
|
||||
with open(json_path, "r", encoding="utf-8") as handle:
|
||||
try:
|
||||
module_results[module_name] = json.load(handle)
|
||||
except Exception as exc:
|
||||
log.error("Unable to load results from file %s: %s",
|
||||
json_path, exc)
|
||||
|
||||
if not module_results:
|
||||
log.error("Did not find any MVT results at %s", results_path)
|
||||
|
||||
return module_results
|
||||
|
||||
def load_timeline(self):
|
||||
"""Load timeline from CSV file"""
|
||||
timeline = []
|
||||
timeline_path = os.path.join(self.results_path, "timeline.csv")
|
||||
with open(timeline_path, "r", encoding="utf-8") as handle:
|
||||
csvinput = csv.reader(handle, delimiter=",", quotechar="\"",
|
||||
quoting=csv.QUOTE_ALL, escapechar='\\')
|
||||
for row in csvinput:
|
||||
if row[0] == "UTC Timestamp":
|
||||
continue
|
||||
timeline.append({
|
||||
"timestamp": row[0],
|
||||
"module": row[1],
|
||||
"event": row[2],
|
||||
"data": row[3],
|
||||
})
|
||||
return timeline
|
||||
@@ -5,10 +5,13 @@
|
||||
|
||||
import datetime
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Iterator, Union
|
||||
|
||||
from rich.logging import RichHandler
|
||||
|
||||
|
||||
def convert_chrometime_to_datetime(timestamp: int) -> datetime.datetime:
|
||||
"""Converts Chrome timestamp to a datetime.
|
||||
@@ -197,3 +200,28 @@ def generate_hashes_from_path(path: str, log) -> Iterator[dict]:
|
||||
continue
|
||||
|
||||
yield {"file_path": file_path, "sha256": sha256}
|
||||
|
||||
|
||||
def init_logging(verbose: bool = False):
|
||||
"""
|
||||
Initialise logging for the MVT module
|
||||
"""
|
||||
# Setup logging using Rich.
|
||||
log = logging.getLogger("mvt")
|
||||
log.setLevel(logging.DEBUG)
|
||||
consoleHandler = RichHandler(show_path=False, log_time_format="%X")
|
||||
consoleHandler.setFormatter(logging.Formatter("[%(name)s] %(message)s"))
|
||||
if verbose:
|
||||
consoleHandler.setLevel(logging.DEBUG)
|
||||
else:
|
||||
consoleHandler.setLevel(logging.INFO)
|
||||
log.addHandler(consoleHandler)
|
||||
|
||||
|
||||
def set_verbose_logging(verbose: bool = False):
|
||||
log = logging.getLogger("mvt")
|
||||
handler = log.handlers[0]
|
||||
if verbose:
|
||||
handler.setLevel(logging.DEBUG)
|
||||
else:
|
||||
handler.setLevel(logging.INFO)
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
# Use of this software is governed by the MVT License 1.1 that can be found at
|
||||
# https://license.mvt.re/1.1/
|
||||
|
||||
MVT_VERSION = "2.2.4"
|
||||
MVT_VERSION = "2.2.6"
|
||||
|
||||
@@ -8,17 +8,17 @@ import logging
|
||||
import os
|
||||
|
||||
import click
|
||||
from rich.logging import RichHandler
|
||||
from rich.prompt import Prompt
|
||||
|
||||
from mvt.common.cmd_check_iocs import CmdCheckIOCS
|
||||
from mvt.common.help import (HELP_MSG_FAST, HELP_MSG_HASHES, HELP_MSG_IOC,
|
||||
HELP_MSG_LIST_MODULES, HELP_MSG_MODULE,
|
||||
HELP_MSG_OUTPUT)
|
||||
HELP_MSG_OUTPUT, HELP_MSG_VERBOSE)
|
||||
from mvt.common.logo import logo
|
||||
from mvt.common.options import MutuallyExclusiveOption
|
||||
from mvt.common.updates import IndicatorsUpdates
|
||||
from mvt.common.utils import generate_hashes_from_path
|
||||
from mvt.common.utils import (generate_hashes_from_path, init_logging,
|
||||
set_verbose_logging)
|
||||
|
||||
from .cmd_check_backup import CmdIOSCheckBackup
|
||||
from .cmd_check_fs import CmdIOSCheckFS
|
||||
@@ -26,12 +26,10 @@ from .decrypt import DecryptBackup
|
||||
from .modules.backup import BACKUP_MODULES
|
||||
from .modules.fs import FS_MODULES
|
||||
from .modules.mixed import MIXED_MODULES
|
||||
from .modules.post_analysis import POST_ANALYSIS_MODULES
|
||||
|
||||
# Setup logging using Rich.
|
||||
LOG_FORMAT = "[%(name)s] %(message)s"
|
||||
logging.basicConfig(level="INFO", format=LOG_FORMAT, handlers=[
|
||||
RichHandler(show_path=False, log_time_format="%X")])
|
||||
log = logging.getLogger(__name__)
|
||||
init_logging()
|
||||
log = logging.getLogger("mvt")
|
||||
|
||||
# Set this environment variable to a password if needed.
|
||||
MVT_IOS_BACKUP_PASSWORD = "MVT_IOS_BACKUP_PASSWORD"
|
||||
@@ -166,9 +164,12 @@ def extract_key(password, key_file, backup_path):
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.option("--hashes", "-H", is_flag=True, help=HELP_MSG_HASHES)
|
||||
@click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE)
|
||||
@click.argument("BACKUP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_backup(ctx, iocs, output, fast, list_modules, module, hashes, backup_path):
|
||||
def check_backup(ctx, iocs, output, fast, list_modules, module, hashes, verbose, backup_path):
|
||||
set_verbose_logging(verbose)
|
||||
|
||||
cmd = CmdIOSCheckBackup(target_path=backup_path, results_path=output,
|
||||
ioc_files=iocs, module_name=module, fast_mode=fast,
|
||||
hashes=hashes)
|
||||
@@ -199,9 +200,11 @@ def check_backup(ctx, iocs, output, fast, list_modules, module, hashes, backup_p
|
||||
@click.option("--list-modules", "-l", is_flag=True, help=HELP_MSG_LIST_MODULES)
|
||||
@click.option("--module", "-m", help=HELP_MSG_MODULE)
|
||||
@click.option("--hashes", "-H", is_flag=True, help=HELP_MSG_HASHES)
|
||||
@click.option("--verbose", "-v", is_flag=True, help=HELP_MSG_VERBOSE)
|
||||
@click.argument("DUMP_PATH", type=click.Path(exists=True))
|
||||
@click.pass_context
|
||||
def check_fs(ctx, iocs, output, fast, list_modules, module, hashes, dump_path):
|
||||
def check_fs(ctx, iocs, output, fast, list_modules, module, hashes, verbose, dump_path):
|
||||
set_verbose_logging(verbose)
|
||||
cmd = CmdIOSCheckFS(target_path=dump_path, results_path=output,
|
||||
ioc_files=iocs, module_name=module, fast_mode=fast,
|
||||
hashes=hashes)
|
||||
@@ -232,7 +235,7 @@ def check_fs(ctx, iocs, output, fast, list_modules, module, hashes, dump_path):
|
||||
@click.pass_context
|
||||
def check_iocs(ctx, iocs, list_modules, module, folder):
|
||||
cmd = CmdCheckIOCS(target_path=folder, ioc_files=iocs, module_name=module)
|
||||
cmd.modules = BACKUP_MODULES + FS_MODULES + MIXED_MODULES
|
||||
cmd.modules = BACKUP_MODULES + FS_MODULES + MIXED_MODULES + POST_ANALYSIS_MODULES
|
||||
|
||||
if list_modules:
|
||||
cmd.list_modules()
|
||||
|
||||
166
mvt/ios/data/ios_models.json
Normal file
166
mvt/ios/data/ios_models.json
Normal file
@@ -0,0 +1,166 @@
|
||||
[
|
||||
{
|
||||
"identifier": "iPhone4,1",
|
||||
"description": "iPhone 4S"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone5,1",
|
||||
"description": "iPhone 5"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone5,2",
|
||||
"description": "iPhone 5"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone5,3",
|
||||
"description": "iPhone 5c"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone5,4",
|
||||
"description": "iPhone 5c"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone6,1",
|
||||
"description": "iPhone 5s"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone6,2",
|
||||
"description": "iPhone 5s"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone7,1",
|
||||
"description": "iPhone 6 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone7,2",
|
||||
"description": "iPhone 6"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone8,1",
|
||||
"description": "iPhone 6s"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone8,2",
|
||||
"description": "iPhone 6s Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone8,4",
|
||||
"description": "iPhone SE (1st generation)"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone9,1",
|
||||
"description": "iPhone 7"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone9,2",
|
||||
"description": "iPhone 7 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone9,3",
|
||||
"description": "iPhone 7"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone9,4",
|
||||
"description": "iPhone 7 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,1",
|
||||
"description": "iPhone 8"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,2",
|
||||
"description": "iPhone 8 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,3",
|
||||
"description": "iPhone X"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,4",
|
||||
"description": "iPhone 8"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,5",
|
||||
"description": "iPhone 8 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,6",
|
||||
"description": "iPhone X"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone11,2",
|
||||
"description": "iPhone XS"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone11,4",
|
||||
"description": "iPhone XS Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone11,6",
|
||||
"description": "iPhone XS Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone11,8",
|
||||
"description": "iPhone XR"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone12,1",
|
||||
"description": "iPhone 11"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone12,3",
|
||||
"description": "iPhone 11 Pro"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone12,5",
|
||||
"description": "iPhone 11 Pro Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone12,8",
|
||||
"description": "iPhone SE (2nd generation)"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone13,1",
|
||||
"description": "iPhone 12 mini"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone13,2",
|
||||
"description": "iPhone 12"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone13,3",
|
||||
"description": "iPhone 12 Pro"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone13,4",
|
||||
"description": "iPhone 12 Pro Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,4",
|
||||
"description": "iPhone 13 Mini"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,5",
|
||||
"description": "iPhone 13"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,2",
|
||||
"description": "iPhone 13 Pro"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,3",
|
||||
"description": "iPhone 13 Pro Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,8",
|
||||
"decription": "iPhone 14 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone15,2",
|
||||
"description": "iPhone 14 Pro"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone15,3",
|
||||
"description": "iPhone 14 Pro Max"
|
||||
}
|
||||
]
|
||||
923
mvt/ios/data/ios_versions.json
Normal file
923
mvt/ios/data/ios_versions.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -158,6 +158,7 @@ class Manifest(IOSExtraction):
|
||||
"mode": oct(self._get_key(file_metadata, "Mode")),
|
||||
"owner": self._get_key(file_metadata, "UserID"),
|
||||
"size": self._get_key(file_metadata, "Size"),
|
||||
"type": "file" if file_data["flags"] == 1 else "directory",
|
||||
})
|
||||
except Exception:
|
||||
self.log.exception("Error reading manifest file metadata for file with ID %s "
|
||||
|
||||
@@ -61,8 +61,8 @@ class Applications(IOSExtraction):
|
||||
self.detected.append(result)
|
||||
continue
|
||||
|
||||
if result.get("sourceApp", "com.apple.AppStore") != "com.apple.AppStore":
|
||||
self.log.warning("Suspicious app not installed from the App Store: %s", result["softwareVersionBundleId"])
|
||||
if result.get("sourceApp", "com.apple.AppStore") not in ["com.apple.AppStore", "com.apple.dmd", "dmd"]:
|
||||
self.log.warning("Suspicious app not installed from the App Store or MDM: %s", result["softwareVersionBundleId"])
|
||||
self.detected.append(result)
|
||||
|
||||
def _parse_itunes_timestamp(self, entry: Dict[str, Any]) -> None:
|
||||
|
||||
@@ -59,7 +59,7 @@ class Calendar(IOSExtraction):
|
||||
|
||||
def check_indicators(self) -> None:
|
||||
for result in self.results:
|
||||
if result["participant_email"]:
|
||||
if result["participant_email"] and self.indicators:
|
||||
ioc = self.indicators.check_email(result["participant_email"])
|
||||
if ioc:
|
||||
result["matched_indicator"] = ioc
|
||||
|
||||
@@ -38,7 +38,7 @@ class Whatsapp(IOSExtraction):
|
||||
def serialize(self, record: dict) -> Union[dict, list]:
|
||||
text = record.get("ZTEXT", "").replace("\n", "\\n")
|
||||
links_text = ""
|
||||
if record["links"]:
|
||||
if record.get("links"):
|
||||
links_text = " - Embedded links: " + ", ".join(record["links"])
|
||||
|
||||
return {
|
||||
|
||||
3
mvt/ios/modules/post_analysis/__init__.py
Normal file
3
mvt/ios/modules/post_analysis/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .attachment_deletion import PostAttachmentDeletion
|
||||
|
||||
POST_ANALYSIS_MODULES = [PostAttachmentDeletion]
|
||||
101
mvt/ios/modules/post_analysis/attachment_deletion.py
Normal file
101
mvt/ios/modules/post_analysis/attachment_deletion.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import logging
|
||||
import datetime
|
||||
from typing import Optional
|
||||
|
||||
from mvt.common.module import PostAnalysisModule
|
||||
|
||||
|
||||
class PostAttachmentDeletion(PostAnalysisModule):
|
||||
"""
|
||||
Heuristic detection for attachment deletion in a cert time period.
|
||||
|
||||
|
||||
This module implements a hueuristic detection for a multiple iOS SMS attachmemt being deleted
|
||||
in a short period of time. This is a similar concept to the following script used
|
||||
by Kaspersky Labs to detect infections with the Triangulation iOS malware:
|
||||
https://github.com/KasperskyLab/triangle_check/blob/main/triangle_check/__init__.py
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
file_path: Optional[str] = None,
|
||||
target_path: Optional[str] = None,
|
||||
results_path: Optional[str] = None,
|
||||
fast_mode: Optional[bool] = False,
|
||||
log: logging.Logger = logging.getLogger(__name__),
|
||||
results: Optional[list] = None
|
||||
) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.required_modules = ["manifest"]
|
||||
|
||||
|
||||
def load_locationd_events(self):
|
||||
locationd_clients = self.results["locationd_clients"]
|
||||
locations_stopped_event = [event for event in locationd_clients if "LocationTimeStopped" in event]
|
||||
return locations_stopped_event
|
||||
|
||||
def run(self) -> None:
|
||||
"""
|
||||
Run the post-processing module.
|
||||
|
||||
The logical is to look for all SMS attachment directories which were recently created
|
||||
shortly before their last modified time, but which have no contained files.
|
||||
"""
|
||||
for module in self.required_modules:
|
||||
if module not in self.results:
|
||||
raise Exception(f"Required module {module} was not found in results. Did you run the required modules?")
|
||||
|
||||
locationd_events = []
|
||||
locationd_client_iocs = [
|
||||
"com.apple.locationd.bundle-/System/Library/LocationBundles/IonosphereHarvest.bundle",
|
||||
"com.apple.locationd.bundle-/System/Library/LocationBundles/WRMLinkSelection.bundle"
|
||||
]
|
||||
for event in self.load_locationd_events():
|
||||
for ioc in locationd_client_iocs:
|
||||
if ioc in event["Registered"]:
|
||||
locationd_events.append(event)
|
||||
print(event)
|
||||
|
||||
|
||||
|
||||
# Filter the relevant events from the manifest:
|
||||
events_by_time = {}
|
||||
sms_files = [event for event in self.results["manifest"] if event["relative_path"].startswith("Library/SMS/Attachments/")]
|
||||
attachment_folders = {}
|
||||
for record in sorted(sms_files, key=lambda x: x["relative_path"]):
|
||||
num_path_segments = record["relative_path"].count('/')
|
||||
# Skip entries with a full-path
|
||||
# if not (num_path_segments == 3 or num_path_segments == 4):
|
||||
# continue
|
||||
|
||||
attachment_root = "/".join(record["relative_path"].split('/', 5)[:5])
|
||||
attachment_folder = attachment_folders.get(attachment_root, [])
|
||||
attachment_folder.append(record)
|
||||
attachment_folders[attachment_root] = attachment_folder
|
||||
|
||||
# Look for directories containing no files, which had a short lifespan
|
||||
for key, items in attachment_folders.items():
|
||||
has_files = any([item["flags"] == 1 for item in items])
|
||||
if has_files:
|
||||
continue
|
||||
|
||||
for item in sorted(items, key=lambda x: x["created"]):
|
||||
# item_created = datetime.datetime.strptime(item["created"], "%Y-%m-%d %H:%M:%S.%f")
|
||||
item_modified = datetime.datetime.strptime(item["modified"], "%Y-%m-%d %H:%M:%S.%f") # M
|
||||
status_changed = datetime.datetime.strptime(item["status_changed"], "%Y-%m-%d %H:%M:%S.%f") # C
|
||||
|
||||
# self.append_timeline(fs_stat['LastModified'], ('M', relativePath))
|
||||
# self.append_timeline(fs_stat['LastStatusChange'], ('C', relativePath))
|
||||
# self.append_timeline(fs_stat['Birth'], ('B', relativePath))
|
||||
|
||||
|
||||
# Skip items which were created and modified at the same time, likely never had files.
|
||||
# print(item["relative_path"], status_changed, item_modified)
|
||||
if item_modified == status_changed:
|
||||
print("changed == modified", item["relative_path"], status_changed, item_modified)
|
||||
continue
|
||||
|
||||
if (item_modified - status_changed): # < datetime.timedelta(minutes=10):
|
||||
self.log.info(f"Possible attachment deletion. Attachment folder '{key}' with no files, created and modified within 10 minutes. '{item['relative_path']}' created {item_created}, modified {item_modified})")
|
||||
File diff suppressed because it is too large
Load Diff
@@ -42,6 +42,9 @@ console_scripts =
|
||||
mvt-ios = mvt.ios:cli
|
||||
mvt-android = mvt.android:cli
|
||||
|
||||
[options.package_data]
|
||||
mvt = ios/data/*.json
|
||||
|
||||
[flake8]
|
||||
max-complexity = 10
|
||||
max-line-length = 1000
|
||||
|
||||
0
tests/android_bugreport/__init__.py
Normal file
0
tests/android_bugreport/__init__.py
Normal file
@@ -8,6 +8,7 @@ from pathlib import Path
|
||||
|
||||
from mvt.android.modules.bugreport.appops import Appops
|
||||
from mvt.android.modules.bugreport.packages import Packages
|
||||
from mvt.android.modules.bugreport.getprop import Getprop
|
||||
from mvt.common.module import run_module
|
||||
|
||||
from ..utils import get_artifact_folder
|
||||
@@ -40,3 +41,7 @@ class TestBugreportAnalysis:
|
||||
assert m.results[1]["package_name"] == "com.instagram.android"
|
||||
assert len(m.results[0]["permissions"]) == 4
|
||||
assert len(m.results[1]["permissions"]) == 32
|
||||
|
||||
def test_getprop_module(self):
|
||||
m = self.launch_bug_report_module(Getprop)
|
||||
assert len(m.results) == 0
|
||||
Reference in New Issue
Block a user