1
mirror of https://github.com/mvt-project/mvt synced 2025-10-21 22:42:15 +02:00

Compare commits

..

23 Commits

Author SHA1 Message Date
Nex
f1d039346d Bumped version 2021-09-14 14:33:17 +02:00
Nex
ccdfd92d4a Merge branch 'dozenfossil-main' 2021-09-14 14:29:21 +02:00
Nex
032b229eb8 Minor changes for consistency 2021-09-14 14:29:04 +02:00
Nex
93936976c7 Merge branch 'main' of https://github.com/dozenfossil/mvt into dozenfossil-main 2021-09-14 14:26:37 +02:00
Nex
f3a4e9d108 Merge pull request #186 from beneficentboast/main
fix error for manipulated entries in DataUsage/NetUsage
2021-09-14 14:26:00 +02:00
Nex
93a9735b5e Reordering 2021-09-14 14:21:54 +02:00
Nex
7b0e2d4564 Added version 2021-09-14 14:20:54 +02:00
beneficentboast
725a99bcd5 fix error for manipulated entries in DataUsage 2021-09-13 20:13:43 +02:00
dozenfossil
35a6f6ec9a fix multi path/file issue 2021-09-13 20:02:48 +02:00
Nex
3f9809f36c Formatting docstrings 2021-09-11 02:39:33 +02:00
Nex
6da6595108 More docstrings 2021-09-10 20:09:37 +02:00
Nex
35dfeaccee Re-ordered list of shortener domains 2021-09-10 15:21:02 +02:00
Nex
e5f2aa3c3d Standardizing reST docstrings 2021-09-10 15:18:13 +02:00
Nex
3236c1b390 Added new TCC module 2021-09-09 12:00:48 +02:00
Nex
80a670273d Added additional locationd path 2021-09-07 15:18:00 +02:00
Nex
969b5cc506 Fixed bug in locationd module 2021-09-07 15:06:19 +02:00
Nex
ef8622d4c3 Changed event name 2021-09-03 14:49:04 +02:00
Nex
e39e9e6f92 Cleaned up and simplified module 2021-09-03 14:48:24 +02:00
Nex
7b32ed3179 Compacted record data 2021-09-03 14:41:55 +02:00
Nex
315317863e Fixed documentation 2021-09-03 14:06:01 +02:00
Nex
08d35b056a Merge branch 'guitarsinger-main' 2021-09-03 13:35:59 +02:00
Nex
3e679312d1 Renamed module 2021-09-03 13:35:27 +02:00
guitarsinger
be4f1afed6 add OSAnalyticsADDAILY 2021-09-03 11:59:44 +02:00
35 changed files with 541 additions and 176 deletions

View File

@@ -148,6 +148,18 @@ If indicators are provided through the command-line, they are checked against th
---
### `os_analytics_ad_daily.json`
!!! info "Availability"
Backup: :material-check:
Full filesystem dump: :material-check:
This JSON file is created by mvt-ios' `OSAnalyticsADDaily` module. The module extracts records from a plist located *private/var/mobile/Library/Preferences/com.apple.osanalytics.addaily.plist*, which contains a history of data usage by processes running on the system. Besides the network statistics, these records are particularly important because they might show traces of malicious process executions and the relevant timeframe.
If indicators are provided through the command-line, they are checked against the process names. Any matches are stored in *os_analytics_ad_daily_detected.json*.
---
### `datausage.json`
!!! info "Availability"
@@ -240,6 +252,16 @@ This JSON file is created by mvt-ios' `SMSAttachments` module. The module extrac
---
### `tcc.json`
!!! info "Availability"
Backup: :material-check:
Full filesystem dump: :material-check:
This JSON file is created by mvt-ios' `TCC` module. The module extracts records from a SQLite database located at */private/var/mobile/Library/TCC/TCC.db*, which contains a list of which services such as microphone, camera, or location, apps have been granted or denied access to.
---
### `version_history.json`
!!! info "Availability"

View File

@@ -32,7 +32,10 @@ class PullProgress(tqdm):
class DownloadAPKs(AndroidExtraction):
"""DownloadAPKs is the main class operating the download of APKs
from the device."""
from the device.
"""
def __init__(self, output_folder=None, all_apks=False, log=None,
packages=None):
@@ -51,7 +54,9 @@ class DownloadAPKs(AndroidExtraction):
@classmethod
def from_json(cls, json_path):
"""Initialize this class from an existing apks.json file.
:param json_path: Path to the apks.json file to parse.
"""
with open(json_path, "r") as handle:
packages = json.load(handle)
@@ -59,9 +64,11 @@ class DownloadAPKs(AndroidExtraction):
def pull_package_file(self, package_name, remote_path):
"""Pull files related to specific package from the device.
:param package_name: Name of the package to download
:param remote_path: Path to the file to download
:returns: Path to the local copy
"""
log.info("Downloading %s ...", remote_path)
@@ -101,6 +108,8 @@ class DownloadAPKs(AndroidExtraction):
def get_packages(self):
"""Use the Packages adb module to retrieve the list of packages.
We reuse the same extraction logic to then download the APKs.
"""
self.log.info("Retrieving list of installed packages...")
@@ -111,8 +120,7 @@ class DownloadAPKs(AndroidExtraction):
self.packages = m.results
def pull_packages(self):
"""Download all files of all selected packages from the device.
"""
"""Download all files of all selected packages from the device."""
log.info("Starting extraction of installed APKs at folder %s", self.output_folder)
if not os.path.exists(self.output_folder):
@@ -185,15 +193,13 @@ class DownloadAPKs(AndroidExtraction):
log.info("Download of selected packages completed")
def save_json(self):
"""Save the results to the package.json file.
"""
"""Save the results to the package.json file."""
json_path = os.path.join(self.output_folder, "apks.json")
with open(json_path, "w") as handle:
json.dump(self.packages, handle, indent=4)
def run(self):
"""Run all steps of fetch-apk.
"""
"""Run all steps of fetch-apk."""
self.get_packages()
self._adb_connect()
self.pull_packages()

View File

@@ -39,8 +39,7 @@ class AndroidExtraction(MVTModule):
@staticmethod
def _adb_check_keys():
"""Make sure Android adb keys exist.
"""
"""Make sure Android adb keys exist."""
if not os.path.isdir(os.path.dirname(ADB_KEY_PATH)):
os.path.makedirs(os.path.dirname(ADB_KEY_PATH))
@@ -51,8 +50,7 @@ class AndroidExtraction(MVTModule):
write_public_keyfile(ADB_KEY_PATH, ADB_PUB_KEY_PATH)
def _adb_connect(self):
"""Connect to the device over adb.
"""
"""Connect to the device over adb."""
self._adb_check_keys()
with open(ADB_KEY_PATH, "rb") as handle:
@@ -94,47 +92,53 @@ class AndroidExtraction(MVTModule):
break
def _adb_disconnect(self):
"""Close adb connection to the device.
"""
"""Close adb connection to the device."""
self.device.close()
def _adb_reconnect(self):
"""Reconnect to device using adb.
"""
"""Reconnect to device using adb."""
log.info("Reconnecting ...")
self._adb_disconnect()
self._adb_connect()
def _adb_command(self, command):
"""Execute an adb shell command.
:param command: Shell command to execute
:returns: Output of command
"""
return self.device.shell(command)
def _adb_check_if_root(self):
"""Check if we have a `su` binary on the Android device.
:returns: Boolean indicating whether a `su` binary is present or not
"""
return bool(self._adb_command("command -v su"))
def _adb_root_or_die(self):
"""Check if we have a `su` binary, otherwise raise an Exception.
"""
"""Check if we have a `su` binary, otherwise raise an Exception."""
if not self._adb_check_if_root():
raise InsufficientPrivileges("This module is optionally available in case the device is already rooted. Do NOT root your own device!")
def _adb_command_as_root(self, command):
"""Execute an adb shell command.
:param command: Shell command to execute as root
:returns: Output of command
"""
return self._adb_command(f"su -c {command}")
def _adb_check_file_exists(self, file):
"""Verify that a file exists.
:param file: Path of the file
:returns: Boolean indicating whether the file exists or not
"""
# TODO: Need to support checking files without root privileges as well.
@@ -148,9 +152,12 @@ class AndroidExtraction(MVTModule):
def _adb_download(self, remote_path, local_path, progress_callback=None, retry_root=True):
"""Download a file form the device.
:param remote_path: Path to download from the device
:param local_path: Path to where to locally store the copy of the file
:param progress_callback: Callback for download progress bar
:param progress_callback: Callback for download progress bar (Default value = None)
:param retry_root: Default value = True)
"""
try:
self.device.pull(remote_path, local_path, progress_callback)
@@ -191,9 +198,11 @@ class AndroidExtraction(MVTModule):
def _adb_process_file(self, remote_path, process_routine):
"""Download a local copy of a file which is only accessible as root.
This is a wrapper around process_routine.
:param remote_path: Path of the file on the device to process
:param process_routine: Function to be called on the local copy of the
downloaded file
"""
# Connect to the device over adb.
self._adb_connect()
@@ -227,6 +236,5 @@ class AndroidExtraction(MVTModule):
self._adb_disconnect()
def run(self):
"""Run the main procedure.
"""
"""Run the main procedure."""
raise NotImplementedError

View File

@@ -35,7 +35,9 @@ class ChromeHistory(AndroidExtraction):
def _parse_db(self, db_path):
"""Parse a Chrome History database file.
:param db_path: Path to the History database to process.
"""
conn = sqlite3.connect(db_path)
cur = conn.cursor()

View File

@@ -71,7 +71,9 @@ class SMS(AndroidExtraction):
def _parse_db(self, db_path):
"""Parse an Android bugle_db SMS database file.
:param db_path: Path to the Android SMS database file to process
"""
conn = sqlite3.connect(db_path)
cur = conn.cursor()

View File

@@ -48,7 +48,9 @@ class Whatsapp(AndroidExtraction):
def _parse_db(self, db_path):
"""Parse an Android msgstore.db WhatsApp database file.
:param db_path: Path to the Android WhatsApp database file to process
"""
conn = sqlite3.connect(db_path)
cur = conn.cursor()

View File

@@ -32,6 +32,9 @@ class Indicators:
def parse_stix2(self, file_path):
"""Extract indicators from a STIX2 file.
:param file_path: Path to the STIX2 file to parse
:type file_path: str
"""
self.log.info("Parsing STIX2 indicators file at path %s",
file_path)
@@ -64,9 +67,18 @@ class Indicators:
self._add_indicator(ioc=value,
iocs_list=self.ioc_files)
def check_domain(self, url):
def check_domain(self, url) -> bool:
"""Check if a given URL matches any of the provided domain indicators.
:param url: URL to match against domain indicators
:type url: str
:returns: True if the URL matched an indicator, otherwise False
:rtype: bool
"""
# TODO: If the IOC domain contains a subdomain, it is not currently
# being matched.
if not url:
return False
try:
# First we use the provided URL.
@@ -124,18 +136,33 @@ class Indicators:
return True
def check_domains(self, urls):
"""Check the provided list of (suspicious) domains against a list of URLs.
:param urls: List of URLs to check
return False
def check_domains(self, urls) -> bool:
"""Check a list of URLs against the provided list of domain indicators.
:param urls: List of URLs to check against domain indicators
:type urls: list
:returns: True if any URL matched an indicator, otherwise False
:rtype: bool
"""
if not urls:
return False
for url in urls:
if self.check_domain(url):
return True
def check_process(self, process):
return False
def check_process(self, process) -> bool:
"""Check the provided process name against the list of process
indicators.
:param process: Process name to check
:param process: Process name to check against process indicators
:type process: str
:returns: True if process matched an indicator, otherwise False
:rtype: bool
"""
if not process:
return False
@@ -151,18 +178,33 @@ class Indicators:
self.log.warning("Found a truncated known suspicious process name \"%s\"", process)
return True
def check_processes(self, processes):
return False
def check_processes(self, processes) -> bool:
"""Check the provided list of processes against the list of
process indicators.
:param processes: List of processes to check
:param processes: List of processes to check against process indicators
:type processes: list
:returns: True if process matched an indicator, otherwise False
:rtype: bool
"""
if not processes:
return False
for process in processes:
if self.check_process(process):
return True
def check_email(self, email):
return False
def check_email(self, email) -> bool:
"""Check the provided email against the list of email indicators.
:param email: Suspicious email to check
:param email: Email address to check against email indicators
:type email: str
:returns: True if email address matched an indicator, otherwise False
:rtype: bool
"""
if not email:
return False
@@ -171,9 +213,16 @@ class Indicators:
self.log.warning("Found a known suspicious email address: \"%s\"", email)
return True
def check_file(self, file_path):
return False
def check_file(self, file_path) -> bool:
"""Check the provided file path against the list of file indicators.
:param file_path: Path or name of the file to check
:param file_path: File path or file name to check against file
indicators
:type file_path: str
:returns: True if the file path matched an indicator, otherwise False
:rtype: bool
"""
if not file_path:
return False
@@ -182,3 +231,5 @@ class Indicators:
if file_name in self.ioc_files:
self.log.warning("Found a known suspicious file: \"%s\"", file_path)
return True
return False

View File

@@ -23,7 +23,8 @@ class InsufficientPrivileges(Exception):
pass
class MVTModule(object):
"""This class provides a base for all extraction modules."""
"""This class provides a base for all extraction modules.
"""
enabled = True
slug = None
@@ -31,12 +32,18 @@ class MVTModule(object):
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):
"""Initialize module.
:param file_path: Path to the module's database file, if there is any.
:param file_path: Path to the module's database file, if there is any
:type file_path: str
:param base_folder: Path to the base folder (backup or filesystem dump)
:type file_path: str
:param output_folder: Folder where results will be stored
:type output_folder: str
:param fast_mode: Flag to enable or disable slow modules
:type fast_mode: bool
:param log: Handle to logger
:param results: Provided list of results entries
:type results: list
"""
self.file_path = file_path
self.base_folder = base_folder
@@ -59,18 +66,18 @@ class MVTModule(object):
return cls(results=results, log=log)
def get_slug(self):
"""Use the module's class name to retrieve a slug
"""
if self.slug:
return self.slug
sub = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", self.__class__.__name__)
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", sub).lower()
def load_indicators(self, file_path):
self.indicators = Indicators(file_path, self.log)
def check_indicators(self):
"""Check the results of this module against a provided list of
indicators."""
indicators.
"""
raise NotImplementedError
def save_to_json(self):
@@ -102,15 +109,17 @@ class MVTModule(object):
@staticmethod
def _deduplicate_timeline(timeline):
"""Serialize entry as JSON to deduplicate repeated entries"""
"""Serialize entry as JSON to deduplicate repeated entries
:param timeline: List of entries from timeline to deduplicate
"""
timeline_set = set()
for record in timeline:
timeline_set.add(json.dumps(record, sort_keys=True))
return [json.loads(record) for record in timeline_set]
def to_timeline(self):
"""Convert results into a timeline.
"""
"""Convert results into a timeline."""
for result in self.results:
record = self.serialize(result)
if record:
@@ -178,8 +187,9 @@ def run_module(module):
def save_timeline(timeline, timeline_path):
"""Save the timeline in a csv file.
:param timeline: List of records to order and store.
:param timeline_path: Path to the csv file to store the timeline to.
:param timeline: List of records to order and store
:param timeline_path: Path to the csv file to store the timeline to
"""
with io.open(timeline_path, "a+", encoding="utf-8") as handle:
csvoutput = csv.writer(handle, delimiter=",", quotechar="\"")

View File

@@ -9,8 +9,7 @@ from click import Option, UsageError
class MutuallyExclusiveOption(Option):
"""This class extends click to support mutually exclusive options.
"""
"""This class extends click to support mutually exclusive options."""
def __init__(self, *args, **kwargs):
self.mutually_exclusive = set(kwargs.pop("mutually_exclusive", []))

File diff suppressed because it is too large Load Diff

View File

@@ -10,8 +10,12 @@ import re
def convert_mactime_to_unix(timestamp, from_2001=True):
"""Converts Mac Standard Time to a Unix timestamp.
:param timestamp: MacTime timestamp (either int or float)
:returns: Unix epoch timestamp
:param timestamp: MacTime timestamp (either int or float).
:type timestamp: int
:param from_2001: bool: Whether to (Default value = True)
:param from_2001: Default value = True)
:returns: Unix epoch timestamp.
"""
if not timestamp:
return None
@@ -34,8 +38,10 @@ def convert_mactime_to_unix(timestamp, from_2001=True):
def convert_chrometime_to_unix(timestamp):
"""Converts Chrome timestamp to a Unix timestamp.
:param timestamp: Chrome timestamp as int
:returns: Unix epoch timestamp
:param timestamp: Chrome timestamp as int.
:type timestamp: int
:returns: Unix epoch timestamp.
"""
epoch_start = datetime.datetime(1601, 1 , 1)
delta = datetime.timedelta(microseconds=timestamp)
@@ -44,8 +50,11 @@ def convert_chrometime_to_unix(timestamp):
def convert_timestamp_to_iso(timestamp):
"""Converts Unix timestamp to ISO string.
:param timestamp: Unix timestamp
:returns: ISO timestamp string in YYYY-mm-dd HH:MM:SS.ms format
:param timestamp: Unix timestamp.
:type timestamp: int
:returns: ISO timestamp string in YYYY-mm-dd HH:MM:SS.ms format.
:rtype: str
"""
try:
return timestamp.strftime("%Y-%m-%d %H:%M:%S.%f")
@@ -54,15 +63,19 @@ def convert_timestamp_to_iso(timestamp):
def check_for_links(text):
"""Checks if a given text contains HTTP links.
:param text: Any provided text
:returns: Search results
:param text: Any provided text.
:type text: str
:returns: Search results.
"""
return re.findall("(?P<url>https?://[^\s]+)", text, re.IGNORECASE)
def get_sha256_from_file_path(file_path):
"""Calculate the SHA256 hash of a file from a file path.
:param file_path: Path to the file to hash
:returns: The SHA256 hash string
"""
sha256_hash = hashlib.sha256()
with open(file_path, "rb") as handle:
@@ -75,8 +88,10 @@ def get_sha256_from_file_path(file_path):
# https://stackoverflow.com/questions/57014259/json-dumps-on-dictionary-with-bytes-for-keys
def keys_bytes_to_string(obj):
"""Convert object keys from bytes to string.
:param obj: Object to convert from bytes to string.
:returns: Converted object.
:returns: Object converted to string.
:rtype: str
"""
new_obj = {}
if not isinstance(obj, dict):

View File

@@ -6,7 +6,7 @@
import requests
from packaging import version
MVT_VERSION = "1.2.6"
MVT_VERSION = "1.2.7"
def check_for_updates():
res = requests.get("https://pypi.org/pypi/mvt/json")

View File

@@ -17,6 +17,8 @@ log = logging.getLogger(__name__)
class DecryptBackup:
"""This class provides functions to decrypt an encrypted iTunes backup
using either a password or a key file.
"""
def __init__(self, backup_path, dest_path=None):
@@ -35,7 +37,9 @@ class DecryptBackup:
@staticmethod
def is_encrypted(backup_path) -> bool:
"""Query Manifest.db file to see if it's encrypted or not.
:param backup_path: Path to the backup to decrypt
"""
conn = sqlite3.connect(os.path.join(backup_path, "Manifest.db"))
cur = conn.cursor()
@@ -95,7 +99,9 @@ class DecryptBackup:
def decrypt_with_password(self, password):
"""Decrypts an encrypted iOS backup.
:param password: Password to use to decrypt the original backup
"""
log.info("Decrypting iOS backup at path %s with password", self.backup_path)
@@ -131,7 +137,9 @@ class DecryptBackup:
def decrypt_with_key_file(self, key_file):
"""Decrypts an encrypted iOS backup using a key file.
:param key_file: File to read the key bytes to decrypt the backup
"""
log.info("Decrypting iOS backup at path %s with key file %s",
self.backup_path, key_file)
@@ -158,8 +166,7 @@ class DecryptBackup:
log.critical("Failed to decrypt backup. Did you provide the correct key file?")
def get_key(self):
"""Retrieve and prints the encryption key.
"""
"""Retrieve and prints the encryption key."""
if not self._backup:
return
@@ -169,7 +176,9 @@ class DecryptBackup:
def write_key(self, key_path):
"""Save extracted key to file.
:param key_path: Path to the file where to write the derived decryption key.
"""
if not self._decryption_key:
return

View File

@@ -11,8 +11,7 @@ from ..base import IOSExtraction
CONF_PROFILES_DOMAIN = "SysSharedContainerDomain-systemgroup.com.apple.configurationprofiles"
class ConfigurationProfiles(IOSExtraction):
"""This module extracts the full plist data from configuration profiles.
"""
"""This module extracts the full plist data from configuration profiles."""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):

View File

@@ -27,12 +27,19 @@ class Manifest(IOSExtraction):
def _get_key(self, dictionary, key):
"""Unserialized plist objects can have keys which are str or byte types
This is a helper to try fetch a key as both a byte or string type.
:param dictionary: param key:
:param key:
"""
return dictionary.get(key.encode("utf-8"), None) or dictionary.get(key, None)
@staticmethod
def _convert_timestamp(timestamp_or_unix_time_int):
"""Older iOS versions stored the manifest times as unix timestamps.
:param timestamp_or_unix_time_int:
"""
if isinstance(timestamp_or_unix_time_int, datetime.datetime):
return convert_timestamp_to_iso(timestamp_or_unix_time_int)

View File

@@ -14,6 +14,8 @@ CONF_PROFILES_EVENTS_RELPATH = "Library/ConfigurationProfiles/MCProfileEvents.pl
class ProfileEvents(IOSExtraction):
"""This module extracts events related to the installation of configuration
profiles.
"""
def __init__(self, file_path=None, base_folder=None, output_folder=None,

View File

@@ -28,7 +28,9 @@ class IOSExtraction(MVTModule):
def _recover_sqlite_db_if_needed(self, file_path):
"""Tries to recover a malformed database by running a .clone command.
:param file_path: Path to the malformed database file.
"""
# TODO: Find a better solution.
conn = sqlite3.connect(file_path)
@@ -65,8 +67,10 @@ class IOSExtraction(MVTModule):
def _get_backup_files_from_manifest(self, relative_path=None, domain=None):
"""Locate files from Manifest.db.
:param relative_path: Relative path to use as filter from Manifest.db.
:param domain: Domain to use as filter from Manifest.db.
:param relative_path: Relative path to use as filter from Manifest.db. (Default value = None)
:param domain: Domain to use as filter from Manifest.db. (Default value = None)
"""
manifest_db_path = os.path.join(self.base_folder, "Manifest.db")
if not os.path.exists(manifest_db_path):
@@ -116,8 +120,11 @@ class IOSExtraction(MVTModule):
modules that expect to work with a single SQLite database.
If a module requires to process multiple databases or files,
you should use the helper functions above.
:param backup_id: iTunes backup database file's ID (or hash).
:param root_paths: Glob patterns for files to seek in filesystem dump.
:param root_paths: Glob patterns for files to seek in filesystem dump. (Default value = [])
:param backup_ids: Default value = None)
"""
file_path = None
# First we check if the was an explicit file path specified.

View File

@@ -13,7 +13,10 @@ from ..base import IOSExtraction
class Filesystem(IOSExtraction):
"""This module extracts creation and modification date of files from a
full file-system dump."""
full file-system dump.
"""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):

View File

@@ -14,7 +14,10 @@ NETUSAGE_ROOT_PATHS = [
class Netusage(NetBase):
"""This class extracts data from netusage.sqlite and attempts to identify
any suspicious processes if running on a full filesystem dump."""
any suspicious processes if running on a full filesystem dump.
"""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):

View File

@@ -11,7 +11,10 @@ WEBKIT_INDEXEDDB_ROOT_PATHS = [
class WebkitIndexedDB(WebkitBase):
"""This module looks extracts records from WebKit IndexedDB folders,
and checks them against any provided list of suspicious domains."""
and checks them against any provided list of suspicious domains.
"""
slug = "webkit_indexeddb"

View File

@@ -11,7 +11,10 @@ WEBKIT_LOCALSTORAGE_ROOT_PATHS = [
class WebkitLocalStorage(WebkitBase):
"""This module looks extracts records from WebKit LocalStorage folders,
and checks them against any provided list of suspicious domains."""
and checks them against any provided list of suspicious domains.
"""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):

View File

@@ -11,7 +11,10 @@ WEBKIT_SAFARIVIEWSERVICE_ROOT_PATHS = [
class WebkitSafariViewService(WebkitBase):
"""This module looks extracts records from WebKit LocalStorage folders,
and checks them against any provided list of suspicious domains."""
and checks them against any provided list of suspicious domains.
"""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):

View File

@@ -13,15 +13,18 @@ from .idstatuscache import IDStatusCache
from .interactionc import InteractionC
from .locationd import LocationdClients
from .net_datausage import Datausage
from .osanalytics_addaily import OSAnalyticsADDaily
from .safari_browserstate import SafariBrowserState
from .safari_history import SafariHistory
from .sms import SMS
from .sms_attachments import SMSAttachments
from .tcc import TCC
from .webkit_resource_load_statistics import WebkitResourceLoadStatistics
from .webkit_session_resource_log import WebkitSessionResourceLog
from .whatsapp import Whatsapp
MIXED_MODULES = [Calls, ChromeFavicon, ChromeHistory, Contacts, FirefoxFavicon,
FirefoxHistory, IDStatusCache, InteractionC, LocationdClients,
Datausage, SafariBrowserState, SafariHistory, SMS, SMSAttachments,
WebkitResourceLoadStatistics, WebkitSessionResourceLog, Whatsapp,]
OSAnalyticsADDaily, Datausage, SafariBrowserState, SafariHistory,
TCC, SMS, SMSAttachments, WebkitResourceLoadStatistics,
WebkitSessionResourceLog, Whatsapp,]

View File

@@ -19,7 +19,10 @@ FIREFOX_HISTORY_ROOT_PATHS = [
class FirefoxHistory(IOSExtraction):
"""This module extracts all Firefox visits and tries to detect potential
network injection attacks."""
network injection attacks.
"""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):

View File

@@ -51,12 +51,8 @@ class IDStatusCache(IOSExtraction):
result.get("user"))
self.detected.append(result)
def run(self):
self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS,
root_paths=IDSTATUSCACHE_ROOT_PATHS)
self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
with open(self.file_path, "rb") as handle:
def _extract_idstatuscache_entries(self, file_path):
with open(file_path, "rb") as handle:
file_plist = plistlib.load(handle)
id_status_cache_entries = []
@@ -84,4 +80,16 @@ class IDStatusCache(IOSExtraction):
entry["occurrences"] = entry_counter[entry["user"]]
self.results.append(entry)
def run(self):
if self.is_backup:
self._find_ios_database(backup_ids=IDSTATUSCACHE_BACKUP_IDS)
self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
self._extract_idstatuscache_entries(self.file_path)
elif self.is_fs_dump:
for idstatuscache_path in self._get_fs_files_from_patterns(IDSTATUSCACHE_ROOT_PATHS):
self.file_path = idstatuscache_path
self.log.info("Found IDStatusCache plist at path: %s", self.file_path)
self._extract_idstatuscache_entries(self.file_path)
self.log.info("Extracted a total of %d ID Status Cache entries", len(self.results))

View File

@@ -14,10 +14,11 @@ LOCATIOND_BACKUP_IDS = [
]
LOCATIOND_ROOT_PATHS = [
"private/var/mobile/Library/Caches/locationd/clients.plist",
"private/var/root/Library/Caches/locationd/clients.plist"
]
class LocationdClients(IOSExtraction):
"""Extract information from apps who used geolocation"""
"""Extract information from apps who used geolocation."""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):
@@ -50,22 +51,37 @@ class LocationdClients(IOSExtraction):
return records
def run(self):
self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS,
root_paths=LOCATIOND_ROOT_PATHS)
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
def check_indicators(self):
for result in self.results:
parts = result["package"].split("/")
proc_name = parts[len(parts)-1]
with open(self.file_path, "rb") as handle:
if self.indicators.check_process(proc_name):
self.detected.append(result)
def _extract_locationd_entries(self, file_path):
with open(file_path, "rb") as handle:
file_plist = plistlib.load(handle)
for app in file_plist:
if file_plist[app] is dict:
result = file_plist[app]
result["package"] = app
for ts in self.timestamps:
if ts in result.keys():
result[ts] = convert_timestamp_to_iso(convert_mactime_to_unix(result[ts]))
for key, values in file_plist.items():
result = file_plist[key]
result["package"] = key
for ts in self.timestamps:
if ts in result.keys():
result[ts] = convert_timestamp_to_iso(convert_mactime_to_unix(result[ts]))
self.results.append(result)
self.results.append(result)
def run(self):
if self.is_backup:
self._find_ios_database(backup_ids=LOCATIOND_BACKUP_IDS)
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
self._extract_locationd_entries(self.file_path)
elif self.is_fs_dump:
for locationd_path in self._get_fs_files_from_patterns(LOCATIOND_ROOT_PATHS):
self.file_path = locationd_path
self.log.info("Found Locationd Clients plist at path: %s", self.file_path)
self._extract_locationd_entries(self.file_path)
self.log.info("Extracted a total of %d Locationd Clients entries", len(self.results))

View File

@@ -14,7 +14,10 @@ DATAUSAGE_ROOT_PATHS = [
class Datausage(NetBase):
"""This class extracts data from DataUsage.sqlite and attempts to identify
any suspicious processes if running on a full filesystem dump."""
any suspicious processes if running on a full filesystem dump.
"""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):

View File

@@ -0,0 +1,64 @@
# Mobile Verification Toolkit (MVT)
# Copyright (c) 2021 The MVT Project Authors.
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
import plistlib
from mvt.common.utils import convert_timestamp_to_iso
from ..base import IOSExtraction
OSANALYTICS_ADDAILY_BACKUP_IDS = [
"f65b5fafc69bbd3c60be019c6e938e146825fa83",
]
OSANALYTICS_ADDAILY_ROOT_PATHS = [
"private/var/mobile/Library/Preferences/com.apple.osanalytics.addaily.plist",
]
class OSAnalyticsADDaily(IOSExtraction):
"""Extract network usage information by process, from com.apple.osanalytics.addaily.plist"""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):
super().__init__(file_path=file_path, base_folder=base_folder,
output_folder=output_folder, fast_mode=fast_mode,
log=log, results=results)
def serialize(self, record):
record_data = f"{record['package']} WIFI IN: {record['wifi_in']}, WIFI OUT: {record['wifi_out']} - " \
f"WWAN IN: {record['wwan_in']}, WWAN OUT: {record['wwan_out']}"
return {
"timestamp": record["ts"],
"module": self.__class__.__name__,
"event": "osanalytics_addaily",
"data": record_data,
}
def check_indicators(self):
if not self.indicators:
return
for result in self.results:
if self.indicators.check_process(result["package"]):
self.detected.append(result)
def run(self):
self._find_ios_database(backup_ids=OSANALYTICS_ADDAILY_BACKUP_IDS,
root_paths=OSANALYTICS_ADDAILY_ROOT_PATHS)
self.log.info("Found com.apple.osanalytics.addaily plist at path: %s", self.file_path)
with open(self.file_path, "rb") as handle:
file_plist = plistlib.load(handle)
for app, values in file_plist.get("netUsageBaseline", {}).items():
self.results.append({
"package": app,
"ts": convert_timestamp_to_iso(values[0]),
"wifi_in": values[1],
"wifi_out": values[2],
"wwan_in": values[3],
"wwan_out": values[4],
})
self.log.info("Extracted a total of %d com.apple.osanalytics.addaily entries", len(self.results))

View File

@@ -13,9 +13,6 @@ from mvt.common.utils import (convert_mactime_to_unix,
from ..base import IOSExtraction
SAFARI_BROWSER_STATE_BACKUP_IDS = [
"3a47b0981ed7c10f3e2800aa66bac96a3b5db28e",
]
SAFARI_BROWSER_STATE_BACKUP_RELPATH = "Library/Safari/BrowserState.db"
SAFARI_BROWSER_STATE_ROOT_PATHS = [
"private/var/mobile/Library/Safari/BrowserState.db",
@@ -101,12 +98,17 @@ class SafariBrowserState(IOSExtraction):
})
def run(self):
# TODO: Is there really only one BrowserState.db in a device?
self._find_ios_database(backup_ids=SAFARI_BROWSER_STATE_BACKUP_IDS,
root_paths=SAFARI_BROWSER_STATE_ROOT_PATHS)
self.log.info("Found Safari browser state database at path: %s", self.file_path)
self._process_browser_state_db(self.file_path)
if self.is_backup:
for backup_file in self._get_backup_files_from_manifest(relative_path=SAFARI_BROWSER_STATE_BACKUP_RELPATH):
self.file_path = self._get_backup_file_from_id(backup_file["file_id"])
self.log.info("Found Safari browser state database at path: %s", self.file_path)
self._process_browser_state_db(self.file_path)
elif self.is_fs_dump:
for safari_browserstate_path in self._get_fs_files_from_patterns(SAFARI_BROWSER_STATE_ROOT_PATHS):
self.file_path = safari_browserstate_path
self.log.info("Found Safari browser state database at path: %s", self.file_path)
self._process_browser_state_db(self.file_path)
self.log.info("Extracted a total of %d tab records and %d session history entries",
len(self.results), self._session_history_count)

View File

@@ -19,7 +19,10 @@ SAFARI_HISTORY_ROOT_PATHS = [
class SafariHistory(IOSExtraction):
"""This module extracts all Safari visits and tries to detect potential
network injection attacks."""
network injection attacks.
"""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):

View File

@@ -0,0 +1,90 @@
# Mobile Verification Toolkit (MVT)
# Copyright (c) 2021 The MVT Project Authors.
# Use of this software is governed by the MVT License 1.1 that can be found at
# https://license.mvt.re/1.1/
import sqlite3
from datetime import datetime
from mvt.common.utils import convert_timestamp_to_iso
from ..base import IOSExtraction
TCC_BACKUP_IDS = [
"64d0019cb3d46bfc8cce545a8ba54b93e7ea9347",
]
TCC_ROOT_PATHS = [
"private/var/mobile/Library/TCC/TCC.db",
]
AUTH_VALUES = {
0: "denied",
1: "unknown",
2: "allowed",
3: "limited",
}
AUTH_REASONS = {
1: "error",
2: "user_consent",
3: "user_set",
4: "system_set",
5: "service_policy",
6: "mdm_policy",
7: "override_policy",
8: "missing_usage_string",
9: "prompt_timeout",
10: "preflight_unknown",
11: "entitled",
12: "app_type_policy",
}
class TCC(IOSExtraction):
"""This module extracts records from the TCC.db SQLite database."""
def __init__(self, file_path=None, base_folder=None, output_folder=None,
fast_mode=False, log=None, results=[]):
super().__init__(file_path=file_path, base_folder=base_folder,
output_folder=output_folder, fast_mode=fast_mode,
log=log, results=results)
def process_db(self, file_path):
conn = sqlite3.connect(file_path)
cur = conn.cursor()
cur.execute("""SELECT
service, client, client_type, auth_value, auth_reason, last_modified
FROM access;""")
for row in cur:
service = row[0]
client = row[1]
client_type = row[2]
client_type_desc = "bundle_id" if client_type == 0 else "absolute_path"
auth_value = row[3]
auth_value_desc = AUTH_VALUES.get(auth_value, "")
auth_reason = row[4]
auth_reason_desc = AUTH_REASONS.get(auth_reason, "unknown")
last_modified = convert_timestamp_to_iso(datetime.utcfromtimestamp((row[5])))
if service in ["kTCCServiceMicrophone", "kTCCServiceCamera"]:
device = "microphone" if service == "kTCCServiceMicrophone" else "camera"
self.log.info("Found client \"%s\" with access %s to %s on %s by %s",
client, auth_value_desc, device, last_modified, auth_reason_desc)
self.results.append({
"service": service,
"client": client,
"client_type": client_type_desc,
"auth_value": auth_value_desc,
"auth_reason_desc": auth_reason_desc,
"last_modified": last_modified,
})
cur.close()
conn.close()
def run(self):
self._find_ios_database(backup_ids=TCC_BACKUP_IDS, root_paths=TCC_ROOT_PATHS)
self.log.info("Found TCC database at path: %s", self.file_path)
self.process_db(self.file_path)
self.log.info("Extracted a total of %d TCC items", len(self.results))

View File

@@ -18,8 +18,7 @@ WEBKIT_RESOURCELOADSTATICS_ROOT_PATHS = [
]
class WebkitResourceLoadStatistics(IOSExtraction):
"""This module extracts records from WebKit ResourceLoadStatistics observations.db.
"""
"""This module extracts records from WebKit ResourceLoadStatistics observations.db."""
# TODO: Add serialize().
def __init__(self, file_path=None, base_folder=None, output_folder=None,

Some files were not shown because too many files have changed in this diff Show More