mirror of
https://github.com/mvt-project/mvt
synced 2025-10-21 22:42:15 +02:00
Compare commits
22 Commits
v2.2.6
...
feature/po
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
95b2f04db6 | ||
|
|
289b7efdeb | ||
|
|
166a63e14c | ||
|
|
1b933fdb12 | ||
|
|
0c0ff7012b | ||
|
|
f9b0d07a81 | ||
|
|
d14bcdd05f | ||
|
|
e026bb0a76 | ||
|
|
253b4f031a | ||
|
|
ec14297643 | ||
|
|
3142d86edd | ||
|
|
c18998d771 | ||
|
|
22fd794fb8 | ||
|
|
27c5c76dc2 | ||
|
|
fafbac3545 | ||
|
|
bbfaadd297 | ||
|
|
85abed55b6 | ||
|
|
2fbd7607ef | ||
|
|
3787dc48cd | ||
|
|
f814244ff8 | ||
|
|
11730f164f | ||
|
|
912fb060cb |
80
.github/workflows/scripts/update-ios-releases.py
vendored
Normal file
80
.github/workflows/scripts/update-ios-releases.py
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
"""
|
||||
Python script to download the Apple RSS feed and parse it.
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import urllib.request
|
||||
from xml.dom.minidom import parseString
|
||||
|
||||
from packaging import version
|
||||
|
||||
|
||||
def download_apple_rss(feed_url):
|
||||
with urllib.request.urlopen(feed_url) as f:
|
||||
rss_feed = f.read().decode('utf-8')
|
||||
print("Downloaded RSS feed from Apple.")
|
||||
return rss_feed
|
||||
|
||||
|
||||
def parse_latest_ios_versions(rss_feed_text):
|
||||
latest_ios_versions = []
|
||||
|
||||
parsed_feed = parseString(rss_feed_text)
|
||||
for item in parsed_feed.getElementsByTagName("item"):
|
||||
title = item.getElementsByTagName("title")[0].firstChild.data
|
||||
if not title.startswith("iOS"):
|
||||
continue
|
||||
|
||||
import re
|
||||
build_match = re.match(r"iOS (?P<version>[\d\.]+) (?P<beta>beta )?(\S*)?\((?P<build>.*)\)", title)
|
||||
if not build_match:
|
||||
print("Could not parse iOS build:", title)
|
||||
continue
|
||||
|
||||
release_info = build_match.groupdict()
|
||||
if release_info["beta"]:
|
||||
print("Skipping beta release:", title)
|
||||
continue
|
||||
|
||||
release_info.pop("beta")
|
||||
latest_ios_versions.append(release_info)
|
||||
|
||||
return latest_ios_versions
|
||||
|
||||
|
||||
def update_mvt(mvt_checkout_path, latest_ios_versions):
|
||||
version_path = os.path.join(mvt_checkout_path, "mvt/ios/data/ios_versions.json")
|
||||
with open(version_path, "r") as version_file:
|
||||
current_versions = json.load(version_file)
|
||||
|
||||
new_entry_count = 0
|
||||
for new_version in latest_ios_versions:
|
||||
for current_version in current_versions:
|
||||
if new_version["build"] == current_version["build"]:
|
||||
break
|
||||
else:
|
||||
# New version that does not exist in current data
|
||||
current_versions.append(new_version)
|
||||
new_entry_count += 1
|
||||
|
||||
if not new_entry_count:
|
||||
print("No new iOS versions found.")
|
||||
else:
|
||||
print("Found {} new iOS versions.".format(new_entry_count))
|
||||
new_version_list = sorted(current_versions, key=lambda x: version.Version(x["version"]))
|
||||
with open(version_path, "w") as version_file:
|
||||
json.dump(new_version_list, version_file, indent=4)
|
||||
|
||||
|
||||
def main():
|
||||
print("Downloading RSS feed...")
|
||||
mvt_checkout_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../"))
|
||||
|
||||
rss_feed = download_apple_rss("https://developer.apple.com/news/releases/rss/releases.rss")
|
||||
latest_ios_version = parse_latest_ios_versions(rss_feed)
|
||||
update_mvt(mvt_checkout_path, latest_ios_version)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
29
.github/workflows/update-ios-data.yml
vendored
Normal file
29
.github/workflows/update-ios-data.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
name: Update iOS releases and version numbers
|
||||
run-name: ${{ github.actor }} is finding the latest iOS release version and build numbers
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
# * is a special character in YAML so you have to quote this string
|
||||
- cron: '0 */6 * * *'
|
||||
|
||||
|
||||
jobs:
|
||||
update-ios-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Run script to fetch latest iOS releases from Apple RSS feed.
|
||||
run: python3 .github/workflows/scripts/update-ios-releases.py
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
with:
|
||||
title: '[auto] Update iOS releases and versions'
|
||||
commit-message: Add new iOS versions and build numbers
|
||||
branch: auto/add-new-ios-releases
|
||||
body: |
|
||||
This is an automated pull request to update the iOS releases and version numbers.
|
||||
add-paths: |
|
||||
*.json
|
||||
labels: |
|
||||
automated pr
|
||||
@@ -38,7 +38,7 @@ class SMS(AndroidQFModule):
|
||||
if "body" not in message:
|
||||
continue
|
||||
|
||||
if self.indicators.check_domains(message["links"]):
|
||||
if self.indicators.check_domains(message.get("links", [])):
|
||||
self.detected.append(message)
|
||||
|
||||
def parse_backup(self, data):
|
||||
|
||||
@@ -39,8 +39,9 @@ class Getprop(BugReportModule):
|
||||
|
||||
lines = []
|
||||
in_getprop = False
|
||||
|
||||
for line in content.decode(errors="ignore").splitlines():
|
||||
if line.strip() == "------ SYSTEM PROPERTIES (getprop) ------":
|
||||
if line.strip().startswith("------ SYSTEM PROPERTIES"):
|
||||
in_getprop = True
|
||||
continue
|
||||
|
||||
@@ -55,13 +56,14 @@ class Getprop(BugReportModule):
|
||||
self.results = parse_getprop("\n".join(lines))
|
||||
|
||||
# Alert if phone is outdated.
|
||||
security_patch = self.results.get("ro.build.version.security_patch", "")
|
||||
if security_patch:
|
||||
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
|
||||
if (datetime.now() - patch_date) > timedelta(days=6*30):
|
||||
self.log.warning("This phone has not received security updates "
|
||||
"for more than six months (last update: %s)",
|
||||
security_patch)
|
||||
for entry in self.results:
|
||||
if entry["name"] == "ro.build.version.security_patch":
|
||||
security_patch = entry["value"]
|
||||
patch_date = datetime.strptime(security_patch, "%Y-%m-%d")
|
||||
if (datetime.now() - patch_date) > timedelta(days=6*30):
|
||||
self.log.warning("This phone has not received security updates "
|
||||
"for more than six months (last update: %s)",
|
||||
security_patch)
|
||||
|
||||
self.log.info("Extracted %d Android system properties",
|
||||
len(self.results))
|
||||
|
||||
@@ -7,6 +7,7 @@ import logging
|
||||
import os
|
||||
from typing import Optional
|
||||
|
||||
from mvt.common.module import PostAnalysisModule
|
||||
from mvt.common.command import Command
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
@@ -32,6 +33,7 @@ class CmdCheckIOCS(Command):
|
||||
def run(self) -> None:
|
||||
assert self.target_path is not None
|
||||
all_modules = []
|
||||
post_modules = []
|
||||
for entry in self.modules:
|
||||
if entry not in all_modules:
|
||||
all_modules.append(entry)
|
||||
@@ -43,18 +45,24 @@ class CmdCheckIOCS(Command):
|
||||
name_only, _ = os.path.splitext(file_name)
|
||||
file_path = os.path.join(self.target_path, file_name)
|
||||
|
||||
for iocs_module in all_modules:
|
||||
if self.module_name and iocs_module.__name__ != self.module_name:
|
||||
for module in all_modules:
|
||||
if self.module_name and module.__name__ != self.module_name:
|
||||
continue
|
||||
|
||||
if iocs_module().get_slug() != name_only:
|
||||
# Handle post-analysis modules at the end
|
||||
if issubclass(module, PostAnalysisModule) and module not in post_modules:
|
||||
post_modules.append(module)
|
||||
continue
|
||||
|
||||
# Skip if the current result file does not match the module name
|
||||
if module().get_slug() != name_only:
|
||||
continue
|
||||
|
||||
log.info("Loading results from \"%s\" with module %s",
|
||||
file_name, iocs_module.__name__)
|
||||
file_name, module.__name__)
|
||||
|
||||
m = iocs_module.from_json(file_path,
|
||||
log=logging.getLogger(iocs_module.__module__))
|
||||
m = module.from_json(file_path,
|
||||
log=logging.getLogger(module.__module__))
|
||||
if self.iocs.total_ioc_count > 0:
|
||||
m.indicators = self.iocs
|
||||
m.indicators.log = m.log
|
||||
@@ -66,6 +74,13 @@ class CmdCheckIOCS(Command):
|
||||
else:
|
||||
total_detections += len(m.detected)
|
||||
|
||||
# Run post-analysis modules at end
|
||||
for post_module in post_modules:
|
||||
m = post_module.from_results(self.target_path, log=log)
|
||||
m.run()
|
||||
total_detections += len(m.detected)
|
||||
|
||||
|
||||
if total_detections > 0:
|
||||
log.warning("The check of the results produced %d detections!",
|
||||
total_detections)
|
||||
|
||||
@@ -33,6 +33,7 @@ class Command:
|
||||
) -> None:
|
||||
self.name = ""
|
||||
self.modules = []
|
||||
self.modules_post = []
|
||||
|
||||
self.target_path = target_path
|
||||
self.results_path = results_path
|
||||
@@ -139,7 +140,7 @@ class Command:
|
||||
def list_modules(self) -> None:
|
||||
self.log.info("Following is the list of available %s modules:",
|
||||
self.name)
|
||||
for module in self.modules:
|
||||
for module in (self.modules + self.modules_post):
|
||||
self.log.info(" - %s", module.__name__)
|
||||
|
||||
def init(self) -> None:
|
||||
|
||||
@@ -7,6 +7,7 @@ import csv
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import glob
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
|
||||
import simplejson as json
|
||||
@@ -214,7 +215,7 @@ def save_timeline(timeline: list, timeline_path: str) -> None:
|
||||
"""
|
||||
with open(timeline_path, "a+", encoding="utf-8") as handle:
|
||||
csvoutput = csv.writer(handle, delimiter=",", quotechar="\"",
|
||||
quoting=csv.QUOTE_ALL)
|
||||
quoting=csv.QUOTE_ALL, escapechar='\\')
|
||||
csvoutput.writerow(["UTC Timestamp", "Plugin", "Event", "Description"])
|
||||
|
||||
for event in sorted(timeline, key=lambda x: x["timestamp"]
|
||||
@@ -225,3 +226,51 @@ def save_timeline(timeline: list, timeline_path: str) -> None:
|
||||
event.get("event"),
|
||||
event.get("data"),
|
||||
])
|
||||
|
||||
|
||||
class PostAnalysisModule(MVTModule):
|
||||
"""
|
||||
Base module for implementing post-processing rules against the output of
|
||||
multiple MVT modules
|
||||
"""
|
||||
@classmethod
|
||||
def from_results(cls, results_path: str, log: logging.Logger):
|
||||
results = cls.load_results(results_path, log=log)
|
||||
return cls(results=results, log=log)
|
||||
|
||||
@classmethod
|
||||
def load_results(cls, results_path: str, log: logging.Logger):
|
||||
"""Load the results from a directory of json file."""
|
||||
# TODO: Move this to run once before loading all post-processing modules
|
||||
module_results = {}
|
||||
for json_path in glob.glob(os.path.join(results_path, "*.json")):
|
||||
module_name, _ = os.path.splitext(os.path.basename(json_path))
|
||||
with open(json_path, "r", encoding="utf-8") as handle:
|
||||
try:
|
||||
module_results[module_name] = json.load(handle)
|
||||
except Exception as exc:
|
||||
log.error("Unable to load results from file %s: %s",
|
||||
json_path, exc)
|
||||
|
||||
if not module_results:
|
||||
log.error("Did not find any MVT results at %s", results_path)
|
||||
|
||||
return module_results
|
||||
|
||||
def load_timeline(self):
|
||||
"""Load timeline from CSV file"""
|
||||
timeline = []
|
||||
timeline_path = os.path.join(self.results_path, "timeline.csv")
|
||||
with open(timeline_path, "r", encoding="utf-8") as handle:
|
||||
csvinput = csv.reader(handle, delimiter=",", quotechar="\"",
|
||||
quoting=csv.QUOTE_ALL, escapechar='\\')
|
||||
for row in csvinput:
|
||||
if row[0] == "UTC Timestamp":
|
||||
continue
|
||||
timeline.append({
|
||||
"timestamp": row[0],
|
||||
"module": row[1],
|
||||
"event": row[2],
|
||||
"data": row[3],
|
||||
})
|
||||
return timeline
|
||||
@@ -26,6 +26,7 @@ from .decrypt import DecryptBackup
|
||||
from .modules.backup import BACKUP_MODULES
|
||||
from .modules.fs import FS_MODULES
|
||||
from .modules.mixed import MIXED_MODULES
|
||||
from .modules.post_analysis import POST_ANALYSIS_MODULES
|
||||
|
||||
init_logging()
|
||||
log = logging.getLogger("mvt")
|
||||
@@ -234,7 +235,7 @@ def check_fs(ctx, iocs, output, fast, list_modules, module, hashes, verbose, dum
|
||||
@click.pass_context
|
||||
def check_iocs(ctx, iocs, list_modules, module, folder):
|
||||
cmd = CmdCheckIOCS(target_path=folder, ioc_files=iocs, module_name=module)
|
||||
cmd.modules = BACKUP_MODULES + FS_MODULES + MIXED_MODULES
|
||||
cmd.modules = BACKUP_MODULES + FS_MODULES + MIXED_MODULES + POST_ANALYSIS_MODULES
|
||||
|
||||
if list_modules:
|
||||
cmd.list_modules()
|
||||
|
||||
166
mvt/ios/data/ios_models.json
Normal file
166
mvt/ios/data/ios_models.json
Normal file
@@ -0,0 +1,166 @@
|
||||
[
|
||||
{
|
||||
"identifier": "iPhone4,1",
|
||||
"description": "iPhone 4S"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone5,1",
|
||||
"description": "iPhone 5"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone5,2",
|
||||
"description": "iPhone 5"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone5,3",
|
||||
"description": "iPhone 5c"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone5,4",
|
||||
"description": "iPhone 5c"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone6,1",
|
||||
"description": "iPhone 5s"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone6,2",
|
||||
"description": "iPhone 5s"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone7,1",
|
||||
"description": "iPhone 6 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone7,2",
|
||||
"description": "iPhone 6"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone8,1",
|
||||
"description": "iPhone 6s"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone8,2",
|
||||
"description": "iPhone 6s Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone8,4",
|
||||
"description": "iPhone SE (1st generation)"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone9,1",
|
||||
"description": "iPhone 7"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone9,2",
|
||||
"description": "iPhone 7 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone9,3",
|
||||
"description": "iPhone 7"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone9,4",
|
||||
"description": "iPhone 7 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,1",
|
||||
"description": "iPhone 8"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,2",
|
||||
"description": "iPhone 8 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,3",
|
||||
"description": "iPhone X"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,4",
|
||||
"description": "iPhone 8"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,5",
|
||||
"description": "iPhone 8 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone10,6",
|
||||
"description": "iPhone X"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone11,2",
|
||||
"description": "iPhone XS"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone11,4",
|
||||
"description": "iPhone XS Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone11,6",
|
||||
"description": "iPhone XS Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone11,8",
|
||||
"description": "iPhone XR"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone12,1",
|
||||
"description": "iPhone 11"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone12,3",
|
||||
"description": "iPhone 11 Pro"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone12,5",
|
||||
"description": "iPhone 11 Pro Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone12,8",
|
||||
"description": "iPhone SE (2nd generation)"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone13,1",
|
||||
"description": "iPhone 12 mini"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone13,2",
|
||||
"description": "iPhone 12"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone13,3",
|
||||
"description": "iPhone 12 Pro"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone13,4",
|
||||
"description": "iPhone 12 Pro Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,4",
|
||||
"description": "iPhone 13 Mini"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,5",
|
||||
"description": "iPhone 13"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,2",
|
||||
"description": "iPhone 13 Pro"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,3",
|
||||
"description": "iPhone 13 Pro Max"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone14,8",
|
||||
"decription": "iPhone 14 Plus"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone15,2",
|
||||
"description": "iPhone 14 Pro"
|
||||
},
|
||||
{
|
||||
"identifier": "iPhone15,3",
|
||||
"description": "iPhone 14 Pro Max"
|
||||
}
|
||||
]
|
||||
923
mvt/ios/data/ios_versions.json
Normal file
923
mvt/ios/data/ios_versions.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -158,6 +158,7 @@ class Manifest(IOSExtraction):
|
||||
"mode": oct(self._get_key(file_metadata, "Mode")),
|
||||
"owner": self._get_key(file_metadata, "UserID"),
|
||||
"size": self._get_key(file_metadata, "Size"),
|
||||
"type": "file" if file_data["flags"] == 1 else "directory",
|
||||
})
|
||||
except Exception:
|
||||
self.log.exception("Error reading manifest file metadata for file with ID %s "
|
||||
|
||||
3
mvt/ios/modules/post_analysis/__init__.py
Normal file
3
mvt/ios/modules/post_analysis/__init__.py
Normal file
@@ -0,0 +1,3 @@
|
||||
from .attachment_deletion import PostAttachmentDeletion
|
||||
|
||||
POST_ANALYSIS_MODULES = [PostAttachmentDeletion]
|
||||
101
mvt/ios/modules/post_analysis/attachment_deletion.py
Normal file
101
mvt/ios/modules/post_analysis/attachment_deletion.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import logging
|
||||
import datetime
|
||||
from typing import Optional
|
||||
|
||||
from mvt.common.module import PostAnalysisModule
|
||||
|
||||
|
||||
class PostAttachmentDeletion(PostAnalysisModule):
|
||||
"""
|
||||
Heuristic detection for attachment deletion in a cert time period.
|
||||
|
||||
|
||||
This module implements a hueuristic detection for a multiple iOS SMS attachmemt being deleted
|
||||
in a short period of time. This is a similar concept to the following script used
|
||||
by Kaspersky Labs to detect infections with the Triangulation iOS malware:
|
||||
https://github.com/KasperskyLab/triangle_check/blob/main/triangle_check/__init__.py
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
file_path: Optional[str] = None,
|
||||
target_path: Optional[str] = None,
|
||||
results_path: Optional[str] = None,
|
||||
fast_mode: Optional[bool] = False,
|
||||
log: logging.Logger = logging.getLogger(__name__),
|
||||
results: Optional[list] = None
|
||||
) -> None:
|
||||
super().__init__(file_path=file_path, target_path=target_path,
|
||||
results_path=results_path, fast_mode=fast_mode,
|
||||
log=log, results=results)
|
||||
|
||||
self.required_modules = ["manifest"]
|
||||
|
||||
|
||||
def load_locationd_events(self):
|
||||
locationd_clients = self.results["locationd_clients"]
|
||||
locations_stopped_event = [event for event in locationd_clients if "LocationTimeStopped" in event]
|
||||
return locations_stopped_event
|
||||
|
||||
def run(self) -> None:
|
||||
"""
|
||||
Run the post-processing module.
|
||||
|
||||
The logical is to look for all SMS attachment directories which were recently created
|
||||
shortly before their last modified time, but which have no contained files.
|
||||
"""
|
||||
for module in self.required_modules:
|
||||
if module not in self.results:
|
||||
raise Exception(f"Required module {module} was not found in results. Did you run the required modules?")
|
||||
|
||||
locationd_events = []
|
||||
locationd_client_iocs = [
|
||||
"com.apple.locationd.bundle-/System/Library/LocationBundles/IonosphereHarvest.bundle",
|
||||
"com.apple.locationd.bundle-/System/Library/LocationBundles/WRMLinkSelection.bundle"
|
||||
]
|
||||
for event in self.load_locationd_events():
|
||||
for ioc in locationd_client_iocs:
|
||||
if ioc in event["Registered"]:
|
||||
locationd_events.append(event)
|
||||
print(event)
|
||||
|
||||
|
||||
|
||||
# Filter the relevant events from the manifest:
|
||||
events_by_time = {}
|
||||
sms_files = [event for event in self.results["manifest"] if event["relative_path"].startswith("Library/SMS/Attachments/")]
|
||||
attachment_folders = {}
|
||||
for record in sorted(sms_files, key=lambda x: x["relative_path"]):
|
||||
num_path_segments = record["relative_path"].count('/')
|
||||
# Skip entries with a full-path
|
||||
# if not (num_path_segments == 3 or num_path_segments == 4):
|
||||
# continue
|
||||
|
||||
attachment_root = "/".join(record["relative_path"].split('/', 5)[:5])
|
||||
attachment_folder = attachment_folders.get(attachment_root, [])
|
||||
attachment_folder.append(record)
|
||||
attachment_folders[attachment_root] = attachment_folder
|
||||
|
||||
# Look for directories containing no files, which had a short lifespan
|
||||
for key, items in attachment_folders.items():
|
||||
has_files = any([item["flags"] == 1 for item in items])
|
||||
if has_files:
|
||||
continue
|
||||
|
||||
for item in sorted(items, key=lambda x: x["created"]):
|
||||
# item_created = datetime.datetime.strptime(item["created"], "%Y-%m-%d %H:%M:%S.%f")
|
||||
item_modified = datetime.datetime.strptime(item["modified"], "%Y-%m-%d %H:%M:%S.%f") # M
|
||||
status_changed = datetime.datetime.strptime(item["status_changed"], "%Y-%m-%d %H:%M:%S.%f") # C
|
||||
|
||||
# self.append_timeline(fs_stat['LastModified'], ('M', relativePath))
|
||||
# self.append_timeline(fs_stat['LastStatusChange'], ('C', relativePath))
|
||||
# self.append_timeline(fs_stat['Birth'], ('B', relativePath))
|
||||
|
||||
|
||||
# Skip items which were created and modified at the same time, likely never had files.
|
||||
# print(item["relative_path"], status_changed, item_modified)
|
||||
if item_modified == status_changed:
|
||||
print("changed == modified", item["relative_path"], status_changed, item_modified)
|
||||
continue
|
||||
|
||||
if (item_modified - status_changed): # < datetime.timedelta(minutes=10):
|
||||
self.log.info(f"Possible attachment deletion. Attachment folder '{key}' with no files, created and modified within 10 minutes. '{item['relative_path']}' created {item_created}, modified {item_modified})")
|
||||
File diff suppressed because it is too large
Load Diff
@@ -42,6 +42,9 @@ console_scripts =
|
||||
mvt-ios = mvt.ios:cli
|
||||
mvt-android = mvt.android:cli
|
||||
|
||||
[options.package_data]
|
||||
mvt = ios/data/*.json
|
||||
|
||||
[flake8]
|
||||
max-complexity = 10
|
||||
max-line-length = 1000
|
||||
|
||||
0
tests/android_bugreport/__init__.py
Normal file
0
tests/android_bugreport/__init__.py
Normal file
@@ -8,6 +8,7 @@ from pathlib import Path
|
||||
|
||||
from mvt.android.modules.bugreport.appops import Appops
|
||||
from mvt.android.modules.bugreport.packages import Packages
|
||||
from mvt.android.modules.bugreport.getprop import Getprop
|
||||
from mvt.common.module import run_module
|
||||
|
||||
from ..utils import get_artifact_folder
|
||||
@@ -40,3 +41,7 @@ class TestBugreportAnalysis:
|
||||
assert m.results[1]["package_name"] == "com.instagram.android"
|
||||
assert len(m.results[0]["permissions"]) == 4
|
||||
assert len(m.results[1]["permissions"]) == 32
|
||||
|
||||
def test_getprop_module(self):
|
||||
m = self.launch_bug_report_module(Getprop)
|
||||
assert len(m.results) == 0
|
||||
Reference in New Issue
Block a user