-
Notifications
You must be signed in to change notification settings - Fork 40
/
iShutdown_parse.py
154 lines (127 loc) · 5.19 KB
/
iShutdown_parse.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
# © 2023 AO Kaspersky Lab. All Rights Reserved.
# Extracts and parses Shutdown.log forensic artifact
import tarfile
import hashlib
import os
import csv
import re
from datetime import datetime
import argparse
import shutil
import tempfile
def extract_log(tar_path, output_path):
temp_dir = os.path.join(tempfile.gettempdir(), "shutdown_log_extraction")
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
with tarfile.open(tar_path, 'r:gz') as archive:
for member in archive.getmembers():
if 'shutdown.log' in member.name:
member.name = os.path.basename(member.name) # Rename file to avoid long path
archive.extract(member, path=temp_dir)
log_path = os.path.join(temp_dir, member.name)
shutil.copy(log_path, output_path)
return os.path.join(output_path, os.path.basename(member.name))
raise Exception("The specific log file was not found in the archive.")
def get_file_hashes(file_path):
hasher_md5 = hashlib.md5()
hasher_sha1 = hashlib.sha1()
hasher_sha256 = hashlib.sha256()
with open(file_path, 'rb') as file:
buf = file.read()
hasher_md5.update(buf)
hasher_sha1.update(buf)
hasher_sha256.update(buf)
return hasher_md5.hexdigest(), hasher_sha1.hexdigest(), hasher_sha256.hexdigest()
def parse_log(log_path, output_path):
with open(log_path, 'r') as log_file:
log_content = log_file.readlines()
csv_path = os.path.join(output_path, 'parsed_shutdown.csv')
with open(csv_path, 'w', newline='') as csvfile:
log_md5, log_sha1, log_sha256 = get_file_hashes(log_path)
csvfile.write(f"Log MD5: {log_md5}\n")
csvfile.write(f"Log SHA1: {log_sha1}\n")
csvfile.write(f"Log SHA256: {log_sha256}\n")
csvfile.write(
"Parsing Completion: "
f"{datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC')}\n\n"
)
fieldnames = ['entry number', 'reboot time', 'client pid', 'path']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
entry_num = 1
entries = []
for line in log_content:
pid_match = re.search(r'remaining client pid: (\d+) \((.*?)\)', line)
if pid_match:
pid, path = pid_match.groups()
entries.append((pid, path))
sigterm_match = re.search(r'SIGTERM: \[(\d+)\]', line)
if sigterm_match:
timestamp = int(sigterm_match.group(1))
reboot_time = datetime.utcfromtimestamp(timestamp).strftime(
'%Y-%m-%d %H:%M:%S UTC'
)
for pid, path in entries:
writer.writerow({
'entry number': entry_num,
'reboot time': reboot_time,
'client pid': pid,
'path': path
})
entry_num += 1
entries = []
def main():
parser = argparse.ArgumentParser(
description=(
'A tool to extract and parse iOS shutdown logs from a .tar.gz archive. '
'Expected output is a csv file, summary file, and the log file.'
)
)
parser.add_argument(
'-e', '--extract',
help='Path to the .tar.gz archive for extracting shutdown.log file.',
required=True
)
parser.add_argument(
'-p', '--parse',
action='store_true',
help='Flag to indicate if the extracted log should be parsed.',
required=False
)
parser.add_argument(
'-o', '--output',
help='Path to save the output.',
default=".",
required=False
)
args = parser.parse_args()
print("Starting extraction process...")
log_path = extract_log(args.extract, args.output)
print(f"File extracted to {log_path}.")
_, log_sha1, _ = get_file_hashes(log_path)
renamed_path = os.path.join(args.output, f"{log_sha1}.log")
os.rename(log_path, renamed_path)
md5, sha1, sha256 = get_file_hashes(args.extract)
summary_path = os.path.join(args.output, 'extraction_summary.txt')
with open(summary_path, 'w') as summary_file:
summary_file.write(
"Extraction Completion: "
f"{datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S UTC')}\n\n"
)
summary_file.write(f'Original Archive: {args.extract}\n')
summary_file.write(f'File Size: {os.path.getsize(args.extract)} bytes\n')
summary_file.write(f'MD5: {md5}\n')
summary_file.write(f'SHA1: {sha1}\n')
summary_file.write(f'SHA256: {sha256}\n\n')
summary_file.write(f'Extracted Log (Renamed to SHA1 hash): {renamed_path}\n')
summary_file.write(f'File Size: {os.path.getsize(renamed_path)} bytes\n')
log_md5, log_sha1, log_sha256 = get_file_hashes(renamed_path)
summary_file.write(f'MD5: {log_md5}\n')
summary_file.write(f'SHA1: {log_sha1}\n')
summary_file.write(f'SHA256: {log_sha256}\n')
if args.parse:
print("Starting parsing process...")
parse_log(renamed_path, args.output)
print("Parsing completed.")
if __name__ == '__main__':
main()