-
Notifications
You must be signed in to change notification settings - Fork 0
/
parse.py
430 lines (369 loc) · 13.3 KB
/
parse.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
import datetime
import gzip
import ipaddress
import json
import logging
import os
import shutil
import time
import uuid
from json.decoder import JSONDecodeError
from pprint import pprint
import pandas
import pytz
from cachetools import cached, TTLCache
from helpers.dns import get_domain_name
from helpers.time import ms_to_ts, ts_to_ms, to_minute_start, to_minute_end
from models import Session
from models.export_packet import ParsedExportPacket
logging.basicConfig(level=logging.DEBUG)
# constants
EPHEMERAL_PORT_RANGE_START = 32768
EPHEMERAL_PORT_RANGE_END = 65535
PROTOCOL_MAP = {
# https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml
1: "ICMP",
6: "TCP",
17: "UDP",
}
LOCAL_ADDRESS = [192, 168, 42]
VPN_ADDRESS = [192, 168, 43]
PROCESS_ID = uuid.uuid4().hex
CACHE_DIR = "cache/"
PROCESSING_DIR = os.path.join(CACHE_DIR, "processing-" + PROCESS_ID)
PARSED_DIR = "parsed/"
IPFIX_TO_DB_MAP = {
"id": "id",
"intervalStart": "unix_start_ms",
"intervalEnd": "unix_end_ms",
"year": "year",
"month": "month",
"day": "day",
"hour": "hour",
"minute": "minute",
"offset": "offset",
"ipVersion": "ip_version",
"protocolIdentifier": "protocol",
"localAddress": "local_address",
"localName": "local_name",
"remoteAddress": "remote_address",
"remoteName": "remote_name",
"port": "port",
"direction": "direction",
"connections": "connections",
"packets": "packets",
"octets": "octets",
}
# transformation helper functions
@cached(cache=TTLCache(maxsize=1024 * 1024, ttl=900))
def convert_decimal_ip_address_to_string(ip):
if ip is None:
return ip
try:
ip = int(ip)
except (TypeError, ValueError):
logging.error("Cannot parse IP address: " + str(ip))
return ip
return str(ipaddress.ip_address(ip))
def return_local_and_remote_address(row):
if row["ipVersion"] == 4:
split_source_address = [int(i) for i in row["sourceAddress"].split(".")]
split_destination_address = [
int(i) for i in row["destinationAddress"].split(".")
]
source_is_local = (
(split_source_address[: len(LOCAL_ADDRESS)] == LOCAL_ADDRESS)
or (split_source_address[: len(VPN_ADDRESS)] == VPN_ADDRESS)
or (split_source_address == [0, 0, 0, 0])
)
destination_is_local = (
(split_destination_address[: len(LOCAL_ADDRESS)] == LOCAL_ADDRESS)
or (split_destination_address[: len(VPN_ADDRESS)] == VPN_ADDRESS)
or (split_destination_address == [0, 0, 0, 0])
)
if source_is_local and destination_is_local:
return row["sourceAddress"], row["destinationAddress"], "internal"
elif source_is_local:
return row["sourceAddress"], row["destinationAddress"], "upload"
elif destination_is_local:
return row["destinationAddress"], row["sourceAddress"], "download"
else:
logging.error(
"Cannot find local address, "
"keeping source as local and destination as remote: " + str(row)
)
return row["sourceAddress"], row["destinationAddress"], "unknown"
elif row["ipVersion"] == 6:
# TODO: make this more accurate
logging.warning(
"IPv6 has no local address detection yet, "
"keeping source as local and destination as remote: " + str(row)
)
return row["sourceAddress"], row["destinationAddress"], "unknown"
@cached(cache=TTLCache(maxsize=1024 * 1024, ttl=900))
def convert_decimal_mac_address_to_string(mac):
try:
mac = int(mac)
except (TypeError, ValueError):
return mac
mac = format(mac, "x")
try:
return ":".join(format(s, "02x") for s in bytes.fromhex(mac))
except ValueError:
return ":".join(format(s, "02x") for s in bytes.fromhex("0" + mac))
def convert_ephemeral_ports(port):
try:
port = int(port)
except (TypeError, ValueError):
return port
if EPHEMERAL_PORT_RANGE_START <= port <= EPHEMERAL_PORT_RANGE_END:
return 0
else:
return port
def set_main_port(row):
if row["sourceTransportPort"] == 0:
return row["destinationTransportPort"]
elif row["destinationTransportPort"] == 0:
return row["sourceTransportPort"]
else:
return min(row["sourceTransportPort"], row["destinationTransportPort"])
def convert_protocol(protocol):
try:
return PROTOCOL_MAP[protocol]
except KeyError:
return "UNKNOWN"
def calculate_interval(ms_start, ms_end):
interval_start = [ts_to_ms(to_minute_start(ms_to_ts(ms_start)))]
interval_end = ts_to_ms(to_minute_end(ms_to_ts(ms_start)))
while interval_end < ms_end:
interval_start.append(interval_start[-1] + 60 * 1000)
interval_end += 60 * 1000
return interval_start
# process_message function
def process_message(message):
flows = []
# filter and add intervals
for flow in message["flows"]:
if "ipVersion" not in flow.keys():
continue
flow["intervalStart"] = calculate_interval(
flow["flowStartMilliseconds"], flow["flowEndMilliseconds"]
)
flows.append(flow)
pprint(flows)
# convert to dataframe
flows = pandas.DataFrame(flows)
if len(flows) == 0:
return
logging.info(
"[{time}] Parsing: processing {flows} flows".format(
time=pytz.utc.localize(datetime.datetime.utcnow()).isoformat(),
flows=len(flows),
)
)
# convert decimal IP addresses to strings
ip_address_columns = [
"sourceIPv4Address",
"destinationIPv4Address",
"sourceIPv6Address",
"destinationIPv6Address",
]
for col in ip_address_columns:
try:
flows[col] = flows[col].apply(convert_decimal_ip_address_to_string)
except KeyError:
flows[col] = None
# set source and destination address
flows["sourceAddress"] = flows["sourceIPv4Address"].fillna(
flows["sourceIPv6Address"]
)
flows["destinationAddress"] = flows["destinationIPv4Address"].fillna(
flows["destinationIPv6Address"]
)
# set local and remote address
flows["localAddress"] = None
flows["remoteAddress"] = None
flows["direction"] = None
flows.loc[:, ["localAddress", "remoteAddress", "direction"]] = (
flows.loc[:, ["ipVersion", "sourceAddress", "destinationAddress"]]
.apply(return_local_and_remote_address, axis="columns", result_type="expand")
.rename(columns={0: "localAddress", 1: "remoteAddress", 2: "direction"})
)
# add DNS
flows["localName"] = flows["localAddress"].apply(get_domain_name)
flows["remoteName"] = flows["remoteAddress"].apply(get_domain_name)
# convert decimal MAC addresses to strings
mac_address_columns = [
"sourceMacAddress",
"destinationMacAddress",
"postDestinationMacAddress",
]
for col in mac_address_columns:
try:
flows[col] = flows[col].apply(convert_decimal_mac_address_to_string)
except KeyError:
flows[col] = None
flows["destinationMacAddress"] = flows["destinationMacAddress"].fillna(
flows["postDestinationMacAddress"]
)
# fill missing ports (e.g. ICMP)
port_columns = [
"sourceTransportPort",
"destinationTransportPort",
]
for col in port_columns:
try:
flows[col] = flows[col].fillna(0)
except KeyError:
flows[col] = 0
# set main port
flows["port"] = flows[["sourceTransportPort", "destinationTransportPort"]].apply(
set_main_port, axis="columns"
)
# set readable protocol
try:
flows["protocolIdentifier"] = flows["protocolIdentifier"].apply(
convert_protocol
)
except KeyError:
flows["protocolIdentifier"] = convert_protocol(None)
# explode intervals
flows = flows.explode("intervalStart")
flows["intervalEnd"] = flows["intervalStart"] + 60 * 1000
# set start and end timestamps within the time window
flows["timestampStart"] = flows["intervalStart"]
flows["timestampStart"] = flows["timestampStart"].clip(
lower=flows["flowStartMilliseconds"]
)
flows["timestampEnd"] = flows["intervalEnd"]
flows["timestampEnd"] = flows["timestampEnd"].clip(
upper=flows["flowEndMilliseconds"]
)
# calculate octets and packets within the time window
# note: might be NaN if durationMilliseconds = 0, so we fill with the original value
flows["durationMilliseconds"] = (
flows["flowEndMilliseconds"] - flows["flowStartMilliseconds"]
)
flows["octets"] = (
(flows["octetDeltaCount"] / flows["durationMilliseconds"])
* (flows["timestampEnd"] - flows["timestampStart"])
).fillna(flows["octetDeltaCount"])
flows["packets"] = (
(flows["packetDeltaCount"] / flows["durationMilliseconds"])
* (flows["timestampEnd"] - flows["timestampStart"])
).fillna(flows["packetDeltaCount"])
flows["connections"] = 1
# rounding
flows["octets"] = flows["octets"].apply(lambda n: int(round(n, 0)))
flows["packets"] = flows["packets"].apply(lambda n: int(round(n, 0)))
# generate new id
flows["id"] = None
flows["id"] = flows["id"].apply(lambda u: uuid.uuid4().hex)
flows = flows.set_index(["id"])
# set timestamp info
flows["year"] = flows["timestampStart"].apply(
lambda t: ms_to_ts(t).astimezone(pytz.timezone("Europe/Brussels")).year
)
flows["month"] = flows["timestampStart"].apply(
lambda t: ms_to_ts(t).astimezone(pytz.timezone("Europe/Brussels")).month
)
flows["day"] = flows["timestampStart"].apply(
lambda t: ms_to_ts(t).astimezone(pytz.timezone("Europe/Brussels")).day
)
flows["hour"] = flows["timestampStart"].apply(
lambda t: ms_to_ts(t).astimezone(pytz.timezone("Europe/Brussels")).hour
)
flows["minute"] = flows["timestampStart"].apply(
lambda t: ms_to_ts(t).astimezone(pytz.timezone("Europe/Brussels")).minute
)
flows["offset"] = flows["timestampStart"].apply(
lambda t: ms_to_ts(t)
.astimezone(pytz.timezone("Europe/Brussels"))
.utcoffset()
.total_seconds()
)
# remove unnecessary columns
flow_columns = set(flows.columns)
for col in flow_columns.difference(IPFIX_TO_DB_MAP.keys()):
del flows[col]
flows.columns = [IPFIX_TO_DB_MAP[col] for col in flows.columns]
return flows
# main function
def main():
session = Session()
while True:
files = [
f
for f in os.listdir(CACHE_DIR)
if os.path.isfile(os.path.join(CACHE_DIR, f))
]
files = files[0:32]
if len(files) > 0:
now = datetime.datetime.utcnow()
flows = pandas.DataFrame()
for f in files[:]:
src_dir = os.path.join(CACHE_DIR)
prc_dir = os.path.join(PROCESSING_DIR)
os.makedirs(prc_dir, exist_ok=True)
try:
shutil.move(
os.path.join(src_dir, f), os.path.join(prc_dir, f),
)
except FileNotFoundError:
files.remove(f)
for f in files:
prc_dir = os.path.join(PROCESSING_DIR)
retry = 1
while retry <= 10:
with gzip.open(os.path.join(prc_dir, f), "rb") as ifile:
try:
message = json.loads(ifile.read().decode())
except JSONDecodeError:
retry = retry + 1
time.sleep(0.1)
else:
flows = pandas.concat([flows, process_message(message)])
retry = 11
if len(flows) > 0:
logging.info("Storing parsed export packets to database")
flows["unix_updated_ms"] = ts_to_ms(
pytz.utc.localize(datetime.datetime.utcnow())
)
flows.to_sql(
ParsedExportPacket.__tablename__, session.bind, if_exists="append"
)
for f in files:
prc_dir = os.path.join(PROCESSING_DIR)
dst_dir = os.path.join(
PARSED_DIR,
str(now.year).zfill(4),
str(now.month).zfill(2),
str(now.day).zfill(2),
str(now.hour).zfill(2),
)
os.makedirs(dst_dir, exist_ok=True)
shutil.move(
os.path.join(prc_dir, f), os.path.join(dst_dir, f),
)
logging.info("Removed files from cache")
time.sleep(1)
if __name__ == "__main__":
try:
main()
except BaseException:
try:
src_dir_ = os.path.join(CACHE_DIR)
prc_dir_ = os.path.join(PROCESSING_DIR)
processing_files = os.listdir(prc_dir_)
for pf in processing_files:
shutil.move(
os.path.join(prc_dir_, pf), os.path.join(src_dir_, pf),
)
except BaseException:
logging.error("Could not clean processing directory")
raise
finally:
try:
os.rmdir(PROCESSING_DIR)
except OSError:
pass