-
Notifications
You must be signed in to change notification settings - Fork 27
/
Copy pathevtxtoelk.py
146 lines (133 loc) · 7.06 KB
/
evtxtoelk.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
import contextlib
import mmap
import traceback
import json
import argparse
from collections import OrderedDict
from datetime import datetime
from Evtx.Evtx import FileHeader
from Evtx.Views import evtx_file_xml_view
from elasticsearch import Elasticsearch, helpers
import xmltodict
import sys
class EvtxToElk:
@staticmethod
def bulk_to_elasticsearch(es, bulk_queue):
try:
helpers.bulk(es, bulk_queue)
return True
except:
print(traceback.print_exc())
return False
@staticmethod
def evtx_to_elk(filename, elk_ip, elk_index="hostlogs", bulk_queue_len_threshold=500, metadata={}):
bulk_queue = []
es = Elasticsearch([elk_ip])
with open(filename) as infile:
with contextlib.closing(mmap.mmap(infile.fileno(), 0, access=mmap.ACCESS_READ)) as buf:
fh = FileHeader(buf, 0x0)
data = ""
for xml, record in evtx_file_xml_view(fh):
try:
contains_event_data = False
log_line = xmltodict.parse(xml)
# Format the date field
date = log_line.get("Event").get("System").get("TimeCreated").get("@SystemTime")
if "." not in str(date):
date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S")
else:
date = datetime.strptime(date, "%Y-%m-%d %H:%M:%S.%f")
log_line['@timestamp'] = str(date.isoformat())
log_line["Event"]["System"]["TimeCreated"]["@SystemTime"] = str(date.isoformat())
# Process the data field to be searchable
data = ""
if log_line.get("Event") is not None:
data = log_line.get("Event")
if log_line.get("Event").get("EventData") is not None:
data = log_line.get("Event").get("EventData")
if log_line.get("Event").get("EventData").get("Data") is not None:
data = log_line.get("Event").get("EventData").get("Data")
if isinstance(data, list):
contains_event_data = True
data_vals = {}
for dataitem in data:
try:
if dataitem.get("@Name") is not None:
data_vals[str(dataitem.get("@Name"))] = str(
str(dataitem.get("#text")))
except:
pass
log_line["Event"]["EventData"]["Data"] = data_vals
else:
if isinstance(data, OrderedDict):
log_line["Event"]["EventData"]["RawData"] = json.dumps(data)
else:
log_line["Event"]["EventData"]["RawData"] = str(data)
del log_line["Event"]["EventData"]["Data"]
else:
if isinstance(data, OrderedDict):
log_line["Event"]["RawData"] = json.dumps(data)
else:
log_line["Event"]["RawData"] = str(data)
del log_line["Event"]["EventData"]
else:
if isinstance(data, OrderedDict):
log_line = dict(data)
else:
log_line["RawData"] = str(data)
del log_line["Event"]
else:
pass
# Insert data into queue
#event_record = json.loads(json.dumps(log_line))
#event_record.update({
# "_index": elk_index,
# "_type": elk_index,
# "metadata": metadata
#})
#bulk_queue.append(event_record)
event_data = json.loads(json.dumps(log_line))
event_data["_index"] = elk_index
event_data["_type"] = elk_index
event_data["meta"] = metadata
bulk_queue.append(event_data)
#bulk_queue.append({
# "_index": elk_index,
# "_type": elk_index,
# "body": json.loads(json.dumps(log_line)),
# "metadata": metadata
#})
if len(bulk_queue) == bulk_queue_len_threshold:
print('Bulkingrecords to ES: ' + str(len(bulk_queue)))
# start parallel bulking to ElasticSearch, default 500 chunks;
if EvtxToElk.bulk_to_elasticsearch(es, bulk_queue):
bulk_queue = []
else:
print('Failed to bulk data to Elasticsearch')
sys.exit(1)
except:
print("***********")
print("Parsing Exception")
print(traceback.print_exc())
print(json.dumps(log_line, indent=2))
print("***********")
# Check for any remaining records in the bulk queue
if len(bulk_queue) > 0:
print('Bulking final set of records to ES: ' + str(len(bulk_queue)))
if EvtxToElk.bulk_to_elasticsearch(es, bulk_queue):
bulk_queue = []
else:
print('Failed to bulk data to Elasticsearch')
sys.exit(1)
if __name__ == "__main__":
# Create argument parser
parser = argparse.ArgumentParser()
# Add arguments
parser.add_argument('evtxfile', help="Evtx file to parse")
parser.add_argument('elk_ip', default="localhost", help="IP (and port) of ELK instance")
parser.add_argument('-i', default="hostlogs", help="ELK index to load data into")
parser.add_argument('-s', default=500, help="Size of queue")
parser.add_argument('-meta', default={}, type=json.loads, help="Metadata to add to records")
# Parse arguments and call evtx to elk class
args = parser.parse_args()
EvtxToElk.evtx_to_elk(args.evtxfile, args.elk_ip, elk_index=args.i, bulk_queue_len_threshold=int(args.s), metadata=args.meta)