1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20 import re
21 import binascii
22 import mmap
23 from functools import wraps
24
25 from BinaryParser import ParseException
26 from BinaryParser import Block
27 from BinaryParser import debug
28 from BinaryParser import warning
29 from Nodes import NameStringNode
30 from Nodes import TemplateNode
31 from Nodes import RootNode
38
39
40 -class Evtx(object):
41 """
42 A convenience class that makes it easy to open an
43 EVTX file and start iterating the important structures.
44 Note, this class must be used in a context statement
45 (see the `with` keyword).
46 Note, this class will mmap the target file, so ensure
47 your platform supports this operation.
48 """
50 """
51 @type filename: str
52 @param filename: A string that contains the path
53 to the EVTX file to open.
54 """
55 self._filename = filename
56 self._buf = None
57 self._f = None
58 self._fh = None
59
61 self._f = open(self._filename, "rb")
62 self._buf = mmap.mmap(self._f.fileno(), 0, access=mmap.ACCESS_READ)
63 self._fh = FileHeader(self._buf, 0x0)
64 return self
65
66 - def __exit__(self, type, value, traceback):
67 self._buf.close()
68 self._f.close()
69 self._fh = None
70
72 """
73 This decorator ensure that an instance of the
74 Evtx class is used within a context statement. That is,
75 that the `with` statement is used, or `__enter__()`
76 and `__exit__()` are called explicitly.
77 """
78 @wraps(func)
79 def wrapped(self, *args, **kwargs):
80 if self._buf is None:
81 raise TypeError("An Evtx object must be used with"
82 " a context (see the `with` statement).")
83 else:
84 return func(self, *args, **kwargs)
85 return wrapped
86
87 @ensure_contexted
89 """
90 Get each of the ChunkHeaders from within this EVTX file.
91
92 @rtype generator of ChunkHeader
93 @return A generator of ChunkHeaders from this EVTX file.
94 """
95 for chunk in self._fh.chunks():
96 yield chunk
97
98 @ensure_contexted
100 """
101 Get each of the Records from within this EVTX file.
102
103 @rtype generator of Record
104 @return A generator of Records from this EVTX file.
105 """
106 for chunk in self.chunks():
107 for record in chunk.records():
108 yield record
109
110 @ensure_contexted
112 """
113 Get a Record by record number.
114
115 @type record_num: int
116 @param record_num: The record number of the the record to fetch.
117 @rtype Record or None
118 @return The record request by record number, or None if
119 the record is not found.
120 """
121 return self._fh.get_record(record_num)
122
123 @ensure_contexted
126
130 debug("FILE HEADER at %s." % (hex(offset)))
131 super(FileHeader, self).__init__(buf, offset)
132 self.declare_field("string", "magic", 0x0, length=8)
133 self.declare_field("qword", "oldest_chunk")
134 self.declare_field("qword", "current_chunk_number")
135 self.declare_field("qword", "next_record_number")
136 self.declare_field("dword", "header_size")
137 self.declare_field("word", "minor_version")
138 self.declare_field("word", "major_version")
139 self.declare_field("word", "header_chunk_size")
140 self.declare_field("word", "chunk_count")
141 self.declare_field("binary", "unused1", length=0x4c)
142 self.declare_field("dword", "flags")
143 self.declare_field("dword", "checksum")
144
146 return "FileHeader(buf=%r, offset=%r)" % (self._buf, self._offset)
147
149 return "FileHeader(offset=%s)" % (hex(self._offset))
150
152 """
153 @return A boolean that indicates if the first eight bytes of
154 the FileHeader match the expected magic value.
155 """
156 return self.magic() == "ElfFile\x00"
157
159 """
160 @return A integer in the range of an unsigned int that
161 is the calculated CRC32 checksum off the first 0x78 bytes.
162 This is consistent with the checksum stored by the FileHeader.
163 """
164 return binascii.crc32(self.unpack_binary(0, 0x78)) & 0xFFFFFFFF
165
167 """
168 @return A boolean that indicates that the FileHeader
169 successfully passes a set of heuristic checks that
170 all EVTX FileHeaders should pass.
171 """
172 return self.check_magic() and \
173 self.major_version() == 0x3 and \
174 self.minor_version() == 0x1 and \
175 self.header_chunk_size() == 0x1000 and \
176 self.checksum() == self.calculate_checksum()
177
179 """
180 @return A boolean that indicates that the log has been
181 opened and was changed, though not all changes might be
182 reflected in the file header.
183 """
184 return self.flags() & 0x1 == 0x1
185
187 """
188 @return A boolean that indicates that the log
189 has reached its maximum configured size and the retention
190 policy in effect does not allow to reclaim a suitable amount
191 of space from the oldest records and an event message could
192 not be written to the log file.
193 """
194 return self.flags() & 0x2
195
197 """
198 @return A ChunkHeader instance that is the first chunk
199 in the log file, which is always found directly after
200 the FileHeader.
201 """
202 ofs = self._offset + self.header_chunk_size()
203 return ChunkHeader(self._buf, ofs)
204
206 """
207 @return A ChunkHeader instance that is the current chunk
208 indicated by the FileHeader.
209 """
210 ofs = self._offset + self.header_chunk_size()
211 ofs += (self.current_chunk_number() * 0x10000)
212 return ChunkHeader(self._buf, ofs)
213
215 """
216 @return A generator that yields the chunks of the log file
217 starting with the first chunk, which is always found directly
218 after the FileHeader, and continuing to the end of the file.
219 """
220 ofs = self._offset + self.header_chunk_size()
221 while ofs + 0x10000 < len(self._buf):
222 yield ChunkHeader(self._buf, ofs)
223 ofs += 0x10000
224
226 """
227 Get a Record by record number.
228
229 @type record_num: int
230 @param record_num: The record number of the the record to fetch.
231 @rtype Record or None
232 @return The record request by record number, or None if the
233 record is not found.
234 """
235 for chunk in self.chunks():
236 first_record = chunk.log_first_record_number()
237 last_record = chunk.log_last_record_number()
238 if not (first_record <= record_num <= last_record):
239 continue
240 for record in chunk.records():
241 if record.record_num() == record_num:
242 return record
243 return None
244
248 self._template_node = template_node
249 self._xml = None
250
252 """
253 TODO(wb): One day, nodes should generate format strings
254 instead of the XML format made-up abomination.
255 """
256 if self._xml is not None:
257 return
258 matcher = "\[(?:Normal|Conditional) Substitution\(index=(\d+), type=\d+\)\]"
259 self._xml = re.sub(matcher, "{\\1:}",
260 self._template_node.template_format().replace("{", "{{").replace("}", "}}"))
261
263 """
264
265 @type substitutions: list of VariantTypeNode
266 """
267 self._load_xml()
268 return self._xml.format(*map(lambda n: n.xml(), substitutions))
269
271 return self._template_node
272
276 debug("CHUNK HEADER at %s." % (hex(offset)))
277 super(ChunkHeader, self).__init__(buf, offset)
278 self._strings = None
279 self._templates = None
280
281 self.declare_field("string", "magic", 0x0, length=8)
282 self.declare_field("qword", "file_first_record_number")
283 self.declare_field("qword", "file_last_record_number")
284 self.declare_field("qword", "log_first_record_number")
285 self.declare_field("qword", "log_last_record_number")
286 self.declare_field("dword", "header_size")
287 self.declare_field("dword", "last_record_offset")
288 self.declare_field("dword", "next_record_offset")
289 self.declare_field("dword", "data_checksum")
290 self.declare_field("binary", "unused", length=0x44)
291 self.declare_field("dword", "header_checksum")
292
294 return "ChunkHeader(buf=%r, offset=%r)" % (self._buf, self._offset)
295
297 return "ChunkHeader(offset=%s)" % (hex(self._offset))
298
300 """
301 @return A boolean that indicates if the first eight bytes of
302 the ChunkHeader match the expected magic value.
303 """
304 return self.magic() == "ElfChnk\x00"
305
307 """
308 @return A integer in the range of an unsigned int that
309 is the calculated CRC32 checksum of the ChunkHeader fields.
310 """
311 data = self.unpack_binary(0x0, 0x78)
312 data += self.unpack_binary(0x80, 0x180)
313 return binascii.crc32(data) & 0xFFFFFFFF
314
316 """
317 @return A integer in the range of an unsigned int that
318 is the calculated CRC32 checksum of the Chunk data.
319 """
320 data = self.unpack_binary(0x200, self.next_record_offset() - 0x200)
321 return binascii.crc32(data) & 0xFFFFFFFF
322
324 """
325 @return A boolean that indicates that the FileHeader
326 successfully passes a set of heuristic checks that
327 all EVTX ChunkHeaders should pass.
328 """
329 return self.check_magic() and \
330 self.calculate_header_checksum() == self.header_checksum() and \
331 self.calculate_data_checksum() == self.data_checksum()
332
334 if self._strings is None:
335 self._strings = {}
336 for i in xrange(64):
337 ofs = self.unpack_dword(0x80 + (i * 4))
338 while ofs > 0:
339 string_node = self.add_string(ofs)
340 ofs = string_node.next_offset()
341
343 """
344 @return A dict(offset --> NameStringNode)
345 """
346 if not self._strings:
347 self._load_strings()
348 return self._strings
349
351 """
352 @param offset An integer offset that is relative to the start of
353 this chunk.
354 @param parent (Optional) The parent of the newly created
355 NameStringNode instance. (Default: this chunk).
356 @return None
357 """
358 if self._strings is None:
359 self._load_strings()
360 string_node = NameStringNode(self._buf, self._offset + offset,
361 self, parent or self)
362 self._strings[offset] = string_node
363 return string_node
364
366 """
367 @return None
368 """
369 if self._templates is None:
370 self._templates = {}
371 for i in xrange(32):
372 ofs = self.unpack_dword(0x180 + (i * 4))
373 while ofs > 0:
374
375
376 token = self.unpack_byte(ofs - 10)
377 pointer = self.unpack_dword(ofs - 4)
378 if token != 0x0c or pointer != ofs:
379 warning("Unexpected token encountered")
380 ofs = 0
381 continue
382 template = self.add_template(ofs)
383 ofs = template.next_offset()
384
386 """
387 @param offset An integer which contains the chunk-relative offset
388 to a template to load into this Chunk.
389 @param parent (Optional) The parent of the newly created
390 TemplateNode instance. (Default: this chunk).
391 @return Newly added TemplateNode instance.
392 """
393 if self._templates is None:
394 self._load_templates()
395
396 node = TemplateNode(self._buf, self._offset + offset,
397 self, parent or self)
398 self._templates[offset] = node
399 return node
400
402 """
403 @return A dict(offset --> Template) of all encountered
404 templates in this Chunk.
405 """
406 if not self._templates:
407 self._load_templates()
408 return self._templates
409
411 return Record(self._buf, self._offset + 0x200, self)
412
414 record = self.first_record()
415 while record._offset < self._offset + self.next_record_offset():
416 yield record
417 try:
418 record = Record(self._buf,
419 record._offset + record.length(),
420 self)
421 except InvalidRecordException:
422 return
423
426 - def __init__(self, buf, offset, chunk):
440
442 return "Record(buf=%r, offset=%r)" % (self._buf, self._offset)
443
445 return "Record(offset=%s)" % (hex(self._offset))
446
448 return RootNode(self._buf, self._offset + 0x18, self._chunk, self)
449
452
454 return self.size() == self.size2()
455
457 """
458 Return the raw data block which makes up this record as a bytestring.
459
460 @rtype str
461 @return A string that is a copy of the buffer that makes
462 up this record.
463 """
464 return self._buf[self.offset():self.offset() + self.size()]
465