forked from stuxnet999/EventTranscriptParser
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
479f280
commit 845a0f7
Showing
3 changed files
with
285 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,232 @@ | ||
__author__ = "Abhiram Kumar" | ||
|
||
import json | ||
import pandas as pd | ||
import sqlite3 | ||
import argparse | ||
import os | ||
|
||
def BrowserHistoryParse(f): | ||
conn = sqlite3.connect(f) | ||
cursor = conn.cursor() | ||
BrowserHistoryTable = pd.read_sql_query("SELECT events_persisted.sid, events_persisted.payload from events_persisted inner join event_tags on events_persisted.full_event_name_hash = event_tags.full_event_name_hash inner join tag_descriptions on event_tags.tag_id = tag_descriptions.tag_id where tag_descriptions.tag_id = 1", conn) | ||
payload = BrowserHistoryTable['payload'].values.tolist() | ||
sid = BrowserHistoryTable['sid'].values.tolist() | ||
payload_navigation_URL = [] | ||
payload_navigation_URL_time = [] | ||
payload_navigation_URL_date = [] | ||
true_sid = [] | ||
for i in range(len(payload)): | ||
temp = json.loads(payload[i]) | ||
if (temp['data'].__contains__("navigationUrl") == True) and len(temp['data']['navigationUrl']) > 0: | ||
payload_navigation_URL.append(temp['data']['navigationUrl']) | ||
true_sid.append(sid[i]) | ||
timestamp = (temp['data']['Timestamp']).replace("T", " ").replace("Z", "") | ||
timestamp = timestamp.split(" ") | ||
payload_navigation_URL_date.append(timestamp[0]) | ||
payload_navigation_URL_time.append(timestamp[1] + " UTC") | ||
|
||
temp_dict = {'SID': true_sid,'Date': payload_navigation_URL_date, 'Time': payload_navigation_URL_time, 'VisitedURL': payload_navigation_URL} | ||
return temp_dict | ||
|
||
def SoftwareInventory(f): | ||
conn = sqlite3.connect(f) | ||
SoftwareInventoryTable = pd.read_sql_query("""SELECT events_persisted.sid, events_persisted.payload from events_persisted inner join event_tags on events_persisted.full_event_name_hash = event_tags.full_event_name_hash inner join tag_descriptions on event_tags.tag_id = tag_descriptions.tag_id where (tag_descriptions.tag_id = 31 and events_persisted.full_event_name="Microsoft.Windows.Inventory.Core.InventoryApplicationAdd")""", conn) | ||
payload = SoftwareInventoryTable['payload'].values.tolist() | ||
sid = SoftwareInventoryTable['sid'].values.tolist() | ||
Program_Name = [] | ||
Path = [] | ||
OSVersionAtInstallTime = [] | ||
InstallDate = [] | ||
AppVersion = [] | ||
true_sid = [] | ||
|
||
for i in range(len(payload)): | ||
temp = json.loads(payload[i]) | ||
Program_Name.append(temp['data']['Name']) | ||
Path.append(temp['data']['RootDirPath']) | ||
OSVersionAtInstallTime.append(temp['data']['OSVersionAtInstallTime']) | ||
if len(temp['data']['InstallDate']) > 0: | ||
InstallDate.append(temp['data']['InstallDate'] + " UTC") | ||
else: | ||
InstallDate.append("NULL") | ||
AppVersion.append(temp['data']['Version']) | ||
true_sid.append(sid[i]) | ||
|
||
SoftwareInventorydict = {'SID': true_sid, 'Program Name': Program_Name, 'Install Path': Path, 'Install Date': InstallDate, 'Program Version': AppVersion, 'OS Version at Install Time': OSVersionAtInstallTime} | ||
return SoftwareInventorydict | ||
|
||
def WlanScanResults(f): | ||
conn = sqlite3.connect(f) | ||
cursor = conn.cursor() | ||
wlan_scan_results_table = pd.read_sql_query("""SELECT events_persisted.sid, events_persisted.payload from events_persisted inner join event_tags on events_persisted.full_event_name_hash = event_tags.full_event_name_hash inner join tag_descriptions on event_tags.tag_id = tag_descriptions.tag_id where (tag_descriptions.tag_id = 11 and events_persisted.full_event_name = "WlanMSM.WirelessScanResults")""", conn) | ||
payload = wlan_scan_results_table['payload'].values.tolist() | ||
sid = wlan_scan_results_table['sid'].values.tolist() | ||
ssid = [] | ||
mac_addr = [] | ||
time = [] | ||
true_sid = [] | ||
|
||
for i in range(len(payload)): | ||
temp = json.loads(payload[i]) | ||
scan_results_list = temp['data']['ScanResults'].split('\n') | ||
for j in range(len(scan_results_list) - 1): | ||
temp_list = scan_results_list[j].split('\t') | ||
ssid.append(temp_list[0]) | ||
mac_addr.append(temp_list[2]) | ||
time.append(temp['time']) | ||
true_sid.append(sid[i]) | ||
|
||
WlanScanDict = {'SID': true_sid, 'Time': time, 'SSID': ssid, 'MAC Address': mac_addr} | ||
return WlanScanDict | ||
|
||
def UserDefault(f, file): | ||
conn = sqlite3.connect(f) | ||
user_default_table = pd.read_sql_query("""SELECT events_persisted.sid, events_persisted.payload from events_persisted inner join event_tags on events_persisted.full_event_name_hash = event_tags.full_event_name_hash inner join tag_descriptions on event_tags.tag_id = tag_descriptions.tag_id where (tag_descriptions.tag_id = 11 and events_persisted.full_event_name = "Census.Userdefault")""", conn) | ||
payload = user_default_table['payload'].values.tolist() | ||
sid = user_default_table['sid'].values.tolist() | ||
true_sid = [] | ||
temp_file = open(file, "w") | ||
for i in range(len(payload)): | ||
temp = json.loads(payload[i]) | ||
temp_file.write("Device Make: " + temp['ext']['protocol']['devMake'] + "\n") | ||
temp_file.write("Device Model: "+ temp['ext']['protocol']['devModel']+ "\n") | ||
temp_file.write("Timezone: "+ temp['ext']['loc']['tz'] + "\n") | ||
true_sid.append(sid[i]) | ||
temp_file.write("Default Browser: "+ temp['data']['DefaultBrowserProgId'] + "\n") | ||
temp_list = temp['data']['DefaultApp'].split('|') | ||
for j in range(len(temp_list)): | ||
temp_file.write(temp_list[j]+ "\n") | ||
temp_file.write("----------------------------------\n\n") | ||
return temp_file | ||
|
||
def PhysicalDiskInfo(f, file): | ||
conn = sqlite3.connect(f) | ||
physicaldisk_info_table = pd.read_sql_query("""SELECT events_persisted.payload from events_persisted inner join event_tags on events_persisted.full_event_name_hash = event_tags.full_event_name_hash inner join tag_descriptions on event_tags.tag_id = tag_descriptions.tag_id where (tag_descriptions.tag_id = 11 and events_persisted.full_event_name = "Microsoft.Windows.Inventory.General.InventoryMiscellaneousPhysicalDiskInfoAdd")""", conn) | ||
payload = physicaldisk_info_table['payload'].values.tolist() | ||
temp_file = open(file, "w") | ||
for i in range(len(payload)): | ||
temp = json.loads(payload[i]) | ||
temp_file.write("Device Id: "+ temp['data']['DeviceId'] + "\n") | ||
temp_file.write("Serial Number: "+ temp['data']['SerialNumber'] + "\n") | ||
temp_file.write("Size (in bytes): "+ temp['data']['Size'] + "\n") | ||
temp_file.write("Number of partitions: "+ str(temp['data']['NumPartitions']) + "\n") | ||
temp_file.write("Bytes per sector: "+ str(temp['data']['BytesPerSector']) + "\n") | ||
temp_file.write("Media type: "+ temp['data']['MediaType'] + "\n") | ||
temp_file.write("----------------------------------\n\n") | ||
return temp_file | ||
|
||
def WiFiConnectedEvents(f): | ||
conn = sqlite3.connect(f) | ||
wifi_connected_events_table = pd.read_sql_query("""SELECT events_persisted.sid, events_persisted.payload from events_persisted inner join event_tags on events_persisted.full_event_name_hash = event_tags.full_event_name_hash inner join tag_descriptions on event_tags.tag_id = tag_descriptions.tag_id where (tag_descriptions.tag_id = 11 and events_persisted.full_event_name = "Microsoft.OneCore.NetworkingTriage.GetConnected.WiFiConnectedEvent")""", conn) | ||
payload = wifi_connected_events_table['payload'].values.tolist() | ||
sid = wifi_connected_events_table['sid'].values.tolist() | ||
interfaceGuid = [] | ||
interfaceType = [] | ||
interfaceDescription = [] | ||
ssid = [] | ||
authAlgo = [] | ||
bssid = [] | ||
apManufacturer = [] | ||
apModelName = [] | ||
apModelNum = [] | ||
true_sid = [] | ||
|
||
for i in range(len(payload)): | ||
temp = json.loads(payload[i]) | ||
interfaceGuid.append(temp['data']['interfaceGuid']) | ||
interfaceType.append(temp['data']['interfaceType']) | ||
interfaceDescription.append(temp['data']['interfaceDescription']) | ||
ssid.append(temp['data']['ssid']) | ||
authAlgo.append(temp['data']['authAlgo']) | ||
bssid.append(temp['data']['bssid']) | ||
apManufacturer.append(temp['data']['apManufacturer']) | ||
apModelName.append(temp['data']['apModelName']) | ||
apModelNum.append(temp['data']['apModelNum']) | ||
true_sid.append(sid[i]) | ||
|
||
wifi_connected_results_dict = {'SID': true_sid, 'SSID': ssid, 'BSSID': bssid, 'AP Manufacturer': apManufacturer, 'AP Model Name': apModelName, 'AP Model No.': apModelNum, 'Interface Type': interfaceType, 'Interface GUID': interfaceGuid, 'Interface Description': interfaceDescription} | ||
return wifi_connected_results_dict | ||
|
||
def PnPDeviceParse(f): | ||
conn = sqlite3.connect(f) | ||
pnp_device_table = pd.read_sql_query("""SELECT events_persisted.sid, events_persisted.payload from events_persisted inner join event_tags on events_persisted.full_event_name_hash = event_tags.full_event_name_hash inner join tag_descriptions on event_tags.tag_id = tag_descriptions.tag_id where (tag_descriptions.tag_id = 11 and events_persisted.full_event_name = "Microsoft.Windows.Inventory.Core.InventoryDevicePnpAdd")""", conn) | ||
payload = pnp_device_table['payload'].values.tolist() | ||
sid = pnp_device_table['sid'].values.tolist() | ||
true_sid = [] | ||
installdate = [] | ||
firstinstalldate = [] | ||
model = [] | ||
manufacturer = [] | ||
service = [] | ||
parent_id = [] | ||
object_id = [] | ||
|
||
for i in range(len(payload)): | ||
temp = json.loads(payload[i].encode('unicode_escape')) | ||
true_sid.append(sid[i]) | ||
parent_id.append(temp['data']['ParentId']) | ||
object_id.append(temp['data']['baseData']['objectInstanceId']) | ||
installdate.append(temp['data']['InstallDate']) | ||
firstinstalldate.append(temp['data']['FirstInstallDate']) | ||
model.append(temp['data']['Model']) | ||
manufacturer.append(temp['data']['Manufacturer']) | ||
service.append(temp['data']['Service']) | ||
pnp_device_dict = {'SID': true_sid, 'Object ID': object_id, 'Install Date': installdate, 'First Install Date': firstinstalldate, 'Model': model, 'Manufacturer': manufacturer, 'Service': service, 'Parent ID': parent_id} | ||
return pnp_device_dict | ||
|
||
|
||
|
||
if __name__=="__main__": | ||
|
||
event_transcript_parser=argparse.ArgumentParser( | ||
description='''EventTranscript.db parser by Abhiram Kumar.''', | ||
epilog= '''For any queries, please reach out to me via Twitter - @_abhiramkumar''') | ||
|
||
event_transcript_parser.add_argument('-f','--file', required=True, help="Please specify the path to EventTranscript.db") | ||
|
||
parser, empty_list = event_transcript_parser.parse_known_args() | ||
|
||
if os.path.exists(parser.file): | ||
BrowsingHistory = BrowserHistoryParse(parser.file) | ||
df = pd.DataFrame(BrowsingHistory) | ||
outfile = "BrowserHistory.csv" | ||
df.to_csv(outfile, index=False) | ||
print ("Output written to " + outfile) | ||
|
||
software_inventory = SoftwareInventory(parser.file) | ||
df = pd.DataFrame(software_inventory) | ||
outfile = "SoftwareInventory.csv" | ||
df.to_csv(outfile, index=False) | ||
print ("Output written to " + outfile) | ||
|
||
WlanScan = WlanScanResults(parser.file) | ||
df = pd.DataFrame(WlanScan) | ||
outfile = "WlanScan.csv" | ||
df.to_csv(outfile, index=False) | ||
print ("Output written to " + outfile) | ||
|
||
pnp_device = PnPDeviceParse(parser.file) | ||
df = pd.DataFrame(pnp_device) | ||
outfile = "PnpDeviceInstall.csv" | ||
df.to_csv(outfile, index=False) | ||
print ("Output written to " + outfile) | ||
|
||
wificonnectedevents = WiFiConnectedEvents(parser.file) | ||
df = pd.DataFrame(wificonnectedevents) | ||
outfile = "WiFiConnectedEvents.csv" | ||
df.to_csv(outfile, index=False) | ||
print ("Output written to " + outfile) | ||
|
||
outfile = "UserDefaults.txt" | ||
userdefaults = UserDefault(os.path.abspath(parser.file), outfile) | ||
print("Output written to "+ outfile) | ||
userdefaults.close() | ||
|
||
outfile = "PhysicalDiskInfo.txt" | ||
physical_disk_info = PhysicalDiskInfo(parser.file, outfile) | ||
print("Output written to "+ outfile) | ||
physical_disk_info.close() | ||
|
||
else: | ||
print(parser.print_help()) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,53 @@ | ||
# EventTranscriptParser | ||
Python based tool to extract forensic info from EventTranscript.db (Windows Diagnostic Data) | ||
# EventTransScriptParser | ||
|
||
**EventTranscriptParser** is python based tool to extract forensically useful details from EventTranscript.db (Windows Diagnostic Database). | ||
|
||
The database is found in Windows 10 systems and present at `C:\ProgramData\Microsoft\Diagnosis\EventTranscript\EventTranscript.db`. | ||
|
||
The tool currently supports the following features. | ||
|
||
+ Extracting MS Edge browser history. | ||
+ Extracting list of software/programs installed on the host system. | ||
+ Extracting Wireless Scan results. | ||
+ Extracting WiFi connection details (SSIDs, device manufacturers etc...) | ||
+ Extracting Physcial Disk information (Disk size, No. of partitions etc...) | ||
+ Extracting PnP device installation information (Install time, Model, Manufacturer etc...) | ||
+ MORE COMING SOON!! | ||
|
||
### Requirements | ||
|
||
Python 3.8 or above. Older version of Python 3.x should work fine as well. | ||
|
||
#### Dependencies | ||
|
||
These are the required libraries/modules needed to run the script | ||
+ json | ||
+ sqlite3 | ||
+ pandas | ||
+ os | ||
+ argparse | ||
|
||
### Usage | ||
|
||
The tool is completely CLI based. | ||
|
||
```python | ||
python EventTranscriptParser.py -f <Path-To-EventTranscript.db> | ||
``` | ||
|
||
**Tip**: Before running the tool against the database, make sure that the **-wal (Write Ahead Log)** file data is merged with the original database. Because you might miss out on crucial/juicy data. | ||
|
||
![usage](./img/usage.png) | ||
|
||
### Acknowledgements | ||
|
||
This tool wouldn't have been possible without the excellent research & hardwork put in by my colleagues [Andrew Rathbun](https://twitter.com/bunsofwrath12) & [Josh Mitchell](https://www.linkedin.com/in/josh-mitchell-0990ba6a/) in investigating the Windows Diagnostic Data. | ||
|
||
Read more about their research here - https://github.com/rathbuna/EventTranscript.db-Research | ||
|
||
### Author | ||
|
||
Abhiram Kumar | ||
|
||
+ Twitter: [@_abhiramkumar](https://www.twitter.com/_abhiramkumar) | ||
+ Personal blog: https://stuxnet999.github.io |
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.