From 18b1d539286b7e9715d4c158adb3e55203f1dbc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Velschow=20S=C3=B8gaard?= <46562365+seba7236@users.noreply.github.com> Date: Sun, 20 Oct 2024 19:27:50 +0200 Subject: [PATCH] Added files --- lib/a_pb2.py | 474 +++++++++++++++++++++++++++++++++++++++++++ lib/brave.py | 313 ++++++++++++++++++++++++++++ lib/chrome.py | 317 +++++++++++++++++++++++++++++ lib/detectBrowser.py | 69 +++++++ lib/edge.py | 313 ++++++++++++++++++++++++++++ lib/firefox.py | 449 ++++++++++++++++++++++++++++++++++++++++ lib/opera.py | 312 ++++++++++++++++++++++++++++ lib/output.py | 51 +++++ lib/yandex.py | 313 ++++++++++++++++++++++++++++ main.py | 59 ++++++ requirements.txt | 2 + 11 files changed, 2672 insertions(+) create mode 100644 lib/a_pb2.py create mode 100644 lib/brave.py create mode 100644 lib/chrome.py create mode 100644 lib/detectBrowser.py create mode 100644 lib/edge.py create mode 100644 lib/firefox.py create mode 100644 lib/opera.py create mode 100644 lib/output.py create mode 100644 lib/yandex.py create mode 100644 main.py create mode 100644 requirements.txt diff --git a/lib/a_pb2.py b/lib/a_pb2.py new file mode 100644 index 0000000..4ec2655 --- /dev/null +++ b/lib/a_pb2.py @@ -0,0 +1,474 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: a.proto + +from google.protobuf import descriptor as _descriptor # type: ignore +from google.protobuf import message as _message # type: ignore +from google.protobuf import reflection as _reflection # type: ignore +from google.protobuf import symbol_database as _symbol_database # type: ignore +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='a.proto', + package='content', + syntax='proto2', + serialized_options=b'H\003', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\x07\x61.proto\x12\x07\x63ontent\"\xc4\n\n\x1dNotificationDatabaseDataProto\x12\"\n\x1apersistent_notification_id\x18\x01 \x01(\x03\x12\x17\n\x0fnotification_id\x18\x05 \x01(\t\x12\x0e\n\x06origin\x18\x02 \x01(\t\x12&\n\x1eservice_worker_registration_id\x18\x03 \x01(\x03\x12&\n\x1ereplaced_existing_notification\x18\x06 \x01(\x08\x12\x12\n\nnum_clicks\x18\x07 \x01(\x05\x12 \n\x18num_action_button_clicks\x18\x08 \x01(\x05\x12\x1c\n\x14\x63reation_time_millis\x18\t \x01(\x03\x12%\n\x1dtime_until_first_click_millis\x18\n \x01(\x03\x12$\n\x1ctime_until_last_click_millis\x18\x0b \x01(\x03\x12\x1f\n\x17time_until_close_millis\x18\x0c \x01(\x03\x12J\n\rclosed_reason\x18\r \x01(\x0e\x32\x33.content.NotificationDatabaseDataProto.ClosedReason\x12R\n\x11notification_data\x18\x04 \x01(\x0b\x32\x37.content.NotificationDatabaseDataProto.NotificationData\x12\x15\n\rhas_triggered\x18\x0e \x01(\x08\x12\x1b\n\x13is_shown_by_browser\x18\x0f \x01(\x08\x1a\xc2\x01\n\x12NotificationAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12\r\n\x05title\x18\x02 \x01(\t\x12\x0c\n\x04icon\x18\x03 \x01(\t\x12L\n\x04type\x18\x04 \x01(\x0e\x32>.content.NotificationDatabaseDataProto.NotificationAction.Type\x12\x13\n\x0bplaceholder\x18\x05 \x01(\t\"\x1c\n\x04Type\x12\n\n\x06\x42UTTON\x10\x00\x12\x08\n\x04TEXT\x10\x01\x1a\xf4\x03\n\x10NotificationData\x12\r\n\x05title\x18\x01 \x01(\t\x12T\n\tdirection\x18\x02 \x01(\x0e\x32\x41.content.NotificationDatabaseDataProto.NotificationData.Direction\x12\x0c\n\x04lang\x18\x03 \x01(\t\x12\x0c\n\x04\x62ody\x18\x04 \x01(\t\x12\x0b\n\x03tag\x18\x05 \x01(\t\x12\r\n\x05image\x18\x0f \x01(\t\x12\x0c\n\x04icon\x18\x06 \x01(\t\x12\r\n\x05\x62\x61\x64ge\x18\x0e \x01(\t\x12\x1d\n\x11vibration_pattern\x18\t \x03(\x05\x42\x02\x10\x01\x12\x11\n\ttimestamp\x18\x0c \x01(\x03\x12\x10\n\x08renotify\x18\r \x01(\x08\x12\x0e\n\x06silent\x18\x07 \x01(\x08\x12\x1b\n\x13require_interaction\x18\x0b \x01(\x08\x12\x0c\n\x04\x64\x61ta\x18\x08 \x01(\x0c\x12J\n\x07\x61\x63tions\x18\n \x03(\x0b\x32\x39.content.NotificationDatabaseDataProto.NotificationAction\x12\x1e\n\x16show_trigger_timestamp\x18\x10 \x01(\x03\";\n\tDirection\x12\x11\n\rLEFT_TO_RIGHT\x10\x00\x12\x11\n\rRIGHT_TO_LEFT\x10\x01\x12\x08\n\x04\x41UTO\x10\x02\"4\n\x0c\x43losedReason\x12\x08\n\x04USER\x10\x00\x12\r\n\tDEVELOPER\x10\x01\x12\x0b\n\x07UNKNOWN\x10\x02\x42\x02H\x03' # noqa +) + + +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION_TYPE = _descriptor.EnumDescriptor( + name='Type', + full_name='content.NotificationDatabaseDataProto.NotificationAction.Type', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='BUTTON', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='TEXT', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=784, + serialized_end=812, +) +_sym_db.RegisterEnumDescriptor(_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION_TYPE) + +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA_DIRECTION = _descriptor.EnumDescriptor( + name='Direction', + full_name='content.NotificationDatabaseDataProto.NotificationData.Direction', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='LEFT_TO_RIGHT', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='RIGHT_TO_LEFT', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='AUTO', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=1256, + serialized_end=1315, +) +_sym_db.RegisterEnumDescriptor(_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA_DIRECTION) + +_NOTIFICATIONDATABASEDATAPROTO_CLOSEDREASON = _descriptor.EnumDescriptor( + name='ClosedReason', + full_name='content.NotificationDatabaseDataProto.ClosedReason', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='USER', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='DEVELOPER', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='UNKNOWN', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=1317, + serialized_end=1369, +) +_sym_db.RegisterEnumDescriptor(_NOTIFICATIONDATABASEDATAPROTO_CLOSEDREASON) + + +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION = _descriptor.Descriptor( + name='NotificationAction', + full_name='content.NotificationDatabaseDataProto.NotificationAction', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='action', full_name='content.NotificationDatabaseDataProto.NotificationAction.action', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='title', full_name='content.NotificationDatabaseDataProto.NotificationAction.title', index=1, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='icon', full_name='content.NotificationDatabaseDataProto.NotificationAction.icon', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='type', full_name='content.NotificationDatabaseDataProto.NotificationAction.type', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='placeholder', full_name='content.NotificationDatabaseDataProto.NotificationAction.placeholder', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION_TYPE, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=618, + serialized_end=812, +) + +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA = _descriptor.Descriptor( + name='NotificationData', + full_name='content.NotificationDatabaseDataProto.NotificationData', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='title', full_name='content.NotificationDatabaseDataProto.NotificationData.title', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='direction', full_name='content.NotificationDatabaseDataProto.NotificationData.direction', index=1, + number=2, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='lang', full_name='content.NotificationDatabaseDataProto.NotificationData.lang', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='body', full_name='content.NotificationDatabaseDataProto.NotificationData.body', index=3, + number=4, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='tag', full_name='content.NotificationDatabaseDataProto.NotificationData.tag', index=4, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='image', full_name='content.NotificationDatabaseDataProto.NotificationData.image', index=5, + number=15, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='icon', full_name='content.NotificationDatabaseDataProto.NotificationData.icon', index=6, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='badge', full_name='content.NotificationDatabaseDataProto.NotificationData.badge', index=7, + number=14, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='vibration_pattern', full_name='content.NotificationDatabaseDataProto.NotificationData.vibration_pattern', index=8, + number=9, type=5, cpp_type=1, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='timestamp', full_name='content.NotificationDatabaseDataProto.NotificationData.timestamp', index=9, + number=12, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='renotify', full_name='content.NotificationDatabaseDataProto.NotificationData.renotify', index=10, + number=13, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='silent', full_name='content.NotificationDatabaseDataProto.NotificationData.silent', index=11, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='require_interaction', full_name='content.NotificationDatabaseDataProto.NotificationData.require_interaction', index=12, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='data', full_name='content.NotificationDatabaseDataProto.NotificationData.data', index=13, + number=8, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=b"", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='actions', full_name='content.NotificationDatabaseDataProto.NotificationData.actions', index=14, + number=10, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='show_trigger_timestamp', full_name='content.NotificationDatabaseDataProto.NotificationData.show_trigger_timestamp', index=15, + number=16, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA_DIRECTION, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=815, + serialized_end=1315, +) + +_NOTIFICATIONDATABASEDATAPROTO = _descriptor.Descriptor( + name='NotificationDatabaseDataProto', + full_name='content.NotificationDatabaseDataProto', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='persistent_notification_id', full_name='content.NotificationDatabaseDataProto.persistent_notification_id', index=0, + number=1, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='notification_id', full_name='content.NotificationDatabaseDataProto.notification_id', index=1, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='origin', full_name='content.NotificationDatabaseDataProto.origin', index=2, + number=2, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='service_worker_registration_id', full_name='content.NotificationDatabaseDataProto.service_worker_registration_id', index=3, + number=3, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='replaced_existing_notification', full_name='content.NotificationDatabaseDataProto.replaced_existing_notification', index=4, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='num_clicks', full_name='content.NotificationDatabaseDataProto.num_clicks', index=5, + number=7, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='num_action_button_clicks', full_name='content.NotificationDatabaseDataProto.num_action_button_clicks', index=6, + number=8, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='creation_time_millis', full_name='content.NotificationDatabaseDataProto.creation_time_millis', index=7, + number=9, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='time_until_first_click_millis', full_name='content.NotificationDatabaseDataProto.time_until_first_click_millis', index=8, + number=10, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='time_until_last_click_millis', full_name='content.NotificationDatabaseDataProto.time_until_last_click_millis', index=9, + number=11, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='time_until_close_millis', full_name='content.NotificationDatabaseDataProto.time_until_close_millis', index=10, + number=12, type=3, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='closed_reason', full_name='content.NotificationDatabaseDataProto.closed_reason', index=11, + number=13, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='notification_data', full_name='content.NotificationDatabaseDataProto.notification_data', index=12, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='has_triggered', full_name='content.NotificationDatabaseDataProto.has_triggered', index=13, + number=14, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='is_shown_by_browser', full_name='content.NotificationDatabaseDataProto.is_shown_by_browser', index=14, + number=15, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION, _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA, ], + enum_types=[ + _NOTIFICATIONDATABASEDATAPROTO_CLOSEDREASON, + ], + serialized_options=None, + is_extendable=False, + syntax='proto2', + extension_ranges=[], + oneofs=[ + ], + serialized_start=21, + serialized_end=1369, +) + +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION.fields_by_name['type'].enum_type = _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION_TYPE +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION.containing_type = _NOTIFICATIONDATABASEDATAPROTO +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION_TYPE.containing_type = _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA.fields_by_name['direction'].enum_type = _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA_DIRECTION +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA.fields_by_name['actions'].message_type = _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA.containing_type = _NOTIFICATIONDATABASEDATAPROTO +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA_DIRECTION.containing_type = _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA +_NOTIFICATIONDATABASEDATAPROTO.fields_by_name['closed_reason'].enum_type = _NOTIFICATIONDATABASEDATAPROTO_CLOSEDREASON +_NOTIFICATIONDATABASEDATAPROTO.fields_by_name['notification_data'].message_type = _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA +_NOTIFICATIONDATABASEDATAPROTO_CLOSEDREASON.containing_type = _NOTIFICATIONDATABASEDATAPROTO +DESCRIPTOR.message_types_by_name['NotificationDatabaseDataProto'] = _NOTIFICATIONDATABASEDATAPROTO +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +NotificationDatabaseDataProto = _reflection.GeneratedProtocolMessageType('NotificationDatabaseDataProto', (_message.Message,), { + + 'NotificationAction': _reflection.GeneratedProtocolMessageType('NotificationAction', (_message.Message,), { + 'DESCRIPTOR': _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONACTION, + '__module__': 'a_pb2' + # @@protoc_insertion_point(class_scope:content.NotificationDatabaseDataProto.NotificationAction) + }), + + 'NotificationData': _reflection.GeneratedProtocolMessageType('NotificationData', (_message.Message,), { + 'DESCRIPTOR': _NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA, + '__module__': 'a_pb2' + # @@protoc_insertion_point(class_scope:content.NotificationDatabaseDataProto.NotificationData) + }), + 'DESCRIPTOR': _NOTIFICATIONDATABASEDATAPROTO, + '__module__': 'a_pb2' + # @@protoc_insertion_point(class_scope:content.NotificationDatabaseDataProto) + }) +_sym_db.RegisterMessage(NotificationDatabaseDataProto) +_sym_db.RegisterMessage(NotificationDatabaseDataProto.NotificationAction) +_sym_db.RegisterMessage(NotificationDatabaseDataProto.NotificationData) + + +DESCRIPTOR._options = None +_NOTIFICATIONDATABASEDATAPROTO_NOTIFICATIONDATA.fields_by_name['vibration_pattern']._options = None +# @@protoc_insertion_point(module_scope) diff --git a/lib/brave.py b/lib/brave.py new file mode 100644 index 0000000..b938e33 --- /dev/null +++ b/lib/brave.py @@ -0,0 +1,313 @@ +from datetime import datetime, timedelta +import sqlite3 +import os +import json +import plyvel # type: ignore +from lib.a_pb2 import NotificationDatabaseDataProto # type: ignore +from lib.output import outputWriter # type: ignore + + + +def connect_database(database_path: str): + """Establish connection to the SQLite database.""" + try: + if os.path.exists(database_path): + connection = sqlite3.connect(f"file:{database_path}?mode=ro&immutable=1", uri=True) + return connection + else: + return None + except Exception as e: # Dont raise an error if a file is missing / damaged / etc. Just keep parsing the other files + print(f"[!] Database connection error: {e}") + return None + + +def convert_time(timestamp): + """Convert Chromium timestamp to standard datetime format.""" + chromium_base_date = datetime(1601, 1, 1) + timestamp_delta = timedelta(microseconds=timestamp) + return str(chromium_base_date + timestamp_delta) + + +def parse_downloads(database): + """Parse downloads data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM downloads") + entries = cursor.fetchall() + output = [] + output.append( + [ + "filename", + "current_path", + "target_path", + "start_time", + "received_bytes", + "total_bytes", + "end_time", + "opened", + "last_access_time", + "mime_type", + ] + ) + + for entry in entries: + filename = entry[2].split("\\")[-1] + output.append( + [ + filename, + entry[2], + entry[3], + convert_time(entry[4]), + entry[5], + entry[6], + convert_time(entry[11]), + str(bool(entry[12])), + convert_time(entry[13]), + entry[25], + ] + ) + if out.csv: + braveWriter.write_csv("downloads", output) + if out.json: + braveWriter.write_json("downloads", output) + + +def parse_history(database): + """Parse browsing history data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM urls") + entries = cursor.fetchall() + output = [] + output.append(["url", "title", "visit_count", "last_visit_time"]) + + for entry in entries: + title = ( + entry[2] + if not entry[1].startswith("file://") + else entry[1].split("/")[-1] + ) + output.append([entry[1], title, entry[3], convert_time(entry[5])]) + if out.csv: + braveWriter.write_csv("history", output) + if out.json: + braveWriter.write_json("history", output) + + +def parse_visited_links(database): + """Parse visited links data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM visited_links") + entries = cursor.fetchall() + output = [] + output.append(["top_level_url", "frame_url", "visit_count"]) + + for entry in entries: + output.append([entry[2], entry[3], entry[4]]) + if out.csv: + braveWriter.write_csv("visited_links", output) + if out.json: + braveWriter.write_json("visited_links", output) + + +def parse_searches(database): + """Parse search terms data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM keyword_search_terms") + entries = cursor.fetchall() + output = [] + output.append(["term", "normalized_term"]) + + for entry in entries: + output.append([entry[2], entry[3]]) + if out.csv: + braveWriter.write_csv("searches", output) + if out.json: + braveWriter.write_json("searches", output) + + +def parse_favicons(database): + """Parse favicons data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM favicons") + entries = cursor.fetchall() + output = [] + output.append(["url"]) + for entry in entries: + output.append([entry[1]]) + if out.csv: + braveWriter.write_csv("favicons", output) + if out.json: + braveWriter.write_json("favicons", output) + + +def parse_cookies(database): + """Parse cookies from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM cookies") + entries = cursor.fetchall() + output = [] + output.append( + [ + "created_utc", + "host_key", + "name", + "value", + "expires_utc", + "last_access_utc", + "source_port", + "last_update_utc", + ] + ) + for entry in entries: + output.append( + [ + convert_time(entry[0]), + entry[1], + entry[3], + entry[4], + convert_time(entry[7]), + convert_time(entry[10]), + entry[16], + convert_time(entry[17]), + ] + ) + if out.csv: + braveWriter.write_csv("cookies", output) + if out.json: + braveWriter.write_json("cookies", output) + + +def parse_shortcuts(database): + """Parse shortcuts from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM omni_box_shortcuts") + entries = cursor.fetchall() + output = [] + output.append( + [ + "text", + "fill_into_edit", + "contents", + "keyword", + "last_access_time", + "number_of_hits", + ] + ) + for entry in entries: + output.append( + [ + entry[1], + entry[2], + entry[5], + entry[11], + convert_time(entry[12]), + entry[13], + ] + ) + if out.csv: + braveWriter.write_csv("shortcuts", output) + if out.json: + braveWriter.write_json("shortcuts", output) + + +def parse_chromium_notifications(database): + class ClosedReason: + USER = "0" + DEVELOPER = "1" + UNKNOWN = "2" + + # Found in chromium source-code (https://source.chromium.org/chromium/chromium/src/+/main:content/browser/notifications/notification_database_data.proto) Line 16-20 + map_field_to_reason = { + ClosedReason.USER: "USER", + ClosedReason.DEVELOPER: "DEVELOPER", + ClosedReason.UNKNOWN: "UKNOWN" + } + + db_path = database + if os.path.exists(f"{database}"): + # Stupid fix + if len(os.listdir(database)) < 4: + return None + try: + db = plyvel.DB(db_path, create_if_missing=False) + output = [] + output.append(["title", "lang", "body", "tag", "icon", "is_silent", "require_interaction", "time", "badge", "image", "numClicks", "creation_time_millis", "closed_reason", "has_triggered", "origin"]) + for key, value in db: + data = value + + # Create an instance of the generated class and parse the data. + notification_data = NotificationDatabaseDataProto() + notification_data.ParseFromString(data) + output.append([notification_data.notification_data.title, + notification_data.notification_data.lang, + notification_data.notification_data.body, + notification_data.notification_data.tag, + notification_data.notification_data.icon, + notification_data.notification_data.silent, + notification_data.notification_data.require_interaction, + convert_time(int(notification_data.notification_data.timestamp)), + notification_data.notification_data.badge, + notification_data.notification_data.image, + notification_data.num_clicks, + notification_data.creation_time_millis, + map_field_to_reason.get(str(notification_data.closed_reason)), + notification_data.has_triggered, + notification_data.origin + ]) + if out.csv: + braveWriter.write_csv("notifications", output) + if out.json: + braveWriter.write_json("notifications", output) + except Exception as e: + print(e) + + +def parse_extensions(path): + if not os.path.exists(path): + return None + output = [] + output.append(["name", "author", "version", "description", "developer"]) + for extension in os.listdir(path): + id = os.listdir(f"{path}/{extension}")[0] + f = open(f"{path}/{extension}/{id}/manifest.json", "r") + manifest = json.loads(f.read()) + try: + output.append([manifest.get("name", "No name specified"), manifest.get("author", "No author specified"), manifest.get("manifest_version", "No version specified"), manifest.get("description", "No description specified"), manifest.get("developer", "No developer specified")]) + except KeyError: + print(f"[!] Error parsing {braveWriter.browser} extensions, check them manually!") + if out.csv: + braveWriter.write_csv("extensions", output) + if out.json: + braveWriter.write_json("extensions", output) + + +def parse_brave_data(user, directory, output, args): + print("[*] Starting to parse Brave") + global out + out = args + global braveWriter + braveWriter = outputWriter(output, user, "brave") + parse_downloads(f"{directory}/History") + parse_history(f"{directory}/History") + parse_visited_links(f"{directory}/History") + parse_searches(f"{directory}/History") + parse_favicons(f"{directory}/Favicons") + parse_cookies(f"{directory}/Network/Cookies") + parse_shortcuts(f"{directory}/Shortcuts") + parse_chromium_notifications(f"{directory}/Platform Notifications") + parse_extensions(f"{directory}/Extensions") diff --git a/lib/chrome.py b/lib/chrome.py new file mode 100644 index 0000000..19005b4 --- /dev/null +++ b/lib/chrome.py @@ -0,0 +1,317 @@ +from datetime import datetime, timedelta +import sqlite3 +import os +import json +import plyvel # type: ignore +from lib.a_pb2 import NotificationDatabaseDataProto # type: ignore +from lib.output import outputWriter # type: ignore + + + +def connect_database(database_path: str): + """Establish connection to the SQLite database.""" + try: + if os.path.exists(database_path): + connection = sqlite3.connect(f"file:{database_path}?mode=ro&immutable=1", uri=True) + return connection + else: + return None + except Exception as e: # Dont raise an error if a file is missing / damaged / etc. Just keep parsing the other files + print(f"[!] Database connection error: {e}") + return None + + +def convert_time(timestamp): + """Convert Chromium timestamp to standard datetime format.""" + chromium_base_date = datetime(1601, 1, 1) + timestamp_delta = timedelta(microseconds=timestamp) + return str(chromium_base_date + timestamp_delta) + + +def parse_downloads(database): + """Parse downloads data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM downloads") + entries = cursor.fetchall() + output = [] + output.append( + [ + "filename", + "current_path", + "target_path", + "start_time", + "received_bytes", + "total_bytes", + "end_time", + "opened", + "last_access_time", + "mime_type", + ] + ) + + for entry in entries: + filename = entry[2].split("\\")[-1] + output.append( + [ + filename, + entry[2], + entry[3], + convert_time(entry[4]), + entry[5], + entry[6], + convert_time(entry[11]), + str(bool(entry[12])), + convert_time(entry[13]), + entry[25], + ] + ) + if out.csv: + chromeWriter.write_csv("downloads", output) + if out.json: + chromeWriter.write_json("downloads", output) + + +def parse_history(database): + """Parse browsing history data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM urls") + entries = cursor.fetchall() + output = [] + output.append(["url", "title", "visit_count", "last_visit_time"]) + + for entry in entries: + title = ( + entry[2] + if not entry[1].startswith("file://") + else entry[1].split("/")[-1] + ) + output.append([entry[1], title, entry[3], convert_time(entry[5])]) + if out.csv: + chromeWriter.write_csv("history", output) + if out.json: + chromeWriter.write_json("history", output) + + +def parse_visited_links(database): + """Parse visited links data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM visited_links") + entries = cursor.fetchall() + output = [] + output.append(["top_level_url", "frame_url", "visit_count"]) + + for entry in entries: + output.append([entry[2], entry[3], entry[4]]) + if out.csv: + chromeWriter.write_csv("visited_links", output) + if out.json: + chromeWriter.write_json("visited_links", output) + + +def parse_searches(database): + """Parse search terms data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM keyword_search_terms") + entries = cursor.fetchall() + output = [] + output.append(["term", "normalized_term"]) + + for entry in entries: + output.append([entry[2], entry[3]]) + if out.csv: + chromeWriter.write_csv("searches", output) + if out.json: + chromeWriter.write_json("searches", output) + + +def parse_favicons(database): + """Parse favicons data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM favicons") + entries = cursor.fetchall() + output = [] + output.append(["url"]) + for entry in entries: + output.append([entry[1]]) + if out.csv: + chromeWriter.write_csv("favicons", output) + if out.json: + chromeWriter.write_json("favicons", output) + + +def parse_cookies(database): + """Parse cookies from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM cookies") + entries = cursor.fetchall() + output = [] + output.append( + [ + "created_utc", + "host_key", + "name", + "value", + "expires_utc", + "last_access_utc", + "source_port", + "last_update_utc", + ] + ) + for entry in entries: + output.append( + [ + convert_time(entry[0]), + entry[1], + entry[3], + entry[4], + convert_time(entry[7]), + convert_time(entry[10]), + entry[16], + convert_time(entry[17]), + ] + ) + if out.csv: + chromeWriter.write_csv("cookies", output) + if out.json: + chromeWriter.write_json("cookies", output) + + +def parse_shortcuts(database): + """Parse shortcuts from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM omni_box_shortcuts") + entries = cursor.fetchall() + output = [] + output.append( + [ + "text", + "fill_into_edit", + "contents", + "keyword", + "last_access_time", + "number_of_hits", + ] + ) + for entry in entries: + output.append( + [ + entry[1], + entry[2], + entry[5], + entry[11], + convert_time(entry[12]), + entry[13], + ] + ) + if out.csv: + chromeWriter.write_csv("shortcuts", output) + if out.json: + chromeWriter.write_json("shortcuts", output) + + +def parse_chromium_notifications(database): + class ClosedReason: + USER = "0" + DEVELOPER = "1" + UNKNOWN = "2" + + # Found in chromium source-code (https://source.chromium.org/chromium/chromium/src/+/main:content/browser/notifications/notification_database_data.proto) Line 16-20 + map_field_to_reason = { + ClosedReason.USER: "USER", + ClosedReason.DEVELOPER: "DEVELOPER", + ClosedReason.UNKNOWN: "UKNOWN" + } + + db_path = database + if os.path.exists(f"{database}"): + # Stupid fix + if len(os.listdir(database)) < 4: + return None + try: + db = plyvel.DB(db_path, create_if_missing=False) + output = [] + output.append(["title", "lang", "body", "tag", "icon", "is_silent", "require_interaction", "time", "badge", "image", "numClicks", "creation_time_millis", "closed_reason", "has_triggered", "origin"]) + for key, value in db: + data = value + + # Create an instance of the generated class and parse the data. + notification_data = NotificationDatabaseDataProto() + notification_data.ParseFromString(data) + output.append([notification_data.notification_data.title, + notification_data.notification_data.lang, + notification_data.notification_data.body, + notification_data.notification_data.tag, + notification_data.notification_data.icon, + notification_data.notification_data.silent, + notification_data.notification_data.require_interaction, + convert_time(int(notification_data.notification_data.timestamp)), + notification_data.notification_data.badge, + notification_data.notification_data.image, + notification_data.num_clicks, + notification_data.creation_time_millis, + map_field_to_reason.get(str(notification_data.closed_reason)), + notification_data.has_triggered, + notification_data.origin + ]) + if out.csv: + chromeWriter.write_csv("notifications", output) + if out.json: + chromeWriter.write_json("notifications", output) + except Exception as e: + print(e) + + +def parse_extensions(path): + if not os.path.exists(path): + return None + output = [] + output.append(["name", "author", "version", "description", "developer"]) + print(os.listdir(path)) + for extension in os.listdir(path): + if extension == "Temp": + break + print(os.listdir(f"{path}/{extension}")) + id = os.listdir(f"{path}/{extension}")[0] + f = open(f"{path}/{extension}/{id}/manifest.json", "r") + manifest = json.loads(f.read()) + try: + output.append([manifest.get("name", "No name specified"), manifest.get("author", "No author specified"), manifest.get("manifest_version", "No version specified"), manifest.get("description", "No description specified"), manifest.get("developer", "No developer specified")]) + except KeyError: + print(f"[!] Error parsing {chromeWriter.browser} extensions, check them manually!") + if out.csv: + chromeWriter.write_csv("extensions", output) + if out.json: + chromeWriter.write_json("extensions", output) + + +def parse_chrome_data(user, directory, output, args): + print("[*] Starting to parse Chrome") + global out + out = args + global chromeWriter + chromeWriter = outputWriter(output, user, "chrome") + parse_downloads(f"{directory}/History") + parse_history(f"{directory}/History") + parse_visited_links(f"{directory}/History") + parse_searches(f"{directory}/History") + parse_favicons(f"{directory}/Favicons") + parse_cookies(f"{directory}/Network/Cookies") + parse_shortcuts(f"{directory}/Shortcuts") + parse_chromium_notifications(f"{directory}/Platform Notifications") + parse_extensions(f"{directory}/Extensions") diff --git a/lib/detectBrowser.py b/lib/detectBrowser.py new file mode 100644 index 0000000..375c5f0 --- /dev/null +++ b/lib/detectBrowser.py @@ -0,0 +1,69 @@ +import os +import string + +def find_single_letter_directory(path): + """ + Find directories that are named with a single uppercase letter in the given path. + """ + for directory in os.listdir(path): + if os.path.isdir(os.path.join(path, directory)) and len(directory) == 1 and directory in string.ascii_uppercase: + return os.path.join(path, directory) + return None + + +def find_usernames(folder_path): + usernames = [] + for username in os.listdir(folder_path + "/Users"): + if username == "Default": + pass + else: + usernames.append(username) + return usernames + + +def determine_browser(path): + files = os.listdir(path) + for file in files: + if file.lower() == "chrome icon.ico": + return "chrome" + elif file.lower() == "edge icon.ico": + return "edge" + elif "firefox" in path.lower(): + return "firefox" + elif file.lower() == "readme": + if "Opera" in file.readlines(): + return "opera" + else: + return "unknown" + + +def locate_browser_directories(path): + + # Check for a single letter directory first, and use that as the "starting point" + letter_dir = find_single_letter_directory(path) + + if letter_dir: + path = letter_dir # Update path to the single letter directory + + + user_dir = {} + for username in find_usernames(path): + appdata = f"{path}/Users/{username}/AppData" + return_val = {} + if os.path.exists(f"{appdata}/Roaming/Mozilla/Firefox/Profiles/"): + user_profiles = [a for a in os.listdir(f"{appdata}/Roaming/Mozilla/Firefox/Profiles/")] + directories = [] + for profile in user_profiles: + directories.append(f"{appdata}/Roaming/Mozilla/Firefox/Profiles/{profile}") + + return_val["Firefox"] = tuple(directories) + if os.path.exists(f"{appdata}/Local/Google/Chrome/User Data/Default"): + return_val["Chrome"] = f"{appdata}/Local/Google/Chrome/User Data/Default" + if os.path.exists(f"{appdata}/Local/Microsoft/Edge/User Data/Default"): + return_val["Edge"] = f"{appdata}/Local/Microsoft/Edge/User Data/Default" + + if os.path.exists(f"{appdata}/Roaming/Opera Software/Opera Stable/Default"): + return_val["Opera"] = f"{appdata}/Roaming/Opera Software/Opera Stable/Default" + + user_dir[username] = return_val + return user_dir diff --git a/lib/edge.py b/lib/edge.py new file mode 100644 index 0000000..6449c79 --- /dev/null +++ b/lib/edge.py @@ -0,0 +1,313 @@ +from datetime import datetime, timedelta +import sqlite3 +import os +import json +import plyvel # type: ignore +from lib.a_pb2 import NotificationDatabaseDataProto # type: ignore +from lib.output import outputWriter # type: ignore + + + +def connect_database(database_path: str): + """Establish connection to the SQLite database.""" + try: + if os.path.exists(database_path): + connection = sqlite3.connect(f"file:{database_path}?mode=ro&immutable=1", uri=True) + return connection + else: + return None + except Exception as e: # Dont raise an error if a file is missing / damaged / etc. Just keep parsing the other files + print(f"[!] Database connection error: {e}") + return None + + +def convert_time(timestamp): + """Convert Chromium timestamp to standard datetime format.""" + chromium_base_date = datetime(1601, 1, 1) + timestamp_delta = timedelta(microseconds=timestamp) + return str(chromium_base_date + timestamp_delta) + + +def parse_downloads(database): + """Parse downloads data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM downloads") + entries = cursor.fetchall() + output = [] + output.append( + [ + "filename", + "current_path", + "target_path", + "start_time", + "received_bytes", + "total_bytes", + "end_time", + "opened", + "last_access_time", + "mime_type", + ] + ) + + for entry in entries: + filename = entry[2].split("\\")[-1] + output.append( + [ + filename, + entry[2], + entry[3], + convert_time(entry[4]), + entry[5], + entry[6], + convert_time(entry[11]), + str(bool(entry[12])), + convert_time(entry[13]), + entry[25], + ] + ) + if out.csv: + edgeWriter.write_csv("downloads", output) + if out.json: + edgeWriter.write_json("downloads", output) + + +def parse_history(database): + """Parse browsing history data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM urls") + entries = cursor.fetchall() + output = [] + output.append(["url", "title", "visit_count", "last_visit_time"]) + + for entry in entries: + title = ( + entry[2] + if not entry[1].startswith("file://") + else entry[1].split("/")[-1] + ) + output.append([entry[1], title, entry[3], convert_time(entry[5])]) + if out.csv: + edgeWriter.write_csv("history", output) + if out.json: + edgeWriter.write_json("history", output) + + +def parse_visited_links(database): + """Parse visited links data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM visited_links") + entries = cursor.fetchall() + output = [] + output.append(["top_level_url", "frame_url", "visit_count"]) + + for entry in entries: + output.append([entry[2], entry[3], entry[4]]) + if out.csv: + edgeWriter.write_csv("visited_links", output) + if out.json: + edgeWriter.write_json("visited_links", output) + + +def parse_searches(database): + """Parse search terms data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM keyword_search_terms") + entries = cursor.fetchall() + output = [] + output.append(["term", "normalized_term"]) + + for entry in entries: + output.append([entry[2], entry[3]]) + if out.csv: + edgeWriter.write_csv("searches", output) + if out.json: + edgeWriter.write_json("searches", output) + + +def parse_favicons(database): + """Parse favicons data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM favicons") + entries = cursor.fetchall() + output = [] + output.append(["url"]) + for entry in entries: + output.append([entry[1]]) + if out.csv: + edgeWriter.write_csv("favicons", output) + if out.json: + edgeWriter.write_json("favicons", output) + + +def parse_cookies(database): + """Parse cookies from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM cookies") + entries = cursor.fetchall() + output = [] + output.append( + [ + "created_utc", + "host_key", + "name", + "value", + "expires_utc", + "last_access_utc", + "source_port", + "last_update_utc", + ] + ) + for entry in entries: + output.append( + [ + convert_time(entry[0]), + entry[1], + entry[3], + entry[4], + convert_time(entry[7]), + convert_time(entry[10]), + entry[16], + convert_time(entry[17]), + ] + ) + if out.csv: + edgeWriter.write_csv("cookies", output) + if out.json: + edgeWriter.write_json("cookies", output) + + +def parse_shortcuts(database): + """Parse shortcuts from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM omni_box_shortcuts") + entries = cursor.fetchall() + output = [] + output.append( + [ + "text", + "fill_into_edit", + "contents", + "keyword", + "last_access_time", + "number_of_hits", + ] + ) + for entry in entries: + output.append( + [ + entry[1], + entry[2], + entry[5], + entry[11], + convert_time(entry[12]), + entry[13], + ] + ) + if out.csv: + edgeWriter.write_csv("shortcuts", output) + if out.json: + edgeWriter.write_json("shortcuts", output) + + +def parse_chromium_notifications(database): + class ClosedReason: + USER = "0" + DEVELOPER = "1" + UNKNOWN = "2" + + # Found in chromium source-code (https://source.chromium.org/chromium/chromium/src/+/main:content/browser/notifications/notification_database_data.proto) Line 16-20 + map_field_to_reason = { + ClosedReason.USER: "USER", + ClosedReason.DEVELOPER: "DEVELOPER", + ClosedReason.UNKNOWN: "UKNOWN" + } + + db_path = database + if os.path.exists(f"{database}"): + # Stupid fix + if len(os.listdir(database)) < 4: + return None + try: + db = plyvel.DB(db_path, create_if_missing=False) + output = [] + output.append(["title", "lang", "body", "tag", "icon", "is_silent", "require_interaction", "time", "badge", "image", "numClicks", "creation_time_millis", "closed_reason", "has_triggered", "origin"]) + for key, value in db: + data = value + + # Create an instance of the generated class and parse the data. + notification_data = NotificationDatabaseDataProto() + notification_data.ParseFromString(data) + output.append([notification_data.notification_data.title, + notification_data.notification_data.lang, + notification_data.notification_data.body, + notification_data.notification_data.tag, + notification_data.notification_data.icon, + notification_data.notification_data.silent, + notification_data.notification_data.require_interaction, + convert_time(int(notification_data.notification_data.timestamp)), + notification_data.notification_data.badge, + notification_data.notification_data.image, + notification_data.num_clicks, + notification_data.creation_time_millis, + map_field_to_reason.get(str(notification_data.closed_reason)), + notification_data.has_triggered, + notification_data.origin + ]) + if out.csv: + edgeWriter.write_csv("notifications", output) + if out.json: + edgeWriter.write_json("notifications", output) + except Exception as e: + print(e) + + +def parse_extensions(path): + if not os.path.exists(path): + return None + output = [] + output.append(["name", "author", "version", "description", "developer"]) + for extension in os.listdir(path): + id = os.listdir(f"{path}/{extension}")[0] + f = open(f"{path}/{extension}/{id}/manifest.json", "r") + manifest = json.loads(f.read()) + try: + output.append([manifest.get("name", "No name specified"), manifest.get("author", "No author specified"), manifest.get("manifest_version", "No version specified"), manifest.get("description", "No description specified"), manifest.get("developer", "No developer specified")]) + except KeyError: + print(f"[!] Error parsing {edgeWriter.browser} extensions, check them manually!") + if out.csv: + edgeWriter.write_csv("extensions", output) + if out.json: + edgeWriter.write_json("extensions", output) + + +def parse_edge_data(user, directory, output, args): + print("[*] Starting to parse Edge") + global out + out = args + global edgeWriter + edgeWriter = outputWriter(output, user, "edge") + parse_downloads(f"{directory}/History") + parse_history(f"{directory}/History") + parse_visited_links(f"{directory}/History") + parse_searches(f"{directory}/History") + parse_favicons(f"{directory}/Favicons") + parse_cookies(f"{directory}/Network/Cookies") + parse_shortcuts(f"{directory}/Shortcuts") + parse_chromium_notifications(f"{directory}/Platform Notifications") + parse_extensions(f"{directory}/Extensions") diff --git a/lib/firefox.py b/lib/firefox.py new file mode 100644 index 0000000..8557db3 --- /dev/null +++ b/lib/firefox.py @@ -0,0 +1,449 @@ +from datetime import datetime +import sqlite3 +import json +import os +from itertools import groupby +from operator import itemgetter +from lib.output import outputWriter # type: ignore + + +def parse_firefox_data(user, directory, profile, output, args): + print(f"[*] Starting to parse firefox profile {profile}") + places_dict = {} + global firefoxWriter + global out + out = args + firefoxWriter = outputWriter(output, user, "firefox", profile) + parse_cookies(f"{directory}/cookies.sqlite") + parse_formhistory(f"{directory}/formhistory.sqlite") + parse_perms(f"{directory}/permissions.sqlite") + parse_bookmarks(f"{directory}/places.sqlite") + parse_history(f"{directory}/places.sqlite", places_dict) + enrich_history(f"{directory}/places.sqlite", places_dict) + parse_inputhistory(f"{directory}/places.sqlite", places_dict) + parse_history_metadata(f"{directory}/places.sqlite", places_dict) + parse_extensions(f"{directory}/extensions.json") + parse_logins(f"{directory}/logins.json") + parse_downloads(f"{directory}/places.sqlite", places_dict) + parse_favicons(f"{directory}/favicons.sqlite") + parse_notifications(f"{directory}/notificationstore.json") + + +def connect_database(database_path: str): + """Establish connection to the SQLite database.""" + try: + if os.path.exists(database_path): + connection = sqlite3.connect(f"file:{database_path}?mode=ro&immutable=1", uri=True) + return connection + else: + return None + except Exception as e: # Dont raise an error if a file is missing / damaged / etc. Just keep parsing the other files + print(f"[!] Database connection error: {e}") + return None + + +def convert_firefox_time(timestamp: int): + + length = len(str(timestamp)) + + if length == 10: # Seconds + + dt = datetime.fromtimestamp(timestamp) + + elif length == 13: # Milliseconds + + dt = datetime.fromtimestamp(timestamp / 1000) + + else: # Microseconds (PRTime) + + dt = datetime.fromtimestamp(timestamp / 1_000_000) + + return dt.strftime("%Y-%m-%dT%H:%M:%S") + + +def parse_favicons(database: str): + """Parse favicons from the Firefox SQLite database and save it to a CSV file""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_pages_w_icons") + entries = cursor.fetchall() + output = [] + output.append(["page_url"]) + for entry in entries: + output.append([entry[1]]) + if out.csv: + firefoxWriter.write_csv("favicon", output) + if out.json: + firefoxWriter.write_json("favicon", output) + + +def parse_extensions(database: str): + if os.path.exists(database): + data = json.load(open(f"{database}", "r", encoding="UTF-8")) + addons: dict = data.get("addons") + output: list = [] + output.append(["id", "sourceURI", "version", "type", "name", "description", "creator", "installDate", "updateDate", "userPermissions", "optionalPermissions"]) + + for addon in addons: + # I'm using .get() instead of [key], because using get() ensures that if the element doesn't exist, I can specify a "replacement" value + # by doing .get("nonexistent", "No field named nonexistent found"). + output.append([addon.get("id", "No id specified"), + addon.get("sourceURI", "No source URI specified"), + addon.get("version", "No version specified"), + addon.get("type", "No type specified"), + addon.get("defaultLocale").get("name", "No name specified"), + addon.get("defaultLocale").get("description", "No description"), + addon.get("defaultLocale").get("creator", "No creator specified"), + convert_firefox_time(addon.get("installDate", 0)), + convert_firefox_time(addon.get("updateDate", 0)), + addon.get("userPermissions", "No user permissions specified"), + addon.get("optionalPermissions", "No optional permissions specified")]) + if out.csv: + firefoxWriter.write_csv("extensions", output) + if out.json: + firefoxWriter.write_json("extensions", output) + + +def parse_cookies(database: str): + """Parse cookies from the Firefox SQLite database and save it to a CSV file""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_cookies") + entries = cursor.fetchall() + output = [] + output.append( + ["name", "value", "host", "expiry", "lastAccessed", "creationTime"] + ) + for entry in entries: + output.append( + [ + entry[2], + entry[3], + entry[4], + convert_firefox_time(entry[6]), + convert_firefox_time(entry[7]), + convert_firefox_time(entry[8]), + ] + ) + if out.csv: + firefoxWriter.write_csv("cookies", output) + if out.json: + firefoxWriter.write_json("cookies", output) + + +def parse_formhistory(database: str): + """Parse form history from the Firefox SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_formhistory") + entries = cursor.fetchall() + output = [] + output.append( + ["fieldname", "value", "timesUsed", "firstUsed", "lastUsed"] + ) + for entry in entries: + output.append( + [ + entry[1], + entry[2], + entry[3], + convert_firefox_time(entry[4]), + convert_firefox_time(entry[5]), + ] + ) + if out.csv: + firefoxWriter.write_csv("formhistory", output) + if out.json: + firefoxWriter.write_json("formhistory", output) + + +def parse_perms(database: str): + """Parse permissions from the Firefox SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_perms") + entries = cursor.fetchall() + output = [] + output.append(["origin", "type", "expireTime", "modificationTime"]) + for entry in entries: + output.append( + [ + entry[1], + entry[2], + convert_firefox_time(entry[5]), + convert_firefox_time(entry[6]), + ] + ) + if out.csv: + firefoxWriter.write_csv("permissions", output) + if out.json: + firefoxWriter.write_json("permissions", output) + + +def parse_bookmarks(database: str): + """Parse bookmarks from the Firefox SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_bookmarks") + entries = cursor.fetchall() + output = [] + output.append(["title", "dateAdded", "lastModified"]) + for entry in entries: + output.append( + [ + entry[5], # Title + convert_firefox_time(entry[8]), # dateAdded + convert_firefox_time(entry[9]), # lastModified + ] + ) + if out.csv: + firefoxWriter.write_csv("bookmarks", output) + if out.json: + firefoxWriter.write_json("bookmarks", output) + + +def parse_inputhistory(database: str, places_dict: dict): + """Parse inputhistory from the Firefox SQLite database and save it to a CSV file""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_inputhistory") + entries = cursor.fetchall() + output = [] + output.append(["places_id", "url", "input"]) + for entry in entries: + output.append([entry[0], places_dict[entry[0]][0], entry[1]]) + if out.csv: + firefoxWriter.write_csv("inputhistory", output) + if out.json: + firefoxWriter.write_json("inputhistory", output) + + +def parse_history(database: str, places_dict: dict): + """Parse history from the Firefox SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_places") + entries = cursor.fetchall() + output = [] + output.append( + [ + "id", + "url", + "title", + "host", + "visit_count", + "last_visit_date", + "description", + "preview_image_url", + ] + ) + for entry in entries: + places_dict[entry[0]] = [entry[1], entry[2], entry[3][::-1][1::], entry[4]] + if entry[8]: + output.append( + + [ + entry[0], + entry[1], + entry[2], + entry[3][::-1][1::], + entry[4], + convert_firefox_time(entry[8]), + entry[12], + entry[13], + ] + ) + else: + output.append( + + [ + entry[0], + entry[1], + entry[2], + entry[3][::-1][1::], + entry[4], + "Invalid timestamp", + entry[12], + entry[13], + ] + ) + if out.csv: + firefoxWriter.write_csv("history", output) + if out.json: + firefoxWriter.write_json("history", output) + + +def enrich_history(database: str, places_dict: dict): + + """Parse history metadata from the Firefox SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_historyvisits") + entries = cursor.fetchall() + output = [] + # Defined in the source code + # Link: https://searchfox.org/mozilla-central/source/toolkit/components/places/History.sys.mjs Line 762 + visit_types = {"1": "TRANSITION_LINK (User followed a link and got a new toplevel window)", # If transition reason isn't specified, this is the default + "2": "TRANSITION_TYPED (User typed the pages url in the URL bar or selected it from the URL Bar autocomplete result)", + "3": "TRANSITION_BOOKMARK (User followed a bookmark to get to the page)", + "4": "TRANSITION_EMBED (User followed a link on a page that was embedded in another page (iframe))", + "5": "TRANSITION_REDIRECT_PERMANENT (Permanent redirect", + "6": "TRANSITION_REDIRECT_TEMPORARY (Temporary redirect)", + "7": "TRANSITION_DOWNLOAD (User downloaded the file)", + "8": "TRANSITION_FRAMED_LINK (User followed a link and got a visit in a frame)", + "9": "TRANSITION_RELOAD (User reloaded the page)"} + + entries.sort(key=itemgetter(2)) + + output.append(["from_visit", "place_id", "url", "visit_date", "visit_type"]) + for key, group in groupby(entries, key=itemgetter(2)): + for item in group: + output.append([item[1], item[2], places_dict[item[2]][0], convert_firefox_time(item[3]), visit_types[str(item[4])]]) + if out.csv: + firefoxWriter.write_csv("historyvisits", output) + if out.json: + firefoxWriter.write_json("historyvisits", output) + + +def parse_history_metadata(database: str, places_dict: dict): + """Parse history metadata from the Firefox SQLite database and save it""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_places_metadata") + entries = cursor.fetchall() + output = [] + output.append( + [ + "places_id", + "url", + "title", + "host", + "visit_count", + "total_view_time_miliseconds", + "typing_time_miliseconds", + "key_presses", + "scrolling_time_miliseconds", + "scrolling_distance_mm", # Why is all this data being logged?? + ] + ) + + # Aggregate times for each unique ID + times = {} + for entry in entries: + if entry[1] in times.keys(): + times[entry[1]][0] += entry[5] + times[entry[1]][1] += entry[6] + times[entry[1]][2] += entry[7] + times[entry[1]][3] += entry[8] + times[entry[1]][4] += entry[9] + else: + times[entry[1]] = [entry[5], entry[6], entry[7], entry[8], entry[9]] + + # Write aggregated data to CSV, getting additional place data from places_dict + for place_id, time_data in times.items(): + if place_id in places_dict: # Ensure place_id exists in places_dict + places_data = places_dict[place_id] + output.append( + [ + place_id, + places_data[0], + places_data[1], + places_data[2], + places_data[3], + *time_data, + ] + ) + if out.csv: + firefoxWriter.write_csv("metadata", output) + if out.json: + firefoxWriter.write_json("metadata", output) + + +def parse_logins(database: str): + if os.path.exists(database): + data = json.load(open(f"{database}", "r")) + output = [] + output.append(["hostname", "formSubmitUrl", "timeCreated", "timeLastUsed", "timePasswordChanged", "timesUsed"]) + # We don't decrypt the passwords. That's getting too close to being malicious, and there is no reason to do so. + for login in data.get("logins"): + output.append([login.get("hostname", "No hostname specified"), + login.get("formSubmitURL", "No submit URL specified"), + convert_firefox_time(login.get("timeCreated", 0)), + convert_firefox_time(login.get("timeLastUsed", 0)), + convert_firefox_time(login.get("timePasswordChanged", 0)), + login.get("timesUsed", "No times used specified") + ]) + if out.csv: + firefoxWriter.write_csv("logins", output) + if out.json: + firefoxWriter.write_json("logins", output) + + +def parse_downloads(database: str, places_dict: dict): + # Parse downloads in firefox + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM moz_annos") + entries = cursor.fetchall() + entries.sort(key=itemgetter(1)) + output = [] + # Write names of fields + output.append(["places_id", "filename", "download_url", "endTime", "size", "deleted", "canceled"]) + # Grouping the entries in downloads with those in places.sqlite. With this, we can correlate activity using the "places_id", to get url's, etc. + for key, group in groupby(entries, key=itemgetter(1)): + for item in group: + if item[2] == 2 or item[2] == 1: + if not item[3].startswith("file:"): # We're not interested in the file:// entries, they dont have enough metadata to be useful. + json_data = json.loads(item[3]) + output.append([key, + places_dict.get(key)[1], + places_dict.get(key)[0], + convert_firefox_time(json_data.get("endTime")), + json_data.get("fileSize"), + json_data.get("deleted"), + bool(json_data.get("state"))]) + # Write uo output, either CSV or JSON + if out.csv: + firefoxWriter.write_csv("downloads", output) + if out.json: + firefoxWriter.write_json("downloads", output) + + +def parse_notifications(database: str): + if os.path.exists(database): + # Load json + output = [] + with open(f"{database}", "r") as f: + notificationstore = json.loads(f.read()) + output.append(["site", "id", "title", "body", "icon", "alertName", "timestamp", "origin", "mozbehavior"]) + # Look through the items in the notificationstore.json file + for site, notifications in notificationstore.items(): + for _, notification in notifications.items(): + output.append([site, + notification.get("id", "No id specified"), + notification.get("title", "No title specified"), + notification.get("body", "No body specified"), + notification.get("icon", "No icon specified"), + notification.get("alertName", "No alert name specified"), + convert_firefox_time(notification.get("timestamp", "0")), + notification.get("origin", "No origin specified"), + notification.get("mozbehavior", "No behaviour specified") + ]) + if out.csv: + firefoxWriter.write_csv("notifications", output) + if out.json: + firefoxWriter.write_json("notifications", output) diff --git a/lib/opera.py b/lib/opera.py new file mode 100644 index 0000000..661589e --- /dev/null +++ b/lib/opera.py @@ -0,0 +1,312 @@ +from datetime import datetime, timedelta +import sqlite3 +import os +import json +import plyvel # type: ignore +from lib.a_pb2 import NotificationDatabaseDataProto # type: ignore +from lib.output import outputWriter # type: ignore + + +def connect_database(database_path: str): + """Establish connection to the SQLite database.""" + try: + if os.path.exists(database_path): + connection = sqlite3.connect(f"file:{database_path}?mode=ro&immutable=1", uri=True) + return connection + else: + return None + except Exception as e: # Dont raise an error if a file is missing / damaged / etc. Just keep parsing the other files + print(f"[!] Database connection error: {e}") + return None + + +def convert_time(timestamp): + """Convert Chromium timestamp to standard datetime format.""" + chromium_base_date = datetime(1601, 1, 1) + timestamp_delta = timedelta(microseconds=timestamp) + return str(chromium_base_date + timestamp_delta) + + +def parse_downloads(database): + """Parse downloads data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM downloads") + entries = cursor.fetchall() + output = [] + output.append( + [ + "filename", + "current_path", + "target_path", + "start_time", + "received_bytes", + "total_bytes", + "end_time", + "opened", + "last_access_time", + "mime_type", + ] + ) + + for entry in entries: + filename = entry[2].split("\\")[-1] + output.append( + [ + filename, + entry[2], + entry[3], + convert_time(entry[4]), + entry[5], + entry[6], + convert_time(entry[11]), + str(bool(entry[12])), + convert_time(entry[13]), + entry[25], + ] + ) + if out.csv: + operaWriter.write_csv("downloads", output) + if out.json: + operaWriter.write_json("downloads", output) + + +def parse_history(database): + """Parse browsing history data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM urls") + entries = cursor.fetchall() + output = [] + output.append(["url", "title", "visit_count", "last_visit_time"]) + + for entry in entries: + title = ( + entry[2] + if not entry[1].startswith("file://") + else entry[1].split("/")[-1] + ) + output.append([entry[1], title, entry[3], convert_time(entry[5])]) + if out.csv: + operaWriter.write_csv("history", output) + if out.json: + operaWriter.write_json("history", output) + + +def parse_visited_links(database): + """Parse visited links data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM visited_links") + entries = cursor.fetchall() + output = [] + output.append(["top_level_url", "frame_url", "visit_count"]) + + for entry in entries: + output.append([entry[2], entry[3], entry[4]]) + if out.csv: + operaWriter.write_csv("visited_links", output) + if out.json: + operaWriter.write_json("visited_links", output) + + +def parse_searches(database): + """Parse search terms data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM keyword_search_terms") + entries = cursor.fetchall() + output = [] + output.append(["term", "normalized_term"]) + + for entry in entries: + output.append([entry[2], entry[3]]) + if out.csv: + operaWriter.write_csv("searches", output) + if out.json: + operaWriter.write_json("searches", output) + + +def parse_favicons(database): + """Parse favicons data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM favicons") + entries = cursor.fetchall() + output = [] + output.append(["url"]) + for entry in entries: + output.append([entry[1]]) + if out.csv: + operaWriter.write_csv("favicons", output) + if out.json: + operaWriter.write_json("favicons", output) + + +def parse_cookies(database): + """Parse cookies from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM cookies") + entries = cursor.fetchall() + output = [] + output.append( + [ + "created_utc", + "host_key", + "name", + "value", + "expires_utc", + "last_access_utc", + "source_port", + "last_update_utc", + ] + ) + for entry in entries: + output.append( + [ + convert_time(entry[0]), + entry[1], + entry[3], + entry[4], + convert_time(entry[7]), + convert_time(entry[10]), + entry[16], + convert_time(entry[17]), + ] + ) + if out.csv: + operaWriter.write_csv("cookies", output) + if out.json: + operaWriter.write_json("cookies", output) + + +def parse_shortcuts(database): + """Parse shortcuts from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM omni_box_shortcuts") + entries = cursor.fetchall() + output = [] + output.append( + [ + "text", + "fill_into_edit", + "contents", + "keyword", + "last_access_time", + "number_of_hits", + ] + ) + for entry in entries: + output.append( + [ + entry[1], + entry[2], + entry[5], + entry[11], + convert_time(entry[12]), + entry[13], + ] + ) + if out.csv: + operaWriter.write_csv("shortcuts", output) + if out.json: + operaWriter.write_json("shortcuts", output) + + +def parse_chromium_notifications(database): + class ClosedReason: + USER = "0" + DEVELOPER = "1" + UNKNOWN = "2" + + # Found in chromium source-code (https://source.chromium.org/chromium/chromium/src/+/main:content/browser/notifications/notification_database_data.proto) Line 16-20 + map_field_to_reason = { + ClosedReason.USER: "USER", + ClosedReason.DEVELOPER: "DEVELOPER", + ClosedReason.UNKNOWN: "UKNOWN" + } + + db_path = database + if os.path.exists(f"{database}"): + # Stupid fix + if len(os.listdir(database)) < 4: + return None + try: + db = plyvel.DB(db_path, create_if_missing=False) + output = [] + output.append(["title", "lang", "body", "tag", "icon", "is_silent", "require_interaction", "time", "badge", "image", "numClicks", "creation_time_millis", "closed_reason", "has_triggered", "origin"]) + for key, value in db: + data = value + + # Create an instance of the generated class and parse the data. + notification_data = NotificationDatabaseDataProto() + notification_data.ParseFromString(data) + output.append([notification_data.notification_data.title, + notification_data.notification_data.lang, + notification_data.notification_data.body, + notification_data.notification_data.tag, + notification_data.notification_data.icon, + notification_data.notification_data.silent, + notification_data.notification_data.require_interaction, + convert_time(int(notification_data.notification_data.timestamp)), + notification_data.notification_data.badge, + notification_data.notification_data.image, + notification_data.num_clicks, + notification_data.creation_time_millis, + map_field_to_reason.get(str(notification_data.closed_reason)), + notification_data.has_triggered, + notification_data.origin + ]) + if out.csv: + operaWriter.write_csv("notifications", output) + if out.json: + operaWriter.write_json("notifications", output) + except Exception as e: + print(e) + + +def parse_extensions(path): + if not os.path.exists(path): + return None + output = [] + output.append(["name", "author", "version", "description", "developer"]) + for extension in os.listdir(path): + id = os.listdir(f"{path}/{extension}")[0] + f = open(f"{path}/{extension}/{id}/manifest.json", "r") + manifest = json.loads(f.read()) + try: + output.append([manifest.get("name", "No name specified"), manifest.get("author", "No author specified"), manifest.get("manifest_version", "No version specified"), manifest.get("description", "No description specified"), manifest.get("developer", "No developer specified")]) + except KeyError: + print(f"[!] Error parsing {operaWriter.browser} extensions, check them manually!") + if out.csv: + operaWriter.write_csv("extensions", output) + if out.json: + operaWriter.write_json("extensions", output) + + +def parse_opera_data(user, directory, output, args): + print("[*] Starting to parse Opera") + global out + out = args + global operaWriter + operaWriter = outputWriter(output, user, "opera") + parse_downloads(f"{directory}/History") + parse_history(f"{directory}/History") + parse_visited_links(f"{directory}/History") + parse_searches(f"{directory}/History") + parse_favicons(f"{directory}/Favicons") + parse_cookies(f"{directory}/Network/Cookies") + parse_shortcuts(f"{directory}/Shortcuts") + parse_chromium_notifications(f"{directory}/Platform Notifications") + parse_extensions(f"{directory}/Extensions") diff --git a/lib/output.py b/lib/output.py new file mode 100644 index 0000000..7ad5f19 --- /dev/null +++ b/lib/output.py @@ -0,0 +1,51 @@ +import csv +import json + + +class outputWriter: + """ + This class is made to write the output of parse_ functions to either CSV or JSON (Json is W.I.P) + the write_csv function takes a list like this [["column","column2"],["row1_col1","row1_col2"]] and writes it to a csv like this + column1,column2 + row1_col1,row1_col2 + + This is to reduce code reuse in the parsing functions + """ + + def __init__(self, output: str, user: str, browser: str, profile: str = ""): + self.output_directory = output + self.user = user + self.browser = browser + self.profile = profile + if self.profile: + self.filename = f"{self.output_directory}{self.user}_{self.browser}_{self.profile}" + else: + self.filename = f"{self.output_directory}{self.user}_{self.browser}" + + def write_csv(self, datatype: str, content: list): + with open(f"{self.filename}_{datatype}.csv", "w+", newline="", encoding="UTF-8") as csvfile: + csv_writer = csv.writer(csvfile) + for line in content: + csv_writer.writerow(line) + + def write_json(self, datatype: str, content: list): + headers = content.pop(0) # Remove the first element to remove the headers + try: + with open(f"{self.filename}_{datatype}.json", "r") as file: + existing_json = json.load(file) + except FileNotFoundError: + existing_json = [] + + new_json = [] + # Loop through the entries and add them to new JSON + for entry in content: + json_data = {} + for header, value in zip(headers, entry): + json_data[header] = value + new_json.append(json_data) + + existing_json.extend(new_json) + + # Write updated JSON to file: + with open(f"{self.filename}_{datatype}.json", "w", encoding="UTF-8") as file: + json.dump(existing_json, file, indent=4) diff --git a/lib/yandex.py b/lib/yandex.py new file mode 100644 index 0000000..1ca0e72 --- /dev/null +++ b/lib/yandex.py @@ -0,0 +1,313 @@ +from datetime import datetime, timedelta +import sqlite3 +import os +import json +import plyvel # type: ignore +from lib.a_pb2 import NotificationDatabaseDataProto # type: ignore +from lib.output import outputWriter # type: ignore + + +def connect_database(database_path: str): + """Establish connection to the SQLite database.""" + try: + if os.path.exists(database_path): + connection = sqlite3.connect(f"file:{database_path}?mode=ro&immutable=1", uri=True) + return connection + else: + return None + except Exception as e: # Dont raise an error if a file is missing / damaged / etc. Just keep parsing the other files + print(f"[!] Database connection error: {e}") + return None + + +def convert_time(timestamp): + """Convert Chromium timestamp to standard datetime format.""" + chromium_base_date = datetime(1601, 1, 1) + timestamp_delta = timedelta(microseconds=timestamp) + return str(chromium_base_date + timestamp_delta) + + +def parse_downloads(database): + """Parse downloads data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM downloads") + entries = cursor.fetchall() + output = [] + output.append( + [ + "filename", + "current_path", + "target_path", + "start_time", + "received_bytes", + "total_bytes", + "end_time", + "opened", + "last_access_time", + "mime_type", + ] + ) + + for entry in entries: + filename = entry[2].split("\\")[-1] + output.append( + [ + filename, + entry[2], + entry[3], + convert_time(entry[4]), + entry[5], + entry[6], + convert_time(entry[11]), + str(bool(entry[12])), + convert_time(entry[13]), + entry[25], + ] + ) + if out.csv: + yandexWriter.write_csv("downloads", output) + if out.json: + yandexWriter.write_json("downloads", output) + + +def parse_history(database): + """Parse browsing history data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM urls") + entries = cursor.fetchall() + output = [] + output.append(["url", "title", "visit_count", "last_visit_time"]) + + for entry in entries: + title = ( + entry[2] + if not entry[1].startswith("file://") + else entry[1].split("/")[-1] + ) + output.append([entry[1], title, entry[3], convert_time(entry[5])]) + if out.csv: + yandexWriter.write_csv("history", output) + if out.json: + yandexWriter.write_json("history", output) + + +def parse_visited_links(database): + """Parse visited links data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM visited_links") + entries = cursor.fetchall() + output = [] + output.append(["top_level_url", "frame_url", "visit_count"]) + + for entry in entries: + output.append([entry[2], entry[3], entry[4]]) + if out.csv: + yandexWriter.write_csv("visited_links", output) + if out.json: + yandexWriter.write_json("visited_links", output) + + +def parse_searches(database): + """Parse search terms data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM keyword_search_terms") + entries = cursor.fetchall() + output = [] + output.append(["term", "normalized_term"]) + + for entry in entries: + output.append([entry[2], entry[3]]) + if out.csv: + yandexWriter.write_csv("searches", output) + if out.json: + yandexWriter.write_json("searches", output) + + +def parse_favicons(database): + """Parse favicons data from the SQLite database and save it to a CSV file.""" + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM favicons") + entries = cursor.fetchall() + output = [] + output.append(["url"]) + for entry in entries: + output.append([entry[1]]) + if out.csv: + yandexWriter.write_csv("favicons", output) + if out.json: + yandexWriter.write_json("favicons", output) + + +def parse_cookies(database): + """Parse cookies from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM cookies") + entries = cursor.fetchall() + output = [] + output.append( + [ + "created_utc", + "host_key", + "name", + "value", + "expires_utc", + "last_access_utc", + "source_port", + "last_update_utc", + ] + ) + for entry in entries: + output.append( + [ + convert_time(entry[0]), + entry[1], + entry[3], + entry[4], + convert_time(entry[7]), + convert_time(entry[10]), + entry[16], + convert_time(entry[17]), + ] + ) + if out.csv: + yandexWriter.write_csv("cookies", output) + if out.json: + yandexWriter.write_json("cookies", output) + + +def parse_shortcuts(database): + """Parse shortcuts from the SQLite database and save it to a CSV file""" + + connection = connect_database(database) + if not connection: + return None + cursor = connection.execute("SELECT * FROM omni_box_shortcuts") + entries = cursor.fetchall() + output = [] + output.append( + [ + "text", + "fill_into_edit", + "contents", + "keyword", + "last_access_time", + "number_of_hits", + ] + ) + for entry in entries: + output.append( + [ + entry[1], + entry[2], + entry[5], + entry[11], + convert_time(entry[12]), + entry[13], + ] + ) + if out.csv: + yandexWriter.write_csv("shortcuts", output) + if out.json: + yandexWriter.write_json("shortcuts", output) + + +def parse_chromium_notifications(database): + class ClosedReason: + USER = "0" + DEVELOPER = "1" + UNKNOWN = "2" + + # Found in chromium source-code (https://source.chromium.org/chromium/chromium/src/+/main:content/browser/notifications/notification_database_data.proto) Line 16-20 + map_field_to_reason = { + ClosedReason.USER: "USER", + ClosedReason.DEVELOPER: "DEVELOPER", + ClosedReason.UNKNOWN: "UKNOWN" + } + + db_path = database + if os.path.exists(f"{database}"): + # Stupid fix + if len(os.listdir(database)) < 4: + return None + try: + db = plyvel.DB(db_path, create_if_missing=False) + output = [] + output.append(["title", "lang", "body", "tag", "icon", "is_silent", "require_interaction", "time", "badge", "image", "numClicks", "creation_time_millis", "closed_reason", "has_triggered", "origin"]) + for key, value in db: + data = value + + # Create an instance of the generated class and parse the data. + notification_data = NotificationDatabaseDataProto() + notification_data.ParseFromString(data) + output.append([notification_data.notification_data.title, + notification_data.notification_data.lang, + notification_data.notification_data.body, + notification_data.notification_data.tag, + notification_data.notification_data.icon, + notification_data.notification_data.silent, + notification_data.notification_data.require_interaction, + convert_time(int(notification_data.notification_data.timestamp)), + notification_data.notification_data.badge, + notification_data.notification_data.image, + notification_data.num_clicks, + notification_data.creation_time_millis, + map_field_to_reason.get(str(notification_data.closed_reason)), + notification_data.has_triggered, + notification_data.origin + ]) + if out.csv: + yandexWriter.write_csv("notifications", output) + if out.json: + yandexWriter.write_json("notifications", output) + except Exception as e: + print(e) + + +def parse_extensions(path): + if not os.path.exists(path): + return None + output = [] + output.append(["name", "author", "version", "description", "developer"]) + for extension in os.listdir(path): + id = os.listdir(f"{path}/{extension}")[0] + f = open(f"{path}/{extension}/{id}/manifest.json", "r") + manifest = json.loads(f.read()) + try: + output.append([manifest.get("name", "No name specified"), manifest.get("author", "No author specified"), manifest.get("manifest_version", "No version specified"), manifest.get("description", "No description specified"), manifest.get("developer", "No developer specified")]) + except KeyError: + print(f"[!] Error parsing {yandexWriter.browser} extensions, check them manually!") + if out.csv: + yandexWriter.write_csv("extensions", output) + if out.json: + yandexWriter.write_json("extensions", output) + + +def parse_yandex_data(user, directory, output, args): + print("[*] Starting to parse Yandex") + global out + out = args + global yandexWriter + yandexWriter = outputWriter(output, user, "yandex") + # NOTE: Yandex uses different names than default chromium. Check the filenames and update accordingly + parse_downloads(f"{directory}/History") + parse_history(f"{directory}/History") + parse_visited_links(f"{directory}/History") + parse_searches(f"{directory}/History") + parse_favicons(f"{directory}/Favicons") + parse_cookies(f"{directory}/Network/Cookies") + parse_shortcuts(f"{directory}/Shortcuts") + parse_chromium_notifications(f"{directory}/Platform Notifications") + parse_extensions(f"{directory}/Extensions") diff --git a/main.py b/main.py new file mode 100644 index 0000000..ff22bc9 --- /dev/null +++ b/main.py @@ -0,0 +1,59 @@ +import argparse +from lib.detectBrowser import locate_browser_directories +from lib.firefox import parse_firefox_data +from lib.edge import parse_edge_data +from lib.chrome import parse_chrome_data +from lib.opera import parse_opera_data +from lib.brave import parse_brave_data +from lib.yandex import parse_yandex_data + + +def main(args): + directories = locate_browser_directories(args.directory) + output = args.output + if output[-1] != "/": + output += "/" + for user in directories.keys(): + for directory in directories[user]: + if directory == "Firefox": + for profile in directories[user][directory]: + parse_firefox_data(user, profile.replace("//", "/"), profile.split("/")[-1], output, args) + print(f"[*] Finished parsing Firefox profile {profile.split('/')[-1]}") + + if directory == "Chrome": + + parse_chrome_data(user, directories[user][directory].replace("//", "/"), output, args) + print("[*] Finished parsing Chrome") + + if directory == "Edge": + parse_edge_data(user, directories[user][directory].replace("//", "/"), output, args) + print("[*] Finished parsing Edge") + + if directory == "Opera": + parse_opera_data(user, directories[user][directory.replace("//", "/")], output, args) + print("[*] Finished parsing Opera") + + if directory == "Brave": + parse_brave_data(user, directories[user][directory.replace("//", "/")], output, args) + print("[*] Finished parsing Brave") + + if directory == "Yandex": + parse_yandex_data(user, directories[user][directory.replace("//", "/")], output, args) + print("[*] Finished parsing Yandex") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="BrowserParser parses most of all relevant browser artifacts, from the output of KAPE") + + parser.add_argument("directory") + parser.add_argument("output") + + # Create a mutually exclusive group + group = parser.add_mutually_exclusive_group(required=True) + group.add_argument("--csv", action="store_true", help="Output in CSV format") + group.add_argument("--json", action="store_true", help="Output in JSON format") + + # Add arguments to the group + args = parser.parse_args() + main(args) + print("[*] Done parsing!") diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..647f751 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +plyvel==1.5.1 +protobuf==3.20.0 \ No newline at end of file