-
Notifications
You must be signed in to change notification settings - Fork 34
/
ff_backup
executable file
·276 lines (224 loc) · 8.75 KB
/
ff_backup
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
from __future__ import print_function
import itertools as it, operator as op, functools as ft
from os.path import ( join, exists, isfile, isdir, splitext,
expanduser, dirname, basename, realpath, abspath )
from contextlib import contextmanager, closing
from collections import OrderedDict
from hashlib import sha384
import os, sys, logging, io, types, re, random, json
import sqlite3, ConfigParser
import lya, pyaml
class AdHocException(Exception): pass
def get_profile_dir(profile):
if profile:
if profile.startswith(os.sep): return profile
profile = profile.lower() # for ci comparisons
ff_home = expanduser('~/.mozilla/firefox')
profiles = ConfigParser.RawConfigParser()
profiles.read(join(ff_home, 'profiles.ini'))
profile_path, paths = None, list()
for k in profiles.sections():
if not k.startswith('Profile'): continue
name = profiles.get(k, 'Name').strip().lower()
path = profiles.get(k, 'Path')
if profile is not None and name == profile:
profile_path = path
break
if profile:
paths.append(path)
continue
if profile_path: continue
try:
if not profiles.getboolean(k, 'Default'): raise AdHocException
except (ConfigParser.NoOptionError, AdHocException): pass
else: profile_path = path
else:
for path in paths:
if profile in path.lower():
profile_path = path
break
assert profile_path, profile
profile_path = join(ff_home, profile_path)
assert isdir(profile_path), [name, profile_path]
return profile_path
class BackupError(Exception): pass
class BackupStuff(object):
def __init__(self, cfg):
self.cfg = cfg
self.cfg_data = self.cfg.data_types
def sqlite_dict_row(self, cursor, row):
row = sqlite3.Row(cursor, row)
return dict((k, row[k]) for k in row.keys())
def sqlite_conn(self, *db_path):
db_path = join(self.cfg.common.profile_dir, *db_path)
assert isfile(db_path), db_path
conn = sqlite3.connect(db_path, timeout=self.cfg.common.sqlite_timeout)
conn.row_factory = self.sqlite_dict_row
return conn
def sane_hash(self, data, trim=20, prefix='ff_stuff_backup:'):
return sha384('{}\0{}'.format(prefix, bytes(data)))\
.digest().encode('base64').rstrip('=\n').replace('/', '-')[:trim]
def profile_file(self, *path, **opts):
return io.open(join(self.cfg.common.profile_dir, *path), 'rb', **opts)
def _xml_to_py(self, tree):
root = dict()
if tree.items():
root['_attrs'] = dict(tree.items())
for node in tree:
ext_node = self._xml_to_py(node)
if node.tag in root:
if isinstance(root[node.tag], (list, tuple)):
root[node.tag].append(ext_node)
else:
root[node.tag] = [root[node.tag], ext_node]
else: root[node.tag] = ext_node
if tree.text is not None:
text = tree.text.strip()
root['_text'] = text
return root
def xml_to_py(self, xml_str):
from xml.etree import ElementTree
tree = ElementTree.fromstring(xml_str)
return {tree.tag: self._xml_to_py(tree)}
def ytcenter_clean(self, data, strip, log=None):
assert isinstance(data, dict), [type(data), data]
if not strip: return data
for k, v in strip.viewitems():
if v is None:
if k not in data:
log.debug('Missing cleanup key %r, keys: %r', k, data.keys())
data.pop(k, None)
elif k not in data: continue
else:
self.ytcenter_clean( data[k], v,
log=log or logging.getLogger('ytcenter') )
return data
def ytcenter_get(self):
conf, log = self.cfg_data.ytcenter, logging.getLogger('ytcenter')
data = OrderedDict(_hashes=OrderedDict())
if conf.type == 'greasemonkey':
with self.sqlite_conn(conf.db.path) as conn:
with closing(conn.cursor()) as c:
c.execute('select * from scriptvals')
db_dump = c.fetchall()
if db_dump:
assert len(db_dump[0]) == 2\
and not set(db_dump[0].viewkeys()).difference(['name', 'value']),\
db_dump[0]
db_dump = dict(it.imap(op.itemgetter('name', 'value'), db_dump))
for name in conf.db.process:
if name not in db_dump:
log.error('Unable to find db key: %r', name)
continue
val = json.loads(json.loads(db_dump.pop(name))) # double-json \o/
assert name not in data, [name, data.keys()]
data[name] = self.ytcenter_clean(val, conf.strip, log=log)
for name, val in db_dump.viewitems():
if name not in conf.db.skip:
log.warn('Unmatched db key: %r', name)
continue
skip_conf = conf.db.skip[name]
if skip_conf.get('hash'):
data['_hashes'][name] = self.sane_hash(val)
elif conf.type == 'addon':
with self.profile_file(conf.addon.path) as src: val = json.load(src)
val = self.ytcenter_clean(val, conf.strip, log=log)
data[conf.addon.key] = val
data['_hashes'] = data['_hashes'] or None
return data
def convergence_get(self):
conf, log = self.cfg_data.convergence, logging.getLogger('convergence')
with self.sqlite_conn(conf.fingerprint_db) as conn:
with closing(conn.cursor()) as c:
c.execute('select * from fingerprints')
fp_list = sorted(c.fetchall(), key=op.itemgetter('id'))
for fp in fp_list: del fp['id']
with self.profile_file(conf.notaries_xml) as src:
notaries = self.xml_to_py(src.read())
return dict(fingerprints=fp_list, notaries=notaries)
def tabs_get(self):
conf, log = self.cfg_data.tabs, logging.getLogger('tabs')
paths = conf.file
if isinstance(paths, types.StringTypes): paths = [paths]
for p in paths:
try:
with self.profile_file(p) as src: session = json.load(src)
except (OSError, IOError): pass
else: break
else:
raise BackupError( 'Failed to find sessions storage'
' file path, checked: {}'.format(', '.join(paths)), paths )
tabs = OrderedDict()
for win in session['windows']:
for tab in win.get('tabs', list()):
if tab.get('hidden'): continue
entries = tab.get('entries')
if not entries: continue
last_entry = entries[tab['index'] - 1] # index is 1-base there
title, url = (last_entry.get(k) for k in ['title', 'url'])
assert url, tab
tabs[title or url] = url
return tabs
def main(args=None):
import argparse
parser = argparse.ArgumentParser(
description='Tool to export various data from firefox profile dir.')
parser.add_argument('type', nargs='?',
help='Dump only specified data type, ignoring "enabled" parameters in config file.'
' Name corresponds directly to a config file section under "data_types"'
' - e.g. ytcenter, convergence, tabs.'
' Use-case is getting specific data from ff,'
' e.g. "ff_backup tabs" to get all currently open tabs.')
parser.add_argument('-P', '--profile', metavar='name/key/path',
help='Full firefox profile name, profile directory name'
' or its fragment, or a full path to profile dir (default: use default profile).')
parser.add_argument('-t', '--db-lock-timeout', type=float, metavar='seconds',
help='Timeout to acquire sqlite transaction locks (default: %(default)ss).')
parser.add_argument('-c', '--config',
action='append', metavar='path', default=list(),
help='YAML configuration files to process.'
' Can be specified more than once.'
' Values from the latter ones override values in the former.'
' Available CLI options override the values in any config.')
parser.add_argument('-d', '--debug', action='store_true', help='Verbose operation mode.')
opts = parser.parse_args(sys.argv[1:] if args is None else args)
cfg = lya.AttrDict.from_yaml('{}.yaml'.format(splitext(realpath(__file__))[0]))
for k in opts.config: cfg.update_yaml(k)
global log
import logging
lya.configure_logging(cfg.get('logging'), debug=opts.debug)
log = logging.getLogger()
if opts.profile is not None:
cfg.common.profile_dir = opts.profile
if opts.db_lock_timeout is not None:
cfg.common.sqlite_timeout = opts.db_lock_timeout
if opts.type:
if opts.type not in cfg.data_types:
parser.error(( 'Specified data type ({!r}) is not supported.'
' Supported ones: {}' ).format(opts.type, ', '.join(cfg.data_types)))
for k, v in cfg.data_types.viewitems():
v.enabled = k == opts.type
if opts.debug:
global pyaml, dump
try:
import pyaml
except ImportError:
# Only crash when/if this debug printer actually gets used
err_type, err_val, err_tb = sys.exc_info()
def pyaml_dump(*args,**kws): raise err_type, err_val, err_tb
pyaml = type('pyaml_mock', (object,), dict(dump=pyaml_dump))()
else:
pyaml.UnsafePrettyYAMLDumper.add_representer(
sqlite3.Row, lambda s,o: s.represent_dict(dict((k, o[k]) for k in o.keys())) )
dump = ft.partial(pyaml.dump, dst=sys.stdout, force_embed=True)
cfg.common.profile_dir = get_profile_dir(cfg.common.profile_dir)
log.debug('Using ff profile dir: %s', cfg.common.profile_dir)
backup = BackupStuff(cfg)
data = OrderedDict()
for k, v in cfg.data_types.viewitems():
if not v.enabled: continue
data[k] = getattr(backup, '{}_get'.format(k))()
pyaml.dump(data, sys.stdout)
if __name__ == '__main__': sys.exit(main())