-
Notifications
You must be signed in to change notification settings - Fork 0
/
preprocess.py
executable file
·55 lines (45 loc) · 1.62 KB
/
preprocess.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2020-present, Baidu, Inc.
# All rights reserved.
# #
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
# #
# Acknowledgement: The code is modified based on Facebook AI's XLM.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
"""
Example: python data/vocab.txt data/train.txt
vocab.txt: 1stline=word, 2ndline=count
"""
import os
import sys
from src.data.dictionary import Dictionary
from src.logger import create_logger
if __name__ == '__main__':
logger = create_logger(None, 0)
voc_path = sys.argv[1]
txt_path = sys.argv[2]
bin_path = sys.argv[2] + '.pth'
assert os.path.isfile(voc_path)
assert os.path.isfile(txt_path)
dico = Dictionary.read_vocab(voc_path)
logger.info("")
data = Dictionary.index_data(txt_path, bin_path, dico)
logger.info("%i words (%i unique) in %i sentences." % (
len(data['sentences']) - len(data['positions']),
len(data['dico']),
len(data['positions'])
))
if len(data['unk_words']) > 0:
logger.info("%i unknown words (%i unique), covering %.2f%% of the data." % (
sum(data['unk_words'].values()),
len(data['unk_words']),
sum(data['unk_words'].values()) * 100. / (len(data['sentences']) - len(data['positions']))
))
if len(data['unk_words']) < 30:
for w, c in sorted(data['unk_words'].items(), key=lambda x: x[1])[::-1]:
logger.info("%s: %i" % (w, c))