-
Notifications
You must be signed in to change notification settings - Fork 0
/
Initialization.py
36 lines (26 loc) · 948 Bytes
/
Initialization.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import numpy as np
def initialize_wrd_emb(vocab_size, emb_size):
'''
:param vocab_size: int. vocabulary size of the training corpus
:param emb_size: int. the dimension of word embeddings we want to have.
'''
WRD_EMB = np.random.randn(vocab_size, emb_size) * 0.01
return WRD_EMB
def initialize_dense(input_size, output_size):
"""
:param input_size: int. size of the input to the dense layer
:param output_size: int. size of the output from the dense layer
:return: a matrix of dimensions (output_size,input_size)
"""
W = np.random.randn(output_size, input_size) * 0.01
return W
def initialize_parameters(vocab_size, emb_size):
"""
Initialize all the traning parameters
"""
WRD_EMB = initialize_wrd_emb(vocab_size,emb_size)
W = initialize_dense(emb_size, vocab_size)
parameters = {}
parameters['WRD_EMB']= WRD_EMB
parameters['W'] = W
return parameters