Skip to content

Commit

Permalink
update linear weight initial
Browse files Browse the repository at this point in the history
  • Loading branch information
ne7ermore committed Nov 7, 2017
1 parent 3d82d65 commit 2362a18
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 3 deletions.
10 changes: 9 additions & 1 deletion cbow/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,17 +29,25 @@ def __init__(self, vocab_size, ebd_size, cont_size):
super(CBOW, self).__init__()

self.ebd = nn.Embedding(vocab_size, ebd_size)
self.ebd.weight.data.uniform_(-0.1, 0.1)
self.lr1 = nn.Linear(ebd_size*cont_size*2, 128)
self.lr2 = nn.Linear(128, vocab_size)

self._init_weight()

def forward(self, inputs):
out = self.ebd(inputs).view(1, -1)
out = F.relu(self.lr1(out))
out = self.lr2(out)
out = F.log_softmax(out)
return out

def _init_weight(self, scope=0.1):
self.ebd.weight.data.uniform_(-scope, scope)
self.lr1.weight.data.uniform_(0, scope)
self.lr1.bias.data.fill_(0)
self.lr2.weight.data.uniform_(0, scope)
self.lr2.bias.data.fill_(0)

def make_context_vector(context, word_to_ix):
idxs = [word_to_ix[w] for w in context]
tensor = torch.LongTensor(idxs)
Expand Down
4 changes: 2 additions & 2 deletions ngram/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,9 @@ def forward(self, inputs):

def _init_weight(self, scope=0.1):
self.embeddings.weight.data.uniform_(-scope, scope)
self.l1.weight.data.uniform_(-scope, scope)
self.l1.weight.data.uniform_(0, scope)
self.l1.bias.data.fill_(0)
self.l2.weight.data.uniform_(-scope, scope)
self.l2.weight.data.uniform_(0, scope)
self.l2.bias.data.fill_(0)

criterion = nn.NLLLoss()
Expand Down

0 comments on commit 2362a18

Please sign in to comment.