Skip to content

Commit

Permalink
fix bigrams gradInput
Browse files Browse the repository at this point in the history
  • Loading branch information
nicholas-leonard committed Aug 3, 2016
1 parent 81e2f4f commit 1afea7e
Showing 1 changed file with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion Bigrams.lua
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ function Bigrams:updateOutput(input)

for i=1,batchsize do
local am = self.bigrams[input[i]]
if not am then
error("Missing index "..input[i]..". Only have bigrams for "..#self.bigrams.." words")
end
am:batchdraw(self._output[i])
self.output[i]:index(am.index, 1, self._output[i])
end
Expand All @@ -38,7 +41,7 @@ function Bigrams:updateOutput(input)
end

function Bigrams:updateGradInput(input, gradOutput)
self.gradInput = torch.type(self.gradInput) == 'torch.LongTensor' or torch.LongTensor()
self.gradInput = torch.type(self.gradInput) == 'torch.LongTensor' and self.gradInput or torch.LongTensor()
self.gradInput:resizeAs(input):fill(0)
return self.gradInput
end
Expand Down

0 comments on commit 1afea7e

Please sign in to comment.