From 1afea7e6bee5b30afa5e57faa982ae762b0b982c Mon Sep 17 00:00:00 2001 From: nicholas-leonard Date: Wed, 3 Aug 2016 15:43:05 -0400 Subject: [PATCH] fix bigrams gradInput --- Bigrams.lua | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Bigrams.lua b/Bigrams.lua index f27004f..87a079a 100644 --- a/Bigrams.lua +++ b/Bigrams.lua @@ -30,6 +30,9 @@ function Bigrams:updateOutput(input) for i=1,batchsize do local am = self.bigrams[input[i]] + if not am then + error("Missing index "..input[i]..". Only have bigrams for "..#self.bigrams.." words") + end am:batchdraw(self._output[i]) self.output[i]:index(am.index, 1, self._output[i]) end @@ -38,7 +41,7 @@ function Bigrams:updateOutput(input) end function Bigrams:updateGradInput(input, gradOutput) - self.gradInput = torch.type(self.gradInput) == 'torch.LongTensor' or torch.LongTensor() + self.gradInput = torch.type(self.gradInput) == 'torch.LongTensor' and self.gradInput or torch.LongTensor() self.gradInput:resizeAs(input):fill(0) return self.gradInput end