-
Notifications
You must be signed in to change notification settings - Fork 17
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Implements NormStabilizer layer and adds tests
- Loading branch information
Jonathan Uesato
committed
May 2, 2016
1 parent
4731643
commit 2cf512b
Showing
4 changed files
with
199 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,6 @@ | ||
local CopyGrad, _ = torch.class('nn.CopyGrad', 'nn.Identity') | ||
|
||
function CopyGrad:updateGradInput(input, gradOutput) | ||
self.gradInput:resizeAs(gradOutput):copy(gradOutput) | ||
return self.gradInput | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
------------------------------------------------------------------------ | ||
--[[ Norm Stabilization]] | ||
-- Regularizing RNNs by Stabilizing Activations | ||
-- Ref. A: http://arxiv.org/abs/1511.08400 | ||
------------------------------------------------------------------------ | ||
|
||
local NS, parent = torch.class("nn.NormStabilizer", "nn.AbstractRecurrent") | ||
|
||
function NS:__init(beta, rho) | ||
parent.__init(self, rho or 9999) | ||
self.recurrentModule = nn.CopyGrad() | ||
self.beta = beta | ||
end | ||
|
||
function NS:_accGradParameters(input, gradOutput, scale) | ||
-- No parameters to update | ||
end | ||
|
||
function NS:updateOutput(input) | ||
local output | ||
if self.train ~= false then | ||
self:recycle() | ||
local recurrentModule = self:getStepModule(self.step) | ||
output = recurrentModule:updateOutput(input) | ||
else | ||
output = self.recurrentModule:updateOutput(input) | ||
end | ||
|
||
self.outputs[self.step] = output | ||
|
||
self.output = output | ||
self.step = self.step + 1 | ||
self.gradPrevOutput = nil | ||
self.updateGradInputStep = nil | ||
self.accGradParametersStep = nil | ||
|
||
return self.output | ||
end | ||
|
||
function NS:_updateGradInput(input, gradOutput) | ||
-- First grab h[t] and h[t+1] : | ||
-- backward propagate through this step | ||
local gradInput = self.recurrentModule:updateGradInput(input, gradOutput) | ||
local curStep = self.updateGradInputStep-1 | ||
local hiddenModule = self:getStepModule(curStep) | ||
local hiddenState = hiddenModule.output | ||
hiddenModule.gradInput = gradInput | ||
|
||
if curStep < self.step then | ||
local batchSize = hiddenState:size(1) | ||
if curStep > 1 then | ||
local prevHiddenModule = self:getStepModule(curStep - 1) | ||
local prevHiddenState = prevHiddenModule.output | ||
-- Add norm stabilizer cost function directly to respective CopyGrad.gradInput tensors | ||
for i=1,batchSize do | ||
local dRegdNorm = self.beta * 2 * (hiddenState[i]:norm()-prevHiddenState[i]:norm()) / batchSize | ||
local dNormdHid = torch.div(hiddenState[i], hiddenState[i]:norm()) | ||
hiddenModule.gradInput[i]:add(torch.mul(dNormdHid, dRegdNorm)) | ||
end | ||
end | ||
if curStep < self.step-1 then | ||
local nextHiddenModule = self:getStepModule(curStep + 1) | ||
local nextHiddenState = nextHiddenModule.output | ||
for i=1,batchSize do | ||
local dRegdNorm = self.beta * -2 * (nextHiddenState[i]:norm() - hiddenState[i]:norm()) / batchSize | ||
local dNormdHid = torch.div(hiddenState[i], hiddenState[i]:norm()) | ||
hiddenModule.gradInput[i]:add(torch.mul(dNormdHid, dRegdNorm)) | ||
end | ||
end | ||
end | ||
return hiddenModule.gradInput | ||
end | ||
|
||
function NS:__tostring__() | ||
return "nn.NormStabilizer" | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters