Skip to content

Commit 98e3a55

Browse files
committed
refactoring & some changes
1 parent bd49bd4 commit 98e3a55

8 files changed

+60
-28
lines changed

.gitignore

+11-1
Original file line numberDiff line numberDiff line change
@@ -105,4 +105,14 @@ venv.bak/
105105
.mypy_cache/
106106

107107
#nohup
108-
nohup.out
108+
nohup.out
109+
110+
#others
111+
notebooks/*
112+
LICENSE-MIT
113+
pyproject.toml
114+
tox.ini
115+
tests/*
116+
.coveragerc
117+
MANIFEST.in
118+
setup.py

nalu/__init__.py

+4
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
__version__ = '0.0.1'
2+
3+
from .core import *
4+
from .layers import *

nalu/core/__init__.py

+2
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
from .nac_cell import NacCell
2+
from .nalu_cell import NaluCell

NeuralAccumulator.py nalu/core/nac_cell.py

+9-12
Original file line numberDiff line numberDiff line change
@@ -1,27 +1,24 @@
1-
import torch
2-
3-
from math import sqrt
4-
from torch import Tensor, exp, log, nn
1+
from torch import Tensor, nn
52
from torch.nn.parameter import Parameter
63
from torch.nn.init import xavier_uniform_
74
from torch.nn.functional import tanh, sigmoid, linear
85

96

10-
class NeuralAccumulator(nn.Module):
7+
class NacCell(nn.Module):
118
"""Basic NAC unit implementation
129
from https://arxiv.org/pdf/1808.00508.pdf
1310
"""
1411

15-
def __init__(self, inputs, outputs):
12+
def __init__(self, in_shape, out_shape):
1613
"""
17-
inputs: input sample size
18-
outputs: output sample size
14+
in_shape: input sample dimension
15+
out_shape: output sample dimension
1916
"""
2017
super().__init__()
21-
self.inputs = inputs
22-
self.outputs = outputs
23-
self.W_ = Parameter(Tensor(outputs, inputs))
24-
self.M_ = Parameter(Tensor(outputs, inputs))
18+
self.in_shape = in_shape
19+
self.out_shape = out_shape
20+
self.W_ = Parameter(Tensor(out_shape, in_shape))
21+
self.M_ = Parameter(Tensor(out_shape, in_shape))
2522
self.W = Parameter(tanh(self.W_) * sigmoid(self.M_))
2623
xavier_uniform_(self.W_), xavier_uniform_(self.M_)
2724
self.register_parameter('bias', None)

NALU.py nalu/core/nalu_cell.py

+12-15
Original file line numberDiff line numberDiff line change
@@ -1,32 +1,29 @@
1-
import torch
2-
3-
from math import sqrt
41
from torch import Tensor, exp, log, nn
52
from torch.nn.parameter import Parameter
63
from torch.nn.init import xavier_uniform_
7-
from torch.nn.functional import tanh, sigmoid, linear
8-
from NeuralAccumulator import NeuralAccumulator
4+
from torch.nn.functional import sigmoid, linear
5+
from .nac_cell import NacCell
96

107

11-
class NALU(nn.Module):
8+
class NaluCell(nn.Module):
129
"""Basic NALU unit implementation
1310
from https://arxiv.org/pdf/1808.00508.pdf
1411
"""
1512

16-
def __init__(self, inputs, outputs):
13+
def __init__(self, in_shape, out_shape):
1714
"""
18-
inputs: input sample size
19-
outputs: output sample size
15+
in_shape: input sample dimension
16+
out_shape: output sample dimension
2017
"""
2118
super().__init__()
22-
self.inputs = inputs
23-
self.outputs = outputs
24-
self.G = Parameter(Tensor(outputs, inputs))
25-
self.W = Parameter(Tensor(outputs, inputs))
26-
self.nac = NeuralAccumulator(outputs, inputs)
19+
self.in_shape = in_shape
20+
self.out_shape = out_shape
21+
self.G = Parameter(Tensor(out_shape, in_shape))
22+
self.W = Parameter(Tensor(out_shape, in_shape))
23+
self.nac = NacCell(out_shape, in_shape)
24+
xavier_uniform_(self.G), xavier_uniform_(self.W)
2725
self.eps = 1e-5
2826
self.register_parameter('bias', None)
29-
xavier_uniform_(self.G), xavier_uniform_(self.W)
3027

3128
def forward(self, input):
3229
a = self.nac(input)

nalu/layers/__init__.py

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
from .nalu_layer import NaluLayer

nalu/layers/nalu_layer.py

+18
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
from torch.nn import Sequential
2+
from torch import nn
3+
from nalu.core.nalu_cell import NaluCell
4+
5+
6+
class NaluLayer(nn.Module):
7+
def __init__(self, input_shape, output_shape, n_layers, hidden_shape):
8+
super().__init__()
9+
self.input_shape = input_shape
10+
self.output_shape = output_shape
11+
self.n_layers = n_layers
12+
self.hidden_shape = hidden_shape
13+
layers = [NaluCell(hidden_shape if n > 0 else input_shape,
14+
hidden_shape if n < n_layers - 1 else output_shape) for n in range(n_layers)]
15+
self.model = Sequential(*layers)
16+
17+
def forward(self, data):
18+
return self.model(data)

requirements.txt

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
torch
2+
numpy
3+

0 commit comments

Comments
 (0)