Skip to content

Commit 65a7594

Browse files
committed
Get Demo_DFFN conversions working after incorporating tips from:
ethereon#114 ethereon#123 ethereon#104
1 parent ccd1a52 commit 65a7594

File tree

3 files changed

+20
-4
lines changed

3 files changed

+20
-4
lines changed

kaffe/layers.py

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44

55
from .shapes import *
66

7+
import pdb
8+
79
LAYER_DESCRIPTORS = {
810

911
# Caffe Types
@@ -103,6 +105,7 @@ class LayerAdapter(object):
103105
def __init__(self, layer, kind):
104106
self.layer = layer
105107
self.kind = kind
108+
self._input_shape = None
106109

107110
@property
108111
def parameters(self):
@@ -114,7 +117,7 @@ def parameters(self):
114117
raise NodeDispatchError('Caffe parameters not found for layer kind: %s' % (self.kind))
115118

116119
@staticmethod
117-
def get_kernel_value(scalar, repeated, idx, default=None):
120+
def get_kernel_value(scalar, repeated, idx, default=None, params=None):
118121
if scalar:
119122
return scalar
120123
if repeated:
@@ -127,15 +130,26 @@ def get_kernel_value(scalar, repeated, idx, default=None):
127130
# Extract the value for the given spatial dimension
128131
return repeated[idx]
129132
if default is None:
133+
#pdb.set_trace()
130134
raise ValueError('Unable to determine kernel parameter!')
131135
return default
132136

137+
def set_input_shape(self, input_shape):
138+
self._input_shape = input_shape
139+
133140
@property
134141
def kernel_parameters(self):
135142
assert self.kind in (NodeKind.Convolution, NodeKind.Pooling)
136143
params = self.parameters
137-
k_h = self.get_kernel_value(params.kernel_h, params.kernel_size, 0)
138-
k_w = self.get_kernel_value(params.kernel_w, params.kernel_size, 1)
144+
global_pool = hasattr(params, 'global_pooling')
145+
if params.kernel_size:
146+
k_h = self.get_kernel_value(params.kernel_h, params.kernel_size, 0)
147+
k_w = self.get_kernel_value(params.kernel_w, params.kernel_size, 1)
148+
elif self._input_shape:
149+
k_h, k_w = [self._input_shape.height, self._input_shape.width]
150+
else: #errors out in get_kernel_value function
151+
k_h = self.get_kernel_value(params.kernel_h, params.kernel_size, 0)
152+
k_w = self.get_kernel_value(params.kernel_w, params.kernel_size, 1)
139153
s_h = self.get_kernel_value(params.stride_h, params.stride, 0, default=1)
140154
s_w = self.get_kernel_value(params.stride_w, params.stride, 1, default=1)
141155
p_h = self.get_kernel_value(params.pad_h, params.pad, 0, default=0)

kaffe/shapes.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ def get_filter_output_shape(i_h, i_w, params, round_func):
1515
def get_strided_kernel_output_shape(node, round_func):
1616
assert node.layer is not None
1717
input_shape = node.get_only_parent().output_shape
18+
node.layer.set_input_shape(input_shape)
1819
o_h, o_w = get_filter_output_shape(input_shape.height, input_shape.width,
1920
node.layer.kernel_parameters, round_func)
2021
params = node.layer.parameters

kaffe/tensorflow/transformer.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,8 @@ def map_dropout(self, node):
146146
return TensorFlowNode('dropout', node.parameters.dropout_ratio)
147147

148148
def map_batch_norm(self, node):
149-
scale_offset = len(node.data) == 4
149+
scale_offset = len(node.output_shape) == 4
150+
# scale_offset = len(node.data) == 4
150151
kwargs = {} if scale_offset else {'scale_offset': False}
151152
return MaybeActivated(node, default=False)('batch_normalization', **kwargs)
152153

0 commit comments

Comments
 (0)