Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 46 additions & 7 deletions hls4ml/model/hls_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1227,13 +1227,31 @@ def print_tcl(self):

class Pooling1D(Layer):
def initialize(self):
shape = [self.attributes['n_out'], self.attributes['n_filt']]
dims = ['N_OUTPUTS_{}'.format(self.index), 'N_FILT_{}'.format(self.index)]
self.add_output_variable(shape, dims)

#Consider different data format
cl=False
if self.get_attr('data_format') == 'channels_last':
shape = [self.attributes['n_out'], self.attributes['n_filt']]
dims = ['N_OUTPUTS_{}'.format(self.index), 'N_FILT_{}'.format(self.index)]
cl = True
else:
shape = [self.attributes['n_filt'], self.attributes['n_out']]
dims = ['N_FILT_{}'.format(self.index), 'N_OUTPUTS_{}'.format(self.index)]

depth=1
print("adding :",shape,dims)
self.add_output_variable(shape, dims, cl = cl, depth=depth)
self.set_attr('pool_op', self.get_attr('class_name').split('Pooling')[0])
self.is1x1 = False
if (self.attributes['n_out'] == 1):
self.is1x1 = True

def function_cpp(self,iFirst=False):
params = self._default_function_params()
params['data_format'] = 'cf' if self.get_attr('data_format') == 'channels_first' else 'cl'
params['1x1'] = ''
if self.is1x1:
params['1x1'] = '_1x1'
header=''
if self.model.config.get_config_value('IOType') == 'io_serial':
if iFirst:
Expand All @@ -1244,15 +1262,36 @@ def function_cpp(self,iFirst=False):

def config_cpp(self):
params = self._default_config_params()
params['n_in'] = self.get_input_variable().size_cpp()
params['n_out'] = self.get_output_variable().size_cpp()
input_dims = self.get_input_variable().dim_names

if self.get_attr('data_format') == 'channels_last':
params['n_in'] = '*'.join([str(k) for k in input_dims[:-1]])
params['n_chan'] = input_dims[1] + '-1'
params['n_chan_in'] = input_dims[1]
else:
params['n_in'] = '*'.join([str(k) for k in input_dims[1:]])
params['n_chan'] = input_dims[0] + '-1'
params['n_chan_in'] = input_dims[0]

params['n_filt_in'] = 'N_FILT_{}'.format(self.index)
params['n_filt'] = 'N_FILT_{}-1'.format(self.index)
params['n_out'] = 'N_OUTPUTS_{}'.format(self.index)

return self._config_template.format(**params)

def print_tcl(self):
params = self._default_tcl_params()
params['n_chan_in'] = self.get_input_variable().dim_names[0]
params['n_filt_in'] = self.get_output_variable().dim_names[0]
if self.get_attr('data_format') == 'channels_last':
params['n_chan_in'] = self.get_input_variable().dim_names[1]
params['n_filt_in'] = self.get_output_variable().dim_names[1]
else:
params['n_chan_in'] = self.get_input_variable().dim_names[0]
params['n_filt_in'] = self.get_output_variable().dim_names[0]

params['1x1'] = ''
if self.is1x1:
params['1x1'] = '_1x1'

return self._tcl_template.format(**params)

class Pooling2D(Layer):
Expand Down
90 changes: 90 additions & 0 deletions hls4ml/templates/vivado/nnet_utils/nnet_pooling.h
Original file line number Diff line number Diff line change
Expand Up @@ -86,12 +86,17 @@ T pad_val(){
struct pooling1d_config{
// IO size
static const unsigned n_in = 10;
static const unsigned n_filt = 2;
static const unsigned stride = 2;
static const unsigned pool_size = 2;
static const unsigned n_out = n_in / pool_size;
static const unsigned pad_left = 0;
static const unsigned pad_right = 0;

// Pooling function
static const Pool_Op pool_op = Max;
// Reuse
static const unsigned reuse = 1;
};

template<class data_T, typename CONFIG_T>
Expand All @@ -105,6 +110,91 @@ void pooling1d(data_T data[CONFIG_T::n_in], data_T res[CONFIG_T::n_out]){
}
}

template<class data_T, class res_T, typename CONFIG_T>
void pooling1d_cl(
hls::stream<data_T> data[CONFIG_T::n_filt_in],
hls::stream<res_T> res [CONFIG_T::n_filt_in]) {

const static int lShiftX = CONFIG_T::pool_size-CONFIG_T::pad_left-1;
const static int rowsize = (CONFIG_T::n_in+CONFIG_T::pad_left+CONFIG_T::pad_right);

static ap_shift_reg<data_T, (CONFIG_T::pool_size)> layer_in_row[CONFIG_T::n_chan];
#pragma HLS ARRAY_RESHAPE variable=layer_in_row complete dim=2

static data_T layer_in[CONFIG_T::pool_size*CONFIG_T::n_filt];
#pragma HLS ARRAY_RESHAPE variable=layer_in complete dim=0

static unsigned pX=0;

static data_T tmpdata[CONFIG_T::n_chan];
#pragma HLS ARRAY_RESHAPE variable=tmpdata complete

data_T iReset = data[0].read();
for(int i0 = 0; i0 < CONFIG_T::n_chan; i0++) {
data_T pTmp = data[i0+1].read();
tmpdata[i0] = pTmp;
}

static res_T pReset = 0;

if(iReset==0) {
pX = 0;
pReset = 0;

for(int iX = 0; iX < CONFIG_T::pad_left; iX++) {
for(int i0 = 0; i0 < CONFIG_T::n_chan_in; i0++) {
data_T tmp = 0;
layer_in_row[i0].shift(0,tmp);
}
}
}

for(int i0 = 0; i0 < CONFIG_T::n_chan; i0++) {
#pragma HLS UNROLL
data_T tmp = data[i0+1].read();
layer_in_row[i0].shift(0,tmp);
}

//Processs signal
unsigned pLoop = 1;
if(pX == CONFIG_T::n_in-1) pLoop = CONFIG_T::pad_right+1;
for(int i0 = 0; i0 < pLoop; i0++) {
if(i0 > 0) {
for(int i0 = 0; i0 < CONFIG_T::n_chan_in; i0++) {
data_T tmp = 0;
layer_in_row[i0].shift(0,tmp);
}
}
if((pX+1) % CONFIG_T::stride == 0 && pX > lShiftX-1) {
res_T pId = pReset;
if(pReset == 0) pReset = 1;
res[0].write(pId);
for(unsigned i1 = 0; i1 < CONFIG_T::n_filt; i1++) {

#pragma HLS UNROLL
data_T pool[CONFIG_T::pool_size];
#pragma HLS ARRAY_RESHAPE variable=pool complete dim=0

for(unsigned i2 = 0; i2 < CONFIG_T::pool_size; i2++) {
#pragma HLS UNROLL
pool[i2] = layer_in[i1*CONFIG_T::n_filt+i1];
}
res[i1+1].write(pool_op<data_T, CONFIG_T::pool_size, CONFIG_T::pool_op>(pool));
}
}
pX = pX+1;
if(pX == CONFIG_T::n_in+CONFIG_T::pad_right) {
pX = 0;
for(int i1 = 0; i1 < CONFIG_T::pad_left; i1++) {
for(int i0 = 0; i0 < CONFIG_T::n_chan_in; i0++) {
data_T tmp = 0;
layer_in_row[i0].shift(0,tmp);
}
}
}
}
}

struct pooling2d_config{
// IO size
static const unsigned in_height = 10;
Expand Down
22 changes: 18 additions & 4 deletions hls4ml/templates/vivado_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,12 +154,17 @@

pooling1d_config_template = """struct config{index} : nnet::pooling1d_config {{
static const unsigned n_in = {n_in};
static const unsigned n_filt = {n_filt};
static const unsigned n_chan = {n_chan};
static const unsigned n_filt_in = {n_filt_in};
static const unsigned n_chan_in = {n_chan_in};
static const unsigned stride = {stride};
static const unsigned pool_size = {pool_size};
static const unsigned n_out = {n_out};
static const unsigned pad_left = {pad_left};
static const unsigned pad_right = {pad_right};
static const unsigned stride = {stride};
static const nnet::Pool_Op pool_op = nnet::{pool_op};
static const unsigned reuse = {reuse};
}};\n"""

pooling2d_config_template = """struct config{index} : nnet::pooling2d_config {{
Expand Down Expand Up @@ -234,7 +239,7 @@
conv2dmerge_function_template = 'nnet::conv_2d_merge_{strategy}_{data_format}<{input_t}, {output_t}, {config}>({input}, {output}, {w}, {b});'
activ_function_template = 'nnet::{activation}{strategy}<{input_t}, {output_t}, {config}>({input}, {output});'
param_activ_function_template = 'nnet::{activation}{strategy}<{input_t}, {output_t}, {config}>({input}, {param}, {output});'
pooling1d_function_template = 'nnet::pooling1d<{input_t}, {config}>({input}, {output});'
pooling1d_function_template = 'nnet::pooling1d_{data_format}{1x1}<{input_t}, {output_t}, {config}>({input}, {output});'
pooling2d_function_template = 'nnet::pooling2d_{data_format}{1x1}<{input_t}, {output_t}, {config}>({input}, {output});'
merge_function_template = 'nnet::{merge}{strategy}<{input1_t}, {input2_t}, {output_t}, {config}>({input1}, {input2}, {output});'
split_function_template = 'nnet::split{strategy}<{input_t}, {output_t}, {config}>({input}, {output1}, {output2});'
Expand Down Expand Up @@ -306,6 +311,15 @@
source ../common/build.tcl
\n"""

pooling1d_tcl_template = """set arg_0 "-I . -DN_INPUT={n_chan_in} -DN_OUTPUT={n_filt_in}"
set arg_1 "-DCONFIG={config}"
set arg_2 "-DINPUT_T={input_t} -DLAYER_T={output_t}"
set args "$arg_0 $arg_1 $arg_2"
set layer_type pooling1d_{data_format}{1x1}
\n
source ../common/build.tcl
\n"""

pooling2d_tcl_template = """set arg_0 "-I . -DN_INPUT={n_chan_in} -DN_OUTPUT={n_filt_in}"
set arg_1 "-DCONFIG={config}"
set arg_2 "-DINPUT_T={input_t} -DLAYER_T={output_t}"
Expand Down Expand Up @@ -342,7 +356,7 @@
'Conv2D' : conv2d_tcl_template,
'UpSampling2D' : upsampling2d_tcl_template,
'Activation' : activ_tcl_template,
'Pooling1D' : pooling2d_tcl_template,
'Pooling1D' : pooling1d_tcl_template,
'Pooling2D' : pooling2d_tcl_template,
'Merge' : merge_tcl_template,
'Concatenate' : merge_tcl_template,
Expand All @@ -363,7 +377,7 @@ def __init__(self):
self.register_templates('Activation' , activ_function_template, activ_config_template,activ_tcl_template)
self.register_templates('ParametrizedActivation' , param_activ_function_template, activ_config_template,activ_tcl_template)
self.register_templates('PReLU' , param_activ_function_template, activ_config_template,activ_tcl_template)
self.register_templates('Pooling1D' , pooling1d_function_template, pooling1d_config_template,pooling2d_tcl_template)
self.register_templates('Pooling1D' , pooling1d_function_template, pooling1d_config_template,pooling1d_tcl_template)
self.register_templates('Pooling2D' , pooling2d_function_template, pooling2d_config_template,pooling2d_tcl_template)
self.register_templates('Merge' , merge_function_template, merge_config_template,merge_tcl_template)
self.register_templates('Concatenate' , merge_function_template, concat_config_template,merge_tcl_template)
Expand Down
5 changes: 4 additions & 1 deletion hls4ml/writer/vivado_writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,10 @@ def write_model_json(self, model):
onexone = '_1x1'
if(layer.get_attr('strategy') != None):
strategy = "_" + layer.get_attr("strategy")
kernel_name = "pooling2d" + strategy + data_format + onexone

#Kernel name depends on dimensions specify in layer's name
kernel_name = "pooling{}d".format(layer.__class__.__name__[-2]) + strategy + data_format + onexone

input_ports = [{"name":"input", "width": layer.get_input_variable().shape[0] +1}]

if not(tensor_map[layer.get_output_variable().name]['input'] == []):
Expand Down