Skip to content

Commit 7f19b03

Browse files
committed
Add UnaryLUT support
Support UnaryLUT w/ vivado&io_parallel hotfix format support vivado_stream fix syn issue fix syn issue2 Revert unnecessary "fix syn issue1/2" Revert "fix syn issue2" This reverts commit af3c347. Revert "fix syn issue" This reverts commit 532cb9d. rm redundant pipeline pragma unary lut vitis fix unary lut vitis fix - leftover
1 parent c738b7f commit 7f19b03

File tree

8 files changed

+108
-6
lines changed

8 files changed

+108
-6
lines changed

hls4ml/backends/fpga/passes/hgq_proxy_model.py

Lines changed: 20 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from hls4ml.backends.template import FunctionCallTemplate
55
from hls4ml.model.layers import Layer
66
from hls4ml.model.optimizer import OptimizerPass
7-
from hls4ml.model.optimizer.passes.hgq_proxy_model import FixedPointQuantizer
7+
from hls4ml.model.optimizer.passes.hgq_proxy_model import FixedPointQuantizer, UnaryLUT
88
from hls4ml.model.types import Source
99

1010

@@ -40,7 +40,6 @@ def generate_mask_fn(
4040
template<typename input_t, typename output_t>
4141
void {name}(input_t *inp, output_t *out) {{
4242
#pragma HLS INLINE
43-
#pragma HLS PIPELINE
4443
4544
{body}
4645
}}
@@ -84,6 +83,25 @@ def format(self, node):
8483
return self.template.format(**params)
8584

8685

86+
class ProcessUnaryLUTCall(FunctionCallTemplate):
87+
def __init__(self):
88+
super().__init__(UnaryLUT, include_header=[])
89+
self.template = 'nnet::unary_lut<{input_t}, {output_t}, {config}>({input}, {output}, {table});'
90+
self.include_header = [
91+
'nnet_utils/nnet_activation.h',
92+
'nnet_utils/nnet_activation_stream.h',
93+
]
94+
95+
def format(self, node):
96+
params = self._default_function_params(node)
97+
node.attributes['result_t'].precision = node.attributes['table_t'].precision
98+
params['config'] = f'unary_lut_config{node.index}'
99+
params['table'] = node.get_weights('table').name
100+
101+
return self.template.format(**params)
102+
103+
87104
def register_hgq_proxy_model(backend: Backend):
88105
backend.register_pass('process_fixed_point_quantizer_layer', ProcessFixedPointQuantizerLayer)
89106
backend.register_template(ProcessFixedPointQuantizerCall)
107+
backend.register_template(ProcessUnaryLUTCall)

hls4ml/backends/quartus/passes/core_templates.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from hls4ml.backends.backend import get_backend
22
from hls4ml.backends.template import FunctionCallTemplate, LayerConfigTemplate
33
from hls4ml.model.layers import Activation, BatchNormalization, Dense, HardActivation, ParametrizedActivation, PReLU, Softmax
4+
from hls4ml.model.optimizer.passes.hgq_proxy_model import UnaryLUT
45

56
# Dense templates
67

@@ -152,7 +153,7 @@ def format(self, node):
152153

153154
class ActivationConfigTemplate(LayerConfigTemplate):
154155
def __init__(self):
155-
super().__init__((Activation, ParametrizedActivation, PReLU))
156+
super().__init__((Activation, ParametrizedActivation, PReLU, UnaryLUT))
156157
self.template = activ_config_template
157158

158159
def format(self, node):

hls4ml/backends/vivado/passes/core_templates.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from hls4ml.backends.backend import get_backend
22
from hls4ml.backends.template import FunctionCallTemplate, LayerConfigTemplate
33
from hls4ml.model.layers import Activation, BatchNormalization, Dense, HardActivation, ParametrizedActivation, PReLU, Softmax
4+
from hls4ml.model.optimizer.passes.hgq_proxy_model import UnaryLUT
45

56
# Dense templates
67

@@ -144,7 +145,7 @@ def format(self, node):
144145

145146
class ActivationConfigTemplate(LayerConfigTemplate):
146147
def __init__(self):
147-
super().__init__((Activation, ParametrizedActivation, PReLU))
148+
super().__init__((Activation, ParametrizedActivation, PReLU, UnaryLUT))
148149
self.template = activ_config_template
149150

150151
def format(self, node):

hls4ml/converters/keras/hgq_proxy_model.py

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
1-
from hls4ml.converters.keras_to_hls import keras_handler, parse_default_keras_layer
1+
from hls4ml.converters.keras_to_hls import KerasReader, keras_handler, parse_default_keras_layer
22

33

44
@keras_handler('FixedPointQuantizer')
5-
def fixedpoint_quantizer_handler(keras_layer, input_names, input_shapes, data_reader):
5+
def fixedpoint_quantizer_handler(keras_layer, input_names, input_shapes, data_reader: KerasReader):
66
config = parse_default_keras_layer(keras_layer, input_names)
77

88
name = config['name']
@@ -19,3 +19,19 @@ def fixedpoint_quantizer_handler(keras_layer, input_names, input_shapes, data_re
1919

2020
layer = config
2121
return layer, input_shapes[0]
22+
23+
24+
@keras_handler('UnaryLUT')
25+
def unary_lut_keras_handler(keras_layer, input_names, input_shapes, data_reader: KerasReader):
26+
config = parse_default_keras_layer(keras_layer, input_names)
27+
28+
table = data_reader.get_weights_data(config['name'], 'table')
29+
k, i, f = keras_layer['config']['kif_out']
30+
k, b, i = k, k + i + f, k + i
31+
config['table_t'] = f'{"" if k else "u"}fixed<{b},{i}>'
32+
config['table'] = table
33+
config['table_size'] = len(table)
34+
config['activation'] = 'unary_lut'
35+
36+
layer = config
37+
return layer, input_shapes[0]

hls4ml/converters/keras_to_hls.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -205,6 +205,7 @@ def parse_keras_model(model_arch, reader):
205205
'Softmax',
206206
'TernaryTanh',
207207
'HardActivation',
208+
'UnaryLUT',
208209
]
209210
# Recurrent layers
210211
recurrent_layers = ['SimpleRNN', 'LSTM', 'GRU']

hls4ml/model/optimizer/passes/hgq_proxy_model.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,20 @@ def initialize(self):
2323
self.mask_kbi = self.attributes.get('mask_kbi', None)
2424

2525

26+
class UnaryLUT(Layer):
27+
def initialize(self):
28+
inp = self.get_input_variable()
29+
shape = inp.shape
30+
dims = inp.dim_names
31+
self.add_output_variable(shape, dims)
32+
self.set_attr('n_in', inp.size())
33+
self.table = self.attributes['table']
34+
self.table_size = self.attributes['table_size']
35+
36+
table_t = to_hls4ml_fixed(self.attributes['table_t'])
37+
self.add_weights_variable(name='table', var_name='table{index}', precision=table_t, data=self.table)
38+
39+
2640
def to_hls4ml_fixed(fixed: str):
2741
matched = re_parse_fixed.match(re_purge_prefix.sub('', fixed))
2842
assert matched is not None, f'Cannot parse {fixed}'
@@ -125,4 +139,5 @@ def transform(self, model, node: FixedPointQuantizer):
125139

126140
def register_hgq_proxy_model():
127141
register_layer('FixedPointQuantizer', FixedPointQuantizer)
142+
register_layer('UnaryLUT', UnaryLUT)
128143
register_pass('enforce_proxy_model_embedded_config', EnforceProxyModelEmbeddedConfig)

hls4ml/templates/vivado/nnet_utils/nnet_activation.h

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -441,6 +441,28 @@ template <class data_T, class res_T, typename CONFIG_T> void tanh(data_T data[CO
441441
}
442442
}
443443

444+
// *************************************************
445+
// UnaryLUT Activation
446+
// *************************************************
447+
template <int table_size, class data_T> inline unsigned get_index_unary_lut(data_T x) {
448+
// Slice the top N bits to get an index into the table
449+
static constexpr int N = ceillog2(table_size);
450+
return (unsigned)(x(x.width - 1, 0));
451+
}
452+
453+
template <class data_T, class res_T, typename CONFIG_T>
454+
void unary_lut(data_T data[CONFIG_T::n_in], res_T res[CONFIG_T::n_in],
455+
typename CONFIG_T::table_t table[CONFIG_T::table_size]) {
456+
#pragma HLS function_instantiate variable=table
457+
#pragma HLS ARRAY_PARTITION variable=table
458+
459+
for (int ii = 0; ii < CONFIG_T::n_in; ii++) {
460+
#pragma HLS UNROLL
461+
unsigned index = get_index_unary_lut<CONFIG_T::table_size>(data[ii]);
462+
res[ii] = (res_T)table[index];
463+
}
464+
}
465+
444466
// *************************************************
445467
// Hard sigmoid Activation
446468
// *************************************************

hls4ml/templates/vivado/nnet_utils/nnet_activation_stream.h

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -412,6 +412,34 @@ template <class data_T, class res_T, typename CONFIG_T> void tanh(hls::stream<da
412412
}
413413
}
414414

415+
// *************************************************
416+
// UnaryLUT Activation
417+
// *************************************************
418+
419+
template <class data_T, class res_T, typename CONFIG_T>
420+
void unary_lut(hls::stream<data_T> &data, hls::stream<res_T> &res, typename CONFIG_T::table_t table[CONFIG_T::table_size]) {
421+
#pragma HLS function_instantiate variable=table
422+
#pragma HLS ARRAY_PARTITION variable=table complete
423+
424+
UnaryLUTActLoop:
425+
for (int i = 0; i < CONFIG_T::n_in / res_T::size; i++) {
426+
#pragma HLS PIPELINE II=CONFIG_T::reuse_factor rewind
427+
428+
data_T in_data = data.read();
429+
res_T out_data;
430+
PRAGMA_DATA_PACK(out_data)
431+
432+
UnaryLUTPackLoop:
433+
for (int j = 0; j < res_T::size; j++) {
434+
#pragma HLS UNROLL
435+
unsigned index = get_index_unary_lut<CONFIG_T::table_size>(in_data[j].V);
436+
out_data[j] = table[index];
437+
}
438+
439+
res.write(out_data);
440+
}
441+
}
442+
415443
// *************************************************
416444
// Hard sigmoid Activation
417445
// *************************************************

0 commit comments

Comments
 (0)