We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent d247057 commit ed3eaa4Copy full SHA for ed3eaa4
hls4ml/converters/pytorch_to_hls.py
@@ -17,13 +17,11 @@ def __init__(self, config):
17
def get_weights_data(self, layer_name, var_name):
18
data = None
19
20
-
21
# Workaround for naming schme in nn.Sequential,
22
# have to remove the prefix we previously had to add to make sure the tensors are found
23
if 'layer_' in layer_name:
24
layer_name = layer_name.split('layer_')[-1]
25
26
27
# if a layer is reused in the model, torch.FX will append a "_n" for the n-th use
28
# have to snap that off to find the tensors
29
if layer_name.split('_')[-1].isdigit() and len(layer_name.split('_')) > 1:
0 commit comments