@@ -40,11 +40,7 @@ def load_input_constants(builder, node, graph, err):
40
40
def _add_conv_like_op (add_func , get_params_func , params_dict ,
41
41
builder , node , graph , err ):
42
42
43
- if node .inputs [0 ] not in graph .shape_dict :
44
- err .unsupported_op_configuration (builder , node , graph , "Input shape not available" )
45
-
46
- rank = len (graph .shape_dict [node .inputs [0 ]])
47
-
43
+ rank = builder ._get_rank (node .inputs [0 ])
48
44
if rank == 4 :
49
45
get_params_func (builder , node , graph , err , params_dict )
50
46
add_func (node .inputs , node .outputs , params_dict = params_dict , builder = builder , node = node , graph = graph , err = err )
@@ -330,10 +326,7 @@ def _convert_bn(builder, node, graph, err):
330
326
var = node .input_tensors [node .inputs [4 ]] if node .inputs [4 ] in node .input_tensors else \
331
327
np .ones (shape = channels , dtype = np .float32 )
332
328
333
- if node .inputs [0 ] not in graph .shape_dict :
334
- return err .unsupported_op_configuration (builder , node , graph , "Shape of input unknown" )
335
-
336
- rank = len (graph .shape_dict [node .inputs [0 ]])
329
+ rank = builder ._get_rank (node .inputs [0 ])
337
330
# ONNX converts B x C tensor into B x C x 1 hence
338
331
# Rank 2 BN is mapped to Rank 3 BN
339
332
if rank == 3 :
@@ -784,10 +777,7 @@ def _convert_instancenorm(builder, node, graph, err): # type: (NeuralNetworkBui
784
777
scale = node .input_tensors [node .inputs [1 ]]
785
778
bias = node .input_tensors [node .inputs [2 ]]
786
779
787
- if node .inputs [0 ] not in graph .shape_dict :
788
- return err .unsupported_op_configuration (builder , node , graph , "Shape of input unknown" )
789
-
790
- rank = len (graph .shape_dict [node .inputs [0 ]])
780
+ rank = builder ._get_rank (node .inputs [0 ])
791
781
# ONNX converts B x C tensor into B x C x 1 hence
792
782
# Rank 2 BN is mapped to Rank 3 BN
793
783
if rank == 3 :
@@ -875,9 +865,9 @@ def expand_dim(node_name, input_name, output_name, axes):
875
865
if len (activations_list ) == 6 :
876
866
err .unsupported_feature_warning (node , "Forward and backward pass will use same activations." )
877
867
878
- inner_activation = activations_list [0 ]
879
- cell_state_update_activation = activations_list [1 ]
880
- output_activation = activations_list [2 ]
868
+ inner_activation = activations_list [0 ]. upper ()
869
+ cell_state_update_activation = activations_list [1 ]. upper ()
870
+ output_activation = activations_list [2 ]. upper ()
881
871
882
872
# Provide max Clip Value if not provided
883
873
clip_threshold = node .attrs .get ('clip' , 500000.0 )
@@ -924,12 +914,11 @@ def expand_dim(node_name, input_name, output_name, axes):
924
914
output_c_5d = output_c + '_5d'
925
915
926
916
# if input is not present in the network, load they as constant
927
- if node .inputs [0 ] not in graph .shape_dict :
928
- err .unsupported_op_configuration (builder , node , graph , "Input shape not represented within Graph" )
917
+ load_input_constants (builder , node , graph , err )
929
918
930
919
# Input is represented as [Seq Len, Batch Size, Input Size]
931
- batch_size = graph .shape_dict [node .inputs [0 ]][1 ]
932
920
if len (node .inputs ) < 6 :
921
+ batch_size = graph .shape_dict [node .inputs [0 ]][1 ]
933
922
builder .add_load_constant_nd (
934
923
name = node .name + '_load_initial_h_and_c' ,
935
924
output_name = input_h ,
@@ -944,8 +933,11 @@ def expand_dim(node_name, input_name, output_name, axes):
944
933
945
934
# CoreML LSTM expects 5-d tensor
946
935
# Expand dimensions of input to 5-d for compatibility
947
- if len (graph .shape_dict [node .inputs [0 ]]) < 5 :
948
- total_dims = len (graph .shape_dict [node .inputs [0 ]])
936
+ rank = builder ._get_rank (node .inputs [0 ])
937
+ if rank == - 1 :
938
+ return err .unsupported_op_configuration (builder , node , graph , "Rank unknown for input" )
939
+ if rank < 5 :
940
+ total_dims = rank
949
941
add_nodes = 5 - total_dims
950
942
951
943
expand_dim (node .name + '_expand_in_0' , node .inputs [0 ], node .inputs [0 ]+ '_expand_out_0' , [total_dims ])
@@ -1468,7 +1460,7 @@ def _convert_reshape(builder, node, graph, err):
1468
1460
)
1469
1461
return
1470
1462
1471
- len_of_input_shape = len ( graph . shape_dict [ node .inputs [0 ] ])
1463
+ len_of_input_shape = builder . _get_rank ( node .inputs [0 ])
1472
1464
if len (output_shape ) == len_of_input_shape :
1473
1465
builder .add_rank_preserving_reshape (
1474
1466
name = node .name ,
@@ -1584,7 +1576,9 @@ def _convert_reverse_sequence(builder, node, graph, err):
1584
1576
1585
1577
if add_transpose :
1586
1578
output_name_post = '_before_reverse'
1587
- rank = len (graph .shape_dict [node .inputs [0 ]])
1579
+ rank = builder ._get_rank (node .inputs [0 ])
1580
+ if rank == - 1 :
1581
+ return err .unsupported_op_configuration (builder , node , graph , "Rank unknown for input" )
1588
1582
axes = list (range (rank ))
1589
1583
axes [batch_axis ], axes [time_axis ] = axes [time_axis ], axes [batch_axis ]
1590
1584
builder .add_transpose (
@@ -1682,8 +1676,8 @@ def _convert_slice(builder, node, graph, err):
1682
1676
convert to CoreML Slice Static Layer:
1683
1677
https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L5082
1684
1678
'''
1685
- if graph . onnx_ir_version < 5 :
1686
- return _convert_slice_ir4v9 (builder , node , graph , err )
1679
+ if len ( node . inputs ) == 1 :
1680
+ return _convert_slice_ir4v9 (builder , node , graph , err )
1687
1681
1688
1682
data_shape = graph .shape_dict [node .inputs [0 ]]
1689
1683
len_of_data = len (data_shape )
@@ -1743,20 +1737,78 @@ def _convert_softmax(builder, node, graph, err):
1743
1737
convert to CoreML SoftMax ND Layer:
1744
1738
https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#3547
1745
1739
'''
1740
+ def add_softmax (output_name , rank = - 1 , axis = - 3 ):
1741
+ softmax_axis = 3
1742
+ axes = list (range (5 - rank ))
1743
+ if axis < 0 :
1744
+ axis = rank + axis
1745
+ axis += len (axes )
1746
+ input_name = node .inputs [0 ]
1747
+ softmax_output_name = output_name + '_expanded'
1748
+
1749
+ builder .add_expand_dims (
1750
+ name = node .name + '_expand_dims' ,
1751
+ input_name = node .inputs [0 ],
1752
+ output_name = node .inputs [0 ]+ '_expanded' ,
1753
+ axes = axes
1754
+ )
1755
+ input_name += '_expanded'
1756
+ rank = 5
1757
+
1758
+ if axis != - 3 and axis != rank - softmax_axis :
1759
+ transpose_axes = list (range (rank ))
1760
+ transpose_axes [- 3 ], transpose_axes [axis ] = transpose_axes [axis ], transpose_axes [- 3 ]
1761
+
1762
+ print (transpose_axes )
1763
+ builder .add_transpose (
1764
+ name = node .name + '_transpose' ,
1765
+ axes = transpose_axes ,
1766
+ input_name = input_name ,
1767
+ output_name = input_name + '_transposed'
1768
+ )
1769
+ input_name += '_transposed'
1770
+ softmax_output_name += '_transposed'
1771
+
1772
+ builder .add_softmax (
1773
+ name = node .name ,
1774
+ input_name = input_name ,
1775
+ output_name = softmax_output_name
1776
+ )
1777
+
1778
+ if axis != - 3 and axis != rank - softmax_axis :
1779
+ transpose_axes = list (range (rank ))
1780
+ transpose_axes [- 3 ], transpose_axes [axis ] = transpose_axes [axis ], transpose_axes [- 3 ]
1781
+
1782
+ builder .add_transpose (
1783
+ name = node .name + '_transpose_back' ,
1784
+ axes = transpose_axes ,
1785
+ input_name = softmax_output_name ,
1786
+ output_name = softmax_output_name + '_transposed_back'
1787
+ )
1788
+ softmax_output_name += '_transposed_back'
1789
+
1790
+ builder .add_squeeze (
1791
+ name = node .name + '_squeeze_dims' ,
1792
+ input_name = softmax_output_name ,
1793
+ output_name = output_name ,
1794
+ axes = axes
1795
+ )
1796
+
1746
1797
axis = node .attrs .get ('axis' , 1 )
1747
- builder .add_softmax_nd (
1748
- name = node .name ,
1749
- input_name = node .inputs [0 ],
1750
- output_name = node .outputs [0 ] + ('_softmax' if node .op_type == 'LogSoftmax' else '' ),
1751
- axis = axis
1752
- )
1798
+ rank = builder ._get_rank (node .inputs [0 ])
1799
+ if rank == - 1 :
1800
+ return err .unsupported_op_configuration (builder , node , graph , "Rank unknown for input" )
1801
+
1753
1802
if node .op_type == 'LogSoftmax' :
1803
+ add_softmax (node .outputs [0 ] + '_softmax' , rank = rank , axis = axis )
1754
1804
builder .add_unary (
1755
1805
name = node .name + '_log' ,
1756
- input_name = node .outputs [0 ]+ '_softmax' ,
1806
+ input_name = node .outputs [0 ] + '_softmax' ,
1757
1807
output_name = node .outputs [0 ],
1758
1808
mode = 'log'
1759
1809
)
1810
+ else :
1811
+ add_softmax (node .outputs [0 ], rank = rank , axis = axis )
1760
1812
1761
1813
def _convert_split (builder , node , graph , err ):
1762
1814
'''
@@ -1844,7 +1896,9 @@ def _convert_transpose(builder, node, graph, err):
1844
1896
axes = node .attrs .get ('perm' , [])
1845
1897
# If 'perm' not provided, the reverse the dimensions
1846
1898
if axes == []:
1847
- rank = len (graph .shape_dict [node .inputs [0 ]])
1899
+ rank = builder ._get_rank (node .inputs [0 ])
1900
+ if rank == - 1 :
1901
+ return err .unsupported_op_configuration (builder , node , graph , "Rank unknown for input" )
1848
1902
axes = list (range (- 1 , - (rank + 1 ), - 1 ))
1849
1903
1850
1904
builder .add_transpose (
0 commit comments