11
11
from coremltools .proto import NeuralNetwork_pb2 #type: ignore
12
12
from ._error_utils import ErrorHandling
13
13
14
- _SEQUENCE_LAYERS_REGISTRY = set (["LSTM" ])
15
-
16
14
def _compare (a , b , encoding = "utf8" ): #type: (Text, Text, Text) -> bool
17
15
if isinstance (a , bytes ):
18
16
a = a .decode (encoding )
@@ -348,9 +346,6 @@ def _convert_add(builder, node, graph, err): # type: (NeuralNetworkBuilder, Nod
348
346
shape_bias = [second_input .shape [0 ]])
349
347
return
350
348
351
- if 'broadcast' in node .attrs :
352
- if node .attrs ['broadcast' ] == 1 :
353
- return err .unsupported_op_configuration (builder , node , graph , "Broadcast Add is not supported now" )
354
349
builder .add_elementwise (
355
350
name = node .name ,
356
351
input_names = node .inputs ,
@@ -359,10 +354,6 @@ def _convert_add(builder, node, graph, err): # type: (NeuralNetworkBuilder, Nod
359
354
)
360
355
361
356
def _convert_mul (builder , node , graph , err ): # type: (NeuralNetworkBuilder, Node, Graph, ErrorHandling) -> None
362
- if 'broadcast' in node .attrs :
363
- if node .attrs ['broadcast' ] == 1 :
364
- return err .unsupported_op_configuration (builder , node , graph , "Broadcast Multiply is not supported now" )
365
-
366
357
builder .add_elementwise (
367
358
name = node .name ,
368
359
input_names = node .inputs ,
@@ -371,10 +362,6 @@ def _convert_mul(builder, node, graph, err): # type: (NeuralNetworkBuilder, Nod
371
362
)
372
363
373
364
def _convert_div (builder , node , graph , err ): # type: (NeuralNetworkBuilder, Node, Graph, ErrorHandling) -> None
374
- if 'broadcast' in node .attrs :
375
- if node .attrs ['broadcast' ] == 1 :
376
- return err .unsupported_op_configuration (builder , node , graph , "Broadcast Div is not supported now" )
377
-
378
365
builder .add_unary (name = node .name + '_inverse' , #type: ignore
379
366
input_name = node .inputs [1 ],
380
367
output_name = node .inputs [1 ] + '_inverse' ,
@@ -985,6 +972,34 @@ def _convert_custom(builder, node, graph, err): # type: (NeuralNetworkBuilder, N
985
972
986
973
err .custom_layer_nodes .append (node )
987
974
975
+ def _convert_identity (builder , node , graph , err ): # type: (NeuralNetworkBuilder, Node, Graph, ErrorHandling) -> None
976
+ builder .add_activation (
977
+ name = node .name ,
978
+ non_linearity = 'LINEAR' ,
979
+ input_name = node .inputs [0 ],
980
+ output_name = node .outputs [0 ],
981
+ params = [1.0 , 0.0 ]
982
+ )
983
+
984
+ def _convert_const (builder , node , graph , err ): # type: (NeuralNetworkBuilder, Node, Graph, ErrorHandling) -> None
985
+
986
+ for name , value in node .input_tensors .items ():
987
+ if name not in graph .constant_layers_added :
988
+ shape = value .shape
989
+ coreml_shape = [1 ,1 ,1 ]
990
+ if len (shape ) == 3 :
991
+ coreml_shape = list (shape )
992
+ elif len (shape ) == 1 :
993
+ coreml_shape = [shape [0 ],1 ,1 ]
994
+ elif len (shape ) == 2 :
995
+ coreml_shape = [1 , shape [0 ], shape [1 ]]
996
+ else :
997
+ return err .unsupported_op_configuration (builder , node , graph , "unable to translate constant array shape to CoreML shape" )
998
+ builder .add_load_constant (name = name ,
999
+ output_name = name ,
1000
+ constant_value = value .flatten (),
1001
+ shape = coreml_shape )
1002
+ graph .constant_layers_added [name ] = True
988
1003
989
1004
990
1005
_ONNX_NODE_REGISTRY = {
@@ -1050,8 +1065,13 @@ def _convert_custom(builder, node, graph, err): # type: (NeuralNetworkBuilder, N
1050
1065
"ArgMin" : _convert_reduce ,
1051
1066
"Clip" : _convert_clip ,
1052
1067
"MeanVarianceNormalization" : _convert_mvn ,
1068
+ "Unsqueeze" : _convert_identity ,
1069
+ "Squeeze" : _convert_identity
1053
1070
}
1054
1071
1072
+ _SEQUENCE_LAYERS_REGISTRY = set (["LSTM" ])
1073
+
1074
+ _CONST_INPUT_ALLOWED_LAYERS = set ([ "Add" , "Sum" , "Mul" , "Concat" , "Max" , "Min" , "Div" , "Reciprocal" ])
1055
1075
1056
1076
def _get_node_converter_fn (builder , node , err ): # type: (NeuralNetworkBuilder, Node, ErrorHandling) -> Callable[[NeuralNetworkBuilder, Node, Graph, ErrorHandling], None]
1057
1077
"""
@@ -1063,6 +1083,12 @@ def _get_node_converter_fn(builder, node, err): # type: (NeuralNetworkBuilder,
1063
1083
else :
1064
1084
return err .unsupported_op (node )
1065
1085
1086
+ def _add_const_inputs_if_required (builder , node , graph , err ): # type: (NeuralNetworkBuilder, Node, Graph, ErrorHandling) -> None
1087
+ if node .op_type in _CONST_INPUT_ALLOWED_LAYERS :
1088
+ if len (node .input_tensors ) > 0 :
1089
+ _convert_const (builder , node , graph , err )
1090
+
1091
+
1066
1092
def _convert_node (builder , node , graph , err ): # type: (NeuralNetworkBuilder, Node, Graph, ErrorHandling) -> None
1067
1093
converter_fn = _get_node_converter_fn (builder , node , err )
1068
1094
return converter_fn (builder , node , graph , err )
0 commit comments