File tree Expand file tree Collapse file tree 1 file changed +13
-0
lines changed
src/drivers/inference_engine/mindspore Expand file tree Collapse file tree 1 file changed +13
-0
lines changed Original file line number Diff line number Diff line change @@ -362,6 +362,19 @@ void MindSporeInference::PrepareInputTensor(
362
362
MBLOG_DEBUG << " input_buffer_list: " << portname << " , model port: " << name
363
363
<< " , size: " << input_buffer_list->Size ()
364
364
<< " , bytes:" << input_buffer_list->GetBytes ();
365
+ std::vector<size_t > b_shape;
366
+ if (!input_buffer_list->At (0 )->Get (" shape" , b_shape) ||
367
+ input_shape.size () != b_shape.size ()) {
368
+ MBLOG_ERROR << " get input shape failed, tensor shape size:"
369
+ << input_shape.size ()
370
+ << " , buffer shape size: " << b_shape.size ();
371
+ return ;
372
+ }
373
+
374
+ for (size_t index = 0 ; index < b_shape.size (); ++index) {
375
+ input_shape[index] = b_shape[index];
376
+ }
377
+
365
378
// input batch padding
366
379
if (model_need_padding_) {
367
380
padding_batch_size_ = input_shape[0 ] - input_buffer_list->Size ();
You can’t perform that action at this time.
0 commit comments