Skip to content

Commit 6b474c4

Browse files
committed
Merge remote-tracking branch 'upstream/3.4' into merge-3.4
2 parents a9c954e + 0be18f5 commit 6b474c4

23 files changed

+202
-82
lines changed

modules/core/include/opencv2/core/bindings_utils.hpp

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88
#include <opencv2/core/async.hpp>
99
#include <opencv2/core/detail/async_promise.hpp>
1010

11+
#include <stdexcept>
12+
1113
namespace cv { namespace utils {
1214
//! @addtogroup core_utils
1315
//! @{
@@ -113,6 +115,12 @@ String dumpRange(const Range& argument)
113115
}
114116
}
115117

118+
CV_WRAP static inline
119+
void testRaiseGeneralException()
120+
{
121+
throw std::runtime_error("exception text");
122+
}
123+
116124
CV_WRAP static inline
117125
AsyncArray testAsyncArray(InputArray argument)
118126
{

modules/dnn/include/opencv2/dnn/shape_utils.hpp

Lines changed: 19 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -205,21 +205,33 @@ static inline std::ostream& operator<<(std::ostream &out, const MatShape& shape)
205205
return out;
206206
}
207207

208-
inline int clamp(int ax, int dims)
208+
/// @brief Converts axis from `[-dims; dims)` (similar to Python's slice notation) to `[0; dims)` range.
209+
static inline
210+
int normalize_axis(int axis, int dims)
209211
{
210-
return ax < 0 ? ax + dims : ax;
212+
CV_Check(axis, axis >= -dims && axis < dims, "");
213+
axis = (axis < 0) ? (dims + axis) : axis;
214+
CV_DbgCheck(axis, axis >= 0 && axis < dims, "");
215+
return axis;
211216
}
212217

213-
inline int clamp(int ax, const MatShape& shape)
218+
static inline
219+
int normalize_axis(int axis, const MatShape& shape)
214220
{
215-
return clamp(ax, (int)shape.size());
221+
return normalize_axis(axis, (int)shape.size());
216222
}
217223

218-
inline Range clamp(const Range& r, int axisSize)
224+
static inline
225+
Range normalize_axis_range(const Range& r, int axisSize)
219226
{
220-
Range clamped(std::max(r.start, 0),
227+
if (r == Range::all())
228+
return Range(0, axisSize);
229+
CV_CheckGE(r.start, 0, "");
230+
Range clamped(r.start,
221231
r.end > 0 ? std::min(r.end, axisSize) : axisSize + r.end + 1);
222-
CV_Assert_N(clamped.start < clamped.end, clamped.end <= axisSize);
232+
CV_DbgCheckGE(clamped.start, 0, "");
233+
CV_CheckLT(clamped.start, clamped.end, "");
234+
CV_CheckLE(clamped.end, axisSize, "");
223235
return clamped;
224236
}
225237

modules/dnn/include/opencv2/dnn/version.hpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
#define OPENCV_DNN_VERSION_HPP
77

88
/// Use with major OpenCV version only.
9-
#define OPENCV_DNN_API_VERSION 20201117
9+
#define OPENCV_DNN_API_VERSION 20210205
1010

1111
#if !defined CV_DOXYGEN && !defined CV_STATIC_ANALYSIS && !defined CV_DNN_DONT_ADD_INLINE_NS
1212
#define CV__DNN_INLINE_NS __CV_CAT(dnn4_v, OPENCV_DNN_API_VERSION)

modules/dnn/src/dnn.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2972,7 +2972,7 @@ struct Net::Impl : public detail::NetImplBase
29722972
// the concatenation optimization is applied with batch_size > 1.
29732973
// so, for now, we only apply this optimization in the most popular
29742974
// case batch_size == 1.
2975-
int axis = clamp(concatLayer->axis, output.dims);
2975+
int axis = normalize_axis(concatLayer->axis, output.dims);
29762976
if( output.total(0, axis) == 1 )
29772977
{
29782978
size_t i, ninputs = ld.inputBlobsId.size();

modules/dnn/src/layers/concat_layer.cpp

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
7979
{
8080
CV_Assert(inputs.size() > 0);
8181
outputs.resize(1, inputs[0]);
82-
int cAxis = clamp(axis, inputs[0]);
82+
int cAxis = normalize_axis(axis, inputs[0]);
8383

8484
int axisSum = 0;
8585
for (size_t i = 0; i < inputs.size(); i++)
@@ -201,7 +201,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
201201
inps.getUMatVector(inputs);
202202
outs.getUMatVector(outputs);
203203

204-
int cAxis = clamp(axis, inputs[0].dims);
204+
int cAxis = normalize_axis(axis, inputs[0].dims);
205205
if (padding)
206206
return false;
207207

@@ -255,7 +255,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
255255
inputs_arr.getMatVector(inputs);
256256
outputs_arr.getMatVector(outputs);
257257

258-
int cAxis = clamp(axis, inputs[0].dims);
258+
int cAxis = normalize_axis(axis, inputs[0].dims);
259259
Mat& outMat = outputs[0];
260260

261261
if (padding)
@@ -296,7 +296,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
296296
auto context = reinterpret_cast<csl::CSLContext*>(context_);
297297

298298
auto input_wrapper = inputs[0].dynamicCast<CUDABackendWrapper>();
299-
auto concat_axis = clamp(axis, input_wrapper->getRank());
299+
auto concat_axis = normalize_axis(axis, input_wrapper->getRank());
300300
return make_cuda_node<cuda4dnn::ConcatOp>(preferableTarget, std::move(context->stream), concat_axis, padding);
301301
}
302302
#endif
@@ -305,7 +305,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
305305
{
306306
#ifdef HAVE_VULKAN
307307
vkcom::Tensor in = VkComTensor(input[0]);
308-
int cAxis = clamp(axis, in.dimNum());
308+
int cAxis = normalize_axis(axis, in.dimNum());
309309
std::shared_ptr<vkcom::OpBase> op(new vkcom::OpConcat(cAxis));
310310
return Ptr<BackendNode>(new VkComBackendNode(input, op));
311311
#endif // HAVE_VULKAN
@@ -341,7 +341,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
341341
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
342342

343343
InferenceEngine::Builder::ConcatLayer ieLayer(name);
344-
ieLayer.setAxis(clamp(axis, input->getDims().size()));
344+
ieLayer.setAxis(normalize_axis(axis, input->getDims().size()));
345345
ieLayer.setInputPorts(std::vector<InferenceEngine::Port>(inputs.size()));
346346
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
347347
}
@@ -354,7 +354,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
354354
{
355355
InferenceEngine::DataPtr data = ngraphDataNode(inputs[0]);
356356
const int numDims = data->getDims().size();
357-
const int cAxis = clamp(axis, numDims);
357+
const int cAxis = normalize_axis(axis, numDims);
358358
std::vector<size_t> maxDims(numDims, 0);
359359

360360
CV_Assert(inputs.size() == nodes.size());

modules/dnn/src/layers/flatten_layer.cpp

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -89,8 +89,8 @@ class FlattenLayerImpl CV_FINAL : public FlattenLayer
8989
}
9090

9191
int numAxes = inputs[0].size();
92-
int startAxis = clamp(_startAxis, numAxes);
93-
int endAxis = clamp(_endAxis, numAxes);
92+
int startAxis = normalize_axis(_startAxis, numAxes);
93+
int endAxis = normalize_axis(_endAxis, numAxes);
9494

9595
CV_Assert(startAxis >= 0);
9696
CV_Assert(endAxis >= startAxis && endAxis < (int)numAxes);
@@ -120,8 +120,8 @@ class FlattenLayerImpl CV_FINAL : public FlattenLayer
120120
inputs_arr.getMatVector(inputs);
121121

122122
int numAxes = inputs[0].dims;
123-
_startAxis = clamp(_startAxis, numAxes);
124-
_endAxis = clamp(_endAxis, numAxes);
123+
_startAxis = normalize_axis(_startAxis, numAxes);
124+
_endAxis = normalize_axis(_endAxis, numAxes);
125125
}
126126

127127
#ifdef HAVE_OPENCL
@@ -195,8 +195,8 @@ virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inp
195195
std::vector<size_t> dims = ieInpNode->get_shape();
196196

197197
int numAxes = dims.size();
198-
int startAxis = clamp(_startAxis, numAxes);
199-
int endAxis = clamp(_endAxis, numAxes);
198+
int startAxis = normalize_axis(_startAxis, numAxes);
199+
int endAxis = normalize_axis(_endAxis, numAxes);
200200

201201
CV_Assert(startAxis >= 0);
202202
CV_Assert(endAxis >= startAxis && endAxis < numAxes);

modules/dnn/src/layers/fully_connected_layer.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer
132132
CV_CheckEQ(blobs[0].dims, 2, "");
133133
numOutput = blobs[0].size[0];
134134
CV_Assert(!bias || (size_t)numOutput == blobs[1].total());
135-
cAxis = clamp(axis, inputs[0]);
135+
cAxis = normalize_axis(axis, inputs[0]);
136136
}
137137

138138
MatShape outShape(cAxis + 1);
@@ -356,7 +356,7 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer
356356
return true;
357357
}
358358

359-
int axisCan = clamp(axis, inputs[0].dims);
359+
int axisCan = normalize_axis(axis, inputs[0].dims);
360360
int numOutput = blobs[0].size[0];
361361
int innerSize = blobs[0].size[1];
362362
int outerSize = total(shape(inputs[0]), 0, axisCan);
@@ -477,7 +477,7 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer
477477

478478
if (!blobs.empty())
479479
{
480-
int axisCan = clamp(axis, input[0].dims);
480+
int axisCan = normalize_axis(axis, input[0].dims);
481481
int outerSize = input[0].total(0, axisCan);
482482

483483
for (size_t i = 0; i < input.size(); i++)
@@ -525,7 +525,7 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer
525525

526526
auto input_wrapper = inputs[0].dynamicCast<CUDABackendWrapper>();
527527

528-
auto flatten_start_axis = clamp(axis, input_wrapper->getRank());
528+
auto flatten_start_axis = normalize_axis(axis, input_wrapper->getRank());
529529

530530
auto biasMat_ = bias ? biasMat : Mat();
531531
return make_cuda_node<cuda4dnn::InnerProductOp>(preferableTarget, std::move(context->stream), std::move(context->cublas_handle), flatten_start_axis, weightsMat, biasMat_);

modules/dnn/src/layers/normalize_bbox_layer.cpp

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -126,8 +126,8 @@ class NormalizeBBoxLayerImpl CV_FINAL : public NormalizeBBoxLayer
126126

127127
const UMat& inp0 = inputs[0];
128128
UMat& buffer = internals[0];
129-
startAxis = clamp(startAxis, inp0.dims);
130-
endAxis = clamp(endAxis, inp0.dims);
129+
startAxis = normalize_axis(startAxis, inp0.dims);
130+
endAxis = normalize_axis(endAxis, inp0.dims);
131131

132132
size_t num = total(shape(inp0.size), 0, startAxis);
133133
size_t numPlanes = total(shape(inp0.size), startAxis, endAxis + 1);
@@ -211,8 +211,8 @@ class NormalizeBBoxLayerImpl CV_FINAL : public NormalizeBBoxLayer
211211

212212
const Mat& inp0 = inputs[0];
213213
Mat& buffer = internals[0];
214-
startAxis = clamp(startAxis, inp0.dims);
215-
endAxis = clamp(endAxis, inp0.dims);
214+
startAxis = normalize_axis(startAxis, inp0.dims);
215+
endAxis = normalize_axis(endAxis, inp0.dims);
216216

217217
const float* inpData = inp0.ptr<float>();
218218
float* outData = outputs[0].ptr<float>();
@@ -378,8 +378,8 @@ class NormalizeBBoxLayerImpl CV_FINAL : public NormalizeBBoxLayer
378378

379379
NormalizeConfiguration<float> config;
380380
config.input_shape.assign(std::begin(input_shape), std::end(input_shape));
381-
config.axis_start = clamp(startAxis, input_shape.size());
382-
config.axis_end = clamp(endAxis, input_shape.size()) + 1; /* +1 because NormalizeOp follows [start, end) convention */
381+
config.axis_start = normalize_axis(startAxis, input_shape.size());
382+
config.axis_end = normalize_axis(endAxis, input_shape.size()) + 1; /* +1 because NormalizeOp follows [start, end) convention */
383383
config.norm = pnorm;
384384
config.eps = epsilon;
385385

modules/dnn/src/layers/reshape_layer.cpp

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -66,14 +66,7 @@ static void computeShapeByReshapeMask(const MatShape &srcShape,
6666
int srcShapeSize = (int)srcShape.size();
6767
int maskShapeSize = (int)maskShape.size();
6868

69-
if (srcRange == Range::all())
70-
srcRange = Range(0, srcShapeSize);
71-
else
72-
{
73-
int sz = srcRange.size();
74-
srcRange.start = clamp(srcRange.start, srcShapeSize);
75-
srcRange.end = srcRange.end == INT_MAX ? srcShapeSize : srcRange.start + sz;
76-
}
69+
srcRange = normalize_axis_range(srcRange, srcShapeSize);
7770

7871
bool explicitMask = !maskShape.empty(); // All mask values are positive.
7972
for (int i = 0, n = maskShape.size(); i < n && explicitMask; ++i)

modules/dnn/src/layers/scale_layer.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -305,7 +305,7 @@ class ScaleLayerImpl CV_FINAL : public ScaleLayer
305305
numChannels = blobs[0].total();
306306

307307
std::vector<size_t> shape(ieInpNode0->get_shape().size(), 1);
308-
int cAxis = clamp(axis, shape.size());
308+
int cAxis = normalize_axis(axis, shape.size());
309309
shape[cAxis] = numChannels;
310310

311311
auto node = ieInpNode0;

0 commit comments

Comments
 (0)