Skip to content

Commit 4f08bb5

Browse files
committed
Merge pull request opencv#19428 from alalek:dnn_drop_misbehaved_clamp
2 parents 09d2ca1 + 83aa711 commit 4f08bb5

File tree

11 files changed

+57
-52
lines changed

11 files changed

+57
-52
lines changed

modules/dnn/include/opencv2/dnn/shape_utils.hpp

Lines changed: 19 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -205,21 +205,33 @@ static inline std::ostream& operator<<(std::ostream &out, const MatShape& shape)
205205
return out;
206206
}
207207

208-
inline int clamp(int ax, int dims)
208+
/// @brief Converts axis from `[-dims; dims)` (similar to Python's slice notation) to `[0; dims)` range.
209+
static inline
210+
int normalize_axis(int axis, int dims)
209211
{
210-
return ax < 0 ? ax + dims : ax;
212+
CV_Check(axis, axis >= -dims && axis < dims, "");
213+
axis = (axis < 0) ? (dims + axis) : axis;
214+
CV_DbgCheck(axis, axis >= 0 && axis < dims, "");
215+
return axis;
211216
}
212217

213-
inline int clamp(int ax, const MatShape& shape)
218+
static inline
219+
int normalize_axis(int axis, const MatShape& shape)
214220
{
215-
return clamp(ax, (int)shape.size());
221+
return normalize_axis(axis, (int)shape.size());
216222
}
217223

218-
inline Range clamp(const Range& r, int axisSize)
224+
static inline
225+
Range normalize_axis_range(const Range& r, int axisSize)
219226
{
220-
Range clamped(std::max(r.start, 0),
227+
if (r == Range::all())
228+
return Range(0, axisSize);
229+
CV_CheckGE(r.start, 0, "");
230+
Range clamped(r.start,
221231
r.end > 0 ? std::min(r.end, axisSize) : axisSize + r.end + 1);
222-
CV_Assert_N(clamped.start < clamped.end, clamped.end <= axisSize);
232+
CV_DbgCheckGE(clamped.start, 0, "");
233+
CV_CheckLT(clamped.start, clamped.end, "");
234+
CV_CheckLE(clamped.end, axisSize, "");
223235
return clamped;
224236
}
225237

modules/dnn/src/dnn.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2598,7 +2598,7 @@ struct Net::Impl : public detail::NetImplBase
25982598
// the concatenation optimization is applied with batch_size > 1.
25992599
// so, for now, we only apply this optimization in the most popular
26002600
// case batch_size == 1.
2601-
int axis = clamp(concatLayer->axis, output.dims);
2601+
int axis = normalize_axis(concatLayer->axis, output.dims);
26022602
if( output.total(0, axis) == 1 )
26032603
{
26042604
size_t i, ninputs = ld.inputBlobsId.size();

modules/dnn/src/layers/concat_layer.cpp

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
7272
{
7373
CV_Assert(inputs.size() > 0);
7474
outputs.resize(1, inputs[0]);
75-
int cAxis = clamp(axis, inputs[0]);
75+
int cAxis = normalize_axis(axis, inputs[0]);
7676

7777
int axisSum = 0;
7878
for (size_t i = 0; i < inputs.size(); i++)
@@ -192,7 +192,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
192192
inps.getUMatVector(inputs);
193193
outs.getUMatVector(outputs);
194194

195-
int cAxis = clamp(axis, inputs[0].dims);
195+
int cAxis = normalize_axis(axis, inputs[0].dims);
196196
if (padding)
197197
return false;
198198

@@ -246,7 +246,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
246246
inputs_arr.getMatVector(inputs);
247247
outputs_arr.getMatVector(outputs);
248248

249-
int cAxis = clamp(axis, inputs[0].dims);
249+
int cAxis = normalize_axis(axis, inputs[0].dims);
250250
Mat& outMat = outputs[0];
251251

252252
if (padding)
@@ -306,7 +306,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
306306
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
307307

308308
InferenceEngine::Builder::ConcatLayer ieLayer(name);
309-
ieLayer.setAxis(clamp(axis, input->getDims().size()));
309+
ieLayer.setAxis(normalize_axis(axis, input->getDims().size()));
310310
ieLayer.setInputPorts(std::vector<InferenceEngine::Port>(inputs.size()));
311311
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
312312
}
@@ -319,7 +319,7 @@ class ConcatLayerImpl CV_FINAL : public ConcatLayer
319319
{
320320
InferenceEngine::DataPtr data = ngraphDataNode(inputs[0]);
321321
const int numDims = data->getDims().size();
322-
const int cAxis = clamp(axis, numDims);
322+
const int cAxis = normalize_axis(axis, numDims);
323323
std::vector<size_t> maxDims(numDims, 0);
324324

325325
CV_Assert(inputs.size() == nodes.size());

modules/dnn/src/layers/flatten_layer.cpp

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -82,8 +82,8 @@ class FlattenLayerImpl CV_FINAL : public FlattenLayer
8282
}
8383

8484
int numAxes = inputs[0].size();
85-
int startAxis = clamp(_startAxis, numAxes);
86-
int endAxis = clamp(_endAxis, numAxes);
85+
int startAxis = normalize_axis(_startAxis, numAxes);
86+
int endAxis = normalize_axis(_endAxis, numAxes);
8787

8888
CV_Assert(startAxis >= 0);
8989
CV_Assert(endAxis >= startAxis && endAxis < (int)numAxes);
@@ -113,8 +113,8 @@ class FlattenLayerImpl CV_FINAL : public FlattenLayer
113113
inputs_arr.getMatVector(inputs);
114114

115115
int numAxes = inputs[0].dims;
116-
_startAxis = clamp(_startAxis, numAxes);
117-
_endAxis = clamp(_endAxis, numAxes);
116+
_startAxis = normalize_axis(_startAxis, numAxes);
117+
_endAxis = normalize_axis(_endAxis, numAxes);
118118
}
119119

120120
#ifdef HAVE_OPENCL
@@ -186,8 +186,8 @@ virtual Ptr<BackendNode> initNgraph(const std::vector<Ptr<BackendWrapper> >& inp
186186
std::vector<size_t> dims = ieInpNode->get_shape();
187187

188188
int numAxes = dims.size();
189-
int startAxis = clamp(_startAxis, numAxes);
190-
int endAxis = clamp(_endAxis, numAxes);
189+
int startAxis = normalize_axis(_startAxis, numAxes);
190+
int endAxis = normalize_axis(_endAxis, numAxes);
191191

192192
CV_Assert(startAxis >= 0);
193193
CV_Assert(endAxis >= startAxis && endAxis < numAxes);

modules/dnn/src/layers/fully_connected_layer.cpp

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,7 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer
129129
CV_CheckEQ(blobs[0].dims, 2, "");
130130
numOutput = blobs[0].size[0];
131131
CV_Assert(!bias || (size_t)numOutput == blobs[1].total());
132-
cAxis = clamp(axis, inputs[0]);
132+
cAxis = normalize_axis(axis, inputs[0]);
133133
}
134134

135135
MatShape outShape(cAxis + 1);
@@ -352,7 +352,7 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer
352352
return true;
353353
}
354354

355-
int axisCan = clamp(axis, inputs[0].dims);
355+
int axisCan = normalize_axis(axis, inputs[0].dims);
356356
int numOutput = blobs[0].size[0];
357357
int innerSize = blobs[0].size[1];
358358
int outerSize = total(shape(inputs[0]), 0, axisCan);
@@ -473,7 +473,7 @@ class FullyConnectedLayerImpl CV_FINAL : public InnerProductLayer
473473

474474
if (!blobs.empty())
475475
{
476-
int axisCan = clamp(axis, input[0].dims);
476+
int axisCan = normalize_axis(axis, input[0].dims);
477477
int outerSize = input[0].total(0, axisCan);
478478

479479
for (size_t i = 0; i < input.size(); i++)

modules/dnn/src/layers/normalize_bbox_layer.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -118,8 +118,8 @@ class NormalizeBBoxLayerImpl CV_FINAL : public NormalizeBBoxLayer
118118

119119
const UMat& inp0 = inputs[0];
120120
UMat& buffer = internals[0];
121-
startAxis = clamp(startAxis, inp0.dims);
122-
endAxis = clamp(endAxis, inp0.dims);
121+
startAxis = normalize_axis(startAxis, inp0.dims);
122+
endAxis = normalize_axis(endAxis, inp0.dims);
123123

124124
size_t num = total(shape(inp0.size), 0, startAxis);
125125
size_t numPlanes = total(shape(inp0.size), startAxis, endAxis + 1);
@@ -203,8 +203,8 @@ class NormalizeBBoxLayerImpl CV_FINAL : public NormalizeBBoxLayer
203203

204204
const Mat& inp0 = inputs[0];
205205
Mat& buffer = internals[0];
206-
startAxis = clamp(startAxis, inp0.dims);
207-
endAxis = clamp(endAxis, inp0.dims);
206+
startAxis = normalize_axis(startAxis, inp0.dims);
207+
endAxis = normalize_axis(endAxis, inp0.dims);
208208

209209
const float* inpData = inp0.ptr<float>();
210210
float* outData = outputs[0].ptr<float>();

modules/dnn/src/layers/reshape_layer.cpp

Lines changed: 1 addition & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -60,14 +60,7 @@ static void computeShapeByReshapeMask(const MatShape &srcShape,
6060
int srcShapeSize = (int)srcShape.size();
6161
int maskShapeSize = (int)maskShape.size();
6262

63-
if (srcRange == Range::all())
64-
srcRange = Range(0, srcShapeSize);
65-
else
66-
{
67-
int sz = srcRange.size();
68-
srcRange.start = clamp(srcRange.start, srcShapeSize);
69-
srcRange.end = srcRange.end == INT_MAX ? srcShapeSize : srcRange.start + sz;
70-
}
63+
srcRange = normalize_axis_range(srcRange, srcShapeSize);
7164

7265
bool explicitMask = !maskShape.empty(); // All mask values are positive.
7366
for (int i = 0, n = maskShape.size(); i < n && explicitMask; ++i)

modules/dnn/src/layers/scale_layer.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -240,7 +240,7 @@ class ScaleLayerImpl CV_FINAL : public ScaleLayer
240240
numChannels = blobs[0].total();
241241

242242
std::vector<size_t> shape(ieInpNode0->get_shape().size(), 1);
243-
int cAxis = clamp(axis, shape.size());
243+
int cAxis = normalize_axis(axis, shape.size());
244244
shape[cAxis] = numChannels;
245245

246246
auto node = ieInpNode0;

modules/dnn/src/layers/slice_layer.cpp

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ class SliceLayerImpl : public SliceLayer
146146
for (int j = 0; j < sliceRanges[i].size(); ++j)
147147
{
148148
if (shapesInitialized || inpShape[j] > 0)
149-
outputs[i][j] = clamp(sliceRanges[i][j], inpShape[j]).size();
149+
outputs[i][j] = normalize_axis_range(sliceRanges[i][j], inpShape[j]).size();
150150
}
151151
}
152152
}
@@ -209,7 +209,7 @@ class SliceLayerImpl : public SliceLayer
209209
// Clamp.
210210
for (int j = 0; j < finalSliceRanges[i].size(); ++j)
211211
{
212-
finalSliceRanges[i][j] = clamp(finalSliceRanges[i][j], inpShape[j]);
212+
finalSliceRanges[i][j] = normalize_axis_range(finalSliceRanges[i][j], inpShape[j]);
213213
}
214214
}
215215

@@ -601,7 +601,7 @@ class CropLayerImpl CV_FINAL : public SliceLayerImpl
601601
CV_Assert(inputs.size() == 2);
602602

603603
MatShape dstShape = inputs[0];
604-
int start = clamp(axis, dstShape);
604+
int start = normalize_axis(axis, dstShape);
605605
for (int i = start; i < dstShape.size(); i++)
606606
{
607607
dstShape[i] = inputs[1][i];
@@ -620,7 +620,7 @@ class CropLayerImpl CV_FINAL : public SliceLayerImpl
620620
const Mat &inpSzBlob = inputs[1];
621621

622622
int dims = inpBlob.dims;
623-
int start_axis = clamp(axis, dims);
623+
int start_axis = normalize_axis(axis, dims);
624624

625625
std::vector<int> offset_final(dims, 0);
626626
if (offset.size() == 1)

modules/dnn/src/layers/softmax_layer.cpp

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ class SoftMaxLayerImpl CV_FINAL : public SoftmaxLayer
8282
{
8383
bool inplace = Layer::getMemoryShapes(inputs, requiredOutputs, outputs, internals);
8484
MatShape shape = inputs[0];
85-
int cAxis = clamp(axisRaw, shape.size());
85+
int cAxis = normalize_axis(axisRaw, shape.size());
8686
shape[cAxis] = 1;
8787
internals.assign(1, shape);
8888
return inplace;
@@ -115,7 +115,7 @@ class SoftMaxLayerImpl CV_FINAL : public SoftmaxLayer
115115

116116
UMat& src = inputs[0];
117117
UMat& dstMat = outputs[0];
118-
int axis = clamp(axisRaw, src.dims);
118+
int axis = normalize_axis(axisRaw, src.dims);
119119

120120
if (softmaxOp.empty())
121121
{
@@ -207,7 +207,7 @@ class SoftMaxLayerImpl CV_FINAL : public SoftmaxLayer
207207
const Mat &src = inputs[0];
208208
Mat &dst = outputs[0];
209209

210-
int axis = clamp(axisRaw, src.dims);
210+
int axis = normalize_axis(axisRaw, src.dims);
211211
size_t outerSize = src.total(0, axis), channels = src.size[axis],
212212
innerSize = src.total(axis + 1);
213213

@@ -318,7 +318,7 @@ class SoftMaxLayerImpl CV_FINAL : public SoftmaxLayer
318318
InferenceEngine::DataPtr input = infEngineDataNode(inputs[0]);
319319

320320
InferenceEngine::Builder::SoftMaxLayer ieLayer(name);
321-
ieLayer.setAxis(clamp(axisRaw, input->getDims().size()));
321+
ieLayer.setAxis(normalize_axis(axisRaw, input->getDims().size()));
322322

323323
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
324324
}
@@ -329,7 +329,7 @@ class SoftMaxLayerImpl CV_FINAL : public SoftmaxLayer
329329
const std::vector<Ptr<BackendNode> >& nodes) CV_OVERRIDE
330330
{
331331
auto& ieInpNode = nodes[0].dynamicCast<InfEngineNgraphNode>()->node;
332-
int axis = clamp(axisRaw, ieInpNode->get_shape().size());
332+
int axis = normalize_axis(axisRaw, ieInpNode->get_shape().size());
333333
auto softmax = std::make_shared<ngraph::op::v1::Softmax>(ieInpNode, axis);
334334
if (logSoftMax)
335335
return Ptr<BackendNode>(new InfEngineNgraphNode(std::make_shared<ngraph::op::v0::Log>(softmax)));

0 commit comments

Comments
 (0)