Skip to content

Commit 083727b

Browse files
committed
Merge pull request opencv#18551 from alalek:issue_17964
2 parents 6c218c7 + 718dd9f commit 083727b

File tree

4 files changed

+18
-18
lines changed

4 files changed

+18
-18
lines changed

modules/dnn/src/dnn.cpp

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2460,10 +2460,12 @@ struct Net::Impl : public detail::NetImplBase
24602460
if( nextData )
24612461
nextActivLayer = nextData->layerInstance.dynamicCast<ActivationLayer>();
24622462

2463+
Ptr<PowerLayer> activ_power;
24632464
if( !nextActivLayer.empty() &&
24642465
(!nextData->type.compare("ReLU") ||
24652466
!nextData->type.compare("ChannelsPReLU") ||
2466-
!nextData->type.compare("Power")) &&
2467+
(!nextData->type.compare("Power") && (activ_power = nextActivLayer.dynamicCast<PowerLayer>()) && activ_power->scale == 1.0f)
2468+
) &&
24672469
currLayer->setActivation(nextActivLayer) )
24682470
{
24692471
CV_Assert_N(biasLayerData->outputBlobsWrappers.size() == 1, ld.inputBlobsWrappers.size() == 1);

modules/dnn/src/layers/convolution_layer.cpp

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -46,6 +46,8 @@
4646
#include "../op_inf_engine.hpp"
4747
#include "../ie_ngraph.hpp"
4848

49+
#include <opencv2/core/utils/logger.hpp>
50+
4951
#include "opencv2/core/hal/hal.hpp"
5052
#include "opencv2/core/hal/intrin.hpp"
5153
#include <iostream>
@@ -371,6 +373,14 @@ class ConvolutionLayerImpl CV_FINAL : public BaseConvolutionLayerImpl
371373
Ptr<PowerLayer> activ_power = activ.dynamicCast<PowerLayer>();
372374
if (!activ_power.empty())
373375
{
376+
if (activ_power->scale != 1.0f) // not supported well by implementation, #17964
377+
{
378+
// FIXIT no way to check number of blobs (like, eltwise input)
379+
CV_LOG_INFO(NULL, "DNN/OpenCL: can't configure Power activation (scale != 1.0f)");
380+
activ.release();
381+
newActiv = false;
382+
return false;
383+
}
374384
if (activ_power->scale != 1.f || activ_power->shift != 0.f)
375385
{
376386
const int outCh = blobs[0].size[0];

modules/dnn/test/test_common.impl.hpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,10 +63,10 @@ void normAssert(
6363
double l1 /*= 0.00001*/, double lInf /*= 0.0001*/)
6464
{
6565
double normL1 = cvtest::norm(ref, test, cv::NORM_L1) / ref.getMat().total();
66-
EXPECT_LE(normL1, l1) << comment;
66+
EXPECT_LE(normL1, l1) << comment << " |ref| = " << cvtest::norm(ref, cv::NORM_INF);
6767

6868
double normInf = cvtest::norm(ref, test, cv::NORM_INF);
69-
EXPECT_LE(normInf, lInf) << comment;
69+
EXPECT_LE(normInf, lInf) << comment << " |ref| = " << cvtest::norm(ref, cv::NORM_INF);
7070
}
7171

7272
std::vector<cv::Rect2d> matToBoxes(const cv::Mat& m)

modules/dnn/test/test_layers.cpp

Lines changed: 3 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2219,10 +2219,6 @@ TEST_P(ConvolutionActivationFusion, Accuracy)
22192219
Backend backendId = get<0>(get<2>(GetParam()));
22202220
Target targetId = get<1>(get<2>(GetParam()));
22212221

2222-
// bug: https://github.com/opencv/opencv/issues/17964
2223-
if (actType == "Power" && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
2224-
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL);
2225-
22262222
Net net;
22272223
int convId = net.addLayer(convParams.name, convParams.type, convParams);
22282224
int activId = net.addLayerToPrev(activationParams.name, activationParams.type, activationParams);
@@ -2235,7 +2231,7 @@ TEST_P(ConvolutionActivationFusion, Accuracy)
22352231
expectedFusedLayers.push_back(activId); // all activations are fused
22362232
else if (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16)
22372233
{
2238-
if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "ReLU6" || actType == "TanH" || actType == "Power")
2234+
if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "ReLU6" || actType == "TanH" /*|| actType == "Power"*/)
22392235
expectedFusedLayers.push_back(activId);
22402236
}
22412237
}
@@ -2349,10 +2345,6 @@ TEST_P(ConvolutionEltwiseActivationFusion, Accuracy)
23492345
if ((eltwiseOp != "sum" || weightedEltwise) && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
23502346
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL);
23512347

2352-
// bug: https://github.com/opencv/opencv/issues/17964
2353-
if (actType == "Power" && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
2354-
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL);
2355-
23562348
Net net;
23572349
int convId = net.addLayer(convParams.name, convParams.type, convParams);
23582350
int eltwiseId = net.addLayer(eltwiseParams.name, eltwiseParams.type, eltwiseParams);
@@ -2369,7 +2361,7 @@ TEST_P(ConvolutionEltwiseActivationFusion, Accuracy)
23692361
expectedFusedLayers.push_back(activId); // activation is fused with eltwise layer
23702362
else if (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16)
23712363
{
2372-
if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "Power")
2364+
if (actType == "ReLU" || actType == "ChannelsPReLU" /*|| actType == "Power"*/)
23732365
{
23742366
expectedFusedLayers.push_back(eltwiseId);
23752367
expectedFusedLayers.push_back(activId);
@@ -2431,10 +2423,6 @@ TEST_P(ConvolutionActivationEltwiseFusion, Accuracy)
24312423
Backend backendId = get<0>(get<4>(GetParam()));
24322424
Target targetId = get<1>(get<4>(GetParam()));
24332425

2434-
// bug: https://github.com/opencv/opencv/issues/17964
2435-
if (actType == "Power" && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
2436-
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL);
2437-
24382426
Net net;
24392427
int convId = net.addLayer(convParams.name, convParams.type, convParams);
24402428
int activId = net.addLayer(activationParams.name, activationParams.type, activationParams);
@@ -2451,7 +2439,7 @@ TEST_P(ConvolutionActivationEltwiseFusion, Accuracy)
24512439
expectedFusedLayers.push_back(activId); // activation fused with convolution
24522440
else if (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16)
24532441
{
2454-
if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "ReLU6" || actType == "TanH" || actType == "Power")
2442+
if (actType == "ReLU" || actType == "ChannelsPReLU" || actType == "ReLU6" || actType == "TanH" /*|| actType == "Power"*/)
24552443
expectedFusedLayers.push_back(activId); // activation fused with convolution
24562444
}
24572445
}

0 commit comments

Comments
 (0)