@@ -2219,10 +2219,6 @@ TEST_P(ConvolutionActivationFusion, Accuracy)
2219
2219
Backend backendId = get<0 >(get<2 >(GetParam ()));
2220
2220
Target targetId = get<1 >(get<2 >(GetParam ()));
2221
2221
2222
- // bug: https://github.com/opencv/opencv/issues/17964
2223
- if (actType == " Power" && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
2224
- applyTestTag (CV_TEST_TAG_DNN_SKIP_OPENCL);
2225
-
2226
2222
Net net;
2227
2223
int convId = net.addLayer (convParams.name , convParams.type , convParams);
2228
2224
int activId = net.addLayerToPrev (activationParams.name , activationParams.type , activationParams);
@@ -2235,7 +2231,7 @@ TEST_P(ConvolutionActivationFusion, Accuracy)
2235
2231
expectedFusedLayers.push_back (activId); // all activations are fused
2236
2232
else if (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16)
2237
2233
{
2238
- if (actType == " ReLU" || actType == " ChannelsPReLU" || actType == " ReLU6" || actType == " TanH" || actType == " Power" )
2234
+ if (actType == " ReLU" || actType == " ChannelsPReLU" || actType == " ReLU6" || actType == " TanH" /* || actType == "Power"*/ )
2239
2235
expectedFusedLayers.push_back (activId);
2240
2236
}
2241
2237
}
@@ -2349,10 +2345,6 @@ TEST_P(ConvolutionEltwiseActivationFusion, Accuracy)
2349
2345
if ((eltwiseOp != " sum" || weightedEltwise) && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
2350
2346
applyTestTag (CV_TEST_TAG_DNN_SKIP_OPENCL);
2351
2347
2352
- // bug: https://github.com/opencv/opencv/issues/17964
2353
- if (actType == " Power" && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
2354
- applyTestTag (CV_TEST_TAG_DNN_SKIP_OPENCL);
2355
-
2356
2348
Net net;
2357
2349
int convId = net.addLayer (convParams.name , convParams.type , convParams);
2358
2350
int eltwiseId = net.addLayer (eltwiseParams.name , eltwiseParams.type , eltwiseParams);
@@ -2369,7 +2361,7 @@ TEST_P(ConvolutionEltwiseActivationFusion, Accuracy)
2369
2361
expectedFusedLayers.push_back (activId); // activation is fused with eltwise layer
2370
2362
else if (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16)
2371
2363
{
2372
- if (actType == " ReLU" || actType == " ChannelsPReLU" || actType == " Power" )
2364
+ if (actType == " ReLU" || actType == " ChannelsPReLU" /* || actType == "Power"*/ )
2373
2365
{
2374
2366
expectedFusedLayers.push_back (eltwiseId);
2375
2367
expectedFusedLayers.push_back (activId);
@@ -2431,10 +2423,6 @@ TEST_P(ConvolutionActivationEltwiseFusion, Accuracy)
2431
2423
Backend backendId = get<0 >(get<4 >(GetParam ()));
2432
2424
Target targetId = get<1 >(get<4 >(GetParam ()));
2433
2425
2434
- // bug: https://github.com/opencv/opencv/issues/17964
2435
- if (actType == " Power" && backendId == DNN_BACKEND_OPENCV && (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16))
2436
- applyTestTag (CV_TEST_TAG_DNN_SKIP_OPENCL);
2437
-
2438
2426
Net net;
2439
2427
int convId = net.addLayer (convParams.name , convParams.type , convParams);
2440
2428
int activId = net.addLayer (activationParams.name , activationParams.type , activationParams);
@@ -2451,7 +2439,7 @@ TEST_P(ConvolutionActivationEltwiseFusion, Accuracy)
2451
2439
expectedFusedLayers.push_back (activId); // activation fused with convolution
2452
2440
else if (targetId == DNN_TARGET_OPENCL || targetId == DNN_TARGET_OPENCL_FP16)
2453
2441
{
2454
- if (actType == " ReLU" || actType == " ChannelsPReLU" || actType == " ReLU6" || actType == " TanH" || actType == " Power" )
2442
+ if (actType == " ReLU" || actType == " ChannelsPReLU" || actType == " ReLU6" || actType == " TanH" /* || actType == "Power"*/ )
2455
2443
expectedFusedLayers.push_back (activId); // activation fused with convolution
2456
2444
}
2457
2445
}
0 commit comments