Skip to content

Commit 96947c3

Browse files
jebastin-nadaralalek
authored andcommitted
Added exp layer
backport of commit: 6111935 partial backport of commit: dd59761
1 parent 19f1bac commit 96947c3

File tree

8 files changed

+173
-2
lines changed

8 files changed

+173
-2
lines changed

modules/dnn/include/opencv2/dnn/all_layers.hpp

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -499,6 +499,14 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
499499
static Ptr<PowerLayer> create(const LayerParams &params);
500500
};
501501

502+
class CV_EXPORTS ExpLayer : public ActivationLayer
503+
{
504+
public:
505+
float base, scale, shift;
506+
507+
static Ptr<ExpLayer> create(const LayerParams &params);
508+
};
509+
502510
/* Layers used in semantic segmentation */
503511

504512
class CV_EXPORTS CropLayer : public Layer

modules/dnn/src/init.cpp

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,7 @@ void initializeLayerFactory()
110110
CV_DNN_REGISTER_LAYER_CLASS(BNLL, BNLLLayer);
111111
CV_DNN_REGISTER_LAYER_CLASS(AbsVal, AbsLayer);
112112
CV_DNN_REGISTER_LAYER_CLASS(Power, PowerLayer);
113+
CV_DNN_REGISTER_LAYER_CLASS(Exp, ExpLayer);
113114
CV_DNN_REGISTER_LAYER_CLASS(BatchNorm, BatchNormLayer);
114115
CV_DNN_REGISTER_LAYER_CLASS(MaxUnpool, MaxUnpoolLayer);
115116
CV_DNN_REGISTER_LAYER_CLASS(Dropout, BlankLayer);

modules/dnn/src/layers/elementwise_layers.cpp

Lines changed: 113 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1202,6 +1202,105 @@ struct PowerFunctor : public BaseFunctor
12021202
int64 getFLOPSPerElement() const { return power == 1 ? 2 : 10; }
12031203
};
12041204

1205+
struct ExpFunctor : public BaseFunctor
1206+
{
1207+
typedef ExpLayer Layer;
1208+
float base, scale, shift;
1209+
float normScale, normShift;
1210+
1211+
ExpFunctor(float base_ = -1.f, float scale_ = 1.f, float shift_ = 0.f)
1212+
: base(base_), scale(scale_), shift(shift_)
1213+
{
1214+
// For base > 0 :
1215+
// y = base^(scale * input + shift)
1216+
// ln(y) = ln(base)*(scale * input + shift)
1217+
// y = exp((ln(base)*scale) * input + (ln(base)*shift))
1218+
// y = exp(normalized_scale * input + normalized_shift)
1219+
CV_Check(base, base == -1.f || base > 0.f, "Unsupported 'base' value");
1220+
const float ln_base = (base == -1.f) ? 1.f : log(base);
1221+
normScale = scale * ln_base;
1222+
normShift = shift * ln_base;
1223+
}
1224+
1225+
bool supportBackend(int backendId, int targetId)
1226+
{
1227+
return backendId == DNN_BACKEND_OPENCV ||
1228+
backendId == DNN_BACKEND_HALIDE || backendId == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
1229+
}
1230+
1231+
void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const
1232+
{
1233+
float a = normScale, b = normShift;
1234+
for( int cn = cn0; cn < cn1; cn++, srcptr += planeSize, dstptr += planeSize )
1235+
{
1236+
for( int i = 0; i < len; i++ )
1237+
{
1238+
float x = srcptr[i];
1239+
dstptr[i] = exp(a*x + b);
1240+
}
1241+
}
1242+
}
1243+
1244+
#ifdef HAVE_OPENCL
1245+
bool applyOCL(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals)
1246+
{
1247+
std::vector<UMat> inputs;
1248+
std::vector<UMat> outputs;
1249+
1250+
inps.getUMatVector(inputs);
1251+
outs.getUMatVector(outputs);
1252+
String buildopt = oclGetTMacro(inputs[0]);
1253+
1254+
for (size_t i = 0; i < inputs.size(); i++)
1255+
{
1256+
UMat& src = inputs[i];
1257+
UMat& dst = outputs[i];
1258+
1259+
ocl::Kernel kernel("ExpForward", ocl::dnn::activations_oclsrc, buildopt);
1260+
kernel.set(0, (int)src.total());
1261+
kernel.set(1, ocl::KernelArg::PtrReadOnly(src));
1262+
kernel.set(2, ocl::KernelArg::PtrWriteOnly(dst));
1263+
kernel.set(3, (float)normScale);
1264+
kernel.set(4, (float)normShift);
1265+
1266+
size_t gSize = src.total();
1267+
CV_Assert(kernel.run(1, &gSize, NULL, false));
1268+
}
1269+
return true;
1270+
}
1271+
#endif
1272+
1273+
#ifdef HAVE_HALIDE
1274+
void attachHalide(const Halide::Expr& input, Halide::Func& top)
1275+
{
1276+
Halide::Var x("x"), y("y"), c("c"), n("n");
1277+
top(x, y, c, n) = exp(normScale * input + normShift);
1278+
}
1279+
#endif // HAVE_HALIDE
1280+
1281+
#ifdef HAVE_DNN_IE_NN_BUILDER_2019
1282+
InferenceEngine::Builder::Layer initInfEngineBuilderAPI()
1283+
{
1284+
CV_Error(Error::StsNotImplemented, "");
1285+
}
1286+
#endif // HAVE_DNN_IE_NN_BUILDER_2019
1287+
1288+
#ifdef HAVE_DNN_NGRAPH
1289+
std::shared_ptr<ngraph::Node> initNgraphAPI(const std::shared_ptr<ngraph::Node>& node)
1290+
{
1291+
auto scale_node = std::make_shared<ngraph::op::Constant>(ngraph::element::f32,
1292+
ngraph::Shape{1}, &normScale);
1293+
auto shift_node = std::make_shared<ngraph::op::Constant>(ngraph::element::f32,
1294+
ngraph::Shape{1}, &normShift);
1295+
auto mul = std::make_shared<ngraph::op::v1::Multiply>(scale_node, node, ngraph::op::AutoBroadcastType::NUMPY);
1296+
auto scale_shift = std::make_shared<ngraph::op::v1::Add>(mul, shift_node, ngraph::op::AutoBroadcastType::NUMPY);
1297+
return std::make_shared<ngraph::op::v0::Exp>(scale_shift);
1298+
}
1299+
#endif // HAVE_DNN_NGRAPH
1300+
1301+
int64 getFLOPSPerElement() const { return 3; }
1302+
};
1303+
12051304
struct ChannelsPReLUFunctor : public BaseFunctor
12061305
{
12071306
typedef ChannelsPReLULayer Layer;
@@ -1419,6 +1518,20 @@ Ptr<PowerLayer> PowerLayer::create(const LayerParams& params)
14191518
return l;
14201519
}
14211520

1521+
Ptr<ExpLayer> ExpLayer::create(const LayerParams& params)
1522+
{
1523+
float base = params.get<float>("base", -1.0f);
1524+
float scale = params.get<float>("scale", 1.0f);
1525+
float shift = params.get<float>("shift", 0.0f);
1526+
Ptr<ExpLayer> l(new ElementWiseLayer<ExpFunctor>(ExpFunctor(base, scale, shift)));
1527+
l->setParamsFrom(params);
1528+
l->base = base;
1529+
l->scale = scale;
1530+
l->shift = shift;
1531+
1532+
return l;
1533+
}
1534+
14221535
Ptr<Layer> ChannelsPReLULayer::create(const LayerParams& params)
14231536
{
14241537
CV_Assert(params.blobs.size() == 1);

modules/dnn/src/opencl/activations.cl

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -140,3 +140,14 @@ __kernel void ELUForward(const int n, __global const T* in, __global T* out)
140140
out[index] = (src >= 0.f) ? src : exp(src) - 1;
141141
}
142142
}
143+
144+
__kernel void ExpForward(const int n, __global const T* in, __global T* out,
145+
const KERNEL_ARG_DTYPE normScale,
146+
const KERNEL_ARG_DTYPE normShift)
147+
{
148+
int index = get_global_id(0);
149+
if (index < n)
150+
{
151+
out[index] = exp(normShift + normScale * in[index]);
152+
}
153+
}

modules/dnn/src/tensorflow/tf_importer.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2425,7 +2425,7 @@ void TFImporter::parseNode(const tensorflow::NodeDef& layer_)
24252425
connectToAllBlobs(layer_id, dstNet, parsePin(layer.input(0)), id, num_inputs);
24262426
}
24272427
else if (type == "Abs" || type == "Tanh" || type == "Sigmoid" ||
2428-
type == "Relu" || type == "Elu" ||
2428+
type == "Relu" || type == "Elu" || type == "Exp" ||
24292429
type == "Identity" || type == "Relu6")
24302430
{
24312431
CV_CheckGT(num_inputs, 0, "");

modules/dnn/test/test_halide_layers.cpp

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -621,6 +621,31 @@ INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Power, Combine(
621621
dnnBackendsAndTargetsWithHalide()
622622
));
623623

624+
typedef TestWithParam<tuple<Vec3f, tuple<Backend, Target> > > Exp;
625+
TEST_P(Exp, Accuracy)
626+
{
627+
float base = get<0>(GetParam())[0];
628+
float scale = get<0>(GetParam())[1];
629+
float shift = get<0>(GetParam())[2];
630+
Backend backendId = get<0>(get<1>(GetParam()));
631+
Target targetId = get<1>(get<1>(GetParam()));
632+
633+
LayerParams lp;
634+
lp.set("base", base);
635+
lp.set("scale", scale);
636+
lp.set("shift", shift);
637+
lp.type = "Exp";
638+
lp.name = "testLayer";
639+
testInPlaceActivation(lp, backendId, targetId);
640+
}
641+
642+
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, Exp, Combine(
643+
/*base, scale, shift*/ Values(Vec3f(0.9f, -1.0f, 1.1f), Vec3f(0.9f, 1.1f, -1.0f),
644+
Vec3f(-1.0f, 0.9f, 1.1f), Vec3f(-1.0f, 1.1f, 0.9f),
645+
Vec3f(1.1f, 0.9f, -1.0f), Vec3f(1.1f, -1.0f, 0.9f)),
646+
dnnBackendsAndTargetsWithHalide()
647+
));
648+
624649
TEST_P(Test_Halide_layers, ChannelsPReLU)
625650
{
626651
LayerParams lp;

modules/dnn/test/test_layers.cpp

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2107,6 +2107,12 @@ class TestLayerFusion : public DNNTestLayer {
21072107
randu(scales, -1.0f, 1.0f);
21082108
activationParams.blobs.push_back(scales);
21092109
}
2110+
else if (activationParams.type == "Exp")
2111+
{
2112+
activationParams.set("base", -1.0f);
2113+
activationParams.set("scale", 0.3f);
2114+
activationParams.set("shift", 0.6f);
2115+
}
21102116
}
21112117

21122118
static void makeDefaultTestEltwiseLayer(LayerParams& eltwiseParams, const std::string& op, bool withCoefficients)
@@ -2178,7 +2184,7 @@ class TestLayerFusion : public DNNTestLayer {
21782184
static testing::internal::ParamGenerator<std::string> activationLayersList()
21792185
{
21802186
// TODO: automate list generation
2181-
return Values("ReLU", "ReLU6", "ChannelsPReLU", "TanH", "Swish", "Mish", "Sigmoid", "ELU", "AbsVal", "BNLL", "Power");
2187+
return Values("ReLU", "ReLU6", "ChannelsPReLU", "TanH", "Swish", "Mish", "Sigmoid", "ELU", "AbsVal", "BNLL", "Power", "Exp");
21822188
}
21832189

21842190
static testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargetsForFusionTests()

modules/dnn/test/test_onnx_importer.cpp

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -307,6 +307,13 @@ TEST_P(Test_ONNX_layers, Power)
307307
testONNXModels("pow2", npy, 0, 0, false, false);
308308
}
309309

310+
TEST_P(Test_ONNX_layers, Exp)
311+
{
312+
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
313+
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
314+
testONNXModels("exp");
315+
}
316+
310317
TEST_P(Test_ONNX_layers, Concatenation)
311318
{
312319
if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)

0 commit comments

Comments
 (0)