From eca606277a80edfb487f1c6eead055b6c004f79b Mon Sep 17 00:00:00 2001 From: dkurt Date: Fri, 5 May 2017 11:33:57 +0300 Subject: [PATCH] Enabled parallel computation of ReLU activation layer --- modules/dnn/src/layers/elementwise_layers.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/dnn/src/layers/elementwise_layers.cpp b/modules/dnn/src/layers/elementwise_layers.cpp index e0f3936b72..d794a596c4 100644 --- a/modules/dnn/src/layers/elementwise_layers.cpp +++ b/modules/dnn/src/layers/elementwise_layers.cpp @@ -224,7 +224,7 @@ Ptr<_Layer> _Layer::create() { \ Ptr ReLULayer::create(const LayerParams& params) { float negativeSlope = params.get("negative_slope", 0.f); - Ptr l(new ElementWiseLayer(false, ReLUFunctor(negativeSlope))); + Ptr l(new ElementWiseLayer(true, ReLUFunctor(negativeSlope))); l->setParamsFrom(params); return l;