Skip to content

Commit 29b4fd2

Browse files
committed
Merge pull request opencv#11351 from dkurt:dnn_enable_inf_engine_tests
2 parents f659f80 + bd77d10 commit 29b4fd2

File tree

6 files changed

+81
-69
lines changed

6 files changed

+81
-69
lines changed

cmake/OpenCVDetectInferenceEngine.cmake

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,10 @@ macro(ie_fail)
1515
return()
1616
endmacro()
1717

18+
if(NOT HAVE_CXX11)
19+
ie_fail()
20+
endif()
21+
1822
if(NOT INF_ENGINE_ROOT_DIR OR NOT EXISTS "${INF_ENGINE_ROOT_DIR}/include/inference_engine.hpp")
1923
set(ie_root_paths "${INF_ENGINE_ROOT_DIR}")
2024
if(DEFINED ENV{INTEL_CVSDK_DIR})

modules/dnn/perf/perf_net.cpp

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -95,24 +95,18 @@ PERF_TEST_P_(DNNTestNetwork, AlexNet)
9595

9696
PERF_TEST_P_(DNNTestNetwork, GoogLeNet)
9797
{
98-
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
99-
throw SkipTestException("");
10098
processNet("dnn/bvlc_googlenet.caffemodel", "dnn/bvlc_googlenet.prototxt",
10199
"", Mat(cv::Size(224, 224), CV_32FC3));
102100
}
103101

104102
PERF_TEST_P_(DNNTestNetwork, ResNet_50)
105103
{
106-
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
107-
throw SkipTestException("");
108104
processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
109105
"resnet_50.yml", Mat(cv::Size(224, 224), CV_32FC3));
110106
}
111107

112108
PERF_TEST_P_(DNNTestNetwork, SqueezeNet_v1_1)
113109
{
114-
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
115-
throw SkipTestException("");
116110
processNet("dnn/squeezenet_v1.1.caffemodel", "dnn/squeezenet_v1.1.prototxt",
117111
"squeezenet_v1_1.yml", Mat(cv::Size(227, 227), CV_32FC3));
118112
}

modules/dnn/src/dnn.cpp

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1255,6 +1255,15 @@ struct Net::Impl
12551255
if (weightableLayer->_biases)
12561256
weightableLayer->_biases = convertFp16(weightableLayer->_biases);
12571257
}
1258+
else
1259+
{
1260+
for (const auto& weights : {"weights", "biases"})
1261+
{
1262+
auto it = ieNode->layer->blobs.find(weights);
1263+
if (it != ieNode->layer->blobs.end())
1264+
it->second = convertFp16(it->second);
1265+
}
1266+
}
12581267
}
12591268

12601269
ieNode->connect(ld.inputBlobsWrappers, ld.outputBlobsWrappers);

modules/dnn/src/layers/prior_box_layer.cpp

Lines changed: 31 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -295,6 +295,19 @@ class PriorBoxLayerImpl CV_FINAL : public PriorBoxLayer
295295
return false;
296296
}
297297

298+
void finalize(const std::vector<Mat*> &inputs, std::vector<Mat> &outputs) CV_OVERRIDE
299+
{
300+
CV_Assert(inputs.size() > 1, inputs[0]->dims == 4, inputs[1]->dims == 4);
301+
int layerWidth = inputs[0]->size[3];
302+
int layerHeight = inputs[0]->size[2];
303+
304+
int imageWidth = inputs[1]->size[3];
305+
int imageHeight = inputs[1]->size[2];
306+
307+
_stepY = _stepY == 0 ? (static_cast<float>(imageHeight) / layerHeight) : _stepY;
308+
_stepX = _stepX == 0 ? (static_cast<float>(imageWidth) / layerWidth) : _stepX;
309+
}
310+
298311
#ifdef HAVE_OPENCL
299312
bool forward_ocl(InputArrayOfArrays inps, OutputArrayOfArrays outs, OutputArrayOfArrays internals)
300313
{
@@ -310,16 +323,6 @@ class PriorBoxLayerImpl CV_FINAL : public PriorBoxLayer
310323
int _imageWidth = inputs[1].size[3];
311324
int _imageHeight = inputs[1].size[2];
312325

313-
float stepX, stepY;
314-
if (_stepX == 0 || _stepY == 0)
315-
{
316-
stepX = static_cast<float>(_imageWidth) / _layerWidth;
317-
stepY = static_cast<float>(_imageHeight) / _layerHeight;
318-
} else {
319-
stepX = _stepX;
320-
stepY = _stepY;
321-
}
322-
323326
if (umat_offsetsX.empty())
324327
{
325328
Mat offsetsX(1, _offsetsX.size(), CV_32FC1, &_offsetsX[0]);
@@ -339,8 +342,8 @@ class PriorBoxLayerImpl CV_FINAL : public PriorBoxLayer
339342

340343
ocl::Kernel kernel("prior_box", ocl::dnn::prior_box_oclsrc);
341344
kernel.set(0, (int)nthreads);
342-
kernel.set(1, (float)stepX);
343-
kernel.set(2, (float)stepY);
345+
kernel.set(1, (float)_stepX);
346+
kernel.set(2, (float)_stepY);
344347
kernel.set(3, ocl::KernelArg::PtrReadOnly(umat_offsetsX));
345348
kernel.set(4, ocl::KernelArg::PtrReadOnly(umat_offsetsY));
346349
kernel.set(5, (int)_offsetsX.size());
@@ -410,15 +413,6 @@ class PriorBoxLayerImpl CV_FINAL : public PriorBoxLayer
410413
int _imageWidth = inputs[1]->size[3];
411414
int _imageHeight = inputs[1]->size[2];
412415

413-
float stepX, stepY;
414-
if (_stepX == 0 || _stepY == 0) {
415-
stepX = static_cast<float>(_imageWidth) / _layerWidth;
416-
stepY = static_cast<float>(_imageHeight) / _layerHeight;
417-
} else {
418-
stepX = _stepX;
419-
stepY = _stepY;
420-
}
421-
422416
float* outputPtr = outputs[0].ptr<float>();
423417
float _boxWidth, _boxHeight;
424418
for (size_t h = 0; h < _layerHeight; ++h)
@@ -431,8 +425,8 @@ class PriorBoxLayerImpl CV_FINAL : public PriorBoxLayer
431425
_boxHeight = _boxHeights[i];
432426
for (int j = 0; j < _offsetsX.size(); ++j)
433427
{
434-
float center_x = (w + _offsetsX[j]) * stepX;
435-
float center_y = (h + _offsetsY[j]) * stepY;
428+
float center_x = (w + _offsetsX[j]) * _stepX;
429+
float center_y = (h + _offsetsY[j]) * _stepY;
436430
outputPtr = addPrior(center_x, center_y, _boxWidth, _boxHeight, _imageWidth,
437431
_imageHeight, _bboxesNormalized, outputPtr);
438432
}
@@ -495,20 +489,28 @@ class PriorBoxLayerImpl CV_FINAL : public PriorBoxLayer
495489
ieLayer->params["aspect_ratio"] += format(",%f", _aspectRatios[i]);
496490
}
497491

498-
ieLayer->params["flip"] = _flip ? "1" : "0";
492+
ieLayer->params["flip"] = "0"; // We already flipped aspect ratios.
499493
ieLayer->params["clip"] = _clip ? "1" : "0";
500494

501495
CV_Assert(!_variance.empty());
502496
ieLayer->params["variance"] = format("%f", _variance[0]);
503497
for (int i = 1; i < _variance.size(); ++i)
504498
ieLayer->params["variance"] += format(",%f", _variance[i]);
505499

506-
ieLayer->params["step"] = _stepX == _stepY ? format("%f", _stepX) : "0";
507-
ieLayer->params["step_h"] = _stepY;
508-
ieLayer->params["step_w"] = _stepX;
509-
500+
if (_stepX == _stepY)
501+
{
502+
ieLayer->params["step"] = format("%f", _stepX);
503+
ieLayer->params["step_h"] = "0.0";
504+
ieLayer->params["step_w"] = "0.0";
505+
}
506+
else
507+
{
508+
ieLayer->params["step"] = "0.0";
509+
ieLayer->params["step_h"] = format("%f", _stepY);
510+
ieLayer->params["step_w"] = format("%f", _stepX);
511+
}
510512
CV_Assert(_offsetsX.size() == 1, _offsetsY.size() == 1, _offsetsX[0] == _offsetsY[0]);
511-
ieLayer->params["offset"] = format("%f", _offsetsX[0]);;
513+
ieLayer->params["offset"] = format("%f", _offsetsX[0]);
512514

513515
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
514516
#endif // HAVE_INF_ENGINE

modules/dnn/src/op_inf_engine.cpp

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -233,8 +233,16 @@ InferenceEngine::StatusCode
233233
InfEngineBackendNet::getLayerByName(const char *layerName, InferenceEngine::CNNLayerPtr &out,
234234
InferenceEngine::ResponseDesc *resp) noexcept
235235
{
236-
CV_Error(Error::StsNotImplemented, "");
237-
return InferenceEngine::StatusCode::OK;
236+
for (auto& l : layers)
237+
{
238+
if (l->name == layerName)
239+
{
240+
out = l;
241+
return InferenceEngine::StatusCode::OK;
242+
}
243+
}
244+
CV_Error(Error::StsObjectNotFound, cv::format("Cannot find a layer %s", layerName));
245+
return InferenceEngine::StatusCode::NOT_FOUND;
238246
}
239247

240248
void InfEngineBackendNet::setTargetDevice(InferenceEngine::TargetDevice device) noexcept

modules/dnn/test/test_backends.cpp

Lines changed: 27 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -23,9 +23,9 @@ class DNNTestNetwork : public TestWithParam <tuple<DNNBackend, DNNTarget> >
2323
}
2424

2525
void processNet(const std::string& weights, const std::string& proto,
26-
Size inpSize, const std::string& outputLayer,
26+
Size inpSize, const std::string& outputLayer = "",
2727
const std::string& halideScheduler = "",
28-
double l1 = 1e-5, double lInf = 1e-4)
28+
double l1 = 0.0, double lInf = 0.0)
2929
{
3030
// Create a common input blob.
3131
int blobSize[] = {1, 3, inpSize.height, inpSize.width};
@@ -36,9 +36,9 @@ class DNNTestNetwork : public TestWithParam <tuple<DNNBackend, DNNTarget> >
3636
}
3737

3838
void processNet(std::string weights, std::string proto,
39-
Mat inp, const std::string& outputLayer,
39+
Mat inp, const std::string& outputLayer = "",
4040
std::string halideScheduler = "",
41-
double l1 = 1e-5, double lInf = 1e-4)
41+
double l1 = 0.0, double lInf = 0.0)
4242
{
4343
if (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL)
4444
{
@@ -49,6 +49,16 @@ class DNNTestNetwork : public TestWithParam <tuple<DNNBackend, DNNTarget> >
4949
throw SkipTestException("OpenCL is not available/disabled in OpenCV");
5050
}
5151
}
52+
if (target == DNN_TARGET_OPENCL_FP16)
53+
{
54+
l1 = l1 == 0.0 ? 4e-3 : l1;
55+
lInf = lInf == 0.0 ? 2e-2 : lInf;
56+
}
57+
else
58+
{
59+
l1 = l1 == 0.0 ? 1e-5 : l1;
60+
lInf = lInf == 0.0 ? 1e-4 : lInf;
61+
}
5262
weights = findDataFile(weights, false);
5363
if (!proto.empty())
5464
proto = findDataFile(proto, false);
@@ -71,31 +81,28 @@ class DNNTestNetwork : public TestWithParam <tuple<DNNBackend, DNNTarget> >
7181
Mat out = net.forward(outputLayer).clone();
7282

7383
if (outputLayer == "detection_out")
74-
normAssertDetections(outDefault, out, "First run", 0.2);
84+
normAssertDetections(outDefault, out, "First run", 0.2, l1, lInf);
7585
else
7686
normAssert(outDefault, out, "First run", l1, lInf);
7787

7888
// Test 2: change input.
79-
inp *= 0.1f;
89+
float* inpData = (float*)inp.data;
90+
for (int i = 0; i < inp.size[0] * inp.size[1]; ++i)
91+
{
92+
Mat slice(inp.size[2], inp.size[3], CV_32F, inpData);
93+
cv::flip(slice, slice, 1);
94+
inpData += slice.total();
95+
}
8096
netDefault.setInput(inp);
8197
net.setInput(inp);
8298
outDefault = netDefault.forward(outputLayer).clone();
8399
out = net.forward(outputLayer).clone();
84100

85101
if (outputLayer == "detection_out")
86-
checkDetections(outDefault, out, "Second run", l1, lInf);
102+
normAssertDetections(outDefault, out, "Second run", 0.2, l1, lInf);
87103
else
88104
normAssert(outDefault, out, "Second run", l1, lInf);
89105
}
90-
91-
void checkDetections(const Mat& out, const Mat& ref, const std::string& msg,
92-
float l1, float lInf, int top = 5)
93-
{
94-
top = std::min(std::min(top, out.size[2]), out.size[3]);
95-
std::vector<cv::Range> range(4, cv::Range::all());
96-
range[2] = cv::Range(0, top);
97-
normAssert(out(range), ref(range));
98-
}
99106
};
100107

101108
TEST_P(DNNTestNetwork, AlexNet)
@@ -110,8 +117,6 @@ TEST_P(DNNTestNetwork, AlexNet)
110117

111118
TEST_P(DNNTestNetwork, ResNet_50)
112119
{
113-
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
114-
throw SkipTestException("");
115120
processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
116121
Size(224, 224), "prob",
117122
target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_resnet_50.yml" :
@@ -120,8 +125,6 @@ TEST_P(DNNTestNetwork, ResNet_50)
120125

121126
TEST_P(DNNTestNetwork, SqueezeNet_v1_1)
122127
{
123-
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
124-
throw SkipTestException("");
125128
processNet("dnn/squeezenet_v1.1.caffemodel", "dnn/squeezenet_v1.1.prototxt",
126129
Size(227, 227), "prob",
127130
target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_squeezenet_v1_1.yml" :
@@ -130,8 +133,6 @@ TEST_P(DNNTestNetwork, SqueezeNet_v1_1)
130133

131134
TEST_P(DNNTestNetwork, GoogLeNet)
132135
{
133-
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
134-
throw SkipTestException("");
135136
processNet("dnn/bvlc_googlenet.caffemodel", "dnn/bvlc_googlenet.prototxt",
136137
Size(224, 224), "prob");
137138
}
@@ -180,7 +181,7 @@ TEST_P(DNNTestNetwork, SSD_VGG16)
180181
{
181182
if (backend == DNN_BACKEND_DEFAULT && target == DNN_TARGET_OPENCL ||
182183
backend == DNN_BACKEND_HALIDE && target == DNN_TARGET_CPU ||
183-
backend == DNN_BACKEND_INFERENCE_ENGINE)
184+
backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU)
184185
throw SkipTestException("");
185186
processNet("dnn/VGG_ILSVRC2016_SSD_300x300_iter_440000.caffemodel",
186187
"dnn/ssd_vgg16.prototxt", Size(300, 300), "detection_out");
@@ -189,30 +190,24 @@ TEST_P(DNNTestNetwork, SSD_VGG16)
189190
TEST_P(DNNTestNetwork, OpenPose_pose_coco)
190191
{
191192
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
192-
double l1 = target == DNN_TARGET_OPENCL_FP16 ? 3e-5 : 1e-5;
193-
double lInf = target == DNN_TARGET_OPENCL_FP16 ? 3e-3 : 1e-4;
194193
processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt",
195-
Size(368, 368), "", "", l1, lInf);
194+
Size(368, 368));
196195
}
197196

198197
TEST_P(DNNTestNetwork, OpenPose_pose_mpi)
199198
{
200199
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
201-
double l1 = target == DNN_TARGET_OPENCL_FP16 ? 4e-5 : 1e-5;
202-
double lInf = target == DNN_TARGET_OPENCL_FP16 ? 7e-3 : 1e-4;
203200
processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt",
204-
Size(368, 368), "", "", l1, lInf);
201+
Size(368, 368));
205202
}
206203

207204
TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
208205
{
209206
if (backend == DNN_BACKEND_HALIDE) throw SkipTestException("");
210-
double l1 = target == DNN_TARGET_OPENCL_FP16 ? 5e-5 : 1e-5;
211-
double lInf = target == DNN_TARGET_OPENCL_FP16 ? 5e-3 : 1e-4;
212207
// The same .caffemodel but modified .prototxt
213208
// See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp
214209
processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi_faster_4_stages.prototxt",
215-
Size(368, 368), "", "", l1, lInf);
210+
Size(368, 368));
216211
}
217212

218213
TEST_P(DNNTestNetwork, OpenFace)

0 commit comments

Comments
 (0)