1/*M/////////////////////////////////////////////////////////////////////////////////////// 2// 3// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. 4// 5// By downloading, copying, installing or using the software you agree to this license. 6// If you do not agree to this license, do not download, install, 7// copy or use the software. 8// 9// 10// License Agreement 11// For Open Source Computer Vision Library 12// 13// Copyright (C) 2008-2013, Itseez Inc., all rights reserved. 14// Third party copyrights are property of their respective owners. 15// 16// Redistribution and use in source and binary forms, with or without modification, 17// are permitted provided that the following conditions are met: 18// 19// * Redistribution's of source code must retain the above copyright notice, 20// this list of conditions and the following disclaimer. 21// 22// * Redistribution's in binary form must reproduce the above copyright notice, 23// this list of conditions and the following disclaimer in the documentation 24// and/or other materials provided with the distribution. 25// 26// * The name of Itseez Inc. may not be used to endorse or promote products 27// derived from this software without specific prior written permission. 28// 29// This software is provided by the copyright holders and contributors "as is" and 30// any express or implied warranties, including, but not limited to, the implied 31// warranties of merchantability and fitness for a particular purpose are disclaimed. 32// In no event shall the copyright holders or contributors be liable for any direct, 33// indirect, incidental, special, exemplary, or consequential damages 34// (including, but not limited to, procurement of substitute goods or services; 35// loss of use, data, or profits; or business interruption) however caused 36// and on any theory of liability, whether in contract, strict liability, 37// or tort (including negligence or otherwise) arising in any way out of 38// the use of this software, even if advised of the possibility of such damage. 39// 40//M*/ 41 42#include "precomp.hpp" 43#include <cstdio> 44 45#include "cascadedetect.hpp" 46#include "opencv2/objdetect/objdetect_c.h" 47#include "opencl_kernels_objdetect.hpp" 48 49#if defined ANDROID && defined RENDERSCRIPT 50#include "rsobjdetect.hpp" 51#endif 52 53namespace cv 54{ 55 56template<typename _Tp> void copyVectorToUMat(const std::vector<_Tp>& v, UMat& um) 57{ 58 if(v.empty()) 59 um.release(); 60 Mat(1, (int)(v.size()*sizeof(v[0])), CV_8U, (void*)&v[0]).copyTo(um); 61} 62 63void groupRectangles(std::vector<Rect>& rectList, int groupThreshold, double eps, 64 std::vector<int>* weights, std::vector<double>* levelWeights) 65{ 66 if( groupThreshold <= 0 || rectList.empty() ) 67 { 68 if( weights ) 69 { 70 size_t i, sz = rectList.size(); 71 weights->resize(sz); 72 for( i = 0; i < sz; i++ ) 73 (*weights)[i] = 1; 74 } 75 return; 76 } 77 78 std::vector<int> labels; 79 int nclasses = partition(rectList, labels, SimilarRects(eps)); 80 81 std::vector<Rect> rrects(nclasses); 82 std::vector<int> rweights(nclasses, 0); 83 std::vector<int> rejectLevels(nclasses, 0); 84 std::vector<double> rejectWeights(nclasses, DBL_MIN); 85 int i, j, nlabels = (int)labels.size(); 86 for( i = 0; i < nlabels; i++ ) 87 { 88 int cls = labels[i]; 89 rrects[cls].x += rectList[i].x; 90 rrects[cls].y += rectList[i].y; 91 rrects[cls].width += rectList[i].width; 92 rrects[cls].height += rectList[i].height; 93 rweights[cls]++; 94 } 95 96 bool useDefaultWeights = false; 97 98 if ( levelWeights && weights && !weights->empty() && !levelWeights->empty() ) 99 { 100 for( i = 0; i < nlabels; i++ ) 101 { 102 int cls = labels[i]; 103 if( (*weights)[i] > rejectLevels[cls] ) 104 { 105 rejectLevels[cls] = (*weights)[i]; 106 rejectWeights[cls] = (*levelWeights)[i]; 107 } 108 else if( ( (*weights)[i] == rejectLevels[cls] ) && ( (*levelWeights)[i] > rejectWeights[cls] ) ) 109 rejectWeights[cls] = (*levelWeights)[i]; 110 } 111 } 112 else 113 useDefaultWeights = true; 114 115 for( i = 0; i < nclasses; i++ ) 116 { 117 Rect r = rrects[i]; 118 float s = 1.f/rweights[i]; 119 rrects[i] = Rect(saturate_cast<int>(r.x*s), 120 saturate_cast<int>(r.y*s), 121 saturate_cast<int>(r.width*s), 122 saturate_cast<int>(r.height*s)); 123 } 124 125 rectList.clear(); 126 if( weights ) 127 weights->clear(); 128 if( levelWeights ) 129 levelWeights->clear(); 130 131 for( i = 0; i < nclasses; i++ ) 132 { 133 Rect r1 = rrects[i]; 134 int n1 = rweights[i]; 135 double w1 = rejectWeights[i]; 136 int l1 = rejectLevels[i]; 137 138 // filter out rectangles which don't have enough similar rectangles 139 if( n1 <= groupThreshold ) 140 continue; 141 // filter out small face rectangles inside large rectangles 142 for( j = 0; j < nclasses; j++ ) 143 { 144 int n2 = rweights[j]; 145 146 if( j == i || n2 <= groupThreshold ) 147 continue; 148 Rect r2 = rrects[j]; 149 150 int dx = saturate_cast<int>( r2.width * eps ); 151 int dy = saturate_cast<int>( r2.height * eps ); 152 153 if( i != j && 154 r1.x >= r2.x - dx && 155 r1.y >= r2.y - dy && 156 r1.x + r1.width <= r2.x + r2.width + dx && 157 r1.y + r1.height <= r2.y + r2.height + dy && 158 (n2 > std::max(3, n1) || n1 < 3) ) 159 break; 160 } 161 162 if( j == nclasses ) 163 { 164 rectList.push_back(r1); 165 if( weights ) 166 weights->push_back(useDefaultWeights ? n1 : l1); 167 if( levelWeights ) 168 levelWeights->push_back(w1); 169 } 170 } 171} 172 173class MeanshiftGrouping 174{ 175public: 176 MeanshiftGrouping(const Point3d& densKer, const std::vector<Point3d>& posV, 177 const std::vector<double>& wV, double eps, int maxIter = 20) 178 { 179 densityKernel = densKer; 180 weightsV = wV; 181 positionsV = posV; 182 positionsCount = (int)posV.size(); 183 meanshiftV.resize(positionsCount); 184 distanceV.resize(positionsCount); 185 iterMax = maxIter; 186 modeEps = eps; 187 188 for (unsigned i = 0; i<positionsV.size(); i++) 189 { 190 meanshiftV[i] = getNewValue(positionsV[i]); 191 distanceV[i] = moveToMode(meanshiftV[i]); 192 meanshiftV[i] -= positionsV[i]; 193 } 194 } 195 196 void getModes(std::vector<Point3d>& modesV, std::vector<double>& resWeightsV, const double eps) 197 { 198 for (size_t i=0; i <distanceV.size(); i++) 199 { 200 bool is_found = false; 201 for(size_t j=0; j<modesV.size(); j++) 202 { 203 if ( getDistance(distanceV[i], modesV[j]) < eps) 204 { 205 is_found=true; 206 break; 207 } 208 } 209 if (!is_found) 210 { 211 modesV.push_back(distanceV[i]); 212 } 213 } 214 215 resWeightsV.resize(modesV.size()); 216 217 for (size_t i=0; i<modesV.size(); i++) 218 { 219 resWeightsV[i] = getResultWeight(modesV[i]); 220 } 221 } 222 223protected: 224 std::vector<Point3d> positionsV; 225 std::vector<double> weightsV; 226 227 Point3d densityKernel; 228 int positionsCount; 229 230 std::vector<Point3d> meanshiftV; 231 std::vector<Point3d> distanceV; 232 int iterMax; 233 double modeEps; 234 235 Point3d getNewValue(const Point3d& inPt) const 236 { 237 Point3d resPoint(.0); 238 Point3d ratPoint(.0); 239 for (size_t i=0; i<positionsV.size(); i++) 240 { 241 Point3d aPt= positionsV[i]; 242 Point3d bPt = inPt; 243 Point3d sPt = densityKernel; 244 245 sPt.x *= std::exp(aPt.z); 246 sPt.y *= std::exp(aPt.z); 247 248 aPt.x /= sPt.x; 249 aPt.y /= sPt.y; 250 aPt.z /= sPt.z; 251 252 bPt.x /= sPt.x; 253 bPt.y /= sPt.y; 254 bPt.z /= sPt.z; 255 256 double w = (weightsV[i])*std::exp(-((aPt-bPt).dot(aPt-bPt))/2)/std::sqrt(sPt.dot(Point3d(1,1,1))); 257 258 resPoint += w*aPt; 259 260 ratPoint.x += w/sPt.x; 261 ratPoint.y += w/sPt.y; 262 ratPoint.z += w/sPt.z; 263 } 264 resPoint.x /= ratPoint.x; 265 resPoint.y /= ratPoint.y; 266 resPoint.z /= ratPoint.z; 267 return resPoint; 268 } 269 270 double getResultWeight(const Point3d& inPt) const 271 { 272 double sumW=0; 273 for (size_t i=0; i<positionsV.size(); i++) 274 { 275 Point3d aPt = positionsV[i]; 276 Point3d sPt = densityKernel; 277 278 sPt.x *= std::exp(aPt.z); 279 sPt.y *= std::exp(aPt.z); 280 281 aPt -= inPt; 282 283 aPt.x /= sPt.x; 284 aPt.y /= sPt.y; 285 aPt.z /= sPt.z; 286 287 sumW+=(weightsV[i])*std::exp(-(aPt.dot(aPt))/2)/std::sqrt(sPt.dot(Point3d(1,1,1))); 288 } 289 return sumW; 290 } 291 292 Point3d moveToMode(Point3d aPt) const 293 { 294 Point3d bPt; 295 for (int i = 0; i<iterMax; i++) 296 { 297 bPt = aPt; 298 aPt = getNewValue(bPt); 299 if ( getDistance(aPt, bPt) <= modeEps ) 300 { 301 break; 302 } 303 } 304 return aPt; 305 } 306 307 double getDistance(Point3d p1, Point3d p2) const 308 { 309 Point3d ns = densityKernel; 310 ns.x *= std::exp(p2.z); 311 ns.y *= std::exp(p2.z); 312 p2 -= p1; 313 p2.x /= ns.x; 314 p2.y /= ns.y; 315 p2.z /= ns.z; 316 return p2.dot(p2); 317 } 318}; 319//new grouping function with using meanshift 320static void groupRectangles_meanshift(std::vector<Rect>& rectList, double detectThreshold, std::vector<double>* foundWeights, 321 std::vector<double>& scales, Size winDetSize) 322{ 323 int detectionCount = (int)rectList.size(); 324 std::vector<Point3d> hits(detectionCount), resultHits; 325 std::vector<double> hitWeights(detectionCount), resultWeights; 326 Point2d hitCenter; 327 328 for (int i=0; i < detectionCount; i++) 329 { 330 hitWeights[i] = (*foundWeights)[i]; 331 hitCenter = (rectList[i].tl() + rectList[i].br())*(0.5); //center of rectangles 332 hits[i] = Point3d(hitCenter.x, hitCenter.y, std::log(scales[i])); 333 } 334 335 rectList.clear(); 336 if (foundWeights) 337 foundWeights->clear(); 338 339 double logZ = std::log(1.3); 340 Point3d smothing(8, 16, logZ); 341 342 MeanshiftGrouping msGrouping(smothing, hits, hitWeights, 1e-5, 100); 343 344 msGrouping.getModes(resultHits, resultWeights, 1); 345 346 for (unsigned i=0; i < resultHits.size(); ++i) 347 { 348 349 double scale = std::exp(resultHits[i].z); 350 hitCenter.x = resultHits[i].x; 351 hitCenter.y = resultHits[i].y; 352 Size s( int(winDetSize.width * scale), int(winDetSize.height * scale) ); 353 Rect resultRect( int(hitCenter.x-s.width/2), int(hitCenter.y-s.height/2), 354 int(s.width), int(s.height) ); 355 356 if (resultWeights[i] > detectThreshold) 357 { 358 rectList.push_back(resultRect); 359 foundWeights->push_back(resultWeights[i]); 360 } 361 } 362} 363 364void groupRectangles(std::vector<Rect>& rectList, int groupThreshold, double eps) 365{ 366 groupRectangles(rectList, groupThreshold, eps, 0, 0); 367} 368 369void groupRectangles(std::vector<Rect>& rectList, std::vector<int>& weights, int groupThreshold, double eps) 370{ 371 groupRectangles(rectList, groupThreshold, eps, &weights, 0); 372} 373//used for cascade detection algorithm for ROC-curve calculating 374void groupRectangles(std::vector<Rect>& rectList, std::vector<int>& rejectLevels, 375 std::vector<double>& levelWeights, int groupThreshold, double eps) 376{ 377 groupRectangles(rectList, groupThreshold, eps, &rejectLevels, &levelWeights); 378} 379//can be used for HOG detection algorithm only 380void groupRectangles_meanshift(std::vector<Rect>& rectList, std::vector<double>& foundWeights, 381 std::vector<double>& foundScales, double detectThreshold, Size winDetSize) 382{ 383 groupRectangles_meanshift(rectList, detectThreshold, &foundWeights, foundScales, winDetSize); 384} 385 386 387FeatureEvaluator::~FeatureEvaluator() {} 388 389bool FeatureEvaluator::read(const FileNode&, Size _origWinSize) 390{ 391 origWinSize = _origWinSize; 392 localSize = lbufSize = Size(0, 0); 393 if (scaleData.empty()) 394 scaleData = makePtr<std::vector<ScaleData> >(); 395 else 396 scaleData->clear(); 397 return true; 398} 399 400Ptr<FeatureEvaluator> FeatureEvaluator::clone() const { return Ptr<FeatureEvaluator>(); } 401int FeatureEvaluator::getFeatureType() const {return -1;} 402bool FeatureEvaluator::setWindow(Point, int) { return true; } 403void FeatureEvaluator::getUMats(std::vector<UMat>& bufs) 404{ 405 if (!(sbufFlag & USBUF_VALID)) 406 { 407 sbuf.copyTo(usbuf); 408 sbufFlag |= USBUF_VALID; 409 } 410 411 bufs.clear(); 412 bufs.push_back(uscaleData); 413 bufs.push_back(usbuf); 414 bufs.push_back(ufbuf); 415} 416 417void FeatureEvaluator::getMats() 418{ 419 if (!(sbufFlag & SBUF_VALID)) 420 { 421 usbuf.copyTo(sbuf); 422 sbufFlag |= SBUF_VALID; 423 } 424} 425 426float FeatureEvaluator::calcOrd(int) const { return 0.; } 427int FeatureEvaluator::calcCat(int) const { return 0; } 428 429bool FeatureEvaluator::updateScaleData( Size imgsz, const std::vector<float>& _scales ) 430{ 431 if( scaleData.empty() ) 432 scaleData = makePtr<std::vector<ScaleData> >(); 433 434 size_t i, nscales = _scales.size(); 435 bool recalcOptFeatures = nscales != scaleData->size(); 436 scaleData->resize(nscales); 437 438 int layer_dy = 0; 439 Point layer_ofs(0,0); 440 Size prevBufSize = sbufSize; 441 sbufSize.width = std::max(sbufSize.width, (int)alignSize(cvRound(imgsz.width/_scales[0]) + 31, 32)); 442 recalcOptFeatures = recalcOptFeatures || sbufSize.width != prevBufSize.width; 443 444 for( i = 0; i < nscales; i++ ) 445 { 446 FeatureEvaluator::ScaleData& s = scaleData->at(i); 447 if( !recalcOptFeatures && fabs(s.scale - _scales[i]) > FLT_EPSILON*100*_scales[i] ) 448 recalcOptFeatures = true; 449 float sc = _scales[i]; 450 Size sz; 451 sz.width = cvRound(imgsz.width/sc); 452 sz.height = cvRound(imgsz.height/sc); 453 s.ystep = sc >= 2 ? 1 : 2; 454 s.scale = sc; 455 s.szi = Size(sz.width+1, sz.height+1); 456 457 if( i == 0 ) 458 { 459 layer_dy = s.szi.height; 460 } 461 462 if( layer_ofs.x + s.szi.width > sbufSize.width ) 463 { 464 layer_ofs = Point(0, layer_ofs.y + layer_dy); 465 layer_dy = s.szi.height; 466 } 467 s.layer_ofs = layer_ofs.y*sbufSize.width + layer_ofs.x; 468 layer_ofs.x += s.szi.width; 469 } 470 471 layer_ofs.y += layer_dy; 472 sbufSize.height = std::max(sbufSize.height, layer_ofs.y); 473 recalcOptFeatures = recalcOptFeatures || sbufSize.height != prevBufSize.height; 474 return recalcOptFeatures; 475} 476 477#if defined ANDROID && defined RENDERSCRIPT 478void haarIntegral(Mat in, int width, int height, int* out, int* outSq) { 479 int sum = 0, sumSq =0, val = 0, idx = 0; 480 uchar *src = in.data; 481 size_t step = in.step; 482 483 memset(out, 0, width*sizeof(out[0])); 484 memset(outSq, 0, width*sizeof(outSq[0])); 485 outSq += width; 486 out += width; 487 488 for (int y = 1; y < height; y++, out += width, outSq += width, src += step) { 489 sum = sumSq = out[0] = outSq[0] = 0; 490 for (int x = 1; x < width; x++) { 491 val = src[x]; 492 sum += val; 493 sumSq += val * val; 494 out[x] = out[x - width] + sum; 495 outSq[x] = outSq[x - width] + sumSq; 496 } 497 } 498} 499#endif 500 501bool FeatureEvaluator::setImage( InputArray _image, const std::vector<float>& _scales ) 502{ 503 Size imgsz = _image.size(); 504 bool recalcOptFeatures = updateScaleData(imgsz, _scales); 505 506 size_t i, nscales = scaleData->size(); 507 if (nscales == 0) 508 { 509 return false; 510 } 511 Size sz0 = scaleData->at(0).szi; 512 sz0 = Size(std::max(rbuf.cols, (int)alignSize(sz0.width, 16)), std::max(rbuf.rows, sz0.height)); 513 514 if (recalcOptFeatures) 515 { 516 computeOptFeatures(); 517 copyVectorToUMat(*scaleData, uscaleData); 518 } 519 520 if (_image.isUMat() && localSize.area() > 0) 521 { 522 usbuf.create(sbufSize.height*nchannels, sbufSize.width, CV_32S); 523 urbuf.create(sz0, CV_8U); 524 525 for (i = 0; i < nscales; i++) 526 { 527 const ScaleData& s = scaleData->at(i); 528 UMat dst(urbuf, Rect(0, 0, s.szi.width - 1, s.szi.height - 1)); 529 resize(_image, dst, dst.size(), 1. / s.scale, 1. / s.scale, INTER_LINEAR); 530 computeChannels((int)i, dst); 531 } 532 sbufFlag = USBUF_VALID; 533 } 534 else 535 { 536 Mat image = _image.getMat(); 537 sbuf.create(sbufSize.height*nchannels, sbufSize.width, CV_32S); 538 rbuf.create(sz0, CV_8U); 539 540 541#if defined ANDROID && defined RENDERSCRIPT 542 integralImages = (int **) malloc(sizeof(int *)*nscales); 543 integralImagesSq = (int **) malloc(sizeof(int *)*nscales); 544#endif 545 546 for (i = 0; i < nscales; i++) 547 { 548#if defined ANDROID && defined RENDERSCRIPT 549 const ScaleData& s = scaleData->at(i); 550 Mat dst(s.szi.height - 1, s.szi.width - 1 , CV_8U); 551 resize(image, dst, dst.size(), 1. / s.scale, 1. / s.scale, INTER_LINEAR); 552 const Size sz = s.getWorkingSize(origWinSize); 553 int* intImg = (int *)malloc(sizeof(int)*s.szi.area()); 554 int* intImgSq = (int *)malloc(sizeof(int)*s.szi.area()); 555 haarIntegral(dst, sz.width, sz.height, intImg, intImgSq); 556 integralImages[i] = intImg; 557 integralImagesSq[i] = intImgSq; 558#else 559 const ScaleData& s = scaleData->at(i); 560 Mat dst(s.szi.height - 1, s.szi.width - 1, CV_8U, rbuf.ptr()); 561 resize(image, dst, dst.size(), 1. / s.scale, 1. / s.scale, INTER_LINEAR); 562 computeChannels((int)i, dst); 563#endif 564 } 565 sbufFlag = SBUF_VALID; 566 } 567 568 return true; 569} 570 571//---------------------------------------------- HaarEvaluator --------------------------------------- 572 573bool HaarEvaluator::Feature :: read( const FileNode& node ) 574{ 575 FileNode rnode = node[CC_RECTS]; 576 FileNodeIterator it = rnode.begin(), it_end = rnode.end(); 577 578 int ri; 579 for( ri = 0; ri < RECT_NUM; ri++ ) 580 { 581 rect[ri].r = Rect(); 582 rect[ri].weight = 0.f; 583 } 584 585 for(ri = 0; it != it_end; ++it, ri++) 586 { 587 FileNodeIterator it2 = (*it).begin(); 588 it2 >> rect[ri].r.x >> rect[ri].r.y >> 589 rect[ri].r.width >> rect[ri].r.height >> rect[ri].weight; 590 } 591 592 tilted = (int)node[CC_TILTED] != 0; 593 return true; 594} 595 596HaarEvaluator::HaarEvaluator() 597{ 598 optfeaturesPtr = 0; 599 pwin = 0; 600 localSize = Size(4, 2); 601 lbufSize = Size(0, 0); 602 nchannels = 0; 603 tofs = 0; 604} 605 606HaarEvaluator::~HaarEvaluator() 607{ 608} 609 610bool HaarEvaluator::read(const FileNode& node, Size _origWinSize) 611{ 612 if (!FeatureEvaluator::read(node, _origWinSize)) 613 return false; 614 size_t i, n = node.size(); 615 CV_Assert(n > 0); 616 if(features.empty()) 617 features = makePtr<std::vector<Feature> >(); 618 if(optfeatures.empty()) 619 optfeatures = makePtr<std::vector<OptFeature> >(); 620 if (optfeatures_lbuf.empty()) 621 optfeatures_lbuf = makePtr<std::vector<OptFeature> >(); 622 features->resize(n); 623 FileNodeIterator it = node.begin(); 624 hasTiltedFeatures = false; 625 std::vector<Feature>& ff = *features; 626 sbufSize = Size(); 627 ufbuf.release(); 628 629 for(i = 0; i < n; i++, ++it) 630 { 631 if(!ff[i].read(*it)) 632 return false; 633 if( ff[i].tilted ) 634 hasTiltedFeatures = true; 635 } 636 nchannels = hasTiltedFeatures ? 3 : 2; 637 normrect = Rect(1, 1, origWinSize.width - 2, origWinSize.height - 2); 638 639 localSize = lbufSize = Size(0, 0); 640 if (ocl::haveOpenCL()) 641 { 642 if (ocl::Device::getDefault().isAMD() || ocl::Device::getDefault().isIntel()) 643 { 644 localSize = Size(8, 8); 645 lbufSize = Size(origWinSize.width + localSize.width, 646 origWinSize.height + localSize.height); 647 if (lbufSize.area() > 1024) 648 lbufSize = Size(0, 0); 649 } 650 } 651 652 return true; 653} 654 655Ptr<FeatureEvaluator> HaarEvaluator::clone() const 656{ 657 Ptr<HaarEvaluator> ret = makePtr<HaarEvaluator>(); 658 *ret = *this; 659 return ret; 660} 661 662 663void HaarEvaluator::computeChannels(int scaleIdx, InputArray img) 664{ 665 const ScaleData& s = scaleData->at(scaleIdx); 666 sqofs = hasTiltedFeatures ? sbufSize.area() * 2 : sbufSize.area(); 667 668 if (img.isUMat()) 669 { 670 int sx = s.layer_ofs % sbufSize.width; 671 int sy = s.layer_ofs / sbufSize.width; 672 int sqy = sy + (sqofs / sbufSize.width); 673 UMat sum(usbuf, Rect(sx, sy, s.szi.width, s.szi.height)); 674 UMat sqsum(usbuf, Rect(sx, sqy, s.szi.width, s.szi.height)); 675 sqsum.flags = (sqsum.flags & ~UMat::DEPTH_MASK) | CV_32S; 676 677 if (hasTiltedFeatures) 678 { 679 int sty = sy + (tofs / sbufSize.width); 680 UMat tilted(usbuf, Rect(sx, sty, s.szi.width, s.szi.height)); 681 integral(img, sum, sqsum, tilted, CV_32S, CV_32S); 682 } 683 else 684 { 685 UMatData* u = sqsum.u; 686 integral(img, sum, sqsum, noArray(), CV_32S, CV_32S); 687 CV_Assert(sqsum.u == u && sqsum.size() == s.szi && sqsum.type()==CV_32S); 688 } 689 } 690 else 691 { 692 Mat sum(s.szi, CV_32S, sbuf.ptr<int>() + s.layer_ofs, sbuf.step); 693 Mat sqsum(s.szi, CV_32S, sum.ptr<int>() + sqofs, sbuf.step); 694 695 if (hasTiltedFeatures) 696 { 697 Mat tilted(s.szi, CV_32S, sum.ptr<int>() + tofs, sbuf.step); 698 integral(img, sum, sqsum, tilted, CV_32S, CV_32S); 699 } 700 else 701 integral(img, sum, sqsum, noArray(), CV_32S, CV_32S); 702 } 703} 704 705void HaarEvaluator::computeOptFeatures() 706{ 707 if (hasTiltedFeatures) 708 tofs = sbufSize.area(); 709 710 int sstep = sbufSize.width; 711 CV_SUM_OFS( nofs[0], nofs[1], nofs[2], nofs[3], 0, normrect, sstep ); 712 713 size_t fi, nfeatures = features->size(); 714 const std::vector<Feature>& ff = *features; 715 optfeatures->resize(nfeatures); 716 optfeaturesPtr = &(*optfeatures)[0]; 717 for( fi = 0; fi < nfeatures; fi++ ) 718 optfeaturesPtr[fi].setOffsets( ff[fi], sstep, tofs ); 719 optfeatures_lbuf->resize(nfeatures); 720 721 for( fi = 0; fi < nfeatures; fi++ ) 722 optfeatures_lbuf->at(fi).setOffsets(ff[fi], lbufSize.width > 0 ? lbufSize.width : sstep, tofs); 723 724 copyVectorToUMat(*optfeatures_lbuf, ufbuf); 725} 726 727bool HaarEvaluator::setWindow( Point pt, int scaleIdx ) 728{ 729 const ScaleData& s = getScaleData(scaleIdx); 730 731 if( pt.x < 0 || pt.y < 0 || 732 pt.x + origWinSize.width >= s.szi.width || 733 pt.y + origWinSize.height >= s.szi.height ) 734 return false; 735 736 pwin = &sbuf.at<int>(pt) + s.layer_ofs; 737 const int* pq = (const int*)(pwin + sqofs); 738 int valsum = CALC_SUM_OFS(nofs, pwin); 739 unsigned valsqsum = (unsigned)(CALC_SUM_OFS(nofs, pq)); 740 741 double area = normrect.area(); 742 double nf = area * valsqsum - (double)valsum * valsum; 743 if( nf > 0. ) 744 { 745 nf = std::sqrt(nf); 746 varianceNormFactor = (float)(1./nf); 747 return area*varianceNormFactor < 1e-1; 748 } 749 else 750 { 751 varianceNormFactor = 1.f; 752 return false; 753 } 754} 755 756 757void HaarEvaluator::OptFeature::setOffsets( const Feature& _f, int step, int _tofs ) 758{ 759 weight[0] = _f.rect[0].weight; 760 weight[1] = _f.rect[1].weight; 761 weight[2] = _f.rect[2].weight; 762 763 if( _f.tilted ) 764 { 765 CV_TILTED_OFS( ofs[0][0], ofs[0][1], ofs[0][2], ofs[0][3], _tofs, _f.rect[0].r, step ); 766 CV_TILTED_OFS( ofs[1][0], ofs[1][1], ofs[1][2], ofs[1][3], _tofs, _f.rect[1].r, step ); 767 CV_TILTED_OFS( ofs[2][0], ofs[2][1], ofs[2][2], ofs[2][3], _tofs, _f.rect[2].r, step ); 768 } 769 else 770 { 771 CV_SUM_OFS( ofs[0][0], ofs[0][1], ofs[0][2], ofs[0][3], 0, _f.rect[0].r, step ); 772 CV_SUM_OFS( ofs[1][0], ofs[1][1], ofs[1][2], ofs[1][3], 0, _f.rect[1].r, step ); 773 CV_SUM_OFS( ofs[2][0], ofs[2][1], ofs[2][2], ofs[2][3], 0, _f.rect[2].r, step ); 774 } 775} 776 777Rect HaarEvaluator::getNormRect() const 778{ 779 return normrect; 780} 781 782int HaarEvaluator::getSquaresOffset() const 783{ 784 return sqofs; 785} 786 787//---------------------------------------------- LBPEvaluator ------------------------------------- 788bool LBPEvaluator::Feature :: read(const FileNode& node ) 789{ 790 FileNode rnode = node[CC_RECT]; 791 FileNodeIterator it = rnode.begin(); 792 it >> rect.x >> rect.y >> rect.width >> rect.height; 793 return true; 794} 795 796LBPEvaluator::LBPEvaluator() 797{ 798 features = makePtr<std::vector<Feature> >(); 799 optfeatures = makePtr<std::vector<OptFeature> >(); 800 scaleData = makePtr<std::vector<ScaleData> >(); 801} 802 803LBPEvaluator::~LBPEvaluator() 804{ 805} 806 807bool LBPEvaluator::read( const FileNode& node, Size _origWinSize ) 808{ 809#if defined RENDERSCRIPT 810 CV_Error(Error::StsNotImplemented, "Renderscript cannot be used with LBP in 3.0"); 811#endif 812 813 if (!FeatureEvaluator::read(node, _origWinSize)) 814 return false; 815 if(features.empty()) 816 features = makePtr<std::vector<Feature> >(); 817 if(optfeatures.empty()) 818 optfeatures = makePtr<std::vector<OptFeature> >(); 819 if (optfeatures_lbuf.empty()) 820 optfeatures_lbuf = makePtr<std::vector<OptFeature> >(); 821 822 features->resize(node.size()); 823 optfeaturesPtr = 0; 824 FileNodeIterator it = node.begin(), it_end = node.end(); 825 std::vector<Feature>& ff = *features; 826 for(int i = 0; it != it_end; ++it, i++) 827 { 828 if(!ff[i].read(*it)) 829 return false; 830 } 831 nchannels = 1; 832 localSize = lbufSize = Size(0, 0); 833 if (ocl::haveOpenCL()) 834 localSize = Size(8, 8); 835 836 return true; 837} 838 839Ptr<FeatureEvaluator> LBPEvaluator::clone() const 840{ 841 Ptr<LBPEvaluator> ret = makePtr<LBPEvaluator>(); 842 *ret = *this; 843 return ret; 844} 845 846void LBPEvaluator::computeChannels(int scaleIdx, InputArray _img) 847{ 848 const ScaleData& s = scaleData->at(scaleIdx); 849 850 if (_img.isUMat()) 851 { 852 int sx = s.layer_ofs % sbufSize.width; 853 int sy = s.layer_ofs / sbufSize.width; 854 UMat sum(usbuf, Rect(sx, sy, s.szi.width, s.szi.height)); 855 integral(_img, sum, noArray(), noArray(), CV_32S); 856 } 857 else 858 { 859 Mat sum(s.szi, CV_32S, sbuf.ptr<int>() + s.layer_ofs, sbuf.step); 860 integral(_img, sum, noArray(), noArray(), CV_32S); 861 } 862} 863 864void LBPEvaluator::computeOptFeatures() 865{ 866 int sstep = sbufSize.width; 867 868 size_t fi, nfeatures = features->size(); 869 const std::vector<Feature>& ff = *features; 870 optfeatures->resize(nfeatures); 871 optfeaturesPtr = &(*optfeatures)[0]; 872 for( fi = 0; fi < nfeatures; fi++ ) 873 optfeaturesPtr[fi].setOffsets( ff[fi], sstep ); 874 copyVectorToUMat(*optfeatures, ufbuf); 875} 876 877 878void LBPEvaluator::OptFeature::setOffsets( const Feature& _f, int step ) 879{ 880 Rect tr = _f.rect; 881 int w0 = tr.width; 882 int h0 = tr.height; 883 884 CV_SUM_OFS( ofs[0], ofs[1], ofs[4], ofs[5], 0, tr, step ); 885 tr.x += 2*w0; 886 CV_SUM_OFS( ofs[2], ofs[3], ofs[6], ofs[7], 0, tr, step ); 887 tr.y += 2*h0; 888 CV_SUM_OFS( ofs[10], ofs[11], ofs[14], ofs[15], 0, tr, step ); 889 tr.x -= 2*w0; 890 CV_SUM_OFS( ofs[8], ofs[9], ofs[12], ofs[13], 0, tr, step ); 891} 892 893 894bool LBPEvaluator::setWindow( Point pt, int scaleIdx ) 895{ 896 CV_Assert(0 <= scaleIdx && scaleIdx < (int)scaleData->size()); 897 const ScaleData& s = scaleData->at(scaleIdx); 898 899 if( pt.x < 0 || pt.y < 0 || 900 pt.x + origWinSize.width >= s.szi.width || 901 pt.y + origWinSize.height >= s.szi.height ) 902 return false; 903 904 pwin = &sbuf.at<int>(pt) + s.layer_ofs; 905 return true; 906} 907 908 909Ptr<FeatureEvaluator> FeatureEvaluator::create( int featureType ) 910{ 911 return featureType == HAAR ? Ptr<FeatureEvaluator>(new HaarEvaluator) : 912 featureType == LBP ? Ptr<FeatureEvaluator>(new LBPEvaluator) : 913 Ptr<FeatureEvaluator>(); 914} 915 916//---------------------------------------- Classifier Cascade -------------------------------------------- 917 918CascadeClassifierImpl::CascadeClassifierImpl() 919{ 920} 921 922CascadeClassifierImpl::~CascadeClassifierImpl() 923{ 924} 925 926bool CascadeClassifierImpl::empty() const 927{ 928 return !oldCascade && data.stages.empty(); 929} 930 931bool CascadeClassifierImpl::load(const String& filename) 932{ 933 oldCascade.release(); 934 data = Data(); 935 featureEvaluator.release(); 936 937 FileStorage fs(filename, FileStorage::READ); 938 if( !fs.isOpened() ) 939 return false; 940 941 if( read_(fs.getFirstTopLevelNode()) ) 942 return true; 943 944 fs.release(); 945 946 oldCascade.reset((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0)); 947 return !oldCascade.empty(); 948} 949 950void CascadeClassifierImpl::read(const FileNode& node) 951{ 952 read_(node); 953} 954 955int CascadeClassifierImpl::runAt( Ptr<FeatureEvaluator>& evaluator, Point pt, int scaleIdx, double& weight ) 956{ 957 assert( !oldCascade && 958 (data.featureType == FeatureEvaluator::HAAR || 959 data.featureType == FeatureEvaluator::LBP || 960 data.featureType == FeatureEvaluator::HOG) ); 961 962 if( !evaluator->setWindow(pt, scaleIdx) ) 963 return -1; 964 if( data.maxNodesPerTree == 1 ) 965 { 966 if( data.featureType == FeatureEvaluator::HAAR ) 967 return predictOrderedStump<HaarEvaluator>( *this, evaluator, weight ); 968 else if( data.featureType == FeatureEvaluator::LBP ) 969 return predictCategoricalStump<LBPEvaluator>( *this, evaluator, weight ); 970 else 971 return -2; 972 } 973 else 974 { 975 if( data.featureType == FeatureEvaluator::HAAR ) 976 return predictOrdered<HaarEvaluator>( *this, evaluator, weight ); 977 else if( data.featureType == FeatureEvaluator::LBP ) 978 return predictCategorical<LBPEvaluator>( *this, evaluator, weight ); 979 else 980 return -2; 981 } 982} 983 984void CascadeClassifierImpl::setMaskGenerator(const Ptr<MaskGenerator>& _maskGenerator) 985{ 986 maskGenerator=_maskGenerator; 987} 988Ptr<CascadeClassifierImpl::MaskGenerator> CascadeClassifierImpl::getMaskGenerator() 989{ 990 return maskGenerator; 991} 992 993Ptr<BaseCascadeClassifier::MaskGenerator> createFaceDetectionMaskGenerator() 994{ 995#ifdef HAVE_TEGRA_OPTIMIZATION 996 if (tegra::useTegra()) 997 return tegra::getCascadeClassifierMaskGenerator(); 998#endif 999 return Ptr<BaseCascadeClassifier::MaskGenerator>(); 1000} 1001 1002class CascadeClassifierInvoker : public ParallelLoopBody 1003{ 1004public: 1005 CascadeClassifierInvoker( CascadeClassifierImpl& _cc, int _nscales, int _nstripes, 1006 const FeatureEvaluator::ScaleData* _scaleData, 1007 const int* _stripeSizes, std::vector<Rect>& _vec, 1008 std::vector<int>& _levels, std::vector<double>& _weights, 1009 bool outputLevels, const Mat& _mask, Mutex* _mtx) 1010 { 1011 classifier = &_cc; 1012 nscales = _nscales; 1013 nstripes = _nstripes; 1014 scaleData = _scaleData; 1015 stripeSizes = _stripeSizes; 1016 rectangles = &_vec; 1017 rejectLevels = outputLevels ? &_levels : 0; 1018 levelWeights = outputLevels ? &_weights : 0; 1019 mask = _mask; 1020 mtx = _mtx; 1021 } 1022 1023 void operator()(const Range& range) const 1024 { 1025 Ptr<FeatureEvaluator> evaluator = classifier->featureEvaluator->clone(); 1026 double gypWeight = 0.; 1027 Size origWinSize = classifier->data.origWinSize; 1028 1029 for( int scaleIdx = 0; scaleIdx < nscales; scaleIdx++ ) 1030 { 1031 const FeatureEvaluator::ScaleData& s = scaleData[scaleIdx]; 1032 float scalingFactor = s.scale; 1033 int yStep = s.ystep; 1034 int stripeSize = stripeSizes[scaleIdx]; 1035 int y0 = range.start*stripeSize; 1036 Size szw = s.getWorkingSize(origWinSize); 1037 int y1 = std::min(range.end*stripeSize, szw.height); 1038 Size winSize(cvRound(origWinSize.width * scalingFactor), 1039 cvRound(origWinSize.height * scalingFactor)); 1040 1041 for( int y = y0; y < y1; y += yStep ) 1042 { 1043 for( int x = 0; x < szw.width; x += yStep ) 1044 { 1045 int result = classifier->runAt(evaluator, Point(x, y), scaleIdx, gypWeight); 1046 if( rejectLevels ) 1047 { 1048 if( result == 1 ) 1049 result = -(int)classifier->data.stages.size(); 1050 if( classifier->data.stages.size() + result == 0 ) 1051 { 1052 mtx->lock(); 1053 rectangles->push_back(Rect(cvRound(x*scalingFactor), 1054 cvRound(y*scalingFactor), 1055 winSize.width, winSize.height)); 1056 rejectLevels->push_back(-result); 1057 levelWeights->push_back(gypWeight); 1058 mtx->unlock(); 1059 } 1060 } 1061 else if( result > 0 ) 1062 { 1063 mtx->lock(); 1064 rectangles->push_back(Rect(cvRound(x*scalingFactor), 1065 cvRound(y*scalingFactor), 1066 winSize.width, winSize.height)); 1067 mtx->unlock(); 1068 } 1069 if( result == 0 ) 1070 x += yStep; 1071 } 1072 } 1073 } 1074 } 1075 1076 CascadeClassifierImpl* classifier; 1077 std::vector<Rect>* rectangles; 1078 int nscales, nstripes; 1079 const FeatureEvaluator::ScaleData* scaleData; 1080 const int* stripeSizes; 1081 std::vector<int> *rejectLevels; 1082 std::vector<double> *levelWeights; 1083 std::vector<float> scales; 1084 Mat mask; 1085 Mutex* mtx; 1086}; 1087 1088 1089struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } }; 1090struct getNeighbors { int operator ()(const CvAvgComp& e) const { return e.neighbors; } }; 1091 1092 1093bool CascadeClassifierImpl::ocl_detectMultiScaleNoGrouping( const std::vector<float>& scales, 1094 std::vector<Rect>& candidates ) 1095{ 1096 int featureType = getFeatureType(); 1097 std::vector<UMat> bufs; 1098 featureEvaluator->getUMats(bufs); 1099 Size localsz = featureEvaluator->getLocalSize(); 1100 if( localsz.area() == 0 ) 1101 return false; 1102 Size lbufSize = featureEvaluator->getLocalBufSize(); 1103 size_t localsize[] = { localsz.width, localsz.height }; 1104 const int grp_per_CU = 12; 1105 size_t globalsize[] = { grp_per_CU*ocl::Device::getDefault().maxComputeUnits()*localsize[0], localsize[1] }; 1106 bool ok = false; 1107 1108 ufacepos.create(1, MAX_FACES*3+1, CV_32S); 1109 UMat ufacepos_count(ufacepos, Rect(0, 0, 1, 1)); 1110 ufacepos_count.setTo(Scalar::all(0)); 1111 1112 if( ustages.empty() ) 1113 { 1114 copyVectorToUMat(data.stages, ustages); 1115 if (!data.stumps.empty()) 1116 copyVectorToUMat(data.stumps, unodes); 1117 else 1118 copyVectorToUMat(data.nodes, unodes); 1119 copyVectorToUMat(data.leaves, uleaves); 1120 if( !data.subsets.empty() ) 1121 copyVectorToUMat(data.subsets, usubsets); 1122 } 1123 1124 int nstages = (int)data.stages.size(); 1125 int splitstage_ocl = 1; 1126 1127 if( featureType == FeatureEvaluator::HAAR ) 1128 { 1129 Ptr<HaarEvaluator> haar = featureEvaluator.dynamicCast<HaarEvaluator>(); 1130 if( haar.empty() ) 1131 return false; 1132 1133 if( haarKernel.empty() ) 1134 { 1135 String opts; 1136 if (lbufSize.area()) 1137 opts = format("-D LOCAL_SIZE_X=%d -D LOCAL_SIZE_Y=%d -D SUM_BUF_SIZE=%d -D SUM_BUF_STEP=%d -D NODE_COUNT=%d -D SPLIT_STAGE=%d -D N_STAGES=%d -D MAX_FACES=%d -D HAAR", 1138 localsz.width, localsz.height, lbufSize.area(), lbufSize.width, data.maxNodesPerTree, splitstage_ocl, nstages, MAX_FACES); 1139 else 1140 opts = format("-D LOCAL_SIZE_X=%d -D LOCAL_SIZE_Y=%d -D NODE_COUNT=%d -D SPLIT_STAGE=%d -D N_STAGES=%d -D MAX_FACES=%d -D HAAR", 1141 localsz.width, localsz.height, data.maxNodesPerTree, splitstage_ocl, nstages, MAX_FACES); 1142 haarKernel.create("runHaarClassifier", ocl::objdetect::cascadedetect_oclsrc, opts); 1143 if( haarKernel.empty() ) 1144 return false; 1145 } 1146 1147 Rect normrect = haar->getNormRect(); 1148 int sqofs = haar->getSquaresOffset(); 1149 1150 haarKernel.args((int)scales.size(), 1151 ocl::KernelArg::PtrReadOnly(bufs[0]), // scaleData 1152 ocl::KernelArg::ReadOnlyNoSize(bufs[1]), // sum 1153 ocl::KernelArg::PtrReadOnly(bufs[2]), // optfeatures 1154 1155 // cascade classifier 1156 ocl::KernelArg::PtrReadOnly(ustages), 1157 ocl::KernelArg::PtrReadOnly(unodes), 1158 ocl::KernelArg::PtrReadOnly(uleaves), 1159 1160 ocl::KernelArg::PtrWriteOnly(ufacepos), // positions 1161 normrect, sqofs, data.origWinSize); 1162 ok = haarKernel.run(2, globalsize, localsize, true); 1163 } 1164 else if( featureType == FeatureEvaluator::LBP ) 1165 { 1166 if (data.maxNodesPerTree > 1) 1167 return false; 1168 1169 Ptr<LBPEvaluator> lbp = featureEvaluator.dynamicCast<LBPEvaluator>(); 1170 if( lbp.empty() ) 1171 return false; 1172 1173 if( lbpKernel.empty() ) 1174 { 1175 String opts; 1176 if (lbufSize.area()) 1177 opts = format("-D LOCAL_SIZE_X=%d -D LOCAL_SIZE_Y=%d -D SUM_BUF_SIZE=%d -D SUM_BUF_STEP=%d -D SPLIT_STAGE=%d -D N_STAGES=%d -D MAX_FACES=%d -D LBP", 1178 localsz.width, localsz.height, lbufSize.area(), lbufSize.width, splitstage_ocl, nstages, MAX_FACES); 1179 else 1180 opts = format("-D LOCAL_SIZE_X=%d -D LOCAL_SIZE_Y=%d -D SPLIT_STAGE=%d -D N_STAGES=%d -D MAX_FACES=%d -D LBP", 1181 localsz.width, localsz.height, splitstage_ocl, nstages, MAX_FACES); 1182 lbpKernel.create("runLBPClassifierStumpSimple", ocl::objdetect::cascadedetect_oclsrc, opts); 1183 if( lbpKernel.empty() ) 1184 return false; 1185 } 1186 1187 int subsetSize = (data.ncategories + 31)/32; 1188 lbpKernel.args((int)scales.size(), 1189 ocl::KernelArg::PtrReadOnly(bufs[0]), // scaleData 1190 ocl::KernelArg::ReadOnlyNoSize(bufs[1]), // sum 1191 ocl::KernelArg::PtrReadOnly(bufs[2]), // optfeatures 1192 1193 // cascade classifier 1194 ocl::KernelArg::PtrReadOnly(ustages), 1195 ocl::KernelArg::PtrReadOnly(unodes), 1196 ocl::KernelArg::PtrReadOnly(usubsets), 1197 subsetSize, 1198 1199 ocl::KernelArg::PtrWriteOnly(ufacepos), // positions 1200 data.origWinSize); 1201 1202 ok = lbpKernel.run(2, globalsize, localsize, true); 1203 } 1204 1205 if( ok ) 1206 { 1207 Mat facepos = ufacepos.getMat(ACCESS_READ); 1208 const int* fptr = facepos.ptr<int>(); 1209 int nfaces = fptr[0]; 1210 nfaces = std::min(nfaces, (int)MAX_FACES); 1211 1212 for( int i = 0; i < nfaces; i++ ) 1213 { 1214 const FeatureEvaluator::ScaleData& s = featureEvaluator->getScaleData(fptr[i*3 + 1]); 1215 candidates.push_back(Rect(cvRound(fptr[i*3 + 2]*s.scale), 1216 cvRound(fptr[i*3 + 3]*s.scale), 1217 cvRound(data.origWinSize.width*s.scale), 1218 cvRound(data.origWinSize.height*s.scale))); 1219 } 1220 } 1221 return ok; 1222} 1223 1224bool CascadeClassifierImpl::isOldFormatCascade() const 1225{ 1226 return !oldCascade.empty(); 1227} 1228 1229int CascadeClassifierImpl::getFeatureType() const 1230{ 1231 return featureEvaluator->getFeatureType(); 1232} 1233 1234Size CascadeClassifierImpl::getOriginalWindowSize() const 1235{ 1236 return data.origWinSize; 1237} 1238 1239void* CascadeClassifierImpl::getOldCascade() 1240{ 1241 return oldCascade; 1242} 1243 1244static void detectMultiScaleOldFormat( const Mat& image, Ptr<CvHaarClassifierCascade> oldCascade, 1245 std::vector<Rect>& objects, 1246 std::vector<int>& rejectLevels, 1247 std::vector<double>& levelWeights, 1248 std::vector<CvAvgComp>& vecAvgComp, 1249 double scaleFactor, int minNeighbors, 1250 int flags, Size minObjectSize, Size maxObjectSize, 1251 bool outputRejectLevels = false ) 1252{ 1253 MemStorage storage(cvCreateMemStorage(0)); 1254 CvMat _image = image; 1255 CvSeq* _objects = cvHaarDetectObjectsForROC( &_image, oldCascade, storage, rejectLevels, levelWeights, scaleFactor, 1256 minNeighbors, flags, minObjectSize, maxObjectSize, outputRejectLevels ); 1257 Seq<CvAvgComp>(_objects).copyTo(vecAvgComp); 1258 objects.resize(vecAvgComp.size()); 1259 std::transform(vecAvgComp.begin(), vecAvgComp.end(), objects.begin(), getRect()); 1260} 1261 1262 1263void CascadeClassifierImpl::detectMultiScaleNoGrouping( InputArray _image, std::vector<Rect>& candidates, 1264 std::vector<int>& rejectLevels, std::vector<double>& levelWeights, 1265 double scaleFactor, Size minObjectSize, Size maxObjectSize, 1266 bool outputRejectLevels ) 1267{ 1268 Size imgsz = _image.size(); 1269 1270 Mat grayImage; 1271 _InputArray gray; 1272 1273 candidates.clear(); 1274 rejectLevels.clear(); 1275 levelWeights.clear(); 1276 1277 if( maxObjectSize.height == 0 || maxObjectSize.width == 0 ) 1278 maxObjectSize = imgsz; 1279 1280#ifdef HAVE_OPENCL 1281 bool use_ocl = tryOpenCL && ocl::useOpenCL() && 1282 featureEvaluator->getLocalSize().area() > 0 && 1283 ocl::Device::getDefault().type() != ocl::Device::TYPE_CPU && 1284 (data.minNodesPerTree == data.maxNodesPerTree) && 1285 !isOldFormatCascade() && 1286 maskGenerator.empty() && 1287 !outputRejectLevels; 1288#endif 1289 1290 /*if( use_ocl ) 1291 { 1292 if (_image.channels() > 1) 1293 cvtColor(_image, ugrayImage, COLOR_BGR2GRAY); 1294 else if (_image.isUMat()) 1295 ugrayImage = _image.getUMat(); 1296 else 1297 _image.copyTo(ugrayImage); 1298 gray = ugrayImage; 1299 } 1300 else*/ 1301 { 1302 if (_image.channels() > 1) 1303 cvtColor(_image, grayImage, COLOR_BGR2GRAY); 1304 else if (_image.isMat()) 1305 grayImage = _image.getMat(); 1306 else 1307 _image.copyTo(grayImage); 1308 gray = grayImage; 1309 } 1310 1311 std::vector<float> scales; 1312 scales.reserve(1024); 1313 1314 for( double factor = 1; ; factor *= scaleFactor ) 1315 { 1316 Size originalWindowSize = getOriginalWindowSize(); 1317 1318 Size windowSize( cvRound(originalWindowSize.width*factor), cvRound(originalWindowSize.height*factor) ); 1319 if( windowSize.width > maxObjectSize.width || windowSize.height > maxObjectSize.height || 1320 windowSize.width > imgsz.width || windowSize.height > imgsz.height ) 1321 break; 1322 if( windowSize.width < minObjectSize.width || windowSize.height < minObjectSize.height ) 1323 continue; 1324 scales.push_back((float)factor); 1325 } 1326 1327 if( scales.size() == 0 || !featureEvaluator->setImage(gray, scales) ) 1328 return; 1329 1330 // OpenCL code 1331 CV_OCL_RUN(use_ocl, ocl_detectMultiScaleNoGrouping( scales, candidates )) 1332 1333 tryOpenCL = false; 1334 1335 featureEvaluator->getMats(); 1336 { 1337 Mat currentMask; 1338 if (maskGenerator) 1339 currentMask = maskGenerator->generateMask(gray.getMat()); 1340 1341 size_t i, nscales = scales.size(); 1342 cv::AutoBuffer<int> stripeSizeBuf(nscales); 1343 int* stripeSizes = stripeSizeBuf; 1344 const FeatureEvaluator::ScaleData* s = &featureEvaluator->getScaleData(0); 1345 Size szw = s->getWorkingSize(data.origWinSize); 1346 int nstripes = cvCeil(szw.width/32.); 1347 for( i = 0; i < nscales; i++ ) 1348 { 1349 szw = s[i].getWorkingSize(data.origWinSize); 1350 stripeSizes[i] = std::max((szw.height/s[i].ystep + nstripes-1)/nstripes, 1)*s[i].ystep; 1351 } 1352#if defined ANDROID && defined RENDERSCRIPT 1353 rs_parallel_detect(candidates, nscales); 1354#else 1355 CascadeClassifierInvoker invoker(*this, (int)nscales, nstripes, s, stripeSizes, 1356 candidates, rejectLevels, levelWeights, 1357 outputRejectLevels, currentMask, &mtx); 1358 parallel_for_(Range(0, nstripes), invoker); 1359#endif 1360 } 1361} 1362 1363#if defined ANDROID && defined RENDERSCRIPT 1364void CascadeClassifierImpl::rs_parallel_detect(std::vector<Rect>& candidates, int nscales) { 1365 HaarEvaluator& heval = (HaarEvaluator&)*featureEvaluator; 1366 const FeatureEvaluator::ScaleData* s = &featureEvaluator->getScaleData(0); 1367 Size origWinSize = data.origWinSize; 1368 const int origWidth = origWinSize.width; 1369 const int origHeight = origWinSize.height; 1370 const int stepSize = heval.sbuf.step.p[0]; 1371 1372 if (!loadedHaarVars) setHaarVars(); 1373 1374 unsigned char* inData = heval.sbuf.data; 1375 const char* fin; 1376 for( int scaleIdx = 0; scaleIdx < nscales; scaleIdx++ ) 1377 { 1378 const FeatureEvaluator::ScaleData& sd = s[scaleIdx]; 1379 const float scalingFactor = sd.scale; 1380 const Size sz = sd.getWorkingSize(origWinSize); 1381 const int winWidth = origWinSize.width * scalingFactor; 1382 const int winHeight = origWinSize.height * scalingFactor; 1383 const int layerOfs = sd.layer_ofs; 1384 const int area = sz.width*sz.height; 1385 1386 bool *outData = (bool *)malloc(sizeof(bool)*area); 1387 int* arr = heval.integralImages[scaleIdx]; 1388 int* arrSq = heval.integralImagesSq[scaleIdx]; 1389 1390 1391 innerloops(sz.height,sz.width,arr,arrSq,sd.ystep,outData); 1392 1393 for (int y=0; y < sz.height; y += sd.ystep) { 1394 for (int x = 0; x< sz.width; x += sd.ystep) { 1395 if (*(outData + x + y*sz.width)) { 1396 candidates.push_back(Rect(cvRound(x*scalingFactor), 1397 cvRound(y*scalingFactor), 1398 winWidth, winHeight)); 1399 } 1400 } 1401 } 1402 1403 free(outData); 1404 } 1405 cleanUpInnerLoops(); 1406 for (int i = 0; i < nscales; i++) 1407 { 1408 free(heval.integralImages[i]); 1409 free(heval.integralImagesSq[i]); 1410 } 1411 free(heval.integralImages); 1412 free(heval.integralImagesSq); 1413} 1414 1415void CascadeClassifierImpl::setHaarVars() { 1416 HaarVars hf; 1417 Size origWinSize = data.origWinSize; 1418 const int origWidth = origWinSize.width; 1419 const int origHeight = origWinSize.height; 1420 HaarEvaluator& heval = (HaarEvaluator&)*featureEvaluator; 1421 int _nofs[4] = {(heval.nofs)[0], (heval.nofs)[1], (heval.nofs)[2], (heval.nofs)[3]}; 1422 memcpy (haarVars.nofs, _nofs, sizeof(_nofs)); 1423 hf.sqofs = heval.sqofs; 1424 hf.normRectArea = heval.normrect.area(); 1425 1426 int nOptFeatures = (*heval.optfeatures).size(); 1427 HaarFeature *haf = (HaarFeature *)malloc(sizeof(HaarFeature)*nOptFeatures); 1428 const std::vector<HaarEvaluator::Feature>& ff = *heval.features; 1429 for (int i = 0; i < nOptFeatures; i++ ){ 1430 HaarFeature f; 1431 for (int j = 0; j < 3; j++) { 1432 f.x[j]=ff[i].rect[j].r.x; 1433 f.y[j]=ff[i].rect[j].r.y; 1434 f.width[j] = ff[i].rect[j].r.width; 1435 f.height[j] = ff[i].rect[j].r.height; 1436 } 1437 f.weight0 = ff[i].rect[0].weight; 1438 f.weight1 = ff[i].rect[1].weight; 1439 f.weight2 = ff[i].rect[2].weight; 1440 haf[i] = f; 1441 } 1442 hf.haarFeatures = &haf[0]; 1443 1444 1445 int nstages = (int) data.stages.size(); 1446 HaarStage *stageArr = (HaarStage *)malloc(sizeof(HaarStage)*nstages); 1447 for (int j = 0; j < nstages; j++ ){ 1448 HaarStage st; 1449 st.first = data.stages[j].first; 1450 st.ntrees = data.stages[j].ntrees; 1451 st.threshold = data.stages[j].threshold; 1452 stageArr[j] = st; 1453 } 1454 hf.stages = &stageArr[0]; 1455 hf.stagesSize = nstages; 1456 hf.nStumps = (int) data.stumps.size(); 1457 1458 int nstumps = data.stumps.size(); 1459 HaarStump *stumpArr = (HaarStump *)malloc(sizeof(HaarStump)*nstumps); 1460 for (int j = 0; j < nstumps; j++){ 1461 HaarStump st; 1462 st.featureIdx = data.stumps[j].featureIdx; 1463 st.threshold = data.stumps[j].threshold; 1464 st.left = data.stumps[j].left; 1465 st.right = data.stumps[j].right; 1466 stumpArr[j] = st; 1467 } 1468 hf.stumps = &stumpArr[0]; 1469 hf.nFeatures = (int) heval.optfeatures->size(); 1470 1471 HaarRect nr; 1472 nr.x = heval.normrect.x; 1473 nr.y = heval.normrect.y; 1474 nr.width = heval.normrect.width; 1475 nr.height = heval.normrect.height; 1476 hf.nrect = nr; 1477 1478 haarVars = hf; 1479 1480 loadedHaarVars = true; 1481 initInnerLoop(haarVars,origWidth,origHeight); 1482} 1483#endif 1484 1485 1486void CascadeClassifierImpl::detectMultiScale( InputArray _image, std::vector<Rect>& objects, 1487 std::vector<int>& rejectLevels, 1488 std::vector<double>& levelWeights, 1489 double scaleFactor, int minNeighbors, 1490 int flags, Size minObjectSize, Size maxObjectSize, 1491 bool outputRejectLevels ) 1492{ 1493 CV_Assert( scaleFactor > 1 && _image.depth() == CV_8U ); 1494 1495 if( empty() ) 1496 return; 1497 1498 if( isOldFormatCascade() ) 1499 { 1500 Mat image = _image.getMat(); 1501 std::vector<CvAvgComp> fakeVecAvgComp; 1502 detectMultiScaleOldFormat( image, oldCascade, objects, rejectLevels, levelWeights, fakeVecAvgComp, scaleFactor, 1503 minNeighbors, flags, minObjectSize, maxObjectSize, outputRejectLevels ); 1504 } 1505 else 1506 { 1507 detectMultiScaleNoGrouping( _image, objects, rejectLevels, levelWeights, scaleFactor, minObjectSize, maxObjectSize, 1508 outputRejectLevels ); 1509 const double GROUP_EPS = 0.2; 1510 if( outputRejectLevels ) 1511 { 1512 groupRectangles( objects, rejectLevels, levelWeights, minNeighbors, GROUP_EPS ); 1513 } 1514 else 1515 { 1516 groupRectangles( objects, minNeighbors, GROUP_EPS ); 1517 } 1518 } 1519} 1520 1521void CascadeClassifierImpl::detectMultiScale( InputArray _image, std::vector<Rect>& objects, 1522 double scaleFactor, int minNeighbors, 1523 int flags, Size minObjectSize, Size maxObjectSize) 1524{ 1525 std::vector<int> fakeLevels; 1526 std::vector<double> fakeWeights; 1527 detectMultiScale( _image, objects, fakeLevels, fakeWeights, scaleFactor, 1528 minNeighbors, flags, minObjectSize, maxObjectSize ); 1529} 1530 1531void CascadeClassifierImpl::detectMultiScale( InputArray _image, std::vector<Rect>& objects, 1532 std::vector<int>& numDetections, double scaleFactor, 1533 int minNeighbors, int flags, Size minObjectSize, 1534 Size maxObjectSize ) 1535{ 1536 Mat image = _image.getMat(); 1537 CV_Assert( scaleFactor > 1 && image.depth() == CV_8U ); 1538 1539 if( empty() ) 1540 return; 1541 1542 std::vector<int> fakeLevels; 1543 std::vector<double> fakeWeights; 1544 if( isOldFormatCascade() ) 1545 { 1546 std::vector<CvAvgComp> vecAvgComp; 1547 detectMultiScaleOldFormat( image, oldCascade, objects, fakeLevels, fakeWeights, vecAvgComp, scaleFactor, 1548 minNeighbors, flags, minObjectSize, maxObjectSize ); 1549 numDetections.resize(vecAvgComp.size()); 1550 std::transform(vecAvgComp.begin(), vecAvgComp.end(), numDetections.begin(), getNeighbors()); 1551 } 1552 else 1553 { 1554 detectMultiScaleNoGrouping( image, objects, fakeLevels, fakeWeights, scaleFactor, minObjectSize, maxObjectSize ); 1555 const double GROUP_EPS = 0.2; 1556 groupRectangles( objects, numDetections, minNeighbors, GROUP_EPS ); 1557 } 1558} 1559 1560 1561CascadeClassifierImpl::Data::Data() 1562{ 1563 stageType = featureType = ncategories = maxNodesPerTree = 0; 1564} 1565 1566bool CascadeClassifierImpl::Data::read(const FileNode &root) 1567{ 1568 static const float THRESHOLD_EPS = 1e-5f; 1569 1570 // load stage params 1571 String stageTypeStr = (String)root[CC_STAGE_TYPE]; 1572 if( stageTypeStr == CC_BOOST ) 1573 stageType = BOOST; 1574 else 1575 return false; 1576 1577 String featureTypeStr = (String)root[CC_FEATURE_TYPE]; 1578 if( featureTypeStr == CC_HAAR ) 1579 featureType = FeatureEvaluator::HAAR; 1580 else if( featureTypeStr == CC_LBP ) 1581 featureType = FeatureEvaluator::LBP; 1582 else if( featureTypeStr == CC_HOG ) 1583 { 1584 featureType = FeatureEvaluator::HOG; 1585 CV_Error(Error::StsNotImplemented, "HOG cascade is not supported in 3.0"); 1586 } 1587 else 1588 return false; 1589 1590 origWinSize.width = (int)root[CC_WIDTH]; 1591 origWinSize.height = (int)root[CC_HEIGHT]; 1592 CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 ); 1593 1594 // load feature params 1595 FileNode fn = root[CC_FEATURE_PARAMS]; 1596 if( fn.empty() ) 1597 return false; 1598 1599 ncategories = fn[CC_MAX_CAT_COUNT]; 1600 int subsetSize = (ncategories + 31)/32, 1601 nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 ); 1602 1603 // load stages 1604 fn = root[CC_STAGES]; 1605 if( fn.empty() ) 1606 return false; 1607 1608 stages.reserve(fn.size()); 1609 classifiers.clear(); 1610 nodes.clear(); 1611 stumps.clear(); 1612 1613 FileNodeIterator it = fn.begin(), it_end = fn.end(); 1614 minNodesPerTree = INT_MAX; 1615 maxNodesPerTree = 0; 1616 1617 for( int si = 0; it != it_end; si++, ++it ) 1618 { 1619 FileNode fns = *it; 1620 Stage stage; 1621 stage.threshold = (float)fns[CC_STAGE_THRESHOLD] - THRESHOLD_EPS; 1622 fns = fns[CC_WEAK_CLASSIFIERS]; 1623 if(fns.empty()) 1624 return false; 1625 stage.ntrees = (int)fns.size(); 1626 stage.first = (int)classifiers.size(); 1627 stages.push_back(stage); 1628 classifiers.reserve(stages[si].first + stages[si].ntrees); 1629 1630 FileNodeIterator it1 = fns.begin(), it1_end = fns.end(); 1631 for( ; it1 != it1_end; ++it1 ) // weak trees 1632 { 1633 FileNode fnw = *it1; 1634 FileNode internalNodes = fnw[CC_INTERNAL_NODES]; 1635 FileNode leafValues = fnw[CC_LEAF_VALUES]; 1636 if( internalNodes.empty() || leafValues.empty() ) 1637 return false; 1638 1639 DTree tree; 1640 tree.nodeCount = (int)internalNodes.size()/nodeStep; 1641 minNodesPerTree = std::min(minNodesPerTree, tree.nodeCount); 1642 maxNodesPerTree = std::max(maxNodesPerTree, tree.nodeCount); 1643 1644 classifiers.push_back(tree); 1645 1646 nodes.reserve(nodes.size() + tree.nodeCount); 1647 leaves.reserve(leaves.size() + leafValues.size()); 1648 if( subsetSize > 0 ) 1649 subsets.reserve(subsets.size() + tree.nodeCount*subsetSize); 1650 1651 FileNodeIterator internalNodesIter = internalNodes.begin(), internalNodesEnd = internalNodes.end(); 1652 1653 for( ; internalNodesIter != internalNodesEnd; ) // nodes 1654 { 1655 DTreeNode node; 1656 node.left = (int)*internalNodesIter; ++internalNodesIter; 1657 node.right = (int)*internalNodesIter; ++internalNodesIter; 1658 node.featureIdx = (int)*internalNodesIter; ++internalNodesIter; 1659 if( subsetSize > 0 ) 1660 { 1661 for( int j = 0; j < subsetSize; j++, ++internalNodesIter ) 1662 subsets.push_back((int)*internalNodesIter); 1663 node.threshold = 0.f; 1664 } 1665 else 1666 { 1667 node.threshold = (float)*internalNodesIter; ++internalNodesIter; 1668 } 1669 nodes.push_back(node); 1670 } 1671 1672 internalNodesIter = leafValues.begin(), internalNodesEnd = leafValues.end(); 1673 1674 for( ; internalNodesIter != internalNodesEnd; ++internalNodesIter ) // leaves 1675 leaves.push_back((float)*internalNodesIter); 1676 } 1677 } 1678 1679 if( maxNodesPerTree == 1 ) 1680 { 1681 int nodeOfs = 0, leafOfs = 0; 1682 size_t nstages = stages.size(); 1683 for( size_t stageIdx = 0; stageIdx < nstages; stageIdx++ ) 1684 { 1685 const Stage& stage = stages[stageIdx]; 1686 1687 int ntrees = stage.ntrees; 1688 for( int i = 0; i < ntrees; i++, nodeOfs++, leafOfs+= 2 ) 1689 { 1690 const DTreeNode& node = nodes[nodeOfs]; 1691 stumps.push_back(Stump(node.featureIdx, node.threshold, 1692 leaves[leafOfs], leaves[leafOfs+1])); 1693 } 1694 } 1695 } 1696 1697 return true; 1698} 1699 1700 1701bool CascadeClassifierImpl::read_(const FileNode& root) 1702{ 1703 tryOpenCL = true; 1704 haarKernel = ocl::Kernel(); 1705 lbpKernel = ocl::Kernel(); 1706 ustages.release(); 1707 unodes.release(); 1708 uleaves.release(); 1709 if( !data.read(root) ) 1710 return false; 1711 1712 // load features 1713 featureEvaluator = FeatureEvaluator::create(data.featureType); 1714 FileNode fn = root[CC_FEATURES]; 1715 if( fn.empty() ) 1716 return false; 1717 1718 return featureEvaluator->read(fn, data.origWinSize); 1719} 1720 1721template<> void DefaultDeleter<CvHaarClassifierCascade>::operator ()(CvHaarClassifierCascade* obj) const 1722{ cvReleaseHaarClassifierCascade(&obj); } 1723 1724 1725BaseCascadeClassifier::~BaseCascadeClassifier() 1726{ 1727} 1728 1729CascadeClassifier::CascadeClassifier() { 1730#if defined(RENDERSCRIPT) && !defined(ANDROID) 1731 CV_Error(Error::StsNotImplemented, "Renderscript cannot be used on non-Android devices"); 1732#endif 1733} 1734CascadeClassifier::CascadeClassifier(const String& filename) 1735{ 1736#if defined(RENDERSCRIPT) && !defined(ANDROID) 1737 CV_Error(Error::StsNotImplemented, "Renderscript cannot be used on non-Android devices"); 1738#endif 1739 load(filename); 1740} 1741 1742CascadeClassifier::~CascadeClassifier() 1743{ 1744} 1745 1746bool CascadeClassifier::empty() const 1747{ 1748 return cc.empty() || cc->empty(); 1749} 1750 1751bool CascadeClassifier::load( const String& filename ) 1752{ 1753 cc = makePtr<CascadeClassifierImpl>(); 1754 if(!cc->load(filename)) 1755 cc.release(); 1756 return !empty(); 1757} 1758 1759bool CascadeClassifier::read(const FileNode &root) 1760{ 1761 Ptr<CascadeClassifierImpl> ccimpl = makePtr<CascadeClassifierImpl>(); 1762 bool ok = ccimpl->read_(root); 1763 if( ok ) 1764 cc = ccimpl.staticCast<BaseCascadeClassifier>(); 1765 else 1766 cc.release(); 1767 return ok; 1768} 1769 1770void clipObjects(Size sz, std::vector<Rect>& objects, 1771 std::vector<int>* a, std::vector<double>* b) 1772{ 1773 size_t i, j = 0, n = objects.size(); 1774 Rect win0 = Rect(0, 0, sz.width, sz.height); 1775 if(a) 1776 { 1777 CV_Assert(a->size() == n); 1778 } 1779 if(b) 1780 { 1781 CV_Assert(b->size() == n); 1782 } 1783 1784 for( i = 0; i < n; i++ ) 1785 { 1786 Rect r = win0 & objects[i]; 1787 if( r.area() > 0 ) 1788 { 1789 objects[j] = r; 1790 if( i > j ) 1791 { 1792 if(a) a->at(j) = a->at(i); 1793 if(b) b->at(j) = b->at(i); 1794 } 1795 j++; 1796 } 1797 } 1798 1799 if( j < n ) 1800 { 1801 objects.resize(j); 1802 if(a) a->resize(j); 1803 if(b) b->resize(j); 1804 } 1805} 1806 1807void CascadeClassifier::detectMultiScale( InputArray image, 1808 CV_OUT std::vector<Rect>& objects, 1809 double scaleFactor, 1810 int minNeighbors, int flags, 1811 Size minSize, 1812 Size maxSize ) 1813{ 1814 CV_Assert(!empty()); 1815 cc->detectMultiScale(image, objects, scaleFactor, minNeighbors, flags, minSize, maxSize); 1816 clipObjects(image.size(), objects, 0, 0); 1817} 1818 1819void CascadeClassifier::detectMultiScale( InputArray image, 1820 CV_OUT std::vector<Rect>& objects, 1821 CV_OUT std::vector<int>& numDetections, 1822 double scaleFactor, 1823 int minNeighbors, int flags, 1824 Size minSize, Size maxSize ) 1825{ 1826 CV_Assert(!empty()); 1827 cc->detectMultiScale(image, objects, numDetections, 1828 scaleFactor, minNeighbors, flags, minSize, maxSize); 1829 clipObjects(image.size(), objects, &numDetections, 0); 1830} 1831 1832void CascadeClassifier::detectMultiScale( InputArray image, 1833 CV_OUT std::vector<Rect>& objects, 1834 CV_OUT std::vector<int>& rejectLevels, 1835 CV_OUT std::vector<double>& levelWeights, 1836 double scaleFactor, 1837 int minNeighbors, int flags, 1838 Size minSize, Size maxSize, 1839 bool outputRejectLevels ) 1840{ 1841 CV_Assert(!empty()); 1842 cc->detectMultiScale(image, objects, rejectLevels, levelWeights, 1843 scaleFactor, minNeighbors, flags, 1844 minSize, maxSize, outputRejectLevels); 1845 clipObjects(image.size(), objects, &rejectLevels, &levelWeights); 1846} 1847 1848bool CascadeClassifier::isOldFormatCascade() const 1849{ 1850 CV_Assert(!empty()); 1851 return cc->isOldFormatCascade(); 1852} 1853 1854Size CascadeClassifier::getOriginalWindowSize() const 1855{ 1856 CV_Assert(!empty()); 1857 return cc->getOriginalWindowSize(); 1858} 1859 1860int CascadeClassifier::getFeatureType() const 1861{ 1862 CV_Assert(!empty()); 1863 return cc->getFeatureType(); 1864} 1865 1866void* CascadeClassifier::getOldCascade() 1867{ 1868 CV_Assert(!empty()); 1869 return cc->getOldCascade(); 1870} 1871 1872void CascadeClassifier::setMaskGenerator(const Ptr<BaseCascadeClassifier::MaskGenerator>& maskGenerator) 1873{ 1874 CV_Assert(!empty()); 1875 cc->setMaskGenerator(maskGenerator); 1876} 1877 1878Ptr<BaseCascadeClassifier::MaskGenerator> CascadeClassifier::getMaskGenerator() 1879{ 1880 CV_Assert(!empty()); 1881 return cc->getMaskGenerator(); 1882} 1883 1884} // namespace cv 1885