1#pragma once 2 3#include "opencv2/core/ocl.hpp" 4#include "HaarStructs.h" 5 6namespace cv 7{ 8 9void clipObjects(Size sz, std::vector<Rect>& objects, 10 std::vector<int>* a, std::vector<double>* b); 11 12class FeatureEvaluator 13{ 14public: 15 enum 16 { 17 HAAR = 0, 18 LBP = 1, 19 HOG = 2 20 }; 21 22 struct ScaleData 23 { 24 ScaleData() { scale = 0.f; layer_ofs = ystep = 0; } 25 Size getWorkingSize(Size winSize) const 26 { 27 return Size(std::max(szi.width - winSize.width, 0), 28 std::max(szi.height - winSize.height, 0)); 29 } 30 31 float scale; 32 Size szi; 33 int layer_ofs, ystep; 34 }; 35 36 virtual ~FeatureEvaluator(); 37 38 virtual bool read(const FileNode& node, Size origWinSize); 39 virtual Ptr<FeatureEvaluator> clone() const; 40 virtual int getFeatureType() const; 41 int getNumChannels() const { return nchannels; } 42 43 virtual bool setImage(InputArray img, const std::vector<float>& scales); 44 virtual bool setWindow(Point p, int scaleIdx); 45 const ScaleData& getScaleData(int scaleIdx) const 46 { 47 CV_Assert( 0 <= scaleIdx && scaleIdx < (int)scaleData->size()); 48 return scaleData->at(scaleIdx); 49 } 50 virtual void getUMats(std::vector<UMat>& bufs); 51 virtual void getMats(); 52 53 Size getLocalSize() const { return localSize; } 54 Size getLocalBufSize() const { return lbufSize; } 55 56 virtual float calcOrd(int featureIdx) const; 57 virtual int calcCat(int featureIdx) const; 58 59 static Ptr<FeatureEvaluator> create(int type); 60 61protected: 62 enum { SBUF_VALID=1, USBUF_VALID=2 }; 63 int sbufFlag; 64 65 bool updateScaleData( Size imgsz, const std::vector<float>& _scales ); 66 virtual void computeChannels( int, InputArray ) {} 67 virtual void computeOptFeatures() {} 68 69 Size origWinSize, sbufSize, localSize, lbufSize; 70 int nchannels; 71 Mat sbuf, rbuf; 72 UMat urbuf, usbuf, ufbuf, uscaleData; 73 74 Ptr<std::vector<ScaleData> > scaleData; 75 76#if defined ANDROID && defined RENDERSCRIPT 77 int** integralImages; 78 int** integralImagesSq; 79 friend class CascadeClassifierImpl; 80#endif 81}; 82 83 84class CascadeClassifierImpl : public BaseCascadeClassifier 85{ 86public: 87 CascadeClassifierImpl(); 88 virtual ~CascadeClassifierImpl(); 89 90 bool empty() const; 91 bool load( const String& filename ); 92 void read( const FileNode& node ); 93 bool read_( const FileNode& node ); 94 void detectMultiScale( InputArray image, 95 CV_OUT std::vector<Rect>& objects, 96 double scaleFactor = 1.1, 97 int minNeighbors = 3, int flags = 0, 98 Size minSize = Size(), 99 Size maxSize = Size() ); 100 101 void detectMultiScale( InputArray image, 102 CV_OUT std::vector<Rect>& objects, 103 CV_OUT std::vector<int>& numDetections, 104 double scaleFactor=1.1, 105 int minNeighbors=3, int flags=0, 106 Size minSize=Size(), 107 Size maxSize=Size() ); 108 109 void detectMultiScale( InputArray image, 110 CV_OUT std::vector<Rect>& objects, 111 CV_OUT std::vector<int>& rejectLevels, 112 CV_OUT std::vector<double>& levelWeights, 113 double scaleFactor = 1.1, 114 int minNeighbors = 3, int flags = 0, 115 Size minSize = Size(), 116 Size maxSize = Size(), 117 bool outputRejectLevels = false ); 118 119 120 bool isOldFormatCascade() const; 121 Size getOriginalWindowSize() const; 122 int getFeatureType() const; 123 void* getOldCascade(); 124 125 void setMaskGenerator(const Ptr<MaskGenerator>& maskGenerator); 126 Ptr<MaskGenerator> getMaskGenerator(); 127 128protected: 129 enum { SUM_ALIGN = 64 }; 130 131 bool detectSingleScale( InputArray image, Size processingRectSize, 132 int yStep, double factor, std::vector<Rect>& candidates, 133 std::vector<int>& rejectLevels, std::vector<double>& levelWeights, 134 Size sumSize0, bool outputRejectLevels = false ); 135 bool ocl_detectMultiScaleNoGrouping( const std::vector<float>& scales, 136 std::vector<Rect>& candidates ); 137 138 void detectMultiScaleNoGrouping( InputArray image, std::vector<Rect>& candidates, 139 std::vector<int>& rejectLevels, std::vector<double>& levelWeights, 140 double scaleFactor, Size minObjectSize, Size maxObjectSize, 141 bool outputRejectLevels = false ); 142#if defined ANDROID && defined RENDERSCRIPT 143 void setHaarVars(); 144 void rs_parallel_detect(std::vector<Rect>& candidates, int nscales); 145#endif 146 147 enum { MAX_FACES = 10000 }; 148 enum { BOOST = 0 }; 149 enum { DO_CANNY_PRUNING = CASCADE_DO_CANNY_PRUNING, 150 SCALE_IMAGE = CASCADE_SCALE_IMAGE, 151 FIND_BIGGEST_OBJECT = CASCADE_FIND_BIGGEST_OBJECT, 152 DO_ROUGH_SEARCH = CASCADE_DO_ROUGH_SEARCH 153 }; 154 155 friend class CascadeClassifierInvoker; 156 friend class SparseCascadeClassifierInvoker; 157 158 template<class FEval> 159 friend int predictOrdered( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight); 160 161 template<class FEval> 162 friend int predictCategorical( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight); 163 164 template<class FEval> 165 friend int predictOrderedStump( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight); 166 167 template<class FEval> 168 friend int predictCategoricalStump( CascadeClassifierImpl& cascade, Ptr<FeatureEvaluator> &featureEvaluator, double& weight); 169 170 int runAt( Ptr<FeatureEvaluator>& feval, Point pt, int scaleIdx, double& weight ); 171 172 class Data 173 { 174 public: 175 struct DTreeNode 176 { 177 int featureIdx; 178 float threshold; // for ordered features only 179 int left; 180 int right; 181 }; 182 183 struct DTree 184 { 185 int nodeCount; 186 }; 187 188 struct Stage 189 { 190 int first; 191 int ntrees; 192 float threshold; 193 }; 194 195 struct Stump 196 { 197 Stump() { } 198 Stump(int _featureIdx, float _threshold, float _left, float _right) 199 : featureIdx(_featureIdx), threshold(_threshold), left(_left), right(_right) {} 200 201 int featureIdx; 202 float threshold; 203 float left; 204 float right; 205 }; 206 207 Data(); 208 209 bool read(const FileNode &node); 210 211 int stageType; 212 int featureType; 213 int ncategories; 214 int minNodesPerTree, maxNodesPerTree; 215 Size origWinSize; 216 217 std::vector<Stage> stages; 218 std::vector<DTree> classifiers; 219 std::vector<DTreeNode> nodes; 220 std::vector<float> leaves; 221 std::vector<int> subsets; 222 std::vector<Stump> stumps; 223 }; 224 225 Data data; 226 Ptr<FeatureEvaluator> featureEvaluator; 227 Ptr<CvHaarClassifierCascade> oldCascade; 228 229 Ptr<MaskGenerator> maskGenerator; 230 UMat ugrayImage; 231 UMat ufacepos, ustages, unodes, uleaves, usubsets; 232 ocl::Kernel haarKernel, lbpKernel; 233 bool tryOpenCL; 234 235 Mutex mtx; 236#if defined ANDROID && defined RENDERSCRIPT 237 HaarVars haarVars; 238 bool loadedHaarVars; 239#endif 240}; 241 242#define CC_CASCADE_PARAMS "cascadeParams" 243#define CC_STAGE_TYPE "stageType" 244#define CC_FEATURE_TYPE "featureType" 245#define CC_HEIGHT "height" 246#define CC_WIDTH "width" 247 248#define CC_STAGE_NUM "stageNum" 249#define CC_STAGES "stages" 250#define CC_STAGE_PARAMS "stageParams" 251 252#define CC_BOOST "BOOST" 253#define CC_MAX_DEPTH "maxDepth" 254#define CC_WEAK_COUNT "maxWeakCount" 255#define CC_STAGE_THRESHOLD "stageThreshold" 256#define CC_WEAK_CLASSIFIERS "weakClassifiers" 257#define CC_INTERNAL_NODES "internalNodes" 258#define CC_LEAF_VALUES "leafValues" 259 260#define CC_FEATURES "features" 261#define CC_FEATURE_PARAMS "featureParams" 262#define CC_MAX_CAT_COUNT "maxCatCount" 263 264#define CC_HAAR "HAAR" 265#define CC_RECTS "rects" 266#define CC_TILTED "tilted" 267 268#define CC_LBP "LBP" 269#define CC_RECT "rect" 270 271#define CC_HOG "HOG" 272 273#define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \ 274 /* (x, y) */ \ 275 (p0) = sum + (rect).x + (step) * (rect).y, \ 276 /* (x + w, y) */ \ 277 (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \ 278 /* (x + w, y) */ \ 279 (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \ 280 /* (x + w, y + h) */ \ 281 (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height) 282 283#define CV_TILTED_PTRS( p0, p1, p2, p3, tilted, rect, step ) \ 284 /* (x, y) */ \ 285 (p0) = tilted + (rect).x + (step) * (rect).y, \ 286 /* (x - h, y + h) */ \ 287 (p1) = tilted + (rect).x - (rect).height + (step) * ((rect).y + (rect).height), \ 288 /* (x + w, y + w) */ \ 289 (p2) = tilted + (rect).x + (rect).width + (step) * ((rect).y + (rect).width), \ 290 /* (x + w - h, y + w + h) */ \ 291 (p3) = tilted + (rect).x + (rect).width - (rect).height \ 292 + (step) * ((rect).y + (rect).width + (rect).height) 293 294#define CALC_SUM_(p0, p1, p2, p3, offset) \ 295 ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset]) 296 297#define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset) 298 299#define CV_SUM_OFS( p0, p1, p2, p3, sum, rect, step ) \ 300/* (x, y) */ \ 301(p0) = sum + (rect).x + (step) * (rect).y, \ 302/* (x + w, y) */ \ 303(p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \ 304/* (x + w, y) */ \ 305(p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \ 306/* (x + w, y + h) */ \ 307(p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height) 308 309#define CV_TILTED_OFS( p0, p1, p2, p3, tilted, rect, step ) \ 310/* (x, y) */ \ 311(p0) = tilted + (rect).x + (step) * (rect).y, \ 312/* (x - h, y + h) */ \ 313(p1) = tilted + (rect).x - (rect).height + (step) * ((rect).y + (rect).height), \ 314/* (x + w, y + w) */ \ 315(p2) = tilted + (rect).x + (rect).width + (step) * ((rect).y + (rect).width), \ 316/* (x + w - h, y + w + h) */ \ 317(p3) = tilted + (rect).x + (rect).width - (rect).height \ 318+ (step) * ((rect).y + (rect).width + (rect).height) 319 320#define CALC_SUM_(p0, p1, p2, p3, offset) \ 321((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset]) 322 323#define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset) 324 325#define CALC_SUM_OFS_(p0, p1, p2, p3, ptr) \ 326((ptr)[p0] - (ptr)[p1] - (ptr)[p2] + (ptr)[p3]) 327 328#define CALC_SUM_OFS(rect, ptr) CALC_SUM_OFS_((rect)[0], (rect)[1], (rect)[2], (rect)[3], ptr) 329 330//---------------------------------------------- HaarEvaluator --------------------------------------- 331class HaarEvaluator : public FeatureEvaluator 332{ 333public: 334 struct Feature 335 { 336 Feature(); 337 bool read( const FileNode& node ); 338 339 bool tilted; 340 341 enum { RECT_NUM = 3 }; 342 struct 343 { 344 Rect r; 345 float weight; 346 } rect[RECT_NUM]; 347 }; 348 349 struct OptFeature 350 { 351 OptFeature(); 352 353 enum { RECT_NUM = Feature::RECT_NUM }; 354 float calc( const int* pwin ) const; 355 void setOffsets( const Feature& _f, int step, int tofs ); 356 357 int ofs[RECT_NUM][4]; 358 float weight[4]; 359 }; 360 361 HaarEvaluator(); 362 virtual ~HaarEvaluator(); 363 364 virtual bool read( const FileNode& node, Size origWinSize); 365 virtual Ptr<FeatureEvaluator> clone() const; 366 virtual int getFeatureType() const { return FeatureEvaluator::HAAR; } 367 368 virtual bool setWindow(Point p, int scaleIdx); 369 Rect getNormRect() const; 370 int getSquaresOffset() const; 371 372 float operator()(int featureIdx) const 373 { return optfeaturesPtr[featureIdx].calc(pwin) * varianceNormFactor; } 374 virtual float calcOrd(int featureIdx) const 375 { return (*this)(featureIdx); } 376 377protected: 378 virtual void computeChannels( int i, InputArray img ); 379 virtual void computeOptFeatures(); 380 381 friend class CascadeClassifierImpl; 382 383 Ptr<std::vector<Feature> > features; 384 Ptr<std::vector<OptFeature> > optfeatures; 385 Ptr<std::vector<OptFeature> > optfeatures_lbuf; 386 bool hasTiltedFeatures; 387 388 int tofs, sqofs; 389 Vec4i nofs; 390 Rect normrect; 391 const int* pwin; 392 OptFeature* optfeaturesPtr; // optimization 393 float varianceNormFactor; 394}; 395 396inline HaarEvaluator::Feature :: Feature() 397{ 398 tilted = false; 399 rect[0].r = rect[1].r = rect[2].r = Rect(); 400 rect[0].weight = rect[1].weight = rect[2].weight = 0; 401} 402 403inline HaarEvaluator::OptFeature :: OptFeature() 404{ 405 weight[0] = weight[1] = weight[2] = 0.f; 406 407 ofs[0][0] = ofs[0][1] = ofs[0][2] = ofs[0][3] = 408 ofs[1][0] = ofs[1][1] = ofs[1][2] = ofs[1][3] = 409 ofs[2][0] = ofs[2][1] = ofs[2][2] = ofs[2][3] = 0; 410} 411 412inline float HaarEvaluator::OptFeature :: calc( const int* ptr ) const 413{ 414 float ret = weight[0] * CALC_SUM_OFS(ofs[0], ptr) + 415 weight[1] * CALC_SUM_OFS(ofs[1], ptr); 416 417 if( weight[2] != 0.0f ) 418 ret += weight[2] * CALC_SUM_OFS(ofs[2], ptr); 419 420 return ret; 421} 422 423//---------------------------------------------- LBPEvaluator ------------------------------------- 424 425class LBPEvaluator : public FeatureEvaluator 426{ 427public: 428 struct Feature 429 { 430 Feature(); 431 Feature( int x, int y, int _block_w, int _block_h ) : 432 rect(x, y, _block_w, _block_h) {} 433 434 bool read(const FileNode& node ); 435 436 Rect rect; // weight and height for block 437 }; 438 439 struct OptFeature 440 { 441 OptFeature(); 442 443 int calc( const int* pwin ) const; 444 void setOffsets( const Feature& _f, int step ); 445 int ofs[16]; 446 }; 447 448 LBPEvaluator(); 449 virtual ~LBPEvaluator(); 450 451 virtual bool read( const FileNode& node, Size origWinSize ); 452 virtual Ptr<FeatureEvaluator> clone() const; 453 virtual int getFeatureType() const { return FeatureEvaluator::LBP; } 454 455 virtual bool setWindow(Point p, int scaleIdx); 456 457 int operator()(int featureIdx) const 458 { return optfeaturesPtr[featureIdx].calc(pwin); } 459 virtual int calcCat(int featureIdx) const 460 { return (*this)(featureIdx); } 461protected: 462 virtual void computeChannels( int i, InputArray img ); 463 virtual void computeOptFeatures(); 464 465 Ptr<std::vector<Feature> > features; 466 Ptr<std::vector<OptFeature> > optfeatures; 467 Ptr<std::vector<OptFeature> > optfeatures_lbuf; 468 OptFeature* optfeaturesPtr; // optimization 469 470 const int* pwin; 471}; 472 473 474inline LBPEvaluator::Feature :: Feature() 475{ 476 rect = Rect(); 477} 478 479inline LBPEvaluator::OptFeature :: OptFeature() 480{ 481 for( int i = 0; i < 16; i++ ) 482 ofs[i] = 0; 483} 484 485inline int LBPEvaluator::OptFeature :: calc( const int* p ) const 486{ 487 int cval = CALC_SUM_OFS_( ofs[5], ofs[6], ofs[9], ofs[10], p ); 488 489 return (CALC_SUM_OFS_( ofs[0], ofs[1], ofs[4], ofs[5], p ) >= cval ? 128 : 0) | // 0 490 (CALC_SUM_OFS_( ofs[1], ofs[2], ofs[5], ofs[6], p ) >= cval ? 64 : 0) | // 1 491 (CALC_SUM_OFS_( ofs[2], ofs[3], ofs[6], ofs[7], p ) >= cval ? 32 : 0) | // 2 492 (CALC_SUM_OFS_( ofs[6], ofs[7], ofs[10], ofs[11], p ) >= cval ? 16 : 0) | // 5 493 (CALC_SUM_OFS_( ofs[10], ofs[11], ofs[14], ofs[15], p ) >= cval ? 8 : 0)| // 8 494 (CALC_SUM_OFS_( ofs[9], ofs[10], ofs[13], ofs[14], p ) >= cval ? 4 : 0)| // 7 495 (CALC_SUM_OFS_( ofs[8], ofs[9], ofs[12], ofs[13], p ) >= cval ? 2 : 0)| // 6 496 (CALC_SUM_OFS_( ofs[4], ofs[5], ofs[8], ofs[9], p ) >= cval ? 1 : 0); 497} 498 499 500//---------------------------------------------- predictor functions ------------------------------------- 501 502template<class FEval> 503inline int predictOrdered( CascadeClassifierImpl& cascade, 504 Ptr<FeatureEvaluator> &_featureEvaluator, double& sum ) 505{ 506 int nstages = (int)cascade.data.stages.size(); 507 int nodeOfs = 0, leafOfs = 0; 508 FEval& featureEvaluator = (FEval&)*_featureEvaluator; 509 float* cascadeLeaves = &cascade.data.leaves[0]; 510 CascadeClassifierImpl::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; 511 CascadeClassifierImpl::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; 512 CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0]; 513 514 for( int si = 0; si < nstages; si++ ) 515 { 516 CascadeClassifierImpl::Data::Stage& stage = cascadeStages[si]; 517 int wi, ntrees = stage.ntrees; 518 sum = 0; 519 520 for( wi = 0; wi < ntrees; wi++ ) 521 { 522 CascadeClassifierImpl::Data::DTree& weak = cascadeWeaks[stage.first + wi]; 523 int idx = 0, root = nodeOfs; 524 525 do 526 { 527 CascadeClassifierImpl::Data::DTreeNode& node = cascadeNodes[root + idx]; 528 double val = featureEvaluator(node.featureIdx); 529 idx = val < node.threshold ? node.left : node.right; 530 } 531 while( idx > 0 ); 532 sum += cascadeLeaves[leafOfs - idx]; 533 nodeOfs += weak.nodeCount; 534 leafOfs += weak.nodeCount + 1; 535 } 536 if( sum < stage.threshold ) 537 return -si; 538 } 539 return 1; 540} 541 542template<class FEval> 543inline int predictCategorical( CascadeClassifierImpl& cascade, 544 Ptr<FeatureEvaluator> &_featureEvaluator, double& sum ) 545{ 546 int nstages = (int)cascade.data.stages.size(); 547 int nodeOfs = 0, leafOfs = 0; 548 FEval& featureEvaluator = (FEval&)*_featureEvaluator; 549 size_t subsetSize = (cascade.data.ncategories + 31)/32; 550 int* cascadeSubsets = &cascade.data.subsets[0]; 551 float* cascadeLeaves = &cascade.data.leaves[0]; 552 CascadeClassifierImpl::Data::DTreeNode* cascadeNodes = &cascade.data.nodes[0]; 553 CascadeClassifierImpl::Data::DTree* cascadeWeaks = &cascade.data.classifiers[0]; 554 CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0]; 555 556 for(int si = 0; si < nstages; si++ ) 557 { 558 CascadeClassifierImpl::Data::Stage& stage = cascadeStages[si]; 559 int wi, ntrees = stage.ntrees; 560 sum = 0; 561 562 for( wi = 0; wi < ntrees; wi++ ) 563 { 564 CascadeClassifierImpl::Data::DTree& weak = cascadeWeaks[stage.first + wi]; 565 int idx = 0, root = nodeOfs; 566 do 567 { 568 CascadeClassifierImpl::Data::DTreeNode& node = cascadeNodes[root + idx]; 569 int c = featureEvaluator(node.featureIdx); 570 const int* subset = &cascadeSubsets[(root + idx)*subsetSize]; 571 idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right; 572 } 573 while( idx > 0 ); 574 sum += cascadeLeaves[leafOfs - idx]; 575 nodeOfs += weak.nodeCount; 576 leafOfs += weak.nodeCount + 1; 577 } 578 if( sum < stage.threshold ) 579 return -si; 580 } 581 return 1; 582} 583 584template<class FEval> 585inline int predictOrderedStump( CascadeClassifierImpl& cascade, 586 Ptr<FeatureEvaluator> &_featureEvaluator, double& sum ) 587{ 588 CV_Assert(!cascade.data.stumps.empty()); 589 FEval& featureEvaluator = (FEval&)*_featureEvaluator; 590 const CascadeClassifierImpl::Data::Stump* cascadeStumps = &cascade.data.stumps[0]; 591 const CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0]; 592 593 int nstages = (int)cascade.data.stages.size(); 594 double tmp = 0; 595 596 for( int stageIdx = 0; stageIdx < nstages; stageIdx++ ) 597 { 598 const CascadeClassifierImpl::Data::Stage& stage = cascadeStages[stageIdx]; 599 tmp = 0; 600 601 int ntrees = stage.ntrees; 602 for( int i = 0; i < ntrees; i++ ) 603 { 604 const CascadeClassifierImpl::Data::Stump& stump = cascadeStumps[i]; 605 double value = featureEvaluator(stump.featureIdx); 606 tmp += value < stump.threshold ? stump.left : stump.right; 607 } 608 609 if( tmp < stage.threshold ) 610 { 611 sum = (double)tmp; 612 return -stageIdx; 613 } 614 cascadeStumps += ntrees; 615 } 616 617 sum = (double)tmp; 618 return 1; 619} 620 621template<class FEval> 622inline int predictCategoricalStump( CascadeClassifierImpl& cascade, 623 Ptr<FeatureEvaluator> &_featureEvaluator, double& sum ) 624{ 625 CV_Assert(!cascade.data.stumps.empty()); 626 int nstages = (int)cascade.data.stages.size(); 627 FEval& featureEvaluator = (FEval&)*_featureEvaluator; 628 size_t subsetSize = (cascade.data.ncategories + 31)/32; 629 const int* cascadeSubsets = &cascade.data.subsets[0]; 630 const CascadeClassifierImpl::Data::Stump* cascadeStumps = &cascade.data.stumps[0]; 631 const CascadeClassifierImpl::Data::Stage* cascadeStages = &cascade.data.stages[0]; 632 633 double tmp = 0; 634 for( int si = 0; si < nstages; si++ ) 635 { 636 const CascadeClassifierImpl::Data::Stage& stage = cascadeStages[si]; 637 int wi, ntrees = stage.ntrees; 638 tmp = 0; 639 640 for( wi = 0; wi < ntrees; wi++ ) 641 { 642 const CascadeClassifierImpl::Data::Stump& stump = cascadeStumps[wi]; 643 int c = featureEvaluator(stump.featureIdx); 644 const int* subset = &cascadeSubsets[wi*subsetSize]; 645 tmp += (subset[c>>5] & (1 << (c & 31))) ? stump.left : stump.right; 646 } 647 648 if( tmp < stage.threshold ) 649 { 650 sum = tmp; 651 return -si; 652 } 653 654 cascadeStumps += ntrees; 655 cascadeSubsets += ntrees*subsetSize; 656 } 657 658 sum = (double)tmp; 659 return 1; 660} 661}