1 /*M///////////////////////////////////////////////////////////////////////////////////////
3 // IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
5 // By downloading, copying, installing or using the software you agree to this license.
6 // If you do not agree to this license, do not download, install,
7 // copy or use the software.
10 // Intel License Agreement
11 // For Open Source Computer Vision Library
13 // Copyright (C) 2000, Intel Corporation, all rights reserved.
14 // Third party copyrights are property of their respective owners.
16 // Redistribution and use in source and binary forms, with or without modification,
17 // are permitted provided that the following conditions are met:
19 // * Redistribution's of source code must retain the above copyright notice,
20 // this list of conditions and the following disclaimer.
22 // * Redistribution's in binary form must reproduce the above copyright notice,
23 // this list of conditions and the following disclaimer in the documentation
24 // and/or other materials provided with the distribution.
26 // * The name of Intel Corporation may not be used to endorse or promote products
27 // derived from this software without specific prior written permission.
29 // This software is provided by the copyright holders and contributors "as is" and
30 // any express or implied warranties, including, but not limited to, the implied
31 // warranties of merchantability and fitness for a particular purpose are disclaimed.
32 // In no event shall the Intel Corporation or contributors be liable for any direct,
33 // indirect, incidental, special, exemplary, or consequential damages
34 // (including, but not limited to, procurement of substitute goods or services;
35 // loss of use, data, or profits; or business interruption) however caused
36 // and on any theory of liability, whether in contract, strict liability,
37 // or tort (including negligence or otherwise) arising in any way out of
38 // the use of this software, even if advised of the possibility of such damage.
48 // class for grouping object candidates, detected by Cascade Classifier, HOG etc.
49 // instance of the class is to be passed to cv::partition (see cxoperations.hpp)
50 class CV_EXPORTS SimilarRects
53 SimilarRects(double _eps) : eps(_eps) {}
54 inline bool operator()(const Rect& r1, const Rect& r2) const
56 double delta = eps*(std::min(r1.width, r2.width) + std::min(r1.height, r2.height))*0.5;
57 return std::abs(r1.x - r2.x) <= delta &&
58 std::abs(r1.y - r2.y) <= delta &&
59 std::abs(r1.x + r1.width - r2.x - r2.width) <= delta &&
60 std::abs(r1.y + r1.height - r2.y - r2.height) <= delta;
66 void groupRectangles(vector<Rect>& rectList, int groupThreshold, double eps)
68 if( groupThreshold <= 0 )
72 int nclasses = partition(rectList, labels, SimilarRects(eps));
73 vector<Rect> rrects(nclasses);
74 vector<int> rweights(nclasses, 0);
75 int i, nlabels = (int)labels.size();
76 for( i = 0; i < nlabels; i++ )
79 rrects[cls].x += rectList[i].x;
80 rrects[cls].y += rectList[i].y;
81 rrects[cls].width += rectList[i].width;
82 rrects[cls].height += rectList[i].height;
86 for( i = 0; i < nclasses; i++ )
89 if( rweights[i] <= groupThreshold )
91 float s = 1.f/rweights[i];
92 rectList.push_back(Rect(saturate_cast<int>(r.x*s),
93 saturate_cast<int>(r.y*s),
94 saturate_cast<int>(r.width*s),
95 saturate_cast<int>(r.height*s)));
99 //-----------------------------------------------------------------------------------------------------------------
100 #define CC_CASCADE_PARAMS "cascadeParams"
101 #define CC_STAGE_TYPE "stageType"
102 #define CC_FEATURE_TYPE "featureType"
103 #define CC_HEIGHT "height"
104 #define CC_WIDTH "width"
106 #define CC_STAGE_NUM "stageNum"
107 #define CC_STAGES "stages"
108 #define CC_STAGE_PARAMS "stageParams"
110 #define CC_BOOST "BOOST"
111 #define CC_MAX_DEPTH "maxDepth"
112 #define CC_WEAK_COUNT "maxWeakCount"
113 #define CC_STAGE_THRESHOLD "stageThreshold"
114 #define CC_WEAK_CLASSIFIERS "weakClassifiers"
115 #define CC_INTERNAL_NODES "internalNodes"
116 #define CC_LEAF_VALUES "leafValues"
118 #define CC_FEATURES "features"
119 #define CC_FEATURE_PARAMS "featureParams"
120 #define CC_MAX_CAT_COUNT "maxCatCount"
122 #define CC_HAAR "HAAR"
123 #define CC_RECTS "rects"
124 #define CC_TILTED "tilted"
127 #define CC_RECT "rect"
129 #define CV_SUM_PTRS( p0, p1, p2, p3, sum, rect, step ) \
131 (p0) = sum + (rect).x + (step) * (rect).y, \
133 (p1) = sum + (rect).x + (rect).width + (step) * (rect).y, \
135 (p2) = sum + (rect).x + (step) * ((rect).y + (rect).height), \
136 /* (x + w, y + h) */ \
137 (p3) = sum + (rect).x + (rect).width + (step) * ((rect).y + (rect).height)
139 #define CV_TILTED_PTRS( p0, p1, p2, p3, tilted, rect, step ) \
141 (p0) = tilted + (rect).x + (step) * (rect).y, \
142 /* (x - h, y + h) */ \
143 (p1) = tilted + (rect).x - (rect).height + (step) * ((rect).y + (rect).height), \
144 /* (x + w, y + w) */ \
145 (p2) = tilted + (rect).x + (rect).width + (step) * ((rect).y + (rect).width), \
146 /* (x + w - h, y + w + h) */ \
147 (p3) = tilted + (rect).x + (rect).width - (rect).height \
148 + (step) * ((rect).y + (rect).width + (rect).height)
150 #define CALC_SUM_(p0, p1, p2, p3, offset) \
151 ((p0)[offset] - (p1)[offset] - (p2)[offset] + (p3)[offset])
153 #define CALC_SUM(rect,offset) CALC_SUM_((rect)[0], (rect)[1], (rect)[2], (rect)[3], offset)
156 FeatureEvaluator::~FeatureEvaluator() {}
157 bool FeatureEvaluator::read(const FileNode&) {return true;}
158 int FeatureEvaluator::getFeatureType() const {return -1;}
160 bool FeatureEvaluator::setImage(const Mat&, Size) { return true; }
161 int FeatureEvaluator::setWindow(Point) { return true; }
163 class HaarEvaluator : public FeatureEvaluator
170 float calc( int offset ) const;
171 void updatePtrs( const Mat& sum );
172 bool read( const FileNode& node );
176 enum { RECT_NUM = 3 };
184 const int* p[RECT_NUM][4];
188 virtual ~HaarEvaluator();
190 virtual bool read( const FileNode& node );
191 virtual bool setImage(const Mat& image, Size _origWinSize );
192 virtual int setWindow( Point pt );
193 virtual int getFeatureType() const { return FeatureEvaluator::HAAR; }
195 double operator()(int featureIdx, int pOffset) const
196 { return features[featureIdx].calc(pOffset) * varianceNormFactor; }
200 vector<Feature> features;
201 bool hasTiltedFeatures;
203 Mat sum0, sqsum0, tilted0;
204 Mat sum, sqsum, tilted;
210 double varianceNormFactor;
213 //---------------------------------------------- Haar Features ------------------------------------------------
214 inline HaarEvaluator::Feature :: Feature()
217 rect[0].r = rect[1].r = rect[2].r = Rect();
218 rect[0].weight = rect[1].weight = rect[2].weight = 0;
219 p[0][0] = p[0][1] = p[0][2] = p[0][3] =
220 p[1][0] = p[1][1] = p[1][2] = p[1][3] =
221 p[2][0] = p[2][1] = p[2][2] = p[2][3] = 0;
224 inline float HaarEvaluator::Feature :: calc( int offset ) const
226 float ret = rect[0].weight * CALC_SUM(p[0], offset) + rect[1].weight * CALC_SUM(p[1], offset);
228 if( rect[2].weight != 0.0f )
229 ret += rect[2].weight * CALC_SUM(p[2], offset);
234 inline void HaarEvaluator::Feature :: updatePtrs( const Mat& sum )
236 const int* ptr = (const int*)sum.data;
237 size_t step = sum.step/sizeof(ptr[0]);
240 CV_TILTED_PTRS( p[0][0], p[0][1], p[0][2], p[0][3], ptr, rect[0].r, step );
241 CV_TILTED_PTRS( p[1][0], p[1][1], p[1][2], p[1][3], ptr, rect[1].r, step );
243 CV_TILTED_PTRS( p[2][0], p[2][1], p[2][2], p[2][3], ptr, rect[2].r, step );
247 CV_SUM_PTRS( p[0][0], p[0][1], p[0][2], p[0][3], ptr, rect[0].r, step );
248 CV_SUM_PTRS( p[1][0], p[1][1], p[1][2], p[1][3], ptr, rect[1].r, step );
250 CV_SUM_PTRS( p[2][0], p[2][1], p[2][2], p[2][3], ptr, rect[2].r, step );
254 bool HaarEvaluator::Feature :: read( const FileNode& node )
256 FileNode rnode = node[CC_RECTS];
257 FileNodeIterator it = rnode.begin(), it_end = rnode.end();
260 for( ri = 0; ri < RECT_NUM; ri++ )
263 rect[ri].weight = 0.f;
266 for(ri = 0; it != it_end; ++it, ri++)
268 FileNodeIterator it2 = (*it).begin();
269 it2 >> rect[ri].r.x >> rect[ri].r.y >>
270 rect[ri].r.width >> rect[ri].r.height >> rect[ri].weight;
273 tilted = (int)node[CC_TILTED] != 0;
278 HaarEvaluator::HaarEvaluator()
282 HaarEvaluator::~HaarEvaluator()
286 bool HaarEvaluator::read(const FileNode& node)
288 features.resize(node.size());
289 FileNodeIterator it = node.begin(), it_end = node.end();
290 hasTiltedFeatures = false;
292 for(int i = 0; it != it_end; ++it, i++)
294 if(!features[i].read(*it))
296 if( features[i].tilted )
297 hasTiltedFeatures = true;
302 bool HaarEvaluator::setImage( const Mat& image, Size _origWinSize )
304 int rn = image.rows+1, cn = image.cols+1;
305 origWinSize = _origWinSize;
306 normrect = Rect(1, 1, origWinSize.width-2, origWinSize.height-2);
308 if (image.cols < origWinSize.width || image.rows < origWinSize.height)
311 if( sum0.rows < rn || sum0.cols < cn )
313 sum0.create(rn, cn, CV_32S);
314 sqsum0.create(rn, cn, CV_64F);
315 if (hasTiltedFeatures)
316 tilted0.create( rn, cn, CV_32S);
318 sum = Mat(rn, cn, CV_32S, sum0.data);
319 sqsum = Mat(rn, cn, CV_32S, sqsum0.data);
320 if( hasTiltedFeatures )
322 tilted = Mat(rn, cn, CV_32S, tilted0.data);
323 integral(image, sum, sqsum, tilted);
326 integral(image, sum, sqsum);
328 const int* sdata = (const int*)sum.data;
329 const double* sqdata = (const double*)sqsum.data;
330 size_t sumStep = sum.step/sizeof(sdata[0]);
331 size_t sqsumStep = sqsum.step/sizeof(sqdata[0]);
333 CV_SUM_PTRS( p[0], p[1], p[2], p[3], sdata, normrect, sumStep );
334 CV_SUM_PTRS( pq[0], pq[1], pq[2], pq[3], sqdata, normrect, sqsumStep );
336 size_t fi, nfeatures = features.size();
338 for( fi = 0; fi < nfeatures; fi++ )
339 features[fi].updatePtrs( !features[fi].tilted ? sum : tilted );
345 int HaarEvaluator::setWindow( Point pt )
347 if( pt.x < 0 || pt.y < 0 ||
348 pt.x + origWinSize.width >= sum.cols-2 ||
349 pt.y + origWinSize.height >= sum.rows-2 )
352 size_t pOffset = pt.y * (sum.step/sizeof(int)) + pt.x;
353 size_t pqOffset = pt.y * (sqsum.step/sizeof(double)) + pt.x;
354 int valsum = CALC_SUM(p,pOffset);
355 double valsqsum = CALC_SUM(pq,pqOffset);
357 varianceNormFactor = (double)normrect.area() * valsqsum - (double)valsum * valsum;
358 if( varianceNormFactor > 0. )
359 varianceNormFactor = sqrt(varianceNormFactor);
361 varianceNormFactor = 1.;
362 varianceNormFactor = 1./varianceNormFactor;
366 //---------------------------------------------- LBP Features ------------------------------------------------
368 class LBPEvaluator : public FeatureEvaluator
374 Feature( int x, int y, int _block_w, int _block_h ) :
375 rect(x, y, _block_w, _block_h) {}
377 int calc( int offset ) const;
378 void updatePtrs( const Mat& sum );
379 bool read(const FileNode& node );
381 Rect rect; // weight and height for block
382 const int* p[16]; // fast
386 virtual ~LBPEvaluator();
388 virtual bool read( const FileNode& node );
389 virtual bool setImage(const Mat& image, Size _origWinSize);
390 virtual int setWindow( Point pt );
391 virtual int getFeatureType() const { return FeatureEvaluator::LBP; }
393 int operator()(int featureIdx, int pOffset) const
394 { return features[featureIdx].calc(pOffset); }
396 virtual int calcCat(int featureIdx, int pOffset) const
397 { return (*this)(featureIdx, pOffset); }
401 vector<LBPEvaluator::Feature> features;
408 inline LBPEvaluator::Feature :: Feature()
411 for( int i = 0; i < 16; i++ )
415 inline int LBPEvaluator::Feature :: calc( int offset ) const
417 int cval = CALC_SUM_( p[5], p[6], p[9], p[10], offset );
419 return (CALC_SUM_( p[0], p[1], p[4], p[5], offset ) >= cval ? 128 : 0) | // 0
420 (CALC_SUM_( p[1], p[2], p[5], p[6], offset ) >= cval ? 64 : 0) | // 1
421 (CALC_SUM_( p[2], p[3], p[6], p[7], offset ) >= cval ? 32 : 0) | // 2
422 (CALC_SUM_( p[6], p[7], p[10], p[11], offset ) >= cval ? 16 : 0) | // 5
423 (CALC_SUM_( p[10], p[11], p[14], p[15], offset ) >= cval ? 8 : 0)| // 8
424 (CALC_SUM_( p[9], p[10], p[13], p[14], offset ) >= cval ? 4 : 0)| // 7
425 (CALC_SUM_( p[8], p[9], p[12], p[13], offset ) >= cval ? 2 : 0)| // 6
426 (CALC_SUM_( p[4], p[5], p[8], p[9], offset ) >= cval ? 1 : 0);
429 inline void LBPEvaluator::Feature :: updatePtrs( const Mat& sum )
431 const int* ptr = (const int*)sum.data;
432 size_t step = sum.step/sizeof(ptr[0]);
434 CV_SUM_PTRS( p[0], p[1], p[4], p[5], ptr, tr, step );
435 tr.x += 2*rect.width;
436 CV_SUM_PTRS( p[2], p[3], p[6], p[7], ptr, tr, step );
437 tr.y += 2*rect.height;
438 CV_SUM_PTRS( p[10], p[11], p[14], p[15], ptr, tr, step );
439 tr.x -= 2*rect.width;
440 CV_SUM_PTRS( p[8], p[9], p[12], p[13], ptr, tr, step );
443 bool LBPEvaluator::Feature :: read(const FileNode& node )
445 FileNode rnode = node[CC_RECT];
446 FileNodeIterator it = rnode.begin();
447 it >> rect.x >> rect.y >> rect.width >> rect.height;
451 //--------------------------------------- LBPEvaluator -------------------------------------------
453 LBPEvaluator::LBPEvaluator()
457 LBPEvaluator::~LBPEvaluator()
460 bool LBPEvaluator::read( const FileNode& node )
462 features.resize(node.size());
463 FileNodeIterator it = node.begin(), it_end = node.end();
465 for(int i = 0; it != it_end; ++it, i++)
467 if(!features[i].read(*it))
473 bool LBPEvaluator::setImage( const Mat& image, Size _origWinSize )
475 int rn = image.rows+1, cn = image.cols+1;
476 origWinSize = _origWinSize;
478 if( image.cols < origWinSize.width || image.rows < origWinSize.height )
481 if( sum0.rows < rn || sum0.cols < cn )
482 sum0.create(rn, cn, CV_32S);
483 sum = Mat(rn, cn, CV_32S, sum0.data);
484 integral(image, sum);
486 size_t fi, nfeatures = features.size();
488 for( fi = 0; fi < nfeatures; fi++ )
489 features[fi].updatePtrs( sum );
494 int LBPEvaluator::setWindow( Point pt )
496 if( pt.x < 0 || pt.y < 0 ||
497 pt.x + origWinSize.width >= sum.cols-2 ||
498 pt.y + origWinSize.height >= sum.rows-2 )
500 return pt.y * ((int)sum.step/sizeof(int)) + pt.x;
504 Ptr<FeatureEvaluator> FeatureEvaluator::create(int featureType)
506 return featureType == HAAR ? Ptr<FeatureEvaluator>(new HaarEvaluator) :
507 featureType == LBP ? Ptr<FeatureEvaluator>(new LBPEvaluator) : Ptr<FeatureEvaluator>();
510 /////////////////////////////////// Classifier Cascade ////////////////////////////////////////////////
512 CascadeClassifier::CascadeClassifier()
516 CascadeClassifier::CascadeClassifier(const String& filename)
519 CascadeClassifier::~CascadeClassifier()
523 bool CascadeClassifier::empty() const
525 return oldCascade.empty() && stages.empty();
528 bool CascadeClassifier::load(const String& filename)
530 oldCascade.release();
532 FileStorage fs(filename, FileStorage::READ);
536 if( read(fs.getFirstTopLevelNode()) )
541 oldCascade = Ptr<CvHaarClassifierCascade>((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0));
542 return !oldCascade.empty();
546 template<class FEval>
547 inline int predictOrdered( CascadeClassifier& cascade, int pOffset)
549 int si, nstages = (int)cascade.stages.size();
550 int nodeOfs = 0, leafOfs = 0;
551 FEval& feval = (FEval&)*cascade.feval;
553 for( si = 0; si < nstages; si++ )
555 CascadeClassifier::Stage& stage = cascade.stages[si];
556 int wi, ntrees = stage.ntrees;
559 for( wi = 0; wi < ntrees; wi++ )
561 CascadeClassifier::DTree& weak = cascade.classifiers[stage.first + wi];
562 int idx = 0, root = nodeOfs;
565 CascadeClassifier::DTreeNode& node = cascade.nodes[root + idx];
566 double val = feval(node.featureIdx, pOffset);
567 idx = val < node.threshold ? node.left : node.right;
570 sum += cascade.leaves[leafOfs - idx];
571 nodeOfs += weak.nodeCount;
572 leafOfs += weak.nodeCount + 1;
574 if( sum < stage.threshold )
580 template<class FEval>
581 inline int predictCategorical( CascadeClassifier& cascade, int pOffset)
583 int si, nstages = (int)cascade.stages.size();
584 int nodeOfs = 0, leafOfs = 0;
585 FEval& feval = (FEval&)*cascade.feval;
586 size_t subsetSize = (cascade.ncategories + 31)/32;
588 for( si = 0; si < nstages; si++ )
590 CascadeClassifier::Stage& stage = cascade.stages[si];
591 int wi, ntrees = stage.ntrees;
594 for( wi = 0; wi < ntrees; wi++ )
596 CascadeClassifier::DTree& weak = cascade.classifiers[stage.first + wi];
597 int idx = 0, root = nodeOfs;
600 CascadeClassifier::DTreeNode& node = cascade.nodes[root + idx];
601 int c = feval(node.featureIdx, pOffset);
602 const int* subset = &cascade.subsets[(root + idx)*subsetSize];
603 idx = (subset[c>>5] & (1 << (c & 31))) ? node.left : node.right;
606 sum += cascade.leaves[leafOfs - idx];
607 nodeOfs += weak.nodeCount;
608 leafOfs += weak.nodeCount + 1;
610 if( sum < stage.threshold )
617 int CascadeClassifier::runAt(Point pt)
619 CV_Assert( oldCascade.empty() );
620 /*if( !oldCascade.empty() )
621 return cvRunHaarClassifierCascade(oldCascade, pt, 0);*/
623 assert(featureType == FeatureEvaluator::HAAR ||
624 featureType == FeatureEvaluator::LBP);
625 int offset = feval->setWindow(pt);
626 return offset < 0 ? -1 :
627 featureType == FeatureEvaluator::HAAR ?
628 predictOrdered<HaarEvaluator>(*this, offset) :
629 predictCategorical<LBPEvaluator>(*this, offset);
633 bool CascadeClassifier::setImage(const Mat& image)
635 /*if( !oldCascade.empty() )
637 Mat sum(image.rows+1, image.cols+1, CV_32S);
638 Mat tilted(image.rows+1, image.cols+1, CV_32S);
639 Mat sqsum(image.rows+1, image.cols+1, CV_64F);
640 integral(image, sum, sqsum, tilted);
641 CvMat _sum = sum, _sqsum = sqsum, _tilted = tilted;
642 cvSetImagesForHaarClassifierCascade( oldCascade, &_sum, &_sqsum, &_tilted, 1. );
645 return empty() ? false : feval->setImage(image, origWinSize);
649 struct getRect { Rect operator ()(const CvAvgComp& e) const { return e.rect; } };
650 void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& objects,
651 double scaleFactor, int minNeighbors,
652 int flags, Size minSize )
654 CV_Assert( scaleFactor > 1 && image.depth() == CV_8U );
659 if( !oldCascade.empty() )
661 MemStorage storage(cvCreateMemStorage(0));
662 CvMat _image = image;
663 CvSeq* _objects = cvHaarDetectObjects( &_image, oldCascade, storage, scaleFactor,
664 minNeighbors, flags, minSize );
665 vector<CvAvgComp> vecAvgComp;
666 Seq<CvAvgComp>(_objects).copyTo(vecAvgComp);
667 objects.resize(vecAvgComp.size());
668 std::transform(vecAvgComp.begin(), vecAvgComp.end(), objects.begin(), getRect());
674 Mat img = image, imgbuf(image.rows+1, image.cols+1, CV_8U);
676 if( img.channels() > 1 )
679 cvtColor(img, temp, CV_BGR2GRAY);
683 for( double factor = 1; ; factor *= scaleFactor )
686 Size winSize( cvRound(origWinSize.width*factor), cvRound(origWinSize.height*factor) );
687 Size sz( cvRound( img.cols/factor ), cvRound( img.rows/factor ) );
688 Size sz1( sz.width - origWinSize.width, sz.height - origWinSize.height );
690 if( sz1.width <= 0 || sz1.height <= 0 )
692 if( winSize.width < minSize.width || winSize.height < minSize.height )
695 Mat img1( sz, CV_8U, imgbuf.data );
697 resize( img, img1, sz, 0, 0, CV_INTER_LINEAR );
698 if( !feval->setImage( img1, origWinSize ) )
700 int yStep = factor > 2. ? 1 : 2;
702 for( int y = 0; y < sz1.height; y += yStep )
703 for( int x = 0; x < sz1.width; x += yStep )
705 int r = runAt(Point(x,y));
707 objects.push_back(Rect(cvRound(x*factor), cvRound(y*factor),
708 winSize.width, winSize.height));
714 groupRectangles( objects, minNeighbors, 0.2 );
718 bool CascadeClassifier::read(const FileNode& root)
721 String stageTypeStr = (String)root[CC_STAGE_TYPE];
722 if( stageTypeStr == CC_BOOST )
727 String featureTypeStr = (String)root[CC_FEATURE_TYPE];
728 if( featureTypeStr == CC_HAAR )
729 featureType = FeatureEvaluator::HAAR;
730 else if( featureTypeStr == CC_LBP )
731 featureType = FeatureEvaluator::LBP;
735 origWinSize.width = (int)root[CC_WIDTH];
736 origWinSize.height = (int)root[CC_HEIGHT];
737 CV_Assert( origWinSize.height > 0 && origWinSize.width > 0 );
739 // load feature params
740 FileNode fn = root[CC_FEATURE_PARAMS];
744 ncategories = fn[CC_MAX_CAT_COUNT];
745 int subsetSize = (ncategories + 31)/32,
746 nodeStep = 3 + ( ncategories>0 ? subsetSize : 1 );
749 fn = root[CC_STAGES];
753 stages.reserve(fn.size());
757 FileNodeIterator it = fn.begin(), it_end = fn.end();
759 for( int si = 0; it != it_end; si++, ++it )
763 stage.threshold = fns[CC_STAGE_THRESHOLD];
764 fns = fns[CC_WEAK_CLASSIFIERS];
767 stage.ntrees = (int)fns.size();
768 stage.first = (int)classifiers.size();
769 stages.push_back(stage);
770 classifiers.reserve(stages[si].first + stages[si].ntrees);
772 FileNodeIterator it1 = fns.begin(), it1_end = fns.end();
773 for( ; it1 != it1_end; ++it1 ) // weak trees
776 FileNode internalNodes = fnw[CC_INTERNAL_NODES];
777 FileNode leafValues = fnw[CC_LEAF_VALUES];
778 if( internalNodes.empty() || leafValues.empty() )
781 tree.nodeCount = (int)internalNodes.size()/nodeStep;
782 classifiers.push_back(tree);
784 nodes.reserve(nodes.size() + tree.nodeCount);
785 leaves.reserve(leaves.size() + leafValues.size());
787 subsets.reserve(subsets.size() + tree.nodeCount*subsetSize);
789 FileNodeIterator it2 = internalNodes.begin(), it2_end = internalNodes.end();
791 for( ; it2 != it2_end; ) // nodes
794 node.left = (int)*it2; ++it2;
795 node.right = (int)*it2; ++it2;
796 node.featureIdx = (int)*it2; ++it2;
799 for( int j = 0; j < subsetSize; j++, ++it2 )
800 subsets.push_back((int)*it2);
801 node.threshold = 0.f;
805 node.threshold = (float)*it2; ++it2;
807 nodes.push_back(node);
810 it2 = leafValues.begin(), it2_end = leafValues.end();
812 for( ; it2 != it2_end; ++it2 ) // leaves
813 leaves.push_back((float)*it2);
818 feval = FeatureEvaluator::create(featureType);
819 fn = root[CC_FEATURES];
823 return feval->read(fn);