]> rtime.felk.cvut.cz Git - opencv.git/blob - opencv/src/cv/cvhaar.cpp
24346ad48b57c1cbf74fb983b4900a2361edbfa4
[opencv.git] / opencv / src / cv / cvhaar.cpp
1 /*M///////////////////////////////////////////////////////////////////////////////////////
2 //
3 //  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4 //
5 //  By downloading, copying, installing or using the software you agree to this license.
6 //  If you do not agree to this license, do not download, install,
7 //  copy or use the software.
8 //
9 //
10 //                        Intel License Agreement
11 //                For Open Source Computer Vision Library
12 //
13 // Copyright (C) 2000, Intel Corporation, all rights reserved.
14 // Third party copyrights are property of their respective owners.
15 //
16 // Redistribution and use in source and binary forms, with or without modification,
17 // are permitted provided that the following conditions are met:
18 //
19 //   * Redistribution's of source code must retain the above copyright notice,
20 //     this list of conditions and the following disclaimer.
21 //
22 //   * Redistribution's in binary form must reproduce the above copyright notice,
23 //     this list of conditions and the following disclaimer in the documentation
24 //     and/or other materials provided with the distribution.
25 //
26 //   * The name of Intel Corporation may not be used to endorse or promote products
27 //     derived from this software without specific prior written permission.
28 //
29 // This software is provided by the copyright holders and contributors "as is" and
30 // any express or implied warranties, including, but not limited to, the implied
31 // warranties of merchantability and fitness for a particular purpose are disclaimed.
32 // In no event shall the Intel Corporation or contributors be liable for any direct,
33 // indirect, incidental, special, exemplary, or consequential damages
34 // (including, but not limited to, procurement of substitute goods or services;
35 // loss of use, data, or profits; or business interruption) however caused
36 // and on any theory of liability, whether in contract, strict liability,
37 // or tort (including negligence or otherwise) arising in any way out of
38 // the use of this software, even if advised of the possibility of such damage.
39 //
40 //M*/
41
42 /* Haar features calculation */
43
44 #include "_cv.h"
45 #include <stdio.h>
46
47 /*#if CV_SSE2
48 #   if CV_SSE4 || defined __SSE4__
49 #       include <smmintrin.h>
50 #   else
51 #       define _mm_blendv_pd(a, b, m) _mm_xor_pd(a, _mm_and_pd(_mm_xor_pd(b, a), m))
52 #       define _mm_blendv_ps(a, b, m) _mm_xor_ps(a, _mm_and_ps(_mm_xor_ps(b, a), m))
53 #   endif
54 #if defined CV_ICC
55 #   define CV_HAAR_USE_SSE 1
56 #endif
57 #endif*/
58
59 /* these settings affect the quality of detection: change with care */
60 #define CV_ADJUST_FEATURES 1
61 #define CV_ADJUST_WEIGHTS  0
62
63 typedef int sumtype;
64 typedef double sqsumtype;
65
66 typedef struct CvHidHaarFeature
67 {
68     struct
69     {
70         sumtype *p0, *p1, *p2, *p3;
71         float weight;
72     }
73     rect[CV_HAAR_FEATURE_MAX];
74 }
75 CvHidHaarFeature;
76
77
78 typedef struct CvHidHaarTreeNode
79 {
80     CvHidHaarFeature feature;
81     float threshold;
82     int left;
83     int right;
84 }
85 CvHidHaarTreeNode;
86
87
88 typedef struct CvHidHaarClassifier
89 {
90     int count;
91     //CvHaarFeature* orig_feature;
92     CvHidHaarTreeNode* node;
93     float* alpha;
94 }
95 CvHidHaarClassifier;
96
97
98 typedef struct CvHidHaarStageClassifier
99 {
100     int  count;
101     float threshold;
102     CvHidHaarClassifier* classifier;
103     int two_rects;
104
105     struct CvHidHaarStageClassifier* next;
106     struct CvHidHaarStageClassifier* child;
107     struct CvHidHaarStageClassifier* parent;
108 }
109 CvHidHaarStageClassifier;
110
111
112 struct CvHidHaarClassifierCascade
113 {
114     int  count;
115     int  is_stump_based;
116     int  has_tilted_features;
117     int  is_tree;
118     double inv_window_area;
119     CvMat sum, sqsum, tilted;
120     CvHidHaarStageClassifier* stage_classifier;
121     sqsumtype *pq0, *pq1, *pq2, *pq3;
122     sumtype *p0, *p1, *p2, *p3;
123
124     void** ipp_stages;
125 };
126
127
128 const int icv_object_win_border = 1;
129 const float icv_stage_threshold_bias = 0.0001f;
130
131 static CvHaarClassifierCascade*
132 icvCreateHaarClassifierCascade( int stage_count )
133 {
134     CvHaarClassifierCascade* cascade = 0;
135
136     int block_size = sizeof(*cascade) + stage_count*sizeof(*cascade->stage_classifier);
137
138     if( stage_count <= 0 )
139         CV_Error( CV_StsOutOfRange, "Number of stages should be positive" );
140
141     cascade = (CvHaarClassifierCascade*)cvAlloc( block_size );
142     memset( cascade, 0, block_size );
143
144     cascade->stage_classifier = (CvHaarStageClassifier*)(cascade + 1);
145     cascade->flags = CV_HAAR_MAGIC_VAL;
146     cascade->count = stage_count;
147
148     return cascade;
149 }
150
151 static void
152 icvReleaseHidHaarClassifierCascade( CvHidHaarClassifierCascade** _cascade )
153 {
154     if( _cascade && *_cascade )
155     {
156 #ifdef HAVE_IPP
157         CvHidHaarClassifierCascade* cascade = *_cascade;
158         if( cascade->ipp_stages )
159         {
160             int i;
161             for( i = 0; i < cascade->count; i++ )
162             {
163                 if( cascade->ipp_stages[i] )
164                     ippiHaarClassifierFree_32f( (IppiHaarClassifier_32f*)cascade->ipp_stages[i] );
165             }
166         }
167         cvFree( &cascade->ipp_stages );
168 #endif
169         cvFree( _cascade );
170     }
171 }
172
173 /* create more efficient internal representation of haar classifier cascade */
174 static CvHidHaarClassifierCascade*
175 icvCreateHidHaarClassifierCascade( CvHaarClassifierCascade* cascade )
176 {
177     CvRect* ipp_features = 0;
178     float *ipp_weights = 0, *ipp_thresholds = 0, *ipp_val1 = 0, *ipp_val2 = 0;
179     int* ipp_counts = 0;
180
181     CvHidHaarClassifierCascade* out = 0;
182
183     int i, j, k, l;
184     int datasize;
185     int total_classifiers = 0;
186     int total_nodes = 0;
187     char errorstr[100];
188     CvHidHaarClassifier* haar_classifier_ptr;
189     CvHidHaarTreeNode* haar_node_ptr;
190     CvSize orig_window_size;
191     int has_tilted_features = 0;
192     int max_count = 0;
193
194     if( !CV_IS_HAAR_CLASSIFIER(cascade) )
195         CV_Error( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
196
197     if( cascade->hid_cascade )
198         CV_Error( CV_StsError, "hid_cascade has been already created" );
199
200     if( !cascade->stage_classifier )
201         CV_Error( CV_StsNullPtr, "" );
202
203     if( cascade->count <= 0 )
204         CV_Error( CV_StsOutOfRange, "Negative number of cascade stages" );
205
206     orig_window_size = cascade->orig_window_size;
207
208     /* check input structure correctness and calculate total memory size needed for
209        internal representation of the classifier cascade */
210     for( i = 0; i < cascade->count; i++ )
211     {
212         CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
213
214         if( !stage_classifier->classifier ||
215             stage_classifier->count <= 0 )
216         {
217             sprintf( errorstr, "header of the stage classifier #%d is invalid "
218                      "(has null pointers or non-positive classfier count)", i );
219             CV_Error( CV_StsError, errorstr );
220         }
221
222         max_count = MAX( max_count, stage_classifier->count );
223         total_classifiers += stage_classifier->count;
224
225         for( j = 0; j < stage_classifier->count; j++ )
226         {
227             CvHaarClassifier* classifier = stage_classifier->classifier + j;
228
229             total_nodes += classifier->count;
230             for( l = 0; l < classifier->count; l++ )
231             {
232                 for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
233                 {
234                     if( classifier->haar_feature[l].rect[k].r.width )
235                     {
236                         CvRect r = classifier->haar_feature[l].rect[k].r;
237                         int tilted = classifier->haar_feature[l].tilted;
238                         has_tilted_features |= tilted != 0;
239                         if( r.width < 0 || r.height < 0 || r.y < 0 ||
240                             r.x + r.width > orig_window_size.width
241                             ||
242                             (!tilted &&
243                             (r.x < 0 || r.y + r.height > orig_window_size.height))
244                             ||
245                             (tilted && (r.x - r.height < 0 ||
246                             r.y + r.width + r.height > orig_window_size.height)))
247                         {
248                             sprintf( errorstr, "rectangle #%d of the classifier #%d of "
249                                      "the stage classifier #%d is not inside "
250                                      "the reference (original) cascade window", k, j, i );
251                             CV_Error( CV_StsNullPtr, errorstr );
252                         }
253                     }
254                 }
255             }
256         }
257     }
258
259     // this is an upper boundary for the whole hidden cascade size
260     datasize = sizeof(CvHidHaarClassifierCascade) +
261                sizeof(CvHidHaarStageClassifier)*cascade->count +
262                sizeof(CvHidHaarClassifier) * total_classifiers +
263                sizeof(CvHidHaarTreeNode) * total_nodes +
264                sizeof(void*)*(total_nodes + total_classifiers);
265
266     out = (CvHidHaarClassifierCascade*)cvAlloc( datasize );
267     memset( out, 0, sizeof(*out) );
268
269     /* init header */
270     out->count = cascade->count;
271     out->stage_classifier = (CvHidHaarStageClassifier*)(out + 1);
272     haar_classifier_ptr = (CvHidHaarClassifier*)(out->stage_classifier + cascade->count);
273     haar_node_ptr = (CvHidHaarTreeNode*)(haar_classifier_ptr + total_classifiers);
274
275     out->is_stump_based = 1;
276     out->has_tilted_features = has_tilted_features;
277     out->is_tree = 0;
278
279     /* initialize internal representation */
280     for( i = 0; i < cascade->count; i++ )
281     {
282         CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
283         CvHidHaarStageClassifier* hid_stage_classifier = out->stage_classifier + i;
284
285         hid_stage_classifier->count = stage_classifier->count;
286         hid_stage_classifier->threshold = stage_classifier->threshold - icv_stage_threshold_bias;
287         hid_stage_classifier->classifier = haar_classifier_ptr;
288         hid_stage_classifier->two_rects = 1;
289         haar_classifier_ptr += stage_classifier->count;
290
291         hid_stage_classifier->parent = (stage_classifier->parent == -1)
292             ? NULL : out->stage_classifier + stage_classifier->parent;
293         hid_stage_classifier->next = (stage_classifier->next == -1)
294             ? NULL : out->stage_classifier + stage_classifier->next;
295         hid_stage_classifier->child = (stage_classifier->child == -1)
296             ? NULL : out->stage_classifier + stage_classifier->child;
297
298         out->is_tree |= hid_stage_classifier->next != NULL;
299
300         for( j = 0; j < stage_classifier->count; j++ )
301         {
302             CvHaarClassifier* classifier = stage_classifier->classifier + j;
303             CvHidHaarClassifier* hid_classifier = hid_stage_classifier->classifier + j;
304             int node_count = classifier->count;
305             float* alpha_ptr = (float*)(haar_node_ptr + node_count);
306
307             hid_classifier->count = node_count;
308             hid_classifier->node = haar_node_ptr;
309             hid_classifier->alpha = alpha_ptr;
310
311             for( l = 0; l < node_count; l++ )
312             {
313                 CvHidHaarTreeNode* node = hid_classifier->node + l;
314                 CvHaarFeature* feature = classifier->haar_feature + l;
315                 memset( node, -1, sizeof(*node) );
316                 node->threshold = classifier->threshold[l];
317                 node->left = classifier->left[l];
318                 node->right = classifier->right[l];
319
320                 if( fabs(feature->rect[2].weight) < DBL_EPSILON ||
321                     feature->rect[2].r.width == 0 ||
322                     feature->rect[2].r.height == 0 )
323                     memset( &(node->feature.rect[2]), 0, sizeof(node->feature.rect[2]) );
324                 else
325                     hid_stage_classifier->two_rects = 0;
326             }
327
328             memcpy( alpha_ptr, classifier->alpha, (node_count+1)*sizeof(alpha_ptr[0]));
329             haar_node_ptr =
330                 (CvHidHaarTreeNode*)cvAlignPtr(alpha_ptr+node_count+1, sizeof(void*));
331
332             out->is_stump_based &= node_count == 1;
333         }
334     }
335
336 #ifdef HAVE_IPP
337     int can_use_ipp = !out->has_tilted_features && !out->is_tree && out->is_stump_based;
338
339     if( can_use_ipp )
340     {
341         int ipp_datasize = cascade->count*sizeof(out->ipp_stages[0]);
342         float ipp_weight_scale=(float)(1./((orig_window_size.width-icv_object_win_border*2)*
343             (orig_window_size.height-icv_object_win_border*2)));
344
345         out->ipp_stages = (void**)cvAlloc( ipp_datasize );
346         memset( out->ipp_stages, 0, ipp_datasize );
347
348         ipp_features = (CvRect*)cvAlloc( max_count*3*sizeof(ipp_features[0]) );
349         ipp_weights = (float*)cvAlloc( max_count*3*sizeof(ipp_weights[0]) );
350         ipp_thresholds = (float*)cvAlloc( max_count*sizeof(ipp_thresholds[0]) );
351         ipp_val1 = (float*)cvAlloc( max_count*sizeof(ipp_val1[0]) );
352         ipp_val2 = (float*)cvAlloc( max_count*sizeof(ipp_val2[0]) );
353         ipp_counts = (int*)cvAlloc( max_count*sizeof(ipp_counts[0]) );
354
355         for( i = 0; i < cascade->count; i++ )
356         {
357             CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
358             for( j = 0, k = 0; j < stage_classifier->count; j++ )
359             {
360                 CvHaarClassifier* classifier = stage_classifier->classifier + j;
361                 int rect_count = 2 + (classifier->haar_feature->rect[2].r.width != 0);
362
363                 ipp_thresholds[j] = classifier->threshold[0];
364                 ipp_val1[j] = classifier->alpha[0];
365                 ipp_val2[j] = classifier->alpha[1];
366                 ipp_counts[j] = rect_count;
367
368                 for( l = 0; l < rect_count; l++, k++ )
369                 {
370                     ipp_features[k] = classifier->haar_feature->rect[l].r;
371                     //ipp_features[k].y = orig_window_size.height - ipp_features[k].y - ipp_features[k].height;
372                     ipp_weights[k] = classifier->haar_feature->rect[l].weight*ipp_weight_scale;
373                 }
374             }
375
376             if( ippiHaarClassifierInitAlloc_32f( (IppiHaarClassifier_32f**)&out->ipp_stages[i],
377                 (const IppiRect*)ipp_features, ipp_weights, ipp_thresholds,
378                 ipp_val1, ipp_val2, ipp_counts, stage_classifier->count ) < 0 )
379                 break;
380         }
381
382         if( i < cascade->count )
383         {
384             for( j = 0; j < i; j++ )
385                 if( out->ipp_stages[i] )
386                     ippiHaarClassifierFree_32f( (IppiHaarClassifier_32f*)out->ipp_stages[i] );
387             cvFree( &out->ipp_stages );
388         }
389     }
390 #endif
391
392     cascade->hid_cascade = out;
393     assert( (char*)haar_node_ptr - (char*)out <= datasize );
394
395     cvFree( &ipp_features );
396     cvFree( &ipp_weights );
397     cvFree( &ipp_thresholds );
398     cvFree( &ipp_val1 );
399     cvFree( &ipp_val2 );
400     cvFree( &ipp_counts );
401
402     return out;
403 }
404
405
406 #define sum_elem_ptr(sum,row,col)  \
407     ((sumtype*)CV_MAT_ELEM_PTR_FAST((sum),(row),(col),sizeof(sumtype)))
408
409 #define sqsum_elem_ptr(sqsum,row,col)  \
410     ((sqsumtype*)CV_MAT_ELEM_PTR_FAST((sqsum),(row),(col),sizeof(sqsumtype)))
411
412 #define calc_sum(rect,offset) \
413     ((rect).p0[offset] - (rect).p1[offset] - (rect).p2[offset] + (rect).p3[offset])
414
415
416 CV_IMPL void
417 cvSetImagesForHaarClassifierCascade( CvHaarClassifierCascade* _cascade,
418                                      const CvArr* _sum,
419                                      const CvArr* _sqsum,
420                                      const CvArr* _tilted_sum,
421                                      double scale )
422 {
423     CvMat sum_stub, *sum = (CvMat*)_sum;
424     CvMat sqsum_stub, *sqsum = (CvMat*)_sqsum;
425     CvMat tilted_stub, *tilted = (CvMat*)_tilted_sum;
426     CvHidHaarClassifierCascade* cascade;
427     int coi0 = 0, coi1 = 0;
428     int i;
429     CvRect equRect;
430     double weight_scale;
431
432     if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
433         CV_Error( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
434
435     if( scale <= 0 )
436         CV_Error( CV_StsOutOfRange, "Scale must be positive" );
437
438     sum = cvGetMat( sum, &sum_stub, &coi0 );
439     sqsum = cvGetMat( sqsum, &sqsum_stub, &coi1 );
440
441     if( coi0 || coi1 )
442         CV_Error( CV_BadCOI, "COI is not supported" );
443
444     if( !CV_ARE_SIZES_EQ( sum, sqsum ))
445         CV_Error( CV_StsUnmatchedSizes, "All integral images must have the same size" );
446
447     if( CV_MAT_TYPE(sqsum->type) != CV_64FC1 ||
448         CV_MAT_TYPE(sum->type) != CV_32SC1 )
449         CV_Error( CV_StsUnsupportedFormat,
450         "Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
451
452     if( !_cascade->hid_cascade )
453         icvCreateHidHaarClassifierCascade(_cascade);
454
455     cascade = _cascade->hid_cascade;
456
457     if( cascade->has_tilted_features )
458     {
459         tilted = cvGetMat( tilted, &tilted_stub, &coi1 );
460
461         if( CV_MAT_TYPE(tilted->type) != CV_32SC1 )
462             CV_Error( CV_StsUnsupportedFormat,
463             "Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
464
465         if( sum->step != tilted->step )
466             CV_Error( CV_StsUnmatchedSizes,
467             "Sum and tilted_sum must have the same stride (step, widthStep)" );
468
469         if( !CV_ARE_SIZES_EQ( sum, tilted ))
470             CV_Error( CV_StsUnmatchedSizes, "All integral images must have the same size" );
471         cascade->tilted = *tilted;
472     }
473
474     _cascade->scale = scale;
475     _cascade->real_window_size.width = cvRound( _cascade->orig_window_size.width * scale );
476     _cascade->real_window_size.height = cvRound( _cascade->orig_window_size.height * scale );
477
478     cascade->sum = *sum;
479     cascade->sqsum = *sqsum;
480
481     equRect.x = equRect.y = cvRound(scale);
482     equRect.width = cvRound((_cascade->orig_window_size.width-2)*scale);
483     equRect.height = cvRound((_cascade->orig_window_size.height-2)*scale);
484     weight_scale = 1./(equRect.width*equRect.height);
485     cascade->inv_window_area = weight_scale;
486
487     cascade->p0 = sum_elem_ptr(*sum, equRect.y, equRect.x);
488     cascade->p1 = sum_elem_ptr(*sum, equRect.y, equRect.x + equRect.width );
489     cascade->p2 = sum_elem_ptr(*sum, equRect.y + equRect.height, equRect.x );
490     cascade->p3 = sum_elem_ptr(*sum, equRect.y + equRect.height,
491                                      equRect.x + equRect.width );
492
493     cascade->pq0 = sqsum_elem_ptr(*sqsum, equRect.y, equRect.x);
494     cascade->pq1 = sqsum_elem_ptr(*sqsum, equRect.y, equRect.x + equRect.width );
495     cascade->pq2 = sqsum_elem_ptr(*sqsum, equRect.y + equRect.height, equRect.x );
496     cascade->pq3 = sqsum_elem_ptr(*sqsum, equRect.y + equRect.height,
497                                           equRect.x + equRect.width );
498
499     /* init pointers in haar features according to real window size and
500        given image pointers */
501     for( i = 0; i < _cascade->count; i++ )
502     {
503         int j, k, l;
504         for( j = 0; j < cascade->stage_classifier[i].count; j++ )
505         {
506             for( l = 0; l < cascade->stage_classifier[i].classifier[j].count; l++ )
507             {
508                 CvHaarFeature* feature =
509                     &_cascade->stage_classifier[i].classifier[j].haar_feature[l];
510                 /* CvHidHaarClassifier* classifier =
511                     cascade->stage_classifier[i].classifier + j; */
512                 CvHidHaarFeature* hidfeature =
513                     &cascade->stage_classifier[i].classifier[j].node[l].feature;
514                 double sum0 = 0, area0 = 0;
515                 CvRect r[3];
516
517                 int base_w = -1, base_h = -1;
518                 int new_base_w = 0, new_base_h = 0;
519                 int kx, ky;
520                 int flagx = 0, flagy = 0;
521                 int x0 = 0, y0 = 0;
522                 int nr;
523
524                 /* align blocks */
525                 for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
526                 {
527                     if( !hidfeature->rect[k].p0 )
528                         break;
529                     r[k] = feature->rect[k].r;
530                     base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].width-1) );
531                     base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].x - r[0].x-1) );
532                     base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].height-1) );
533                     base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].y - r[0].y-1) );
534                 }
535
536                 nr = k;
537
538                 base_w += 1;
539                 base_h += 1;
540                 kx = r[0].width / base_w;
541                 ky = r[0].height / base_h;
542
543                 if( kx <= 0 )
544                 {
545                     flagx = 1;
546                     new_base_w = cvRound( r[0].width * scale ) / kx;
547                     x0 = cvRound( r[0].x * scale );
548                 }
549
550                 if( ky <= 0 )
551                 {
552                     flagy = 1;
553                     new_base_h = cvRound( r[0].height * scale ) / ky;
554                     y0 = cvRound( r[0].y * scale );
555                 }
556
557                 for( k = 0; k < nr; k++ )
558                 {
559                     CvRect tr;
560                     double correction_ratio;
561
562                     if( flagx )
563                     {
564                         tr.x = (r[k].x - r[0].x) * new_base_w / base_w + x0;
565                         tr.width = r[k].width * new_base_w / base_w;
566                     }
567                     else
568                     {
569                         tr.x = cvRound( r[k].x * scale );
570                         tr.width = cvRound( r[k].width * scale );
571                     }
572
573                     if( flagy )
574                     {
575                         tr.y = (r[k].y - r[0].y) * new_base_h / base_h + y0;
576                         tr.height = r[k].height * new_base_h / base_h;
577                     }
578                     else
579                     {
580                         tr.y = cvRound( r[k].y * scale );
581                         tr.height = cvRound( r[k].height * scale );
582                     }
583
584 #if CV_ADJUST_WEIGHTS
585                     {
586                     // RAINER START
587                     const float orig_feature_size =  (float)(feature->rect[k].r.width)*feature->rect[k].r.height;
588                     const float orig_norm_size = (float)(_cascade->orig_window_size.width)*(_cascade->orig_window_size.height);
589                     const float feature_size = float(tr.width*tr.height);
590                     //const float normSize    = float(equRect.width*equRect.height);
591                     float target_ratio = orig_feature_size / orig_norm_size;
592                     //float isRatio = featureSize / normSize;
593                     //correctionRatio = targetRatio / isRatio / normSize;
594                     correction_ratio = target_ratio / feature_size;
595                     // RAINER END
596                     }
597 #else
598                     correction_ratio = weight_scale * (!feature->tilted ? 1 : 0.5);
599 #endif
600
601                     if( !feature->tilted )
602                     {
603                         hidfeature->rect[k].p0 = sum_elem_ptr(*sum, tr.y, tr.x);
604                         hidfeature->rect[k].p1 = sum_elem_ptr(*sum, tr.y, tr.x + tr.width);
605                         hidfeature->rect[k].p2 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x);
606                         hidfeature->rect[k].p3 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x + tr.width);
607                     }
608                     else
609                     {
610                         hidfeature->rect[k].p2 = sum_elem_ptr(*tilted, tr.y + tr.width, tr.x + tr.width);
611                         hidfeature->rect[k].p3 = sum_elem_ptr(*tilted, tr.y + tr.width + tr.height,
612                                                               tr.x + tr.width - tr.height);
613                         hidfeature->rect[k].p0 = sum_elem_ptr(*tilted, tr.y, tr.x);
614                         hidfeature->rect[k].p1 = sum_elem_ptr(*tilted, tr.y + tr.height, tr.x - tr.height);
615                     }
616
617                     hidfeature->rect[k].weight = (float)(feature->rect[k].weight * correction_ratio);
618
619                     if( k == 0 )
620                         area0 = tr.width * tr.height;
621                     else
622                         sum0 += hidfeature->rect[k].weight * tr.width * tr.height;
623                 }
624
625                 hidfeature->rect[0].weight = (float)(-sum0/area0);
626             } /* l */
627         } /* j */
628     }
629 }
630
631
632 CV_INLINE
633 double icvEvalHidHaarClassifier( CvHidHaarClassifier* classifier,
634                                  double variance_norm_factor,
635                                  size_t p_offset )
636 {
637     int idx = 0;
638     do
639     {
640         CvHidHaarTreeNode* node = classifier->node + idx;
641         double t = node->threshold * variance_norm_factor;
642
643         double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
644         sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
645
646         if( node->feature.rect[2].p0 )
647             sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
648
649         idx = sum < t ? node->left : node->right;
650     }
651     while( idx > 0 );
652     return classifier->alpha[-idx];
653 }
654
655
656 CV_IMPL int
657 cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
658                             CvPoint pt, int start_stage )
659 {
660     int result = -1;
661
662     int p_offset, pq_offset;
663     int i, j;
664     double mean, variance_norm_factor;
665     CvHidHaarClassifierCascade* cascade;
666
667     if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
668         CV_Error( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid cascade pointer" );
669
670     cascade = _cascade->hid_cascade;
671     if( !cascade )
672         CV_Error( CV_StsNullPtr, "Hidden cascade has not been created.\n"
673             "Use cvSetImagesForHaarClassifierCascade" );
674
675     if( pt.x < 0 || pt.y < 0 ||
676         pt.x + _cascade->real_window_size.width >= cascade->sum.width-2 ||
677         pt.y + _cascade->real_window_size.height >= cascade->sum.height-2 )
678         return -1;
679
680     p_offset = pt.y * (cascade->sum.step/sizeof(sumtype)) + pt.x;
681     pq_offset = pt.y * (cascade->sqsum.step/sizeof(sqsumtype)) + pt.x;
682     mean = calc_sum(*cascade,p_offset)*cascade->inv_window_area;
683     variance_norm_factor = cascade->pq0[pq_offset] - cascade->pq1[pq_offset] -
684                            cascade->pq2[pq_offset] + cascade->pq3[pq_offset];
685     variance_norm_factor = variance_norm_factor*cascade->inv_window_area - mean*mean;
686     if( variance_norm_factor >= 0. )
687         variance_norm_factor = sqrt(variance_norm_factor);
688     else
689         variance_norm_factor = 1.;
690
691     if( cascade->is_tree )
692     {
693         CvHidHaarStageClassifier* ptr;
694         assert( start_stage == 0 );
695
696         result = 1;
697         ptr = cascade->stage_classifier;
698
699         while( ptr )
700         {
701             double stage_sum = 0;
702
703             for( j = 0; j < ptr->count; j++ )
704             {
705                 stage_sum += icvEvalHidHaarClassifier( ptr->classifier + j,
706                     variance_norm_factor, p_offset );
707             }
708
709             if( stage_sum >= ptr->threshold )
710             {
711                 ptr = ptr->child;
712             }
713             else
714             {
715                 while( ptr && ptr->next == NULL ) ptr = ptr->parent;
716                 if( ptr == NULL )
717                     return 0;
718                 ptr = ptr->next;
719             }
720         }
721     }
722     else if( cascade->is_stump_based )
723     {
724         for( i = start_stage; i < cascade->count; i++ )
725         {
726 #ifndef CV_HAAR_USE_SSE
727             double stage_sum = 0;
728 #else
729             __m128d stage_sum = _mm_setzero_pd();
730 #endif
731
732             if( cascade->stage_classifier[i].two_rects )
733             {
734                 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
735                 {
736                     CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
737                     CvHidHaarTreeNode* node = classifier->node;
738 #ifndef CV_HAAR_USE_SSE
739                     double t = node->threshold*variance_norm_factor;
740                     double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
741                     sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
742                     stage_sum += classifier->alpha[sum >= t];
743 #else
744                     // ayasin - NHM perf optim. Avoid use of costly flaky jcc
745                     __m128d t = _mm_set_sd(node->threshold*variance_norm_factor);
746                     __m128d a = _mm_set_sd(classifier->alpha[0]);
747                     __m128d b = _mm_set_sd(classifier->alpha[1]);
748                     __m128d sum = _mm_set_sd(calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight +
749                                              calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight);
750                     t = _mm_cmpgt_sd(t, sum);
751                     stage_sum = _mm_add_sd(stage_sum, _mm_blendv_pd(b, a, t));
752 #endif
753                 }
754             }
755             else
756             {
757                 for( j = 0; j < cascade->stage_classifier[i].count; j++ )
758                 {
759                     CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
760                     CvHidHaarTreeNode* node = classifier->node;
761 #ifndef CV_HAAR_USE_SSE
762                     double t = node->threshold*variance_norm_factor;
763                     double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
764                     sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
765                     if( node->feature.rect[2].p0 )
766                         sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
767                     
768                     stage_sum += classifier->alpha[sum >= t];
769 #else
770                     // ayasin - NHM perf optim. Avoid use of costly flaky jcc
771                     __m128d t = _mm_set_sd(node->threshold*variance_norm_factor);
772                     __m128d a = _mm_set_sd(classifier->alpha[0]);
773                     __m128d b = _mm_set_sd(classifier->alpha[1]);
774                     double _sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
775                     _sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
776                     if( node->feature.rect[2].p0 )
777                         _sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
778                     __m128d sum = _mm_set_sd(_sum);
779                     
780                     t = _mm_cmpgt_sd(t, sum);
781                     stage_sum = _mm_add_sd(stage_sum, _mm_blendv_pd(b, a, t));
782 #endif
783                 }
784             }
785
786 #ifndef CV_HAAR_USE_SSE
787             if( stage_sum < cascade->stage_classifier[i].threshold )
788 #else
789             __m128d i_threshold = _mm_set_sd(cascade->stage_classifier[i].threshold);
790             if( _mm_comilt_sd(stage_sum, i_threshold) )
791 #endif
792                 return -i;
793         }
794     }
795     else
796     {
797         for( i = start_stage; i < cascade->count; i++ )
798         {
799             double stage_sum = 0;
800
801             for( j = 0; j < cascade->stage_classifier[i].count; j++ )
802             {
803                 stage_sum += icvEvalHidHaarClassifier(
804                     cascade->stage_classifier[i].classifier + j,
805                     variance_norm_factor, p_offset );
806             }
807
808             if( stage_sum < cascade->stage_classifier[i].threshold )
809                 return -i;
810         }
811     }
812
813     return 1;
814 }
815
816
817 namespace cv
818 {
819
820 struct HaarDetectObjects_ScaleImage_Invoker
821 {
822     HaarDetectObjects_ScaleImage_Invoker( const CvHaarClassifierCascade* _cascade,
823                                           int _stripSize, double _factor,
824                                           const Mat& _sum1, const Mat& _sqsum1, Mat& _norm1,
825                                           Mat& _mask1, Rect _equRect, ConcurrentRectVector& _vec )
826     {
827         cascade = _cascade;
828         stripSize = _stripSize;
829         factor = _factor;
830         sum1 = _sum1;
831         sqsum1 = _sqsum1;
832         norm1 = _norm1;
833         mask1 = _mask1;
834         equRect = _equRect;
835         vec = &_vec;
836     }
837     
838     void operator()( const BlockedRange& range ) const
839     {
840         Size winSize0 = cascade->orig_window_size;
841         Size winSize(cvRound(winSize0.width*factor), cvRound(winSize0.height*factor));
842         int y1 = range.begin()*stripSize, y2 = min(range.end()*stripSize, sum1.rows - 1 - winSize0.height);
843         Size ssz(sum1.cols - 1 - winSize0.width, y2 - y1);
844         int x, y, ystep = factor > 2 ? 1 : 2;
845         
846     #ifdef HAVE_IPP
847         if( cascade->hid_cascade->ipp_stages )
848         {
849             ippiRectStdDev_32f_C1R(sum1.ptr<float>(y1), sum1.step,
850                                    sqsum1.ptr<double>(y1), sqsum1.step,
851                                    norm1.ptr<float>(y1), norm1.step,
852                                    ippiSize(ssz.width, ssz.height), equRect );
853             
854             int positive = (ssz.width/ystep)*((ssz.height + ystep-1)/ystep);
855             
856             if( ystep == 1 )
857                 mask1 = Scalar::all(1);
858             else
859                 for( y = y1; y < y2; y++ )
860                 {
861                     uchar* mask1row = mask1.ptr(y);
862                     memset( mask1row, 0, ssz.width );
863                     
864                     if( y % ystep == 0 )
865                         for( x = 0; x < ssz.width; x += ystep )
866                             mask1row[x] = (uchar)1;
867                 }
868             
869             for( int j = 0; j < cascade->count; j++ )
870             {
871                 if( ippiApplyHaarClassifier_32f_C1R(
872                             sum1.ptr<float>(y1), sum1.step,
873                             norm1.ptr<float>(y1), norm1.step,
874                             mask1.ptr<uchar>(y1), mask1.step,
875                             ippiSize(ssz.width, ssz.height), &positive,
876                             cascade->hid_cascade->stage_classifier[j].threshold,
877                             (IppiHaarClassifier_32f*)cascade->hid_cascade->ipp_stages[j]) < 0 )
878                     positive = 0;
879                 if( positive <= 0 )
880                     break;
881             }
882             
883             if( positive > 0 )
884                 for( y = y1; y < y2; y += ystep )
885                 {
886                     uchar* mask1row = mask1.row(y);
887                     for( x = 0; x < ssz.width; x += ystep )
888                         if( mask1row[x] != 0 )
889                         {
890                             vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
891                                                 winSize.width, winSize.height));
892                             if( --positive == 0 )
893                                 break;
894                         }
895                     if( positive == 0 )
896                         break;
897                 }
898         }
899         else
900 #endif
901             for( y = y1; y < y2; y += ystep )
902                 for( x = 0; x < ssz.width; x += ystep )
903                 {
904                     if( cvRunHaarClassifierCascade( cascade, cvPoint(x,y), 0 ) > 0 )
905                         vec->push_back(Rect(cvRound(x*factor), cvRound(y*factor),
906                                             winSize.width, winSize.height)); 
907                 }
908     }
909     
910     const CvHaarClassifierCascade* cascade;
911     int stripSize;
912     double factor;
913     Mat sum1, sqsum1, norm1, mask1;
914     Rect equRect;
915     ConcurrentRectVector* vec;
916 };
917     
918
919 struct HaarDetectObjects_ScaleCascade_Invoker
920 {
921     HaarDetectObjects_ScaleCascade_Invoker( const CvHaarClassifierCascade* _cascade,
922                                             Size _winsize, const Range& _xrange, double _ystep,
923                                             size_t _sumstep, const int** _p, const int** _pq,
924                                             ConcurrentRectVector& _vec )
925     {
926         cascade = _cascade;
927         winsize = _winsize;
928         xrange = _xrange;
929         ystep = _ystep;
930         sumstep = _sumstep;
931         p = _p; pq = _pq;
932         vec = &_vec;
933     }
934     
935     void operator()( const BlockedRange& range ) const
936     {
937         int iy, startY = range.begin(), endY = range.end();
938         const int *p0 = p[0], *p1 = p[1], *p2 = p[2], *p3 = p[3];
939         const int *pq0 = pq[0], *pq1 = pq[1], *pq2 = pq[2], *pq3 = pq[3];
940         bool doCannyPruning = p0 != 0;
941         int sstep = sumstep/sizeof(p0[0]);
942         
943         for( iy = startY; iy < endY; iy++ )
944         {
945             int ix, y = cvRound(iy*ystep), ixstep = 1;
946             for( ix = xrange.start; ix < xrange.end; ix += ixstep )
947             {
948                 int x = cvRound(ix*ystep); // it should really be ystep, not ixstep
949                 
950                 if( doCannyPruning )
951                 {
952                     int offset = y*sstep + x;
953                     int s = p0[offset] - p1[offset] - p2[offset] + p3[offset];
954                     int sq = pq0[offset] - pq1[offset] - pq2[offset] + pq3[offset];
955                     if( s < 100 || sq < 20 )
956                     {
957                         ixstep = 2;
958                         continue;
959                     }
960                 }
961                 
962                 int result = cvRunHaarClassifierCascade( cascade, cvPoint(x, y), 0 );
963                 if( result > 0 )
964                     vec->push_back(Rect(x, y, winsize.width, winsize.height));
965                 ixstep = result != 0 ? 1 : 2;
966             }
967         }
968     }
969     
970     const CvHaarClassifierCascade* cascade;
971     double ystep;
972     size_t sumstep;
973     Size winsize;
974     Range xrange;
975     const int** p;
976     const int** pq;
977     ConcurrentRectVector* vec;
978 };
979     
980     
981 }
982     
983
984 CV_IMPL CvSeq*
985 cvHaarDetectObjects( const CvArr* _img,
986                      CvHaarClassifierCascade* cascade,
987                      CvMemStorage* storage, double scaleFactor,
988                      int minNeighbors, int flags, CvSize minSize )
989 {
990     const double GROUP_EPS = 0.2;
991     CvMat stub, *img = (CvMat*)_img;
992     cv::Ptr<CvMat> temp, sum, tilted, sqsum, normImg, sumcanny, imgSmall;
993     CvSeq* result_seq = 0;
994     cv::Ptr<CvMemStorage> temp_storage;
995
996     cv::ConcurrentRectVector allCandidates;
997     std::vector<cv::Rect> rectList;
998     std::vector<int> rweights;
999     double factor;
1000     int coi;
1001     bool doCannyPruning = (flags & CV_HAAR_DO_CANNY_PRUNING) != 0;
1002     bool findBiggestObject = (flags & CV_HAAR_FIND_BIGGEST_OBJECT) != 0;
1003     bool roughSearch = (flags & CV_HAAR_DO_ROUGH_SEARCH) != 0;
1004
1005     if( !CV_IS_HAAR_CLASSIFIER(cascade) )
1006         CV_Error( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier cascade" );
1007
1008     if( !storage )
1009         CV_Error( CV_StsNullPtr, "Null storage pointer" );
1010
1011     img = cvGetMat( img, &stub, &coi );
1012     if( coi )
1013         CV_Error( CV_BadCOI, "COI is not supported" );
1014
1015     if( CV_MAT_DEPTH(img->type) != CV_8U )
1016         CV_Error( CV_StsUnsupportedFormat, "Only 8-bit images are supported" );
1017     
1018     if( scaleFactor <= 1 )
1019         CV_Error( CV_StsOutOfRange, "scale factor must be > 1" );
1020
1021     if( findBiggestObject )
1022         flags &= ~CV_HAAR_SCALE_IMAGE;
1023
1024     temp = cvCreateMat( img->rows, img->cols, CV_8UC1 );
1025     sum = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1026     sqsum = cvCreateMat( img->rows + 1, img->cols + 1, CV_64FC1 );
1027
1028     if( !cascade->hid_cascade )
1029         icvCreateHidHaarClassifierCascade(cascade);
1030
1031     if( cascade->hid_cascade->has_tilted_features )
1032         tilted = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1033
1034     result_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), storage );
1035
1036     if( CV_MAT_CN(img->type) > 1 )
1037     {
1038         cvCvtColor( img, temp, CV_BGR2GRAY );
1039         img = temp;
1040     }
1041
1042     if( findBiggestObject )
1043         flags &= ~(CV_HAAR_SCALE_IMAGE|CV_HAAR_DO_CANNY_PRUNING);
1044
1045     if( flags & CV_HAAR_SCALE_IMAGE )
1046     {
1047         CvSize winSize0 = cascade->orig_window_size;
1048 #ifdef HAVE_IPP
1049         int use_ipp = cascade->hid_cascade->ipp_stages != 0;
1050
1051         if( use_ipp )
1052             normImg = cvCreateMat( img->rows, img->cols, CV_32FC1 );
1053 #endif
1054         imgSmall = cvCreateMat( img->rows + 1, img->cols + 1, CV_8UC1 );
1055
1056         for( factor = 1; ; factor *= scaleFactor )
1057         {
1058             CvSize winSize = { cvRound(winSize0.width*factor),
1059                                 cvRound(winSize0.height*factor) };
1060             CvSize sz = { cvRound( img->cols/factor ), cvRound( img->rows/factor ) };
1061             CvSize sz1 = { sz.width - winSize0.width, sz.height - winSize0.height };
1062
1063             CvRect equRect = { icv_object_win_border, icv_object_win_border,
1064                 winSize0.width - icv_object_win_border*2,
1065                 winSize0.height - icv_object_win_border*2 };
1066
1067             CvMat img1, sum1, sqsum1, norm1, tilted1, mask1;
1068             CvMat* _tilted = 0;
1069
1070             if( sz1.width <= 0 || sz1.height <= 0 )
1071                 break;
1072             if( winSize.width < minSize.width || winSize.height < minSize.height )
1073                 continue;
1074
1075             img1 = cvMat( sz.height, sz.width, CV_8UC1, imgSmall->data.ptr );
1076             sum1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, sum->data.ptr );
1077             sqsum1 = cvMat( sz.height+1, sz.width+1, CV_64FC1, sqsum->data.ptr );
1078             if( tilted )
1079             {
1080                 tilted1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, tilted->data.ptr );
1081                 _tilted = &tilted1;
1082             }
1083             norm1 = cvMat( sz1.height, sz1.width, CV_32FC1, normImg ? normImg->data.ptr : 0 );
1084             mask1 = cvMat( sz1.height, sz1.width, CV_8UC1, temp->data.ptr );
1085
1086             cvResize( img, &img1, CV_INTER_LINEAR );
1087             cvIntegral( &img1, &sum1, &sqsum1, _tilted );
1088
1089             int ystep = factor > 2 ? 1 : 2;
1090         #ifdef HAVE_TBB
1091             const int LOCS_PER_THREAD = 1000;
1092             int stripCount = ((sz1.width/ystep)*(sz1.height + ystep-1)/ystep + LOCS_PER_THREAD/2)/LOCS_PER_THREAD;
1093             stripCount = std::min(std::max(stripCount, 1), 100);
1094         #else
1095             const int stripCount = 1;
1096         #endif
1097             
1098 #ifdef HAVE_IPP
1099             if( use_ipp )
1100             {
1101                 cv::Mat fsum(sum1.rows, sum1.cols, CV_32F, sum1.data.ptr, sum1.step);
1102                 cv::Mat(sum1).convertTo(fsum, CV_32F, 1, -(1<<24));
1103             }
1104             else
1105 #endif
1106                 cvSetImagesForHaarClassifierCascade( cascade, &sum1, &sqsum1, _tilted, 1. );            
1107             
1108             cv::Mat _norm1(&norm1), _mask1(&mask1);
1109             cv::parallel_for(cv::BlockedRange(0, stripCount),
1110                          cv::HaarDetectObjects_ScaleImage_Invoker(cascade,
1111                                 (((sz1.height + stripCount - 1)/stripCount + ystep-1)/ystep)*ystep,
1112                                 factor, cv::Mat(&sum1), cv::Mat(&sqsum1), _norm1, _mask1,
1113                                 cv::Rect(equRect), allCandidates));
1114         }
1115     }
1116     else
1117     {
1118         int n_factors = 0;
1119         cv::Rect scanROI;
1120
1121         cvIntegral( img, sum, sqsum, tilted );
1122
1123         if( doCannyPruning )
1124         {
1125             sumcanny = cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 );
1126             cvCanny( img, temp, 0, 50, 3 );
1127             cvIntegral( temp, sumcanny );
1128         }
1129
1130         for( n_factors = 0, factor = 1;
1131              factor*cascade->orig_window_size.width < img->cols - 10 &&
1132              factor*cascade->orig_window_size.height < img->rows - 10;
1133              n_factors++, factor *= scaleFactor )
1134             ;
1135
1136         if( findBiggestObject )
1137         {
1138             scaleFactor = 1./scaleFactor;
1139             factor *= scaleFactor;
1140         }
1141         else
1142             factor = 1;
1143
1144         for( ; n_factors-- > 0; factor *= scaleFactor )
1145         {
1146             const double ystep = std::max( 2., factor );
1147             CvSize winSize = { cvRound( cascade->orig_window_size.width * factor ),
1148                                 cvRound( cascade->orig_window_size.height * factor )};
1149             CvRect equRect = { 0, 0, 0, 0 };
1150             int *p[4] = {0,0,0,0};
1151             int *pq[4] = {0,0,0,0};
1152             int startX = 0, startY = 0;
1153             int endX = cvRound((img->cols - winSize.width) / ystep);
1154             int endY = cvRound((img->rows - winSize.height) / ystep);
1155
1156             if( winSize.width < minSize.width || winSize.height < minSize.height )
1157             {
1158                 if( findBiggestObject )
1159                     break;
1160                 continue;
1161             }
1162
1163             cvSetImagesForHaarClassifierCascade( cascade, sum, sqsum, tilted, factor );
1164             cvZero( temp );
1165
1166             if( doCannyPruning )
1167             {
1168                 equRect.x = cvRound(winSize.width*0.15);
1169                 equRect.y = cvRound(winSize.height*0.15);
1170                 equRect.width = cvRound(winSize.width*0.7);
1171                 equRect.height = cvRound(winSize.height*0.7);
1172
1173                 p[0] = (int*)(sumcanny->data.ptr + equRect.y*sumcanny->step) + equRect.x;
1174                 p[1] = (int*)(sumcanny->data.ptr + equRect.y*sumcanny->step)
1175                             + equRect.x + equRect.width;
1176                 p[2] = (int*)(sumcanny->data.ptr + (equRect.y + equRect.height)*sumcanny->step) + equRect.x;
1177                 p[3] = (int*)(sumcanny->data.ptr + (equRect.y + equRect.height)*sumcanny->step)
1178                             + equRect.x + equRect.width;
1179
1180                 pq[0] = (int*)(sum->data.ptr + equRect.y*sum->step) + equRect.x;
1181                 pq[1] = (int*)(sum->data.ptr + equRect.y*sum->step)
1182                             + equRect.x + equRect.width;
1183                 pq[2] = (int*)(sum->data.ptr + (equRect.y + equRect.height)*sum->step) + equRect.x;
1184                 pq[3] = (int*)(sum->data.ptr + (equRect.y + equRect.height)*sum->step)
1185                             + equRect.x + equRect.width;
1186             }
1187
1188             if( scanROI.area() > 0 )
1189             {
1190                 //adjust start_height and stop_height
1191                 startY = cvRound(scanROI.y / ystep);
1192                 endY = cvRound((scanROI.y + scanROI.height - winSize.height) / ystep);
1193
1194                 startX = cvRound(scanROI.x / ystep);
1195                 endX = cvRound((scanROI.x + scanROI.width - winSize.width) / ystep);
1196             }
1197
1198             cv::parallel_for(cv::BlockedRange(startY, endY),
1199                 cv::HaarDetectObjects_ScaleCascade_Invoker(cascade, winSize, cv::Range(startX, endX),
1200                                                            ystep, sum->step, (const int**)p,
1201                                                            (const int**)pq, allCandidates ));
1202
1203             if( findBiggestObject && !allCandidates.empty() && scanROI.area() == 0 )
1204             {
1205                 rectList.resize(allCandidates.size());
1206                 std::copy(allCandidates.begin(), allCandidates.end(), rectList.begin());
1207                 
1208                 groupRectangles(rectList, std::max(minNeighbors, 1), GROUP_EPS);
1209                 
1210                 if( !rectList.empty() )
1211                 {
1212                     size_t i, sz = rectList.size();
1213                     cv::Rect maxRect;
1214                     
1215                     for( i = 0; i < sz; i++ )
1216                     {
1217                         if( rectList[i].area() > maxRect.area() )
1218                             maxRect = rectList[i];
1219                     }
1220                     
1221                     allCandidates.push_back(maxRect);
1222                     
1223                     scanROI = maxRect;
1224                     int dx = cvRound(maxRect.width*GROUP_EPS);
1225                     int dy = cvRound(maxRect.height*GROUP_EPS);
1226                     scanROI.x = std::max(scanROI.x - dx, 0);
1227                     scanROI.y = std::max(scanROI.y - dy, 0);
1228                     scanROI.width = std::min(scanROI.width + dx*2, img->cols-1-scanROI.x);
1229                     scanROI.height = std::min(scanROI.height + dy*2, img->rows-1-scanROI.y);
1230                 
1231                     double minScale = roughSearch ? 0.6 : 0.4;
1232                     minSize.width = cvRound(maxRect.width*minScale);
1233                     minSize.height = cvRound(maxRect.height*minScale);
1234                 }
1235             }
1236         }
1237     }
1238
1239     rectList.resize(allCandidates.size());
1240     if(!allCandidates.empty())
1241         std::copy(allCandidates.begin(), allCandidates.end(), rectList.begin());
1242     
1243     if( minNeighbors != 0 || findBiggestObject )
1244         groupRectangles(rectList, rweights, std::max(minNeighbors, 1), GROUP_EPS);
1245         
1246     if( findBiggestObject && rectList.size() )
1247     {
1248         CvAvgComp result_comp = {{0,0,0,0},0};
1249         
1250         for( size_t i = 0; i < rectList.size(); i++ )
1251         {
1252             cv::Rect r = rectList[i];
1253             if( r.area() > cv::Rect(result_comp.rect).area() )
1254             {
1255                 result_comp.rect = r;
1256                 result_comp.neighbors = rweights[i];
1257             }
1258         }
1259         cvSeqPush( result_seq, &result_comp );
1260     }
1261     else
1262     {
1263         for( size_t i = 0; i < rectList.size(); i++ )
1264         {
1265             CvAvgComp c;
1266             c.rect = rectList[i];
1267             c.neighbors = rweights[i];
1268             cvSeqPush( result_seq, &c );
1269         }
1270     }
1271
1272     return result_seq;
1273 }
1274
1275
1276 static CvHaarClassifierCascade*
1277 icvLoadCascadeCART( const char** input_cascade, int n, CvSize orig_window_size )
1278 {
1279     int i;
1280     CvHaarClassifierCascade* cascade = icvCreateHaarClassifierCascade(n);
1281     cascade->orig_window_size = orig_window_size;
1282
1283     for( i = 0; i < n; i++ )
1284     {
1285         int j, count, l;
1286         float threshold = 0;
1287         const char* stage = input_cascade[i];
1288         int dl = 0;
1289
1290         /* tree links */
1291         int parent = -1;
1292         int next = -1;
1293
1294         sscanf( stage, "%d%n", &count, &dl );
1295         stage += dl;
1296
1297         assert( count > 0 );
1298         cascade->stage_classifier[i].count = count;
1299         cascade->stage_classifier[i].classifier =
1300             (CvHaarClassifier*)cvAlloc( count*sizeof(cascade->stage_classifier[i].classifier[0]));
1301
1302         for( j = 0; j < count; j++ )
1303         {
1304             CvHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
1305             int k, rects = 0;
1306             char str[100];
1307
1308             sscanf( stage, "%d%n", &classifier->count, &dl );
1309             stage += dl;
1310
1311             classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1312                 classifier->count * ( sizeof( *classifier->haar_feature ) +
1313                                       sizeof( *classifier->threshold ) +
1314                                       sizeof( *classifier->left ) +
1315                                       sizeof( *classifier->right ) ) +
1316                 (classifier->count + 1) * sizeof( *classifier->alpha ) );
1317             classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1318             classifier->left = (int*) (classifier->threshold + classifier->count);
1319             classifier->right = (int*) (classifier->left + classifier->count);
1320             classifier->alpha = (float*) (classifier->right + classifier->count);
1321
1322             for( l = 0; l < classifier->count; l++ )
1323             {
1324                 sscanf( stage, "%d%n", &rects, &dl );
1325                 stage += dl;
1326
1327                 assert( rects >= 2 && rects <= CV_HAAR_FEATURE_MAX );
1328
1329                 for( k = 0; k < rects; k++ )
1330                 {
1331                     CvRect r;
1332                     int band = 0;
1333                     sscanf( stage, "%d%d%d%d%d%f%n",
1334                             &r.x, &r.y, &r.width, &r.height, &band,
1335                             &(classifier->haar_feature[l].rect[k].weight), &dl );
1336                     stage += dl;
1337                     classifier->haar_feature[l].rect[k].r = r;
1338                 }
1339                 sscanf( stage, "%s%n", str, &dl );
1340                 stage += dl;
1341
1342                 classifier->haar_feature[l].tilted = strncmp( str, "tilted", 6 ) == 0;
1343
1344                 for( k = rects; k < CV_HAAR_FEATURE_MAX; k++ )
1345                 {
1346                     memset( classifier->haar_feature[l].rect + k, 0,
1347                             sizeof(classifier->haar_feature[l].rect[k]) );
1348                 }
1349
1350                 sscanf( stage, "%f%d%d%n", &(classifier->threshold[l]),
1351                                        &(classifier->left[l]),
1352                                        &(classifier->right[l]), &dl );
1353                 stage += dl;
1354             }
1355             for( l = 0; l <= classifier->count; l++ )
1356             {
1357                 sscanf( stage, "%f%n", &(classifier->alpha[l]), &dl );
1358                 stage += dl;
1359             }
1360         }
1361
1362         sscanf( stage, "%f%n", &threshold, &dl );
1363         stage += dl;
1364
1365         cascade->stage_classifier[i].threshold = threshold;
1366
1367         /* load tree links */
1368         if( sscanf( stage, "%d%d%n", &parent, &next, &dl ) != 2 )
1369         {
1370             parent = i - 1;
1371             next = -1;
1372         }
1373         stage += dl;
1374
1375         cascade->stage_classifier[i].parent = parent;
1376         cascade->stage_classifier[i].next = next;
1377         cascade->stage_classifier[i].child = -1;
1378
1379         if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
1380         {
1381             cascade->stage_classifier[parent].child = i;
1382         }
1383     }
1384
1385     return cascade;
1386 }
1387
1388 #ifndef _MAX_PATH
1389 #define _MAX_PATH 1024
1390 #endif
1391
1392 CV_IMPL CvHaarClassifierCascade*
1393 cvLoadHaarClassifierCascade( const char* directory, CvSize orig_window_size )
1394 {
1395     const char** input_cascade = 0;
1396     CvHaarClassifierCascade *cascade = 0;
1397
1398     int i, n;
1399     const char* slash;
1400     char name[_MAX_PATH];
1401     int size = 0;
1402     char* ptr = 0;
1403
1404     if( !directory )
1405         CV_Error( CV_StsNullPtr, "Null path is passed" );
1406
1407     n = (int)strlen(directory)-1;
1408     slash = directory[n] == '\\' || directory[n] == '/' ? "" : "/";
1409
1410     /* try to read the classifier from directory */
1411     for( n = 0; ; n++ )
1412     {
1413         sprintf( name, "%s%s%d/AdaBoostCARTHaarClassifier.txt", directory, slash, n );
1414         FILE* f = fopen( name, "rb" );
1415         if( !f )
1416             break;
1417         fseek( f, 0, SEEK_END );
1418         size += ftell( f ) + 1;
1419         fclose(f);
1420     }
1421
1422     if( n == 0 && slash[0] )
1423         return (CvHaarClassifierCascade*)cvLoad( directory );
1424
1425     if( n == 0 )
1426         CV_Error( CV_StsBadArg, "Invalid path" );
1427
1428     size += (n+1)*sizeof(char*);
1429     input_cascade = (const char**)cvAlloc( size );
1430     ptr = (char*)(input_cascade + n + 1);
1431
1432     for( i = 0; i < n; i++ )
1433     {
1434         sprintf( name, "%s/%d/AdaBoostCARTHaarClassifier.txt", directory, i );
1435         FILE* f = fopen( name, "rb" );
1436         if( !f )
1437             CV_Error( CV_StsError, "" );
1438         fseek( f, 0, SEEK_END );
1439         size = ftell( f );
1440         fseek( f, 0, SEEK_SET );
1441         fread( ptr, 1, size, f );
1442         fclose(f);
1443         input_cascade[i] = ptr;
1444         ptr += size;
1445         *ptr++ = '\0';
1446     }
1447
1448     input_cascade[n] = 0;
1449     cascade = icvLoadCascadeCART( input_cascade, n, orig_window_size );
1450
1451     if( input_cascade )
1452         cvFree( &input_cascade );
1453
1454     return cascade;
1455 }
1456
1457
1458 CV_IMPL void
1459 cvReleaseHaarClassifierCascade( CvHaarClassifierCascade** _cascade )
1460 {
1461     if( _cascade && *_cascade )
1462     {
1463         int i, j;
1464         CvHaarClassifierCascade* cascade = *_cascade;
1465
1466         for( i = 0; i < cascade->count; i++ )
1467         {
1468             for( j = 0; j < cascade->stage_classifier[i].count; j++ )
1469                 cvFree( &cascade->stage_classifier[i].classifier[j].haar_feature );
1470             cvFree( &cascade->stage_classifier[i].classifier );
1471         }
1472         icvReleaseHidHaarClassifierCascade( &cascade->hid_cascade );
1473         cvFree( _cascade );
1474     }
1475 }
1476
1477
1478 /****************************************************************************************\
1479 *                                  Persistence functions                                 *
1480 \****************************************************************************************/
1481
1482 /* field names */
1483
1484 #define ICV_HAAR_SIZE_NAME            "size"
1485 #define ICV_HAAR_STAGES_NAME          "stages"
1486 #define ICV_HAAR_TREES_NAME             "trees"
1487 #define ICV_HAAR_FEATURE_NAME             "feature"
1488 #define ICV_HAAR_RECTS_NAME                 "rects"
1489 #define ICV_HAAR_TILTED_NAME                "tilted"
1490 #define ICV_HAAR_THRESHOLD_NAME           "threshold"
1491 #define ICV_HAAR_LEFT_NODE_NAME           "left_node"
1492 #define ICV_HAAR_LEFT_VAL_NAME            "left_val"
1493 #define ICV_HAAR_RIGHT_NODE_NAME          "right_node"
1494 #define ICV_HAAR_RIGHT_VAL_NAME           "right_val"
1495 #define ICV_HAAR_STAGE_THRESHOLD_NAME   "stage_threshold"
1496 #define ICV_HAAR_PARENT_NAME            "parent"
1497 #define ICV_HAAR_NEXT_NAME              "next"
1498
1499 static int
1500 icvIsHaarClassifier( const void* struct_ptr )
1501 {
1502     return CV_IS_HAAR_CLASSIFIER( struct_ptr );
1503 }
1504
1505 static void*
1506 icvReadHaarClassifier( CvFileStorage* fs, CvFileNode* node )
1507 {
1508     CvHaarClassifierCascade* cascade = NULL;
1509
1510     char buf[256];
1511     CvFileNode* seq_fn = NULL; /* sequence */
1512     CvFileNode* fn = NULL;
1513     CvFileNode* stages_fn = NULL;
1514     CvSeqReader stages_reader;
1515     int n;
1516     int i, j, k, l;
1517     int parent, next;
1518
1519     stages_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_STAGES_NAME );
1520     if( !stages_fn || !CV_NODE_IS_SEQ( stages_fn->tag) )
1521         CV_Error( CV_StsError, "Invalid stages node" );
1522
1523     n = stages_fn->data.seq->total;
1524     cascade = icvCreateHaarClassifierCascade(n);
1525
1526     /* read size */
1527     seq_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_SIZE_NAME );
1528     if( !seq_fn || !CV_NODE_IS_SEQ( seq_fn->tag ) || seq_fn->data.seq->total != 2 )
1529         CV_Error( CV_StsError, "size node is not a valid sequence." );
1530     fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 0 );
1531     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1532         CV_Error( CV_StsError, "Invalid size node: width must be positive integer" );
1533     cascade->orig_window_size.width = fn->data.i;
1534     fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 1 );
1535     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1536         CV_Error( CV_StsError, "Invalid size node: height must be positive integer" );
1537     cascade->orig_window_size.height = fn->data.i;
1538
1539     cvStartReadSeq( stages_fn->data.seq, &stages_reader );
1540     for( i = 0; i < n; ++i )
1541     {
1542         CvFileNode* stage_fn;
1543         CvFileNode* trees_fn;
1544         CvSeqReader trees_reader;
1545
1546         stage_fn = (CvFileNode*) stages_reader.ptr;
1547         if( !CV_NODE_IS_MAP( stage_fn->tag ) )
1548         {
1549             sprintf( buf, "Invalid stage %d", i );
1550             CV_Error( CV_StsError, buf );
1551         }
1552
1553         trees_fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_TREES_NAME );
1554         if( !trees_fn || !CV_NODE_IS_SEQ( trees_fn->tag )
1555             || trees_fn->data.seq->total <= 0 )
1556         {
1557             sprintf( buf, "Trees node is not a valid sequence. (stage %d)", i );
1558             CV_Error( CV_StsError, buf );
1559         }
1560
1561         cascade->stage_classifier[i].classifier =
1562             (CvHaarClassifier*) cvAlloc( trees_fn->data.seq->total
1563                 * sizeof( cascade->stage_classifier[i].classifier[0] ) );
1564         for( j = 0; j < trees_fn->data.seq->total; ++j )
1565         {
1566             cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
1567         }
1568         cascade->stage_classifier[i].count = trees_fn->data.seq->total;
1569
1570         cvStartReadSeq( trees_fn->data.seq, &trees_reader );
1571         for( j = 0; j < trees_fn->data.seq->total; ++j )
1572         {
1573             CvFileNode* tree_fn;
1574             CvSeqReader tree_reader;
1575             CvHaarClassifier* classifier;
1576             int last_idx;
1577
1578             classifier = &cascade->stage_classifier[i].classifier[j];
1579             tree_fn = (CvFileNode*) trees_reader.ptr;
1580             if( !CV_NODE_IS_SEQ( tree_fn->tag ) || tree_fn->data.seq->total <= 0 )
1581             {
1582                 sprintf( buf, "Tree node is not a valid sequence."
1583                          " (stage %d, tree %d)", i, j );
1584                 CV_Error( CV_StsError, buf );
1585             }
1586
1587             classifier->count = tree_fn->data.seq->total;
1588             classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1589                 classifier->count * ( sizeof( *classifier->haar_feature ) +
1590                                       sizeof( *classifier->threshold ) +
1591                                       sizeof( *classifier->left ) +
1592                                       sizeof( *classifier->right ) ) +
1593                 (classifier->count + 1) * sizeof( *classifier->alpha ) );
1594             classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1595             classifier->left = (int*) (classifier->threshold + classifier->count);
1596             classifier->right = (int*) (classifier->left + classifier->count);
1597             classifier->alpha = (float*) (classifier->right + classifier->count);
1598
1599             cvStartReadSeq( tree_fn->data.seq, &tree_reader );
1600             for( k = 0, last_idx = 0; k < tree_fn->data.seq->total; ++k )
1601             {
1602                 CvFileNode* node_fn;
1603                 CvFileNode* feature_fn;
1604                 CvFileNode* rects_fn;
1605                 CvSeqReader rects_reader;
1606
1607                 node_fn = (CvFileNode*) tree_reader.ptr;
1608                 if( !CV_NODE_IS_MAP( node_fn->tag ) )
1609                 {
1610                     sprintf( buf, "Tree node %d is not a valid map. (stage %d, tree %d)",
1611                              k, i, j );
1612                     CV_Error( CV_StsError, buf );
1613                 }
1614                 feature_fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_FEATURE_NAME );
1615                 if( !feature_fn || !CV_NODE_IS_MAP( feature_fn->tag ) )
1616                 {
1617                     sprintf( buf, "Feature node is not a valid map. "
1618                              "(stage %d, tree %d, node %d)", i, j, k );
1619                     CV_Error( CV_StsError, buf );
1620                 }
1621                 rects_fn = cvGetFileNodeByName( fs, feature_fn, ICV_HAAR_RECTS_NAME );
1622                 if( !rects_fn || !CV_NODE_IS_SEQ( rects_fn->tag )
1623                     || rects_fn->data.seq->total < 1
1624                     || rects_fn->data.seq->total > CV_HAAR_FEATURE_MAX )
1625                 {
1626                     sprintf( buf, "Rects node is not a valid sequence. "
1627                              "(stage %d, tree %d, node %d)", i, j, k );
1628                     CV_Error( CV_StsError, buf );
1629                 }
1630                 cvStartReadSeq( rects_fn->data.seq, &rects_reader );
1631                 for( l = 0; l < rects_fn->data.seq->total; ++l )
1632                 {
1633                     CvFileNode* rect_fn;
1634                     CvRect r;
1635
1636                     rect_fn = (CvFileNode*) rects_reader.ptr;
1637                     if( !CV_NODE_IS_SEQ( rect_fn->tag ) || rect_fn->data.seq->total != 5 )
1638                     {
1639                         sprintf( buf, "Rect %d is not a valid sequence. "
1640                                  "(stage %d, tree %d, node %d)", l, i, j, k );
1641                         CV_Error( CV_StsError, buf );
1642                     }
1643
1644                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 0 );
1645                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1646                     {
1647                         sprintf( buf, "x coordinate must be non-negative integer. "
1648                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1649                         CV_Error( CV_StsError, buf );
1650                     }
1651                     r.x = fn->data.i;
1652                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 1 );
1653                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1654                     {
1655                         sprintf( buf, "y coordinate must be non-negative integer. "
1656                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1657                         CV_Error( CV_StsError, buf );
1658                     }
1659                     r.y = fn->data.i;
1660                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 2 );
1661                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1662                         || r.x + fn->data.i > cascade->orig_window_size.width )
1663                     {
1664                         sprintf( buf, "width must be positive integer and "
1665                                  "(x + width) must not exceed window width. "
1666                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1667                         CV_Error( CV_StsError, buf );
1668                     }
1669                     r.width = fn->data.i;
1670                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 3 );
1671                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1672                         || r.y + fn->data.i > cascade->orig_window_size.height )
1673                     {
1674                         sprintf( buf, "height must be positive integer and "
1675                                  "(y + height) must not exceed window height. "
1676                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1677                         CV_Error( CV_StsError, buf );
1678                     }
1679                     r.height = fn->data.i;
1680                     fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 4 );
1681                     if( !CV_NODE_IS_REAL( fn->tag ) )
1682                     {
1683                         sprintf( buf, "weight must be real number. "
1684                                  "(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1685                         CV_Error( CV_StsError, buf );
1686                     }
1687
1688                     classifier->haar_feature[k].rect[l].weight = (float) fn->data.f;
1689                     classifier->haar_feature[k].rect[l].r = r;
1690
1691                     CV_NEXT_SEQ_ELEM( sizeof( *rect_fn ), rects_reader );
1692                 } /* for each rect */
1693                 for( l = rects_fn->data.seq->total; l < CV_HAAR_FEATURE_MAX; ++l )
1694                 {
1695                     classifier->haar_feature[k].rect[l].weight = 0;
1696                     classifier->haar_feature[k].rect[l].r = cvRect( 0, 0, 0, 0 );
1697                 }
1698
1699                 fn = cvGetFileNodeByName( fs, feature_fn, ICV_HAAR_TILTED_NAME);
1700                 if( !fn || !CV_NODE_IS_INT( fn->tag ) )
1701                 {
1702                     sprintf( buf, "tilted must be 0 or 1. "
1703                              "(stage %d, tree %d, node %d)", i, j, k );
1704                     CV_Error( CV_StsError, buf );
1705                 }
1706                 classifier->haar_feature[k].tilted = ( fn->data.i != 0 );
1707                 fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_THRESHOLD_NAME);
1708                 if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
1709                 {
1710                     sprintf( buf, "threshold must be real number. "
1711                              "(stage %d, tree %d, node %d)", i, j, k );
1712                     CV_Error( CV_StsError, buf );
1713                 }
1714                 classifier->threshold[k] = (float) fn->data.f;
1715                 fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_LEFT_NODE_NAME);
1716                 if( fn )
1717                 {
1718                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
1719                         || fn->data.i >= tree_fn->data.seq->total )
1720                     {
1721                         sprintf( buf, "left node must be valid node number. "
1722                                  "(stage %d, tree %d, node %d)", i, j, k );
1723                         CV_Error( CV_StsError, buf );
1724                     }
1725                     /* left node */
1726                     classifier->left[k] = fn->data.i;
1727                 }
1728                 else
1729                 {
1730                     fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_LEFT_VAL_NAME );
1731                     if( !fn )
1732                     {
1733                         sprintf( buf, "left node or left value must be specified. "
1734                                  "(stage %d, tree %d, node %d)", i, j, k );
1735                         CV_Error( CV_StsError, buf );
1736                     }
1737                     if( !CV_NODE_IS_REAL( fn->tag ) )
1738                     {
1739                         sprintf( buf, "left value must be real number. "
1740                                  "(stage %d, tree %d, node %d)", i, j, k );
1741                         CV_Error( CV_StsError, buf );
1742                     }
1743                     /* left value */
1744                     if( last_idx >= classifier->count + 1 )
1745                     {
1746                         sprintf( buf, "Tree structure is broken: too many values. "
1747                                  "(stage %d, tree %d, node %d)", i, j, k );
1748                         CV_Error( CV_StsError, buf );
1749                     }
1750                     classifier->left[k] = -last_idx;
1751                     classifier->alpha[last_idx++] = (float) fn->data.f;
1752                 }
1753                 fn = cvGetFileNodeByName( fs, node_fn,ICV_HAAR_RIGHT_NODE_NAME);
1754                 if( fn )
1755                 {
1756                     if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
1757                         || fn->data.i >= tree_fn->data.seq->total )
1758                     {
1759                         sprintf( buf, "right node must be valid node number. "
1760                                  "(stage %d, tree %d, node %d)", i, j, k );
1761                         CV_Error( CV_StsError, buf );
1762                     }
1763                     /* right node */
1764                     classifier->right[k] = fn->data.i;
1765                 }
1766                 else
1767                 {
1768                     fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_RIGHT_VAL_NAME );
1769                     if( !fn )
1770                     {
1771                         sprintf( buf, "right node or right value must be specified. "
1772                                  "(stage %d, tree %d, node %d)", i, j, k );
1773                         CV_Error( CV_StsError, buf );
1774                     }
1775                     if( !CV_NODE_IS_REAL( fn->tag ) )
1776                     {
1777                         sprintf( buf, "right value must be real number. "
1778                                  "(stage %d, tree %d, node %d)", i, j, k );
1779                         CV_Error( CV_StsError, buf );
1780                     }
1781                     /* right value */
1782                     if( last_idx >= classifier->count + 1 )
1783                     {
1784                         sprintf( buf, "Tree structure is broken: too many values. "
1785                                  "(stage %d, tree %d, node %d)", i, j, k );
1786                         CV_Error( CV_StsError, buf );
1787                     }
1788                     classifier->right[k] = -last_idx;
1789                     classifier->alpha[last_idx++] = (float) fn->data.f;
1790                 }
1791
1792                 CV_NEXT_SEQ_ELEM( sizeof( *node_fn ), tree_reader );
1793             } /* for each node */
1794             if( last_idx != classifier->count + 1 )
1795             {
1796                 sprintf( buf, "Tree structure is broken: too few values. "
1797                          "(stage %d, tree %d)", i, j );
1798                 CV_Error( CV_StsError, buf );
1799             }
1800
1801             CV_NEXT_SEQ_ELEM( sizeof( *tree_fn ), trees_reader );
1802         } /* for each tree */
1803
1804         fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_STAGE_THRESHOLD_NAME);
1805         if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
1806         {
1807             sprintf( buf, "stage threshold must be real number. (stage %d)", i );
1808             CV_Error( CV_StsError, buf );
1809         }
1810         cascade->stage_classifier[i].threshold = (float) fn->data.f;
1811
1812         parent = i - 1;
1813         next = -1;
1814
1815         fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_PARENT_NAME );
1816         if( !fn || !CV_NODE_IS_INT( fn->tag )
1817             || fn->data.i < -1 || fn->data.i >= cascade->count )
1818         {
1819             sprintf( buf, "parent must be integer number. (stage %d)", i );
1820             CV_Error( CV_StsError, buf );
1821         }
1822         parent = fn->data.i;
1823         fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_NEXT_NAME );
1824         if( !fn || !CV_NODE_IS_INT( fn->tag )
1825             || fn->data.i < -1 || fn->data.i >= cascade->count )
1826         {
1827             sprintf( buf, "next must be integer number. (stage %d)", i );
1828             CV_Error( CV_StsError, buf );
1829         }
1830         next = fn->data.i;
1831
1832         cascade->stage_classifier[i].parent = parent;
1833         cascade->stage_classifier[i].next = next;
1834         cascade->stage_classifier[i].child = -1;
1835
1836         if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
1837         {
1838             cascade->stage_classifier[parent].child = i;
1839         }
1840
1841         CV_NEXT_SEQ_ELEM( sizeof( *stage_fn ), stages_reader );
1842     } /* for each stage */
1843
1844     return cascade;
1845 }
1846
1847 static void
1848 icvWriteHaarClassifier( CvFileStorage* fs, const char* name, const void* struct_ptr,
1849                         CvAttrList attributes )
1850 {
1851     int i, j, k, l;
1852     char buf[256];
1853     const CvHaarClassifierCascade* cascade = (const CvHaarClassifierCascade*) struct_ptr;
1854
1855     /* TODO: parameters check */
1856
1857     cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_HAAR, attributes );
1858
1859     cvStartWriteStruct( fs, ICV_HAAR_SIZE_NAME, CV_NODE_SEQ | CV_NODE_FLOW );
1860     cvWriteInt( fs, NULL, cascade->orig_window_size.width );
1861     cvWriteInt( fs, NULL, cascade->orig_window_size.height );
1862     cvEndWriteStruct( fs ); /* size */
1863
1864     cvStartWriteStruct( fs, ICV_HAAR_STAGES_NAME, CV_NODE_SEQ );
1865     for( i = 0; i < cascade->count; ++i )
1866     {
1867         cvStartWriteStruct( fs, NULL, CV_NODE_MAP );
1868         sprintf( buf, "stage %d", i );
1869         cvWriteComment( fs, buf, 1 );
1870
1871         cvStartWriteStruct( fs, ICV_HAAR_TREES_NAME, CV_NODE_SEQ );
1872
1873         for( j = 0; j < cascade->stage_classifier[i].count; ++j )
1874         {
1875             CvHaarClassifier* tree = &cascade->stage_classifier[i].classifier[j];
1876
1877             cvStartWriteStruct( fs, NULL, CV_NODE_SEQ );
1878             sprintf( buf, "tree %d", j );
1879             cvWriteComment( fs, buf, 1 );
1880
1881             for( k = 0; k < tree->count; ++k )
1882             {
1883                 CvHaarFeature* feature = &tree->haar_feature[k];
1884
1885                 cvStartWriteStruct( fs, NULL, CV_NODE_MAP );
1886                 if( k )
1887                 {
1888                     sprintf( buf, "node %d", k );
1889                 }
1890                 else
1891                 {
1892                     sprintf( buf, "root node" );
1893                 }
1894                 cvWriteComment( fs, buf, 1 );
1895
1896                 cvStartWriteStruct( fs, ICV_HAAR_FEATURE_NAME, CV_NODE_MAP );
1897
1898                 cvStartWriteStruct( fs, ICV_HAAR_RECTS_NAME, CV_NODE_SEQ );
1899                 for( l = 0; l < CV_HAAR_FEATURE_MAX && feature->rect[l].r.width != 0; ++l )
1900                 {
1901                     cvStartWriteStruct( fs, NULL, CV_NODE_SEQ | CV_NODE_FLOW );
1902                     cvWriteInt(  fs, NULL, feature->rect[l].r.x );
1903                     cvWriteInt(  fs, NULL, feature->rect[l].r.y );
1904                     cvWriteInt(  fs, NULL, feature->rect[l].r.width );
1905                     cvWriteInt(  fs, NULL, feature->rect[l].r.height );
1906                     cvWriteReal( fs, NULL, feature->rect[l].weight );
1907                     cvEndWriteStruct( fs ); /* rect */
1908                 }
1909                 cvEndWriteStruct( fs ); /* rects */
1910                 cvWriteInt( fs, ICV_HAAR_TILTED_NAME, feature->tilted );
1911                 cvEndWriteStruct( fs ); /* feature */
1912
1913                 cvWriteReal( fs, ICV_HAAR_THRESHOLD_NAME, tree->threshold[k]);
1914
1915                 if( tree->left[k] > 0 )
1916                 {
1917                     cvWriteInt( fs, ICV_HAAR_LEFT_NODE_NAME, tree->left[k] );
1918                 }
1919                 else
1920                 {
1921                     cvWriteReal( fs, ICV_HAAR_LEFT_VAL_NAME,
1922                         tree->alpha[-tree->left[k]] );
1923                 }
1924
1925                 if( tree->right[k] > 0 )
1926                 {
1927                     cvWriteInt( fs, ICV_HAAR_RIGHT_NODE_NAME, tree->right[k] );
1928                 }
1929                 else
1930                 {
1931                     cvWriteReal( fs, ICV_HAAR_RIGHT_VAL_NAME,
1932                         tree->alpha[-tree->right[k]] );
1933                 }
1934
1935                 cvEndWriteStruct( fs ); /* split */
1936             }
1937
1938             cvEndWriteStruct( fs ); /* tree */
1939         }
1940
1941         cvEndWriteStruct( fs ); /* trees */
1942
1943         cvWriteReal( fs, ICV_HAAR_STAGE_THRESHOLD_NAME, cascade->stage_classifier[i].threshold);
1944         cvWriteInt( fs, ICV_HAAR_PARENT_NAME, cascade->stage_classifier[i].parent );
1945         cvWriteInt( fs, ICV_HAAR_NEXT_NAME, cascade->stage_classifier[i].next );
1946
1947         cvEndWriteStruct( fs ); /* stage */
1948     } /* for each stage */
1949
1950     cvEndWriteStruct( fs ); /* stages */
1951     cvEndWriteStruct( fs ); /* root */
1952 }
1953
1954 static void*
1955 icvCloneHaarClassifier( const void* struct_ptr )
1956 {
1957     CvHaarClassifierCascade* cascade = NULL;
1958
1959     int i, j, k, n;
1960     const CvHaarClassifierCascade* cascade_src =
1961         (const CvHaarClassifierCascade*) struct_ptr;
1962
1963     n = cascade_src->count;
1964     cascade = icvCreateHaarClassifierCascade(n);
1965     cascade->orig_window_size = cascade_src->orig_window_size;
1966
1967     for( i = 0; i < n; ++i )
1968     {
1969         cascade->stage_classifier[i].parent = cascade_src->stage_classifier[i].parent;
1970         cascade->stage_classifier[i].next = cascade_src->stage_classifier[i].next;
1971         cascade->stage_classifier[i].child = cascade_src->stage_classifier[i].child;
1972         cascade->stage_classifier[i].threshold = cascade_src->stage_classifier[i].threshold;
1973
1974         cascade->stage_classifier[i].count = 0;
1975         cascade->stage_classifier[i].classifier =
1976             (CvHaarClassifier*) cvAlloc( cascade_src->stage_classifier[i].count
1977                 * sizeof( cascade->stage_classifier[i].classifier[0] ) );
1978
1979         cascade->stage_classifier[i].count = cascade_src->stage_classifier[i].count;
1980
1981         for( j = 0; j < cascade->stage_classifier[i].count; ++j )
1982             cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
1983
1984         for( j = 0; j < cascade->stage_classifier[i].count; ++j )
1985         {
1986             const CvHaarClassifier* classifier_src =
1987                 &cascade_src->stage_classifier[i].classifier[j];
1988             CvHaarClassifier* classifier =
1989                 &cascade->stage_classifier[i].classifier[j];
1990
1991             classifier->count = classifier_src->count;
1992             classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1993                 classifier->count * ( sizeof( *classifier->haar_feature ) +
1994                                       sizeof( *classifier->threshold ) +
1995                                       sizeof( *classifier->left ) +
1996                                       sizeof( *classifier->right ) ) +
1997                 (classifier->count + 1) * sizeof( *classifier->alpha ) );
1998             classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1999             classifier->left = (int*) (classifier->threshold + classifier->count);
2000             classifier->right = (int*) (classifier->left + classifier->count);
2001             classifier->alpha = (float*) (classifier->right + classifier->count);
2002             for( k = 0; k < classifier->count; ++k )
2003             {
2004                 classifier->haar_feature[k] = classifier_src->haar_feature[k];
2005                 classifier->threshold[k] = classifier_src->threshold[k];
2006                 classifier->left[k] = classifier_src->left[k];
2007                 classifier->right[k] = classifier_src->right[k];
2008                 classifier->alpha[k] = classifier_src->alpha[k];
2009             }
2010             classifier->alpha[classifier->count] =
2011                 classifier_src->alpha[classifier->count];
2012         }
2013     }
2014
2015     return cascade;
2016 }
2017
2018
2019 CvType haar_type( CV_TYPE_NAME_HAAR, icvIsHaarClassifier,
2020                   (CvReleaseFunc)cvReleaseHaarClassifierCascade,
2021                   icvReadHaarClassifier, icvWriteHaarClassifier,
2022                   icvCloneHaarClassifier );
2023
2024 #if 0
2025 namespace cv
2026 {
2027
2028 HaarClassifierCascade::HaarClassifierCascade() {}
2029 HaarClassifierCascade::HaarClassifierCascade(const String& filename)
2030 { load(filename); }
2031     
2032 bool HaarClassifierCascade::load(const String& filename)
2033 {
2034     cascade = Ptr<CvHaarClassifierCascade>((CvHaarClassifierCascade*)cvLoad(filename.c_str(), 0, 0, 0));
2035     return (CvHaarClassifierCascade*)cascade != 0;
2036 }
2037
2038 void HaarClassifierCascade::detectMultiScale( const Mat& image,
2039                        Vector<Rect>& objects, double scaleFactor,
2040                        int minNeighbors, int flags,
2041                        Size minSize )
2042 {
2043     MemStorage storage(cvCreateMemStorage(0));
2044     CvMat _image = image;
2045     CvSeq* _objects = cvHaarDetectObjects( &_image, cascade, storage, scaleFactor,
2046                                            minNeighbors, flags, minSize );
2047     Seq<Rect>(_objects).copyTo(objects);
2048 }
2049
2050 int HaarClassifierCascade::runAt(Point pt, int startStage, int) const
2051 {
2052     return cvRunHaarClassifierCascade(cascade, pt, startStage);
2053 }
2054
2055 void HaarClassifierCascade::setImages( const Mat& sum, const Mat& sqsum,
2056                                        const Mat& tilted, double scale )
2057 {
2058     CvMat _sum = sum, _sqsum = sqsum, _tilted = tilted;
2059     cvSetImagesForHaarClassifierCascade( cascade, &_sum, &_sqsum, &_tilted, scale );
2060 }
2061
2062 }
2063 #endif
2064
2065 /* End of file. */