Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/objdetect/src/haar.cpp
16337 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// Intel License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2000, Intel Corporation, all rights reserved.
14
// Third party copyrights are property of their respective owners.
15
//
16
// Redistribution and use in source and binary forms, with or without modification,
17
// are permitted provided that the following conditions are met:
18
//
19
// * Redistribution's of source code must retain the above copyright notice,
20
// this list of conditions and the following disclaimer.
21
//
22
// * Redistribution's in binary form must reproduce the above copyright notice,
23
// this list of conditions and the following disclaimer in the documentation
24
// and/or other materials provided with the distribution.
25
//
26
// * The name of Intel Corporation may not be used to endorse or promote products
27
// derived from this software without specific prior written permission.
28
//
29
// This software is provided by the copyright holders and contributors "as is" and
30
// any express or implied warranties, including, but not limited to, the implied
31
// warranties of merchantability and fitness for a particular purpose are disclaimed.
32
// In no event shall the Intel Corporation or contributors be liable for any direct,
33
// indirect, incidental, special, exemplary, or consequential damages
34
// (including, but not limited to, procurement of substitute goods or services;
35
// loss of use, data, or profits; or business interruption) however caused
36
// and on any theory of liability, whether in contract, strict liability,
37
// or tort (including negligence or otherwise) arising in any way out of
38
// the use of this software, even if advised of the possibility of such damage.
39
//
40
//M*/
41
42
/* Haar features calculation */
43
44
#include "precomp.hpp"
45
#include "opencv2/imgproc/imgproc_c.h"
46
#include "opencv2/objdetect/objdetect_c.h"
47
#include <stdio.h>
48
#include "haar.hpp"
49
#if CV_HAAR_FEATURE_MAX_LOCAL != CV_HAAR_FEATURE_MAX
50
#error CV_HAAR_FEATURE_MAX definition changed. Adjust CV_HAAR_FEATURE_MAX_LOCAL value please.
51
#endif
52
53
#if CV_SSE2
54
# if 1 /*!CV_SSE4_1 && !CV_SSE4_2*/
55
# define _mm_blendv_pd(a, b, m) _mm_xor_pd(a, _mm_and_pd(_mm_xor_pd(b, a), m))
56
# define _mm_blendv_ps(a, b, m) _mm_xor_ps(a, _mm_and_ps(_mm_xor_ps(b, a), m))
57
# endif
58
#endif
59
60
#if CV_HAAR_USE_AVX
61
# if defined _MSC_VER
62
# pragma warning( disable : 4752 )
63
# endif
64
#else
65
# if CV_SSE2
66
# define CV_HAAR_USE_SSE 1
67
# endif
68
#endif
69
70
/* these settings affect the quality of detection: change with care */
71
#define CV_ADJUST_FEATURES 1
72
#define CV_ADJUST_WEIGHTS 0
73
74
typedef struct CvHidHaarStageClassifier
75
{
76
int count;
77
float threshold;
78
CvHidHaarClassifier* classifier;
79
int two_rects;
80
81
struct CvHidHaarStageClassifier* next;
82
struct CvHidHaarStageClassifier* child;
83
struct CvHidHaarStageClassifier* parent;
84
} CvHidHaarStageClassifier;
85
86
87
typedef struct CvHidHaarClassifierCascade
88
{
89
int count;
90
int has_tilted_features;
91
double inv_window_area;
92
CvMat sum, sqsum, tilted;
93
CvHidHaarStageClassifier* stage_classifier;
94
sqsumtype *pq0, *pq1, *pq2, *pq3;
95
sumtype *p0, *p1, *p2, *p3;
96
97
bool is_tree;
98
bool isStumpBased;
99
} CvHidHaarClassifierCascade;
100
101
102
const int icv_object_win_border = 1;
103
const float icv_stage_threshold_bias = 0.0001f;
104
105
static CvHaarClassifierCascade*
106
icvCreateHaarClassifierCascade( int stage_count )
107
{
108
CvHaarClassifierCascade* cascade = 0;
109
110
int block_size = sizeof(*cascade) + stage_count*sizeof(*cascade->stage_classifier);
111
112
if( stage_count <= 0 )
113
CV_Error( CV_StsOutOfRange, "Number of stages should be positive" );
114
115
cascade = (CvHaarClassifierCascade*)cvAlloc( block_size );
116
memset( cascade, 0, block_size );
117
118
cascade->stage_classifier = (CvHaarStageClassifier*)(cascade + 1);
119
cascade->flags = CV_HAAR_MAGIC_VAL;
120
cascade->count = stage_count;
121
122
return cascade;
123
}
124
125
static void
126
icvReleaseHidHaarClassifierCascade( CvHidHaarClassifierCascade** _cascade )
127
{
128
if( _cascade && *_cascade )
129
{
130
cvFree( _cascade );
131
}
132
}
133
134
/* create more efficient internal representation of haar classifier cascade */
135
static CvHidHaarClassifierCascade*
136
icvCreateHidHaarClassifierCascade( CvHaarClassifierCascade* cascade )
137
{
138
CvHidHaarClassifierCascade* out = 0;
139
140
int i, j, k, l;
141
int datasize;
142
int total_classifiers = 0;
143
int total_nodes = 0;
144
char errorstr[1000];
145
CvHidHaarClassifier* haar_classifier_ptr;
146
CvHidHaarTreeNode* haar_node_ptr;
147
cv::Size orig_window_size;
148
bool has_tilted_features = false;
149
int max_count = 0;
150
151
if( !CV_IS_HAAR_CLASSIFIER(cascade) )
152
CV_Error( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
153
154
if( cascade->hid_cascade )
155
CV_Error( CV_StsError, "hid_cascade has been already created" );
156
157
if( !cascade->stage_classifier )
158
CV_Error( CV_StsNullPtr, "" );
159
160
if( cascade->count <= 0 )
161
CV_Error( CV_StsOutOfRange, "Negative number of cascade stages" );
162
163
orig_window_size = cascade->orig_window_size;
164
165
/* check input structure correctness and calculate total memory size needed for
166
internal representation of the classifier cascade */
167
for( i = 0; i < cascade->count; i++ )
168
{
169
CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
170
171
if( !stage_classifier->classifier ||
172
stage_classifier->count <= 0 )
173
{
174
sprintf( errorstr, "header of the stage classifier #%d is invalid "
175
"(has null pointers or non-positive classfier count)", i );
176
CV_Error( CV_StsError, errorstr );
177
}
178
179
max_count = MAX( max_count, stage_classifier->count );
180
total_classifiers += stage_classifier->count;
181
182
for( j = 0; j < stage_classifier->count; j++ )
183
{
184
CvHaarClassifier* classifier = stage_classifier->classifier + j;
185
186
total_nodes += classifier->count;
187
for( l = 0; l < classifier->count; l++ )
188
{
189
for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
190
{
191
if( classifier->haar_feature[l].rect[k].r.width )
192
{
193
CvRect r = classifier->haar_feature[l].rect[k].r;
194
int tilted = classifier->haar_feature[l].tilted;
195
has_tilted_features = has_tilted_features | (tilted != 0);
196
if( r.width < 0 || r.height < 0 || r.y < 0 ||
197
r.x + r.width > orig_window_size.width
198
||
199
(!tilted &&
200
(r.x < 0 || r.y + r.height > orig_window_size.height))
201
||
202
(tilted && (r.x - r.height < 0 ||
203
r.y + r.width + r.height > orig_window_size.height)))
204
{
205
sprintf( errorstr, "rectangle #%d of the classifier #%d of "
206
"the stage classifier #%d is not inside "
207
"the reference (original) cascade window", k, j, i );
208
CV_Error( CV_StsNullPtr, errorstr );
209
}
210
}
211
}
212
}
213
}
214
}
215
216
// this is an upper boundary for the whole hidden cascade size
217
datasize = sizeof(CvHidHaarClassifierCascade) +
218
sizeof(CvHidHaarStageClassifier)*cascade->count +
219
sizeof(CvHidHaarClassifier) * total_classifiers +
220
sizeof(CvHidHaarTreeNode) * total_nodes +
221
sizeof(void*)*(total_nodes + total_classifiers);
222
223
out = (CvHidHaarClassifierCascade*)cvAlloc( datasize );
224
memset( out, 0, sizeof(*out) );
225
226
/* init header */
227
out->count = cascade->count;
228
out->stage_classifier = (CvHidHaarStageClassifier*)(out + 1);
229
haar_classifier_ptr = (CvHidHaarClassifier*)(out->stage_classifier + cascade->count);
230
haar_node_ptr = (CvHidHaarTreeNode*)(haar_classifier_ptr + total_classifiers);
231
232
out->isStumpBased = true;
233
out->has_tilted_features = has_tilted_features;
234
out->is_tree = false;
235
236
/* initialize internal representation */
237
for( i = 0; i < cascade->count; i++ )
238
{
239
CvHaarStageClassifier* stage_classifier = cascade->stage_classifier + i;
240
CvHidHaarStageClassifier* hid_stage_classifier = out->stage_classifier + i;
241
242
hid_stage_classifier->count = stage_classifier->count;
243
hid_stage_classifier->threshold = stage_classifier->threshold - icv_stage_threshold_bias;
244
hid_stage_classifier->classifier = haar_classifier_ptr;
245
hid_stage_classifier->two_rects = 1;
246
haar_classifier_ptr += stage_classifier->count;
247
248
hid_stage_classifier->parent = (stage_classifier->parent == -1)
249
? NULL : out->stage_classifier + stage_classifier->parent;
250
hid_stage_classifier->next = (stage_classifier->next == -1)
251
? NULL : out->stage_classifier + stage_classifier->next;
252
hid_stage_classifier->child = (stage_classifier->child == -1)
253
? NULL : out->stage_classifier + stage_classifier->child;
254
255
out->is_tree = out->is_tree || (hid_stage_classifier->next != NULL);
256
257
for( j = 0; j < stage_classifier->count; j++ )
258
{
259
CvHaarClassifier* classifier = stage_classifier->classifier + j;
260
CvHidHaarClassifier* hid_classifier = hid_stage_classifier->classifier + j;
261
int node_count = classifier->count;
262
float* alpha_ptr = (float*)(haar_node_ptr + node_count);
263
264
hid_classifier->count = node_count;
265
hid_classifier->node = haar_node_ptr;
266
hid_classifier->alpha = alpha_ptr;
267
268
for( l = 0; l < node_count; l++ )
269
{
270
CvHidHaarTreeNode* node = hid_classifier->node + l;
271
CvHaarFeature* feature = classifier->haar_feature + l;
272
memset( node, -1, sizeof(*node) );
273
node->threshold = classifier->threshold[l];
274
node->left = classifier->left[l];
275
node->right = classifier->right[l];
276
277
if( fabs(feature->rect[2].weight) < DBL_EPSILON ||
278
feature->rect[2].r.width == 0 ||
279
feature->rect[2].r.height == 0 )
280
memset( &(node->feature.rect[2]), 0, sizeof(node->feature.rect[2]) );
281
else
282
hid_stage_classifier->two_rects = 0;
283
}
284
285
memcpy( alpha_ptr, classifier->alpha, (node_count+1)*sizeof(alpha_ptr[0]));
286
haar_node_ptr =
287
(CvHidHaarTreeNode*)cvAlignPtr(alpha_ptr+node_count+1, sizeof(void*));
288
289
out->isStumpBased = out->isStumpBased && (node_count == 1);
290
}
291
}
292
293
cascade->hid_cascade = out;
294
assert( (char*)haar_node_ptr - (char*)out <= datasize );
295
296
return out;
297
}
298
299
300
#define sum_elem_ptr(sum,row,col) \
301
((sumtype*)CV_MAT_ELEM_PTR_FAST((sum),(row),(col),sizeof(sumtype)))
302
303
#define sqsum_elem_ptr(sqsum,row,col) \
304
((sqsumtype*)CV_MAT_ELEM_PTR_FAST((sqsum),(row),(col),sizeof(sqsumtype)))
305
306
#define calc_sum(rect,offset) \
307
((rect).p0[offset] - (rect).p1[offset] - (rect).p2[offset] + (rect).p3[offset])
308
309
CV_IMPL void
310
cvSetImagesForHaarClassifierCascade( CvHaarClassifierCascade* _cascade,
311
const CvArr* _sum,
312
const CvArr* _sqsum,
313
const CvArr* _tilted_sum,
314
double scale )
315
{
316
CvMat sum_stub, *sum = (CvMat*)_sum;
317
CvMat sqsum_stub, *sqsum = (CvMat*)_sqsum;
318
CvMat tilted_stub, *tilted = (CvMat*)_tilted_sum;
319
CvHidHaarClassifierCascade* cascade;
320
int coi0 = 0, coi1 = 0;
321
int i;
322
cv::Rect equRect;
323
double weight_scale;
324
325
if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
326
CV_Error( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier pointer" );
327
328
if( scale <= 0 )
329
CV_Error( CV_StsOutOfRange, "Scale must be positive" );
330
331
sum = cvGetMat( sum, &sum_stub, &coi0 );
332
sqsum = cvGetMat( sqsum, &sqsum_stub, &coi1 );
333
334
if( coi0 || coi1 )
335
CV_Error( CV_BadCOI, "COI is not supported" );
336
337
if( !CV_ARE_SIZES_EQ( sum, sqsum ))
338
CV_Error( CV_StsUnmatchedSizes, "All integral images must have the same size" );
339
340
if( CV_MAT_TYPE(sqsum->type) != CV_64FC1 ||
341
CV_MAT_TYPE(sum->type) != CV_32SC1 )
342
CV_Error( CV_StsUnsupportedFormat,
343
"Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
344
345
if( !_cascade->hid_cascade )
346
icvCreateHidHaarClassifierCascade(_cascade);
347
348
cascade = _cascade->hid_cascade;
349
350
if( cascade->has_tilted_features )
351
{
352
tilted = cvGetMat( tilted, &tilted_stub, &coi1 );
353
354
if( CV_MAT_TYPE(tilted->type) != CV_32SC1 )
355
CV_Error( CV_StsUnsupportedFormat,
356
"Only (32s, 64f, 32s) combination of (sum,sqsum,tilted_sum) formats is allowed" );
357
358
if( sum->step != tilted->step )
359
CV_Error( CV_StsUnmatchedSizes,
360
"Sum and tilted_sum must have the same stride (step, widthStep)" );
361
362
if( !CV_ARE_SIZES_EQ( sum, tilted ))
363
CV_Error( CV_StsUnmatchedSizes, "All integral images must have the same size" );
364
cascade->tilted = *tilted;
365
}
366
367
_cascade->scale = scale;
368
_cascade->real_window_size.width = cvRound( _cascade->orig_window_size.width * scale );
369
_cascade->real_window_size.height = cvRound( _cascade->orig_window_size.height * scale );
370
371
cascade->sum = *sum;
372
cascade->sqsum = *sqsum;
373
374
equRect.x = equRect.y = cvRound(scale);
375
equRect.width = cvRound((_cascade->orig_window_size.width-2)*scale);
376
equRect.height = cvRound((_cascade->orig_window_size.height-2)*scale);
377
weight_scale = 1./(equRect.width*equRect.height);
378
cascade->inv_window_area = weight_scale;
379
380
cascade->p0 = sum_elem_ptr(*sum, equRect.y, equRect.x);
381
cascade->p1 = sum_elem_ptr(*sum, equRect.y, equRect.x + equRect.width );
382
cascade->p2 = sum_elem_ptr(*sum, equRect.y + equRect.height, equRect.x );
383
cascade->p3 = sum_elem_ptr(*sum, equRect.y + equRect.height,
384
equRect.x + equRect.width );
385
386
cascade->pq0 = sqsum_elem_ptr(*sqsum, equRect.y, equRect.x);
387
cascade->pq1 = sqsum_elem_ptr(*sqsum, equRect.y, equRect.x + equRect.width );
388
cascade->pq2 = sqsum_elem_ptr(*sqsum, equRect.y + equRect.height, equRect.x );
389
cascade->pq3 = sqsum_elem_ptr(*sqsum, equRect.y + equRect.height,
390
equRect.x + equRect.width );
391
392
/* init pointers in haar features according to real window size and
393
given image pointers */
394
for( i = 0; i < _cascade->count; i++ )
395
{
396
int j, k, l;
397
for( j = 0; j < cascade->stage_classifier[i].count; j++ )
398
{
399
for( l = 0; l < cascade->stage_classifier[i].classifier[j].count; l++ )
400
{
401
CvHaarFeature* feature =
402
&_cascade->stage_classifier[i].classifier[j].haar_feature[l];
403
/* CvHidHaarClassifier* classifier =
404
cascade->stage_classifier[i].classifier + j; */
405
CvHidHaarFeature* hidfeature =
406
&cascade->stage_classifier[i].classifier[j].node[l].feature;
407
double sum0 = 0, area0 = 0;
408
cv::Rect r[3];
409
410
int base_w = -1, base_h = -1;
411
int new_base_w = 0, new_base_h = 0;
412
int kx, ky;
413
int flagx = 0, flagy = 0;
414
int x0 = 0, y0 = 0;
415
int nr;
416
417
/* align blocks */
418
for( k = 0; k < CV_HAAR_FEATURE_MAX; k++ )
419
{
420
if( !hidfeature->rect[k].p0 )
421
break;
422
r[k] = feature->rect[k].r;
423
base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].width-1) );
424
base_w = (int)CV_IMIN( (unsigned)base_w, (unsigned)(r[k].x - r[0].x-1) );
425
base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].height-1) );
426
base_h = (int)CV_IMIN( (unsigned)base_h, (unsigned)(r[k].y - r[0].y-1) );
427
}
428
429
nr = k;
430
431
base_w += 1;
432
base_h += 1;
433
kx = r[0].width / base_w;
434
ky = r[0].height / base_h;
435
436
if( kx <= 0 )
437
{
438
flagx = 1;
439
new_base_w = cvRound( r[0].width * scale ) / kx;
440
x0 = cvRound( r[0].x * scale );
441
}
442
443
if( ky <= 0 )
444
{
445
flagy = 1;
446
new_base_h = cvRound( r[0].height * scale ) / ky;
447
y0 = cvRound( r[0].y * scale );
448
}
449
450
for( k = 0; k < nr; k++ )
451
{
452
cv::Rect tr;
453
double correction_ratio;
454
455
if( flagx )
456
{
457
tr.x = (r[k].x - r[0].x) * new_base_w / base_w + x0;
458
tr.width = r[k].width * new_base_w / base_w;
459
}
460
else
461
{
462
tr.x = cvRound( r[k].x * scale );
463
tr.width = cvRound( r[k].width * scale );
464
}
465
466
if( flagy )
467
{
468
tr.y = (r[k].y - r[0].y) * new_base_h / base_h + y0;
469
tr.height = r[k].height * new_base_h / base_h;
470
}
471
else
472
{
473
tr.y = cvRound( r[k].y * scale );
474
tr.height = cvRound( r[k].height * scale );
475
}
476
477
#if CV_ADJUST_WEIGHTS
478
{
479
// RAINER START
480
const float orig_feature_size = (float)(feature->rect[k].r.width)*feature->rect[k].r.height;
481
const float orig_norm_size = (float)(_cascade->orig_window_size.width)*(_cascade->orig_window_size.height);
482
const float feature_size = float(tr.width*tr.height);
483
//const float normSize = float(equRect.width*equRect.height);
484
float target_ratio = orig_feature_size / orig_norm_size;
485
//float isRatio = featureSize / normSize;
486
//correctionRatio = targetRatio / isRatio / normSize;
487
correction_ratio = target_ratio / feature_size;
488
// RAINER END
489
}
490
#else
491
correction_ratio = weight_scale * (!feature->tilted ? 1 : 0.5);
492
#endif
493
494
if( !feature->tilted )
495
{
496
hidfeature->rect[k].p0 = sum_elem_ptr(*sum, tr.y, tr.x);
497
hidfeature->rect[k].p1 = sum_elem_ptr(*sum, tr.y, tr.x + tr.width);
498
hidfeature->rect[k].p2 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x);
499
hidfeature->rect[k].p3 = sum_elem_ptr(*sum, tr.y + tr.height, tr.x + tr.width);
500
}
501
else
502
{
503
hidfeature->rect[k].p2 = sum_elem_ptr(*tilted, tr.y + tr.width, tr.x + tr.width);
504
hidfeature->rect[k].p3 = sum_elem_ptr(*tilted, tr.y + tr.width + tr.height,
505
tr.x + tr.width - tr.height);
506
hidfeature->rect[k].p0 = sum_elem_ptr(*tilted, tr.y, tr.x);
507
hidfeature->rect[k].p1 = sum_elem_ptr(*tilted, tr.y + tr.height, tr.x - tr.height);
508
}
509
510
hidfeature->rect[k].weight = (float)(feature->rect[k].weight * correction_ratio);
511
512
if( k == 0 )
513
area0 = tr.width * tr.height;
514
else
515
sum0 += hidfeature->rect[k].weight * tr.width * tr.height;
516
}
517
CV_Assert(area0 > 0);
518
hidfeature->rect[0].weight = (float)(-sum0/area0);
519
} /* l */
520
} /* j */
521
}
522
}
523
524
525
CV_INLINE
526
double icvEvalHidHaarClassifier( CvHidHaarClassifier* classifier,
527
double variance_norm_factor,
528
size_t p_offset )
529
{
530
int idx = 0;
531
/*#if CV_HAAR_USE_SSE && !CV_HAAR_USE_AVX
532
if(cv::checkHardwareSupport(CV_CPU_SSE2))//based on old SSE variant. Works slow
533
{
534
double CV_DECL_ALIGNED(16) temp[2];
535
__m128d zero = _mm_setzero_pd();
536
do
537
{
538
CvHidHaarTreeNode* node = classifier->node + idx;
539
__m128d t = _mm_set1_pd((node->threshold)*variance_norm_factor);
540
__m128d left = _mm_set1_pd(node->left);
541
__m128d right = _mm_set1_pd(node->right);
542
543
double _sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
544
_sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
545
if( node->feature.rect[2].p0 )
546
_sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
547
548
__m128d sum = _mm_set1_pd(_sum);
549
t = _mm_cmplt_sd(sum, t);
550
sum = _mm_blendv_pd(right, left, t);
551
552
_mm_store_pd(temp, sum);
553
idx = (int)temp[0];
554
}
555
while(idx > 0 );
556
557
}
558
else
559
#endif*/
560
{
561
do
562
{
563
CvHidHaarTreeNode* node = classifier->node + idx;
564
double t = node->threshold * variance_norm_factor;
565
566
double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
567
sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
568
569
if( node->feature.rect[2].p0 )
570
sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
571
572
idx = sum < t ? node->left : node->right;
573
}
574
while( idx > 0 );
575
}
576
return classifier->alpha[-idx];
577
}
578
579
580
581
static int
582
cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
583
CvPoint pt, double& stage_sum, int start_stage )
584
{
585
#if CV_HAAR_USE_AVX
586
bool haveAVX = CV_CPU_HAS_SUPPORT_AVX;
587
#else
588
# ifdef CV_HAAR_USE_SSE
589
bool haveSSE2 = cv::checkHardwareSupport(CV_CPU_SSE2);
590
# endif
591
#endif
592
593
int p_offset, pq_offset;
594
int i, j;
595
double mean, variance_norm_factor;
596
CvHidHaarClassifierCascade* cascade;
597
598
if( !CV_IS_HAAR_CLASSIFIER(_cascade) )
599
CV_Error( !_cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid cascade pointer" );
600
601
cascade = _cascade->hid_cascade;
602
if( !cascade )
603
CV_Error( CV_StsNullPtr, "Hidden cascade has not been created.\n"
604
"Use cvSetImagesForHaarClassifierCascade" );
605
606
if( pt.x < 0 || pt.y < 0 ||
607
pt.x + _cascade->real_window_size.width >= cascade->sum.width ||
608
pt.y + _cascade->real_window_size.height >= cascade->sum.height )
609
return -1;
610
611
p_offset = pt.y * (cascade->sum.step/sizeof(sumtype)) + pt.x;
612
pq_offset = pt.y * (cascade->sqsum.step/sizeof(sqsumtype)) + pt.x;
613
mean = calc_sum(*cascade,p_offset)*cascade->inv_window_area;
614
variance_norm_factor = cascade->pq0[pq_offset] - cascade->pq1[pq_offset] -
615
cascade->pq2[pq_offset] + cascade->pq3[pq_offset];
616
variance_norm_factor = variance_norm_factor*cascade->inv_window_area - mean*mean;
617
if( variance_norm_factor >= 0. )
618
variance_norm_factor = std::sqrt(variance_norm_factor);
619
else
620
variance_norm_factor = 1.;
621
622
if( cascade->is_tree )
623
{
624
CvHidHaarStageClassifier* ptr = cascade->stage_classifier;
625
assert( start_stage == 0 );
626
627
while( ptr )
628
{
629
stage_sum = 0.0;
630
j = 0;
631
632
#if CV_HAAR_USE_AVX
633
if(haveAVX)
634
{
635
for( ; j <= ptr->count - 8; j += 8 )
636
{
637
stage_sum += cv_haar_avx::icvEvalHidHaarClassifierAVX(
638
ptr->classifier + j,
639
variance_norm_factor, p_offset );
640
}
641
}
642
#endif
643
for( ; j < ptr->count; j++ )
644
{
645
stage_sum += icvEvalHidHaarClassifier( ptr->classifier + j, variance_norm_factor, p_offset );
646
}
647
648
if( stage_sum >= ptr->threshold )
649
{
650
ptr = ptr->child;
651
}
652
else
653
{
654
while( ptr && ptr->next == NULL ) ptr = ptr->parent;
655
if( ptr == NULL )
656
return 0;
657
ptr = ptr->next;
658
}
659
}
660
}
661
else if( cascade->isStumpBased )
662
{
663
#if CV_HAAR_USE_AVX
664
if(haveAVX)
665
{
666
for( i = start_stage; i < cascade->count; i++ )
667
{
668
stage_sum = 0.0;
669
j = 0;
670
if( cascade->stage_classifier[i].two_rects )
671
{
672
for( ; j <= cascade->stage_classifier[i].count - 8; j += 8 )
673
{
674
stage_sum += cv_haar_avx::icvEvalHidHaarStumpClassifierTwoRectAVX(
675
cascade->stage_classifier[i].classifier + j,
676
variance_norm_factor, p_offset);
677
}
678
679
for( ; j < cascade->stage_classifier[i].count; j++ )
680
{
681
CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
682
CvHidHaarTreeNode* node = classifier->node;
683
684
double t = node->threshold*variance_norm_factor;
685
double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
686
sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
687
stage_sum += classifier->alpha[sum >= t];
688
}
689
}
690
else
691
{
692
for( ; j <= (cascade->stage_classifier[i].count)-8; j+=8 )
693
{
694
stage_sum += cv_haar_avx::icvEvalHidHaarStumpClassifierAVX(
695
cascade->stage_classifier[i].classifier + j,
696
variance_norm_factor, p_offset);
697
}
698
699
for( ; j < cascade->stage_classifier[i].count; j++ )
700
{
701
CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
702
CvHidHaarTreeNode* node = classifier->node;
703
704
double t = node->threshold*variance_norm_factor;
705
double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
706
sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
707
if( node->feature.rect[2].p0 )
708
sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
709
stage_sum += classifier->alpha[sum >= t];
710
}
711
}
712
if( stage_sum < cascade->stage_classifier[i].threshold )
713
return -i;
714
}
715
}
716
else
717
#elif defined CV_HAAR_USE_SSE //old SSE optimization
718
if(haveSSE2)
719
{
720
for( i = start_stage; i < cascade->count; i++ )
721
{
722
__m128d vstage_sum = _mm_setzero_pd();
723
if( cascade->stage_classifier[i].two_rects )
724
{
725
for( j = 0; j < cascade->stage_classifier[i].count; j++ )
726
{
727
CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
728
CvHidHaarTreeNode* node = classifier->node;
729
730
// ayasin - NHM perf optim. Avoid use of costly flaky jcc
731
__m128d t = _mm_set_sd(node->threshold*variance_norm_factor);
732
__m128d a = _mm_set_sd(classifier->alpha[0]);
733
__m128d b = _mm_set_sd(classifier->alpha[1]);
734
__m128d sum = _mm_set_sd(calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight +
735
calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight);
736
t = _mm_cmpgt_sd(t, sum);
737
vstage_sum = _mm_add_sd(vstage_sum, _mm_blendv_pd(b, a, t));
738
}
739
}
740
else
741
{
742
for( j = 0; j < cascade->stage_classifier[i].count; j++ )
743
{
744
CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
745
CvHidHaarTreeNode* node = classifier->node;
746
// ayasin - NHM perf optim. Avoid use of costly flaky jcc
747
__m128d t = _mm_set_sd(node->threshold*variance_norm_factor);
748
__m128d a = _mm_set_sd(classifier->alpha[0]);
749
__m128d b = _mm_set_sd(classifier->alpha[1]);
750
double _sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
751
_sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
752
if( node->feature.rect[2].p0 )
753
_sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
754
__m128d sum = _mm_set_sd(_sum);
755
756
t = _mm_cmpgt_sd(t, sum);
757
vstage_sum = _mm_add_sd(vstage_sum, _mm_blendv_pd(b, a, t));
758
}
759
}
760
__m128d i_threshold = _mm_set1_pd(cascade->stage_classifier[i].threshold);
761
if( _mm_comilt_sd(vstage_sum, i_threshold) )
762
return -i;
763
}
764
}
765
else
766
#endif // AVX or SSE
767
{
768
for( i = start_stage; i < cascade->count; i++ )
769
{
770
stage_sum = 0.0;
771
if( cascade->stage_classifier[i].two_rects )
772
{
773
for( j = 0; j < cascade->stage_classifier[i].count; j++ )
774
{
775
CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
776
CvHidHaarTreeNode* node = classifier->node;
777
double t = node->threshold*variance_norm_factor;
778
double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
779
sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
780
stage_sum += classifier->alpha[sum >= t];
781
}
782
}
783
else
784
{
785
for( j = 0; j < cascade->stage_classifier[i].count; j++ )
786
{
787
CvHidHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
788
CvHidHaarTreeNode* node = classifier->node;
789
double t = node->threshold*variance_norm_factor;
790
double sum = calc_sum(node->feature.rect[0],p_offset) * node->feature.rect[0].weight;
791
sum += calc_sum(node->feature.rect[1],p_offset) * node->feature.rect[1].weight;
792
if( node->feature.rect[2].p0 )
793
sum += calc_sum(node->feature.rect[2],p_offset) * node->feature.rect[2].weight;
794
stage_sum += classifier->alpha[sum >= t];
795
}
796
}
797
if( stage_sum < cascade->stage_classifier[i].threshold )
798
return -i;
799
}
800
}
801
}
802
else
803
{
804
for( i = start_stage; i < cascade->count; i++ )
805
{
806
stage_sum = 0.0;
807
int k = 0;
808
809
#if CV_HAAR_USE_AVX
810
if(haveAVX)
811
{
812
for( ; k < cascade->stage_classifier[i].count - 8; k += 8 )
813
{
814
stage_sum += cv_haar_avx::icvEvalHidHaarClassifierAVX(
815
cascade->stage_classifier[i].classifier + k,
816
variance_norm_factor, p_offset );
817
}
818
}
819
#endif
820
for(; k < cascade->stage_classifier[i].count; k++ )
821
{
822
823
stage_sum += icvEvalHidHaarClassifier(
824
cascade->stage_classifier[i].classifier + k,
825
variance_norm_factor, p_offset );
826
}
827
828
if( stage_sum < cascade->stage_classifier[i].threshold )
829
return -i;
830
}
831
}
832
return 1;
833
}
834
835
836
CV_IMPL int
837
cvRunHaarClassifierCascade( const CvHaarClassifierCascade* _cascade,
838
CvPoint pt, int start_stage )
839
{
840
CV_INSTRUMENT_REGION();
841
842
double stage_sum;
843
return cvRunHaarClassifierCascadeSum(_cascade, pt, stage_sum, start_stage);
844
}
845
846
namespace cv
847
{
848
849
const size_t PARALLEL_LOOP_BATCH_SIZE = 100;
850
851
class HaarDetectObjects_ScaleImage_Invoker : public ParallelLoopBody
852
{
853
public:
854
HaarDetectObjects_ScaleImage_Invoker( const CvHaarClassifierCascade* _cascade,
855
int _stripSize, double _factor,
856
const Mat& _sum1, const Mat& _sqsum1, Mat* _norm1,
857
Mat* _mask1, Rect _equRect, std::vector<Rect>& _vec,
858
std::vector<int>& _levels, std::vector<double>& _weights,
859
bool _outputLevels, Mutex *_mtx )
860
{
861
cascade = _cascade;
862
stripSize = _stripSize;
863
factor = _factor;
864
sum1 = _sum1;
865
sqsum1 = _sqsum1;
866
norm1 = _norm1;
867
mask1 = _mask1;
868
equRect = _equRect;
869
vec = &_vec;
870
rejectLevels = _outputLevels ? &_levels : 0;
871
levelWeights = _outputLevels ? &_weights : 0;
872
mtx = _mtx;
873
}
874
875
void operator()(const Range& range) const CV_OVERRIDE
876
{
877
CV_INSTRUMENT_REGION();
878
879
Size winSize0 = cascade->orig_window_size;
880
Size winSize(cvRound(winSize0.width*factor), cvRound(winSize0.height*factor));
881
int y1 = range.start*stripSize, y2 = std::min(range.end*stripSize, sum1.rows - 1 - winSize0.height);
882
883
if (y2 <= y1 || sum1.cols <= 1 + winSize0.width)
884
return;
885
886
Size ssz(sum1.cols - 1 - winSize0.width, y2 - y1);
887
int x, y, ystep = factor > 2 ? 1 : 2;
888
889
std::vector<Rect> vecLocal;
890
std::vector<int> rejectLevelsLocal;
891
std::vector<double> levelWeightsLocal;
892
893
for( y = y1; y < y2; y += ystep )
894
for( x = 0; x < ssz.width; x += ystep )
895
{
896
double gypWeight;
897
int result = cvRunHaarClassifierCascadeSum( cascade, cvPoint(x,y), gypWeight, 0 );
898
if( rejectLevels )
899
{
900
if( result == 1 )
901
result = -1*cascade->count;
902
if( cascade->count + result < 4 )
903
{
904
vecLocal.push_back(Rect(cvRound(x*factor), cvRound(y*factor),
905
winSize.width, winSize.height));
906
rejectLevelsLocal.push_back(-result);
907
levelWeightsLocal.push_back(gypWeight);
908
909
if (vecLocal.size() >= PARALLEL_LOOP_BATCH_SIZE)
910
{
911
mtx->lock();
912
vec->insert(vec->end(), vecLocal.begin(), vecLocal.end());
913
rejectLevels->insert(rejectLevels->end(), rejectLevelsLocal.begin(), rejectLevelsLocal.end());
914
levelWeights->insert(levelWeights->end(), levelWeightsLocal.begin(), levelWeightsLocal.end());
915
mtx->unlock();
916
917
vecLocal.clear();
918
rejectLevelsLocal.clear();
919
levelWeightsLocal.clear();
920
}
921
}
922
}
923
else
924
{
925
if( result > 0 )
926
{
927
vecLocal.push_back(Rect(cvRound(x*factor), cvRound(y*factor),
928
winSize.width, winSize.height));
929
930
if (vecLocal.size() >= PARALLEL_LOOP_BATCH_SIZE)
931
{
932
mtx->lock();
933
vec->insert(vec->end(), vecLocal.begin(), vecLocal.end());
934
mtx->unlock();
935
936
vecLocal.clear();
937
}
938
}
939
}
940
}
941
942
if (rejectLevelsLocal.size())
943
{
944
mtx->lock();
945
vec->insert(vec->end(), vecLocal.begin(), vecLocal.end());
946
rejectLevels->insert(rejectLevels->end(), rejectLevelsLocal.begin(), rejectLevelsLocal.end());
947
levelWeights->insert(levelWeights->end(), levelWeightsLocal.begin(), levelWeightsLocal.end());
948
mtx->unlock();
949
}
950
else
951
if (vecLocal.size())
952
{
953
mtx->lock();
954
vec->insert(vec->end(), vecLocal.begin(), vecLocal.end());
955
mtx->unlock();
956
}
957
}
958
959
const CvHaarClassifierCascade* cascade;
960
int stripSize;
961
double factor;
962
Mat sum1, sqsum1, *norm1, *mask1;
963
Rect equRect;
964
std::vector<Rect>* vec;
965
std::vector<int>* rejectLevels;
966
std::vector<double>* levelWeights;
967
Mutex* mtx;
968
};
969
970
971
class HaarDetectObjects_ScaleCascade_Invoker : public ParallelLoopBody
972
{
973
public:
974
HaarDetectObjects_ScaleCascade_Invoker( const CvHaarClassifierCascade* _cascade,
975
Size _winsize, const Range& _xrange, double _ystep,
976
size_t _sumstep, const int** _p, const int** _pq,
977
std::vector<Rect>& _vec, Mutex* _mtx )
978
{
979
cascade = _cascade;
980
winsize = _winsize;
981
xrange = _xrange;
982
ystep = _ystep;
983
sumstep = _sumstep;
984
p = _p; pq = _pq;
985
vec = &_vec;
986
mtx = _mtx;
987
}
988
989
void operator()(const Range& range) const CV_OVERRIDE
990
{
991
CV_INSTRUMENT_REGION();
992
993
int iy, startY = range.start, endY = range.end;
994
const int *p0 = p[0], *p1 = p[1], *p2 = p[2], *p3 = p[3];
995
const int *pq0 = pq[0], *pq1 = pq[1], *pq2 = pq[2], *pq3 = pq[3];
996
bool doCannyPruning = p0 != 0;
997
int sstep = (int)(sumstep/sizeof(p0[0]));
998
999
std::vector<Rect> vecLocal;
1000
1001
for( iy = startY; iy < endY; iy++ )
1002
{
1003
int ix, y = cvRound(iy*ystep), ixstep = 1;
1004
for( ix = xrange.start; ix < xrange.end; ix += ixstep )
1005
{
1006
int x = cvRound(ix*ystep); // it should really be ystep, not ixstep
1007
1008
if( doCannyPruning )
1009
{
1010
int offset = y*sstep + x;
1011
int s = p0[offset] - p1[offset] - p2[offset] + p3[offset];
1012
int sq = pq0[offset] - pq1[offset] - pq2[offset] + pq3[offset];
1013
if( s < 100 || sq < 20 )
1014
{
1015
ixstep = 2;
1016
continue;
1017
}
1018
}
1019
1020
int result = cvRunHaarClassifierCascade( cascade, cvPoint(x, y), 0 );
1021
if( result > 0 )
1022
{
1023
vecLocal.push_back(Rect(x, y, winsize.width, winsize.height));
1024
1025
if (vecLocal.size() >= PARALLEL_LOOP_BATCH_SIZE)
1026
{
1027
mtx->lock();
1028
vec->insert(vec->end(), vecLocal.begin(), vecLocal.end());
1029
mtx->unlock();
1030
1031
vecLocal.clear();
1032
}
1033
}
1034
ixstep = result != 0 ? 1 : 2;
1035
}
1036
}
1037
1038
if (vecLocal.size())
1039
{
1040
mtx->lock();
1041
vec->insert(vec->end(), vecLocal.begin(), vecLocal.end());
1042
mtx->unlock();
1043
}
1044
}
1045
1046
const CvHaarClassifierCascade* cascade;
1047
double ystep;
1048
size_t sumstep;
1049
Size winsize;
1050
Range xrange;
1051
const int** p;
1052
const int** pq;
1053
std::vector<Rect>* vec;
1054
Mutex* mtx;
1055
};
1056
1057
1058
}
1059
1060
1061
CvSeq*
1062
cvHaarDetectObjectsForROC( const CvArr* _img,
1063
CvHaarClassifierCascade* cascade, CvMemStorage* storage,
1064
std::vector<int>& rejectLevels, std::vector<double>& levelWeights,
1065
double scaleFactor, int minNeighbors, int flags,
1066
CvSize minSize, CvSize maxSize, bool outputRejectLevels )
1067
{
1068
CV_INSTRUMENT_REGION();
1069
1070
const double GROUP_EPS = 0.2;
1071
CvMat stub, *img = (CvMat*)_img;
1072
cv::Ptr<CvMat> temp, sum, tilted, sqsum, normImg, sumcanny, imgSmall;
1073
CvSeq* result_seq = 0;
1074
cv::Ptr<CvMemStorage> temp_storage;
1075
1076
std::vector<cv::Rect> allCandidates;
1077
std::vector<cv::Rect> rectList;
1078
std::vector<int> rweights;
1079
double factor;
1080
int coi;
1081
bool doCannyPruning = (flags & CV_HAAR_DO_CANNY_PRUNING) != 0;
1082
bool findBiggestObject = (flags & CV_HAAR_FIND_BIGGEST_OBJECT) != 0;
1083
bool roughSearch = (flags & CV_HAAR_DO_ROUGH_SEARCH) != 0;
1084
cv::Mutex mtx;
1085
1086
if( !CV_IS_HAAR_CLASSIFIER(cascade) )
1087
CV_Error( !cascade ? CV_StsNullPtr : CV_StsBadArg, "Invalid classifier cascade" );
1088
1089
if( !storage )
1090
CV_Error( CV_StsNullPtr, "Null storage pointer" );
1091
1092
img = cvGetMat( img, &stub, &coi );
1093
if( coi )
1094
CV_Error( CV_BadCOI, "COI is not supported" );
1095
1096
if( CV_MAT_DEPTH(img->type) != CV_8U )
1097
CV_Error( CV_StsUnsupportedFormat, "Only 8-bit images are supported" );
1098
1099
if( scaleFactor <= 1 )
1100
CV_Error( CV_StsOutOfRange, "scale factor must be > 1" );
1101
1102
if( findBiggestObject )
1103
flags &= ~CV_HAAR_SCALE_IMAGE;
1104
1105
if( maxSize.height == 0 || maxSize.width == 0 )
1106
{
1107
maxSize.height = img->rows;
1108
maxSize.width = img->cols;
1109
}
1110
1111
temp.reset(cvCreateMat( img->rows, img->cols, CV_8UC1 ));
1112
sum.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ));
1113
sqsum.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_64FC1 ));
1114
1115
if( !cascade->hid_cascade )
1116
icvCreateHidHaarClassifierCascade(cascade);
1117
1118
if( cascade->hid_cascade->has_tilted_features )
1119
tilted.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ));
1120
1121
result_seq = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvAvgComp), storage );
1122
1123
if( CV_MAT_CN(img->type) > 1 )
1124
{
1125
cvCvtColor( img, temp, CV_BGR2GRAY );
1126
img = temp;
1127
}
1128
1129
if( findBiggestObject )
1130
flags &= ~(CV_HAAR_SCALE_IMAGE|CV_HAAR_DO_CANNY_PRUNING);
1131
1132
if( flags & CV_HAAR_SCALE_IMAGE )
1133
{
1134
CvSize winSize0 = cascade->orig_window_size;
1135
imgSmall.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_8UC1 ));
1136
1137
for( factor = 1; ; factor *= scaleFactor )
1138
{
1139
CvSize winSize = { cvRound(winSize0.width*factor),
1140
cvRound(winSize0.height*factor) };
1141
CvSize sz = { cvRound(img->cols/factor), cvRound(img->rows/factor) };
1142
CvSize sz1 = { sz.width - winSize0.width + 1, sz.height - winSize0.height + 1 };
1143
1144
CvRect equRect = { icv_object_win_border, icv_object_win_border,
1145
winSize0.width - icv_object_win_border*2, winSize0.height - icv_object_win_border*2 };
1146
1147
CvMat img1, sum1, sqsum1, norm1, tilted1, mask1;
1148
CvMat* _tilted = 0;
1149
1150
if( sz1.width <= 0 || sz1.height <= 0 )
1151
break;
1152
if( winSize.width > maxSize.width || winSize.height > maxSize.height )
1153
break;
1154
if( winSize.width < minSize.width || winSize.height < minSize.height )
1155
continue;
1156
1157
img1 = cvMat( sz.height, sz.width, CV_8UC1, imgSmall->data.ptr );
1158
sum1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, sum->data.ptr );
1159
sqsum1 = cvMat( sz.height+1, sz.width+1, CV_64FC1, sqsum->data.ptr );
1160
if( tilted )
1161
{
1162
tilted1 = cvMat( sz.height+1, sz.width+1, CV_32SC1, tilted->data.ptr );
1163
_tilted = &tilted1;
1164
}
1165
norm1 = cvMat( sz1.height, sz1.width, CV_32FC1, normImg ? normImg->data.ptr : 0 );
1166
mask1 = cvMat( sz1.height, sz1.width, CV_8UC1, temp->data.ptr );
1167
1168
cvResize( img, &img1, cv::INTER_LINEAR_EXACT );
1169
cvIntegral( &img1, &sum1, &sqsum1, _tilted );
1170
1171
int ystep = factor > 2 ? 1 : 2;
1172
const int LOCS_PER_THREAD = 1000;
1173
int stripCount = ((sz1.width/ystep)*(sz1.height + ystep-1)/ystep + LOCS_PER_THREAD/2)/LOCS_PER_THREAD;
1174
stripCount = std::min(std::max(stripCount, 1), 100);
1175
1176
cvSetImagesForHaarClassifierCascade( cascade, &sum1, &sqsum1, _tilted, 1. );
1177
1178
cv::Mat _norm1 = cv::cvarrToMat(&norm1), _mask1 = cv::cvarrToMat(&mask1);
1179
cv::parallel_for_(cv::Range(0, stripCount),
1180
cv::HaarDetectObjects_ScaleImage_Invoker(cascade,
1181
(((sz1.height + stripCount - 1)/stripCount + ystep-1)/ystep)*ystep,
1182
factor, cv::cvarrToMat(&sum1), cv::cvarrToMat(&sqsum1), &_norm1, &_mask1,
1183
cv::Rect(equRect), allCandidates, rejectLevels, levelWeights, outputRejectLevels, &mtx));
1184
}
1185
}
1186
else
1187
{
1188
int n_factors = 0;
1189
cv::Rect scanROI;
1190
1191
cvIntegral( img, sum, sqsum, tilted );
1192
1193
if( doCannyPruning )
1194
{
1195
sumcanny.reset(cvCreateMat( img->rows + 1, img->cols + 1, CV_32SC1 ));
1196
cvCanny( img, temp, 0, 50, 3 );
1197
cvIntegral( temp, sumcanny );
1198
}
1199
1200
for( n_factors = 0, factor = 1;
1201
factor*cascade->orig_window_size.width < img->cols - 10 &&
1202
factor*cascade->orig_window_size.height < img->rows - 10;
1203
n_factors++, factor *= scaleFactor )
1204
;
1205
1206
if( findBiggestObject )
1207
{
1208
scaleFactor = 1./scaleFactor;
1209
factor *= scaleFactor;
1210
}
1211
else
1212
factor = 1;
1213
1214
for( ; n_factors-- > 0; factor *= scaleFactor )
1215
{
1216
const double ystep = std::max( 2., factor );
1217
cv::Size winSize(cvRound(cascade->orig_window_size.width * factor),
1218
cvRound(cascade->orig_window_size.height * factor));
1219
cv::Rect equRect;
1220
int *p[4] = {0,0,0,0};
1221
int *pq[4] = {0,0,0,0};
1222
int startX = 0, startY = 0;
1223
int endX = cvRound((img->cols - winSize.width) / ystep);
1224
int endY = cvRound((img->rows - winSize.height) / ystep);
1225
1226
if( winSize.width < minSize.width || winSize.height < minSize.height )
1227
{
1228
if( findBiggestObject )
1229
break;
1230
continue;
1231
}
1232
1233
if ( winSize.width > maxSize.width || winSize.height > maxSize.height )
1234
{
1235
if( !findBiggestObject )
1236
break;
1237
continue;
1238
}
1239
1240
cvSetImagesForHaarClassifierCascade( cascade, sum, sqsum, tilted, factor );
1241
cvZero( temp );
1242
1243
if( doCannyPruning )
1244
{
1245
equRect.x = cvRound(winSize.width*0.15);
1246
equRect.y = cvRound(winSize.height*0.15);
1247
equRect.width = cvRound(winSize.width*0.7);
1248
equRect.height = cvRound(winSize.height*0.7);
1249
1250
p[0] = (int*)(sumcanny->data.ptr + equRect.y*sumcanny->step) + equRect.x;
1251
p[1] = (int*)(sumcanny->data.ptr + equRect.y*sumcanny->step)
1252
+ equRect.x + equRect.width;
1253
p[2] = (int*)(sumcanny->data.ptr + (equRect.y + equRect.height)*sumcanny->step) + equRect.x;
1254
p[3] = (int*)(sumcanny->data.ptr + (equRect.y + equRect.height)*sumcanny->step)
1255
+ equRect.x + equRect.width;
1256
1257
pq[0] = (int*)(sum->data.ptr + equRect.y*sum->step) + equRect.x;
1258
pq[1] = (int*)(sum->data.ptr + equRect.y*sum->step)
1259
+ equRect.x + equRect.width;
1260
pq[2] = (int*)(sum->data.ptr + (equRect.y + equRect.height)*sum->step) + equRect.x;
1261
pq[3] = (int*)(sum->data.ptr + (equRect.y + equRect.height)*sum->step)
1262
+ equRect.x + equRect.width;
1263
}
1264
1265
if( !scanROI.empty() )
1266
{
1267
//adjust start_height and stop_height
1268
startY = cvRound(scanROI.y / ystep);
1269
endY = cvRound((scanROI.y + scanROI.height - winSize.height) / ystep);
1270
1271
startX = cvRound(scanROI.x / ystep);
1272
endX = cvRound((scanROI.x + scanROI.width - winSize.width) / ystep);
1273
}
1274
1275
cv::parallel_for_(cv::Range(startY, endY),
1276
cv::HaarDetectObjects_ScaleCascade_Invoker(cascade, winSize, cv::Range(startX, endX),
1277
ystep, sum->step, (const int**)p,
1278
(const int**)pq, allCandidates, &mtx ));
1279
1280
if( findBiggestObject && !allCandidates.empty() && scanROI.empty() )
1281
{
1282
rectList.resize(allCandidates.size());
1283
std::copy(allCandidates.begin(), allCandidates.end(), rectList.begin());
1284
1285
groupRectangles(rectList, std::max(minNeighbors, 1), GROUP_EPS);
1286
1287
if( !rectList.empty() )
1288
{
1289
size_t i, sz = rectList.size();
1290
cv::Rect maxRect;
1291
1292
for( i = 0; i < sz; i++ )
1293
{
1294
if( rectList[i].area() > maxRect.area() )
1295
maxRect = rectList[i];
1296
}
1297
1298
allCandidates.push_back(maxRect);
1299
1300
scanROI = maxRect;
1301
int dx = cvRound(maxRect.width*GROUP_EPS);
1302
int dy = cvRound(maxRect.height*GROUP_EPS);
1303
scanROI.x = std::max(scanROI.x - dx, 0);
1304
scanROI.y = std::max(scanROI.y - dy, 0);
1305
scanROI.width = std::min(scanROI.width + dx*2, img->cols-1-scanROI.x);
1306
scanROI.height = std::min(scanROI.height + dy*2, img->rows-1-scanROI.y);
1307
1308
double minScale = roughSearch ? 0.6 : 0.4;
1309
minSize.width = cvRound(maxRect.width*minScale);
1310
minSize.height = cvRound(maxRect.height*minScale);
1311
}
1312
}
1313
}
1314
}
1315
1316
rectList.resize(allCandidates.size());
1317
if(!allCandidates.empty())
1318
std::copy(allCandidates.begin(), allCandidates.end(), rectList.begin());
1319
1320
if( minNeighbors != 0 || findBiggestObject )
1321
{
1322
if( outputRejectLevels )
1323
{
1324
groupRectangles(rectList, rejectLevels, levelWeights, minNeighbors, GROUP_EPS );
1325
}
1326
else
1327
{
1328
groupRectangles(rectList, rweights, std::max(minNeighbors, 1), GROUP_EPS);
1329
}
1330
}
1331
else
1332
rweights.resize(rectList.size(),0);
1333
1334
if( findBiggestObject && rectList.size() )
1335
{
1336
CvAvgComp result_comp = {{0, 0, 0, 0},0};
1337
1338
for( size_t i = 0; i < rectList.size(); i++ )
1339
{
1340
cv::Rect r = rectList[i];
1341
if( r.area() > cv::Rect(result_comp.rect).area() )
1342
{
1343
result_comp.rect = cvRect(r);
1344
result_comp.neighbors = rweights[i];
1345
}
1346
}
1347
cvSeqPush( result_seq, &result_comp );
1348
}
1349
else
1350
{
1351
for( size_t i = 0; i < rectList.size(); i++ )
1352
{
1353
CvAvgComp c;
1354
c.rect = cvRect(rectList[i]);
1355
c.neighbors = !rweights.empty() ? rweights[i] : 0;
1356
cvSeqPush( result_seq, &c );
1357
}
1358
}
1359
1360
return result_seq;
1361
}
1362
1363
CV_IMPL CvSeq*
1364
cvHaarDetectObjects( const CvArr* _img,
1365
CvHaarClassifierCascade* cascade, CvMemStorage* storage,
1366
double scaleFactor,
1367
int minNeighbors, int flags, CvSize minSize, CvSize maxSize )
1368
{
1369
std::vector<int> fakeLevels;
1370
std::vector<double> fakeWeights;
1371
return cvHaarDetectObjectsForROC( _img, cascade, storage, fakeLevels, fakeWeights,
1372
scaleFactor, minNeighbors, flags, minSize, maxSize, false );
1373
1374
}
1375
1376
1377
static CvHaarClassifierCascade*
1378
icvLoadCascadeCART( const char** input_cascade, int n, CvSize orig_window_size )
1379
{
1380
int i;
1381
CvHaarClassifierCascade* cascade = icvCreateHaarClassifierCascade(n);
1382
cascade->orig_window_size = orig_window_size;
1383
1384
for( i = 0; i < n; i++ )
1385
{
1386
int j, count, l;
1387
float threshold = 0;
1388
const char* stage = input_cascade[i];
1389
int dl = 0;
1390
1391
/* tree links */
1392
int parent = -1;
1393
int next = -1;
1394
1395
sscanf( stage, "%d%n", &count, &dl );
1396
stage += dl;
1397
1398
CV_Assert( count > 0 && count < CV_HAAR_STAGE_MAX);
1399
cascade->stage_classifier[i].count = count;
1400
cascade->stage_classifier[i].classifier =
1401
(CvHaarClassifier*)cvAlloc( count*sizeof(cascade->stage_classifier[i].classifier[0]));
1402
1403
for( j = 0; j < count; j++ )
1404
{
1405
CvHaarClassifier* classifier = cascade->stage_classifier[i].classifier + j;
1406
int k, rects = 0;
1407
char str[100];
1408
1409
sscanf( stage, "%d%n", &classifier->count, &dl );
1410
stage += dl;
1411
1412
CV_Assert( classifier->count > 0 && classifier->count< CV_HAAR_STAGE_MAX);
1413
classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1414
classifier->count * ( sizeof( *classifier->haar_feature ) +
1415
sizeof( *classifier->threshold ) +
1416
sizeof( *classifier->left ) +
1417
sizeof( *classifier->right ) ) +
1418
(classifier->count + 1) * sizeof( *classifier->alpha ) );
1419
classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1420
classifier->left = (int*) (classifier->threshold + classifier->count);
1421
classifier->right = (int*) (classifier->left + classifier->count);
1422
classifier->alpha = (float*) (classifier->right + classifier->count);
1423
1424
for( l = 0; l < classifier->count; l++ )
1425
{
1426
sscanf( stage, "%d%n", &rects, &dl );
1427
stage += dl;
1428
1429
CV_Assert( rects >= 2 && rects <= CV_HAAR_FEATURE_MAX );
1430
1431
for( k = 0; k < rects; k++ )
1432
{
1433
cv::Rect r;
1434
int band = 0;
1435
sscanf( stage, "%d%d%d%d%d%f%n",
1436
&r.x, &r.y, &r.width, &r.height, &band,
1437
&(classifier->haar_feature[l].rect[k].weight), &dl );
1438
stage += dl;
1439
classifier->haar_feature[l].rect[k].r = cvRect(r);
1440
}
1441
sscanf( stage, "%99s%n", str, &dl );
1442
stage += dl;
1443
1444
classifier->haar_feature[l].tilted = strncmp( str, "tilted", 6 ) == 0;
1445
1446
for( k = rects; k < CV_HAAR_FEATURE_MAX; k++ )
1447
{
1448
memset( classifier->haar_feature[l].rect + k, 0,
1449
sizeof(classifier->haar_feature[l].rect[k]) );
1450
}
1451
1452
sscanf( stage, "%f%d%d%n", &(classifier->threshold[l]),
1453
&(classifier->left[l]),
1454
&(classifier->right[l]), &dl );
1455
stage += dl;
1456
}
1457
for( l = 0; l <= classifier->count; l++ )
1458
{
1459
sscanf( stage, "%f%n", &(classifier->alpha[l]), &dl );
1460
stage += dl;
1461
}
1462
}
1463
1464
sscanf( stage, "%f%n", &threshold, &dl );
1465
stage += dl;
1466
1467
cascade->stage_classifier[i].threshold = threshold;
1468
1469
/* load tree links */
1470
if( sscanf( stage, "%d%d%n", &parent, &next, &dl ) != 2 )
1471
{
1472
parent = i - 1;
1473
next = -1;
1474
}
1475
stage += dl;
1476
1477
CV_Assert(parent >= 0 && parent < i);
1478
cascade->stage_classifier[i].parent = parent;
1479
cascade->stage_classifier[i].next = next;
1480
cascade->stage_classifier[i].child = -1;
1481
1482
if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
1483
{
1484
cascade->stage_classifier[parent].child = i;
1485
}
1486
}
1487
1488
return cascade;
1489
}
1490
1491
#ifndef _MAX_PATH
1492
#define _MAX_PATH 1024
1493
#endif
1494
1495
CV_IMPL CvHaarClassifierCascade*
1496
cvLoadHaarClassifierCascade( const char* directory, CvSize orig_window_size )
1497
{
1498
if( !directory )
1499
CV_Error( CV_StsNullPtr, "Null path is passed" );
1500
1501
char name[_MAX_PATH];
1502
1503
int n = (int)strlen(directory)-1;
1504
const char* slash = directory[n] == '\\' || directory[n] == '/' ? "" : "/";
1505
int size = 0;
1506
1507
/* try to read the classifier from directory */
1508
for( n = 0; ; n++ )
1509
{
1510
sprintf( name, "%s%s%d/AdaBoostCARTHaarClassifier.txt", directory, slash, n );
1511
FILE* f = fopen( name, "rb" );
1512
if( !f )
1513
break;
1514
fseek( f, 0, SEEK_END );
1515
size += ftell( f ) + 1;
1516
fclose(f);
1517
}
1518
1519
if( n == 0 && slash[0] )
1520
return (CvHaarClassifierCascade*)cvLoad( directory );
1521
1522
if( n == 0 )
1523
CV_Error( CV_StsBadArg, "Invalid path" );
1524
1525
size += (n+1)*sizeof(char*);
1526
const char** input_cascade = (const char**)cvAlloc( size );
1527
1528
if( !input_cascade )
1529
CV_Error( CV_StsNoMem, "Could not allocate memory for input_cascade" );
1530
1531
char* ptr = (char*)(input_cascade + n + 1);
1532
1533
for( int i = 0; i < n; i++ )
1534
{
1535
sprintf( name, "%s/%d/AdaBoostCARTHaarClassifier.txt", directory, i );
1536
FILE* f = fopen( name, "rb" );
1537
if( !f )
1538
CV_Error( CV_StsError, "" );
1539
fseek( f, 0, SEEK_END );
1540
size = (int)ftell( f );
1541
fseek( f, 0, SEEK_SET );
1542
size_t elements_read = fread( ptr, 1, size, f );
1543
CV_Assert(elements_read == (size_t)(size));
1544
fclose(f);
1545
input_cascade[i] = ptr;
1546
ptr += size;
1547
*ptr++ = '\0';
1548
}
1549
1550
input_cascade[n] = 0;
1551
1552
CvHaarClassifierCascade* cascade = icvLoadCascadeCART( input_cascade, n, orig_window_size );
1553
1554
if( input_cascade )
1555
cvFree( &input_cascade );
1556
1557
return cascade;
1558
}
1559
1560
1561
CV_IMPL void
1562
cvReleaseHaarClassifierCascade( CvHaarClassifierCascade** _cascade )
1563
{
1564
if( _cascade && *_cascade )
1565
{
1566
int i, j;
1567
CvHaarClassifierCascade* cascade = *_cascade;
1568
1569
for( i = 0; i < cascade->count; i++ )
1570
{
1571
for( j = 0; j < cascade->stage_classifier[i].count; j++ )
1572
cvFree( &cascade->stage_classifier[i].classifier[j].haar_feature );
1573
cvFree( &cascade->stage_classifier[i].classifier );
1574
}
1575
icvReleaseHidHaarClassifierCascade( &cascade->hid_cascade );
1576
cvFree( _cascade );
1577
}
1578
}
1579
1580
1581
/****************************************************************************************\
1582
* Persistence functions *
1583
\****************************************************************************************/
1584
1585
/* field names */
1586
1587
#define ICV_HAAR_SIZE_NAME "size"
1588
#define ICV_HAAR_STAGES_NAME "stages"
1589
#define ICV_HAAR_TREES_NAME "trees"
1590
#define ICV_HAAR_FEATURE_NAME "feature"
1591
#define ICV_HAAR_RECTS_NAME "rects"
1592
#define ICV_HAAR_TILTED_NAME "tilted"
1593
#define ICV_HAAR_THRESHOLD_NAME "threshold"
1594
#define ICV_HAAR_LEFT_NODE_NAME "left_node"
1595
#define ICV_HAAR_LEFT_VAL_NAME "left_val"
1596
#define ICV_HAAR_RIGHT_NODE_NAME "right_node"
1597
#define ICV_HAAR_RIGHT_VAL_NAME "right_val"
1598
#define ICV_HAAR_STAGE_THRESHOLD_NAME "stage_threshold"
1599
#define ICV_HAAR_PARENT_NAME "parent"
1600
#define ICV_HAAR_NEXT_NAME "next"
1601
1602
static int
1603
icvIsHaarClassifier( const void* struct_ptr )
1604
{
1605
return CV_IS_HAAR_CLASSIFIER( struct_ptr );
1606
}
1607
1608
static void*
1609
icvReadHaarClassifier( CvFileStorage* fs, CvFileNode* node )
1610
{
1611
CvHaarClassifierCascade* cascade = NULL;
1612
1613
char buf[256];
1614
CvFileNode* seq_fn = NULL; /* sequence */
1615
CvFileNode* fn = NULL;
1616
CvFileNode* stages_fn = NULL;
1617
CvSeqReader stages_reader;
1618
int n;
1619
int i, j, k, l;
1620
int parent, next;
1621
1622
stages_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_STAGES_NAME );
1623
if( !stages_fn || !CV_NODE_IS_SEQ( stages_fn->tag) )
1624
CV_Error( CV_StsError, "Invalid stages node" );
1625
1626
n = stages_fn->data.seq->total;
1627
cascade = icvCreateHaarClassifierCascade(n);
1628
1629
/* read size */
1630
seq_fn = cvGetFileNodeByName( fs, node, ICV_HAAR_SIZE_NAME );
1631
if( !seq_fn || !CV_NODE_IS_SEQ( seq_fn->tag ) || seq_fn->data.seq->total != 2 )
1632
CV_Error( CV_StsError, "size node is not a valid sequence." );
1633
fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 0 );
1634
if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1635
CV_Error( CV_StsError, "Invalid size node: width must be positive integer" );
1636
cascade->orig_window_size.width = fn->data.i;
1637
fn = (CvFileNode*) cvGetSeqElem( seq_fn->data.seq, 1 );
1638
if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0 )
1639
CV_Error( CV_StsError, "Invalid size node: height must be positive integer" );
1640
cascade->orig_window_size.height = fn->data.i;
1641
1642
cvStartReadSeq( stages_fn->data.seq, &stages_reader );
1643
for( i = 0; i < n; ++i )
1644
{
1645
CvFileNode* stage_fn;
1646
CvFileNode* trees_fn;
1647
CvSeqReader trees_reader;
1648
1649
stage_fn = (CvFileNode*) stages_reader.ptr;
1650
if( !CV_NODE_IS_MAP( stage_fn->tag ) )
1651
{
1652
sprintf( buf, "Invalid stage %d", i );
1653
CV_Error( CV_StsError, buf );
1654
}
1655
1656
trees_fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_TREES_NAME );
1657
if( !trees_fn || !CV_NODE_IS_SEQ( trees_fn->tag )
1658
|| trees_fn->data.seq->total <= 0 )
1659
{
1660
sprintf( buf, "Trees node is not a valid sequence. (stage %d)", i );
1661
CV_Error( CV_StsError, buf );
1662
}
1663
1664
cascade->stage_classifier[i].classifier =
1665
(CvHaarClassifier*) cvAlloc( trees_fn->data.seq->total
1666
* sizeof( cascade->stage_classifier[i].classifier[0] ) );
1667
for( j = 0; j < trees_fn->data.seq->total; ++j )
1668
{
1669
cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
1670
}
1671
cascade->stage_classifier[i].count = trees_fn->data.seq->total;
1672
1673
cvStartReadSeq( trees_fn->data.seq, &trees_reader );
1674
for( j = 0; j < trees_fn->data.seq->total; ++j )
1675
{
1676
CvFileNode* tree_fn;
1677
CvSeqReader tree_reader;
1678
CvHaarClassifier* classifier;
1679
int last_idx;
1680
1681
classifier = &cascade->stage_classifier[i].classifier[j];
1682
tree_fn = (CvFileNode*) trees_reader.ptr;
1683
if( !CV_NODE_IS_SEQ( tree_fn->tag ) || tree_fn->data.seq->total <= 0 )
1684
{
1685
sprintf( buf, "Tree node is not a valid sequence."
1686
" (stage %d, tree %d)", i, j );
1687
CV_Error( CV_StsError, buf );
1688
}
1689
1690
classifier->count = tree_fn->data.seq->total;
1691
classifier->haar_feature = (CvHaarFeature*) cvAlloc(
1692
classifier->count * ( sizeof( *classifier->haar_feature ) +
1693
sizeof( *classifier->threshold ) +
1694
sizeof( *classifier->left ) +
1695
sizeof( *classifier->right ) ) +
1696
(classifier->count + 1) * sizeof( *classifier->alpha ) );
1697
classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
1698
classifier->left = (int*) (classifier->threshold + classifier->count);
1699
classifier->right = (int*) (classifier->left + classifier->count);
1700
classifier->alpha = (float*) (classifier->right + classifier->count);
1701
1702
cvStartReadSeq( tree_fn->data.seq, &tree_reader );
1703
for( k = 0, last_idx = 0; k < tree_fn->data.seq->total; ++k )
1704
{
1705
CvFileNode* node_fn;
1706
CvFileNode* feature_fn;
1707
CvFileNode* rects_fn;
1708
CvSeqReader rects_reader;
1709
1710
node_fn = (CvFileNode*) tree_reader.ptr;
1711
if( !CV_NODE_IS_MAP( node_fn->tag ) )
1712
{
1713
sprintf( buf, "Tree node %d is not a valid map. (stage %d, tree %d)",
1714
k, i, j );
1715
CV_Error( CV_StsError, buf );
1716
}
1717
feature_fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_FEATURE_NAME );
1718
if( !feature_fn || !CV_NODE_IS_MAP( feature_fn->tag ) )
1719
{
1720
sprintf( buf, "Feature node is not a valid map. "
1721
"(stage %d, tree %d, node %d)", i, j, k );
1722
CV_Error( CV_StsError, buf );
1723
}
1724
rects_fn = cvGetFileNodeByName( fs, feature_fn, ICV_HAAR_RECTS_NAME );
1725
if( !rects_fn || !CV_NODE_IS_SEQ( rects_fn->tag )
1726
|| rects_fn->data.seq->total < 1
1727
|| rects_fn->data.seq->total > CV_HAAR_FEATURE_MAX )
1728
{
1729
sprintf( buf, "Rects node is not a valid sequence. "
1730
"(stage %d, tree %d, node %d)", i, j, k );
1731
CV_Error( CV_StsError, buf );
1732
}
1733
cvStartReadSeq( rects_fn->data.seq, &rects_reader );
1734
for( l = 0; l < rects_fn->data.seq->total; ++l )
1735
{
1736
CvFileNode* rect_fn;
1737
cv::Rect r;
1738
1739
rect_fn = (CvFileNode*) rects_reader.ptr;
1740
if( !CV_NODE_IS_SEQ( rect_fn->tag ) || rect_fn->data.seq->total != 5 )
1741
{
1742
sprintf( buf, "Rect %d is not a valid sequence. "
1743
"(stage %d, tree %d, node %d)", l, i, j, k );
1744
CV_Error( CV_StsError, buf );
1745
}
1746
1747
fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 0 );
1748
if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1749
{
1750
sprintf( buf, "x coordinate must be non-negative integer. "
1751
"(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1752
CV_Error( CV_StsError, buf );
1753
}
1754
r.x = fn->data.i;
1755
fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 1 );
1756
if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i < 0 )
1757
{
1758
sprintf( buf, "y coordinate must be non-negative integer. "
1759
"(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1760
CV_Error( CV_StsError, buf );
1761
}
1762
r.y = fn->data.i;
1763
fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 2 );
1764
if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1765
|| r.x + fn->data.i > cascade->orig_window_size.width )
1766
{
1767
sprintf( buf, "width must be positive integer and "
1768
"(x + width) must not exceed window width. "
1769
"(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1770
CV_Error( CV_StsError, buf );
1771
}
1772
r.width = fn->data.i;
1773
fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 3 );
1774
if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= 0
1775
|| r.y + fn->data.i > cascade->orig_window_size.height )
1776
{
1777
sprintf( buf, "height must be positive integer and "
1778
"(y + height) must not exceed window height. "
1779
"(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1780
CV_Error( CV_StsError, buf );
1781
}
1782
r.height = fn->data.i;
1783
fn = CV_SEQ_ELEM( rect_fn->data.seq, CvFileNode, 4 );
1784
if( !CV_NODE_IS_REAL( fn->tag ) )
1785
{
1786
sprintf( buf, "weight must be real number. "
1787
"(stage %d, tree %d, node %d, rect %d)", i, j, k, l );
1788
CV_Error( CV_StsError, buf );
1789
}
1790
1791
classifier->haar_feature[k].rect[l].weight = (float) fn->data.f;
1792
classifier->haar_feature[k].rect[l].r = cvRect(r);
1793
1794
CV_NEXT_SEQ_ELEM( sizeof( *rect_fn ), rects_reader );
1795
} /* for each rect */
1796
for( l = rects_fn->data.seq->total; l < CV_HAAR_FEATURE_MAX; ++l )
1797
{
1798
classifier->haar_feature[k].rect[l].weight = 0;
1799
classifier->haar_feature[k].rect[l].r = cvRect( 0, 0, 0, 0 );
1800
}
1801
1802
fn = cvGetFileNodeByName( fs, feature_fn, ICV_HAAR_TILTED_NAME);
1803
if( !fn || !CV_NODE_IS_INT( fn->tag ) )
1804
{
1805
sprintf( buf, "tilted must be 0 or 1. "
1806
"(stage %d, tree %d, node %d)", i, j, k );
1807
CV_Error( CV_StsError, buf );
1808
}
1809
classifier->haar_feature[k].tilted = ( fn->data.i != 0 );
1810
fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_THRESHOLD_NAME);
1811
if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
1812
{
1813
sprintf( buf, "threshold must be real number. "
1814
"(stage %d, tree %d, node %d)", i, j, k );
1815
CV_Error( CV_StsError, buf );
1816
}
1817
classifier->threshold[k] = (float) fn->data.f;
1818
fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_LEFT_NODE_NAME);
1819
if( fn )
1820
{
1821
if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
1822
|| fn->data.i >= tree_fn->data.seq->total )
1823
{
1824
sprintf( buf, "left node must be valid node number. "
1825
"(stage %d, tree %d, node %d)", i, j, k );
1826
CV_Error( CV_StsError, buf );
1827
}
1828
/* left node */
1829
classifier->left[k] = fn->data.i;
1830
}
1831
else
1832
{
1833
fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_LEFT_VAL_NAME );
1834
if( !fn )
1835
{
1836
sprintf( buf, "left node or left value must be specified. "
1837
"(stage %d, tree %d, node %d)", i, j, k );
1838
CV_Error( CV_StsError, buf );
1839
}
1840
if( !CV_NODE_IS_REAL( fn->tag ) )
1841
{
1842
sprintf( buf, "left value must be real number. "
1843
"(stage %d, tree %d, node %d)", i, j, k );
1844
CV_Error( CV_StsError, buf );
1845
}
1846
/* left value */
1847
if( last_idx >= classifier->count + 1 )
1848
{
1849
sprintf( buf, "Tree structure is broken: too many values. "
1850
"(stage %d, tree %d, node %d)", i, j, k );
1851
CV_Error( CV_StsError, buf );
1852
}
1853
classifier->left[k] = -last_idx;
1854
classifier->alpha[last_idx++] = (float) fn->data.f;
1855
}
1856
fn = cvGetFileNodeByName( fs, node_fn,ICV_HAAR_RIGHT_NODE_NAME);
1857
if( fn )
1858
{
1859
if( !CV_NODE_IS_INT( fn->tag ) || fn->data.i <= k
1860
|| fn->data.i >= tree_fn->data.seq->total )
1861
{
1862
sprintf( buf, "right node must be valid node number. "
1863
"(stage %d, tree %d, node %d)", i, j, k );
1864
CV_Error( CV_StsError, buf );
1865
}
1866
/* right node */
1867
classifier->right[k] = fn->data.i;
1868
}
1869
else
1870
{
1871
fn = cvGetFileNodeByName( fs, node_fn, ICV_HAAR_RIGHT_VAL_NAME );
1872
if( !fn )
1873
{
1874
sprintf( buf, "right node or right value must be specified. "
1875
"(stage %d, tree %d, node %d)", i, j, k );
1876
CV_Error( CV_StsError, buf );
1877
}
1878
if( !CV_NODE_IS_REAL( fn->tag ) )
1879
{
1880
sprintf( buf, "right value must be real number. "
1881
"(stage %d, tree %d, node %d)", i, j, k );
1882
CV_Error( CV_StsError, buf );
1883
}
1884
/* right value */
1885
if( last_idx >= classifier->count + 1 )
1886
{
1887
sprintf( buf, "Tree structure is broken: too many values. "
1888
"(stage %d, tree %d, node %d)", i, j, k );
1889
CV_Error( CV_StsError, buf );
1890
}
1891
classifier->right[k] = -last_idx;
1892
classifier->alpha[last_idx++] = (float) fn->data.f;
1893
}
1894
1895
CV_NEXT_SEQ_ELEM( sizeof( *node_fn ), tree_reader );
1896
} /* for each node */
1897
if( last_idx != classifier->count + 1 )
1898
{
1899
sprintf( buf, "Tree structure is broken: too few values. "
1900
"(stage %d, tree %d)", i, j );
1901
CV_Error( CV_StsError, buf );
1902
}
1903
1904
CV_NEXT_SEQ_ELEM( sizeof( *tree_fn ), trees_reader );
1905
} /* for each tree */
1906
1907
fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_STAGE_THRESHOLD_NAME);
1908
if( !fn || !CV_NODE_IS_REAL( fn->tag ) )
1909
{
1910
sprintf( buf, "stage threshold must be real number. (stage %d)", i );
1911
CV_Error( CV_StsError, buf );
1912
}
1913
cascade->stage_classifier[i].threshold = (float) fn->data.f;
1914
1915
parent = i - 1;
1916
next = -1;
1917
1918
fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_PARENT_NAME );
1919
if( !fn || !CV_NODE_IS_INT( fn->tag )
1920
|| fn->data.i < -1 || fn->data.i >= cascade->count )
1921
{
1922
sprintf( buf, "parent must be integer number. (stage %d)", i );
1923
CV_Error( CV_StsError, buf );
1924
}
1925
parent = fn->data.i;
1926
fn = cvGetFileNodeByName( fs, stage_fn, ICV_HAAR_NEXT_NAME );
1927
if( !fn || !CV_NODE_IS_INT( fn->tag )
1928
|| fn->data.i < -1 || fn->data.i >= cascade->count )
1929
{
1930
sprintf( buf, "next must be integer number. (stage %d)", i );
1931
CV_Error( CV_StsError, buf );
1932
}
1933
next = fn->data.i;
1934
1935
cascade->stage_classifier[i].parent = parent;
1936
cascade->stage_classifier[i].next = next;
1937
cascade->stage_classifier[i].child = -1;
1938
1939
if( parent != -1 && cascade->stage_classifier[parent].child == -1 )
1940
{
1941
cascade->stage_classifier[parent].child = i;
1942
}
1943
1944
CV_NEXT_SEQ_ELEM( sizeof( *stage_fn ), stages_reader );
1945
} /* for each stage */
1946
1947
return cascade;
1948
}
1949
1950
static void
1951
icvWriteHaarClassifier( CvFileStorage* fs, const char* name, const void* struct_ptr,
1952
CvAttrList attributes )
1953
{
1954
int i, j, k, l;
1955
char buf[256];
1956
const CvHaarClassifierCascade* cascade = (const CvHaarClassifierCascade*) struct_ptr;
1957
1958
/* TODO: parameters check */
1959
1960
cvStartWriteStruct( fs, name, CV_NODE_MAP, CV_TYPE_NAME_HAAR, attributes );
1961
1962
cvStartWriteStruct( fs, ICV_HAAR_SIZE_NAME, CV_NODE_SEQ | CV_NODE_FLOW );
1963
cvWriteInt( fs, NULL, cascade->orig_window_size.width );
1964
cvWriteInt( fs, NULL, cascade->orig_window_size.height );
1965
cvEndWriteStruct( fs ); /* size */
1966
1967
cvStartWriteStruct( fs, ICV_HAAR_STAGES_NAME, CV_NODE_SEQ );
1968
for( i = 0; i < cascade->count; ++i )
1969
{
1970
cvStartWriteStruct( fs, NULL, CV_NODE_MAP );
1971
sprintf( buf, "stage %d", i );
1972
cvWriteComment( fs, buf, 1 );
1973
1974
cvStartWriteStruct( fs, ICV_HAAR_TREES_NAME, CV_NODE_SEQ );
1975
1976
for( j = 0; j < cascade->stage_classifier[i].count; ++j )
1977
{
1978
CvHaarClassifier* tree = &cascade->stage_classifier[i].classifier[j];
1979
1980
cvStartWriteStruct( fs, NULL, CV_NODE_SEQ );
1981
sprintf( buf, "tree %d", j );
1982
cvWriteComment( fs, buf, 1 );
1983
1984
for( k = 0; k < tree->count; ++k )
1985
{
1986
CvHaarFeature* feature = &tree->haar_feature[k];
1987
1988
cvStartWriteStruct( fs, NULL, CV_NODE_MAP );
1989
if( k )
1990
{
1991
sprintf( buf, "node %d", k );
1992
}
1993
else
1994
{
1995
sprintf( buf, "root node" );
1996
}
1997
cvWriteComment( fs, buf, 1 );
1998
1999
cvStartWriteStruct( fs, ICV_HAAR_FEATURE_NAME, CV_NODE_MAP );
2000
2001
cvStartWriteStruct( fs, ICV_HAAR_RECTS_NAME, CV_NODE_SEQ );
2002
for( l = 0; l < CV_HAAR_FEATURE_MAX && feature->rect[l].r.width != 0; ++l )
2003
{
2004
cvStartWriteStruct( fs, NULL, CV_NODE_SEQ | CV_NODE_FLOW );
2005
cvWriteInt( fs, NULL, feature->rect[l].r.x );
2006
cvWriteInt( fs, NULL, feature->rect[l].r.y );
2007
cvWriteInt( fs, NULL, feature->rect[l].r.width );
2008
cvWriteInt( fs, NULL, feature->rect[l].r.height );
2009
cvWriteReal( fs, NULL, feature->rect[l].weight );
2010
cvEndWriteStruct( fs ); /* rect */
2011
}
2012
cvEndWriteStruct( fs ); /* rects */
2013
cvWriteInt( fs, ICV_HAAR_TILTED_NAME, feature->tilted );
2014
cvEndWriteStruct( fs ); /* feature */
2015
2016
cvWriteReal( fs, ICV_HAAR_THRESHOLD_NAME, tree->threshold[k]);
2017
2018
if( tree->left[k] > 0 )
2019
{
2020
cvWriteInt( fs, ICV_HAAR_LEFT_NODE_NAME, tree->left[k] );
2021
}
2022
else
2023
{
2024
cvWriteReal( fs, ICV_HAAR_LEFT_VAL_NAME,
2025
tree->alpha[-tree->left[k]] );
2026
}
2027
2028
if( tree->right[k] > 0 )
2029
{
2030
cvWriteInt( fs, ICV_HAAR_RIGHT_NODE_NAME, tree->right[k] );
2031
}
2032
else
2033
{
2034
cvWriteReal( fs, ICV_HAAR_RIGHT_VAL_NAME,
2035
tree->alpha[-tree->right[k]] );
2036
}
2037
2038
cvEndWriteStruct( fs ); /* split */
2039
}
2040
2041
cvEndWriteStruct( fs ); /* tree */
2042
}
2043
2044
cvEndWriteStruct( fs ); /* trees */
2045
2046
cvWriteReal( fs, ICV_HAAR_STAGE_THRESHOLD_NAME, cascade->stage_classifier[i].threshold);
2047
cvWriteInt( fs, ICV_HAAR_PARENT_NAME, cascade->stage_classifier[i].parent );
2048
cvWriteInt( fs, ICV_HAAR_NEXT_NAME, cascade->stage_classifier[i].next );
2049
2050
cvEndWriteStruct( fs ); /* stage */
2051
} /* for each stage */
2052
2053
cvEndWriteStruct( fs ); /* stages */
2054
cvEndWriteStruct( fs ); /* root */
2055
}
2056
2057
static void*
2058
icvCloneHaarClassifier( const void* struct_ptr )
2059
{
2060
CvHaarClassifierCascade* cascade = NULL;
2061
2062
int i, j, k, n;
2063
const CvHaarClassifierCascade* cascade_src =
2064
(const CvHaarClassifierCascade*) struct_ptr;
2065
2066
n = cascade_src->count;
2067
cascade = icvCreateHaarClassifierCascade(n);
2068
cascade->orig_window_size = cascade_src->orig_window_size;
2069
2070
for( i = 0; i < n; ++i )
2071
{
2072
cascade->stage_classifier[i].parent = cascade_src->stage_classifier[i].parent;
2073
cascade->stage_classifier[i].next = cascade_src->stage_classifier[i].next;
2074
cascade->stage_classifier[i].child = cascade_src->stage_classifier[i].child;
2075
cascade->stage_classifier[i].threshold = cascade_src->stage_classifier[i].threshold;
2076
2077
cascade->stage_classifier[i].count = 0;
2078
cascade->stage_classifier[i].classifier =
2079
(CvHaarClassifier*) cvAlloc( cascade_src->stage_classifier[i].count
2080
* sizeof( cascade->stage_classifier[i].classifier[0] ) );
2081
2082
cascade->stage_classifier[i].count = cascade_src->stage_classifier[i].count;
2083
2084
for( j = 0; j < cascade->stage_classifier[i].count; ++j )
2085
cascade->stage_classifier[i].classifier[j].haar_feature = NULL;
2086
2087
for( j = 0; j < cascade->stage_classifier[i].count; ++j )
2088
{
2089
const CvHaarClassifier* classifier_src =
2090
&cascade_src->stage_classifier[i].classifier[j];
2091
CvHaarClassifier* classifier =
2092
&cascade->stage_classifier[i].classifier[j];
2093
2094
classifier->count = classifier_src->count;
2095
classifier->haar_feature = (CvHaarFeature*) cvAlloc(
2096
classifier->count * ( sizeof( *classifier->haar_feature ) +
2097
sizeof( *classifier->threshold ) +
2098
sizeof( *classifier->left ) +
2099
sizeof( *classifier->right ) ) +
2100
(classifier->count + 1) * sizeof( *classifier->alpha ) );
2101
classifier->threshold = (float*) (classifier->haar_feature+classifier->count);
2102
classifier->left = (int*) (classifier->threshold + classifier->count);
2103
classifier->right = (int*) (classifier->left + classifier->count);
2104
classifier->alpha = (float*) (classifier->right + classifier->count);
2105
for( k = 0; k < classifier->count; ++k )
2106
{
2107
classifier->haar_feature[k] = classifier_src->haar_feature[k];
2108
classifier->threshold[k] = classifier_src->threshold[k];
2109
classifier->left[k] = classifier_src->left[k];
2110
classifier->right[k] = classifier_src->right[k];
2111
classifier->alpha[k] = classifier_src->alpha[k];
2112
}
2113
classifier->alpha[classifier->count] =
2114
classifier_src->alpha[classifier->count];
2115
}
2116
}
2117
2118
return cascade;
2119
}
2120
2121
2122
CvType haar_type( CV_TYPE_NAME_HAAR, icvIsHaarClassifier,
2123
(CvReleaseFunc)cvReleaseHaarClassifierCascade,
2124
icvReadHaarClassifier, icvWriteHaarClassifier,
2125
icvCloneHaarClassifier );
2126
2127
/* End of file. */
2128
2129