boost.cpp boost.h features.cpp features.h
haarfeatures.cpp haarfeatures.h
lbpfeatures.cpp lbpfeatures.h
- imagestorage.cpp _imagestorage.h
- inner_functions.cpp _inner_functions.h)
+ imagestorage.cpp imagestorage.h)
add_executable(traincascade ${traincascade_files})
+++ /dev/null
-#ifndef _IMAGESTORAGE_H\r
-#define _IMAGESTORAGE_H\r
-\r
-#include <highgui.h>\r
-\r
-//------------------- Background reading ---------------------\r
-struct CvBackgroundReader
-{
- CvBackgroundReader();
- virtual ~CvBackgroundReader();
-
- CvMat src;
- CvMat img;
- CvPoint offset;
- float scale;
- float scaleFactor;
- float stepFactor;
- CvPoint point;
-};\r
-\r
-struct CvBackgroundData
-{
- CvBackgroundData();
- CvBackgroundData( const char* fileName, CvSize _winSize );
- virtual ~CvBackgroundData();
- bool getImage( CvMat* img, bool reset );
- bool getNext( bool reset);
-
- CvBackgroundReader* bgReader;
- int count;
- char** fileName;
- int last;
- int round;
- CvSize winSize;
-};
-\r
-//--------------------- VecFile reading ------------------------\r
-struct CvVecFile
-{
- CvVecFile();
- CvVecFile( const char* _vecFileName );
- virtual ~CvVecFile();
-
- FILE* input;
- int count;
- int vecSize;
- int last;
- short* vector;
- int base;
-};\r
-\r
-//----------------------- CvImageReader -----------------------\r
-class CvImageReader\r
-{\r
-public:\r
- CvImageReader( const char* _vecFileName, const char* _bgfileName, CvSize _winSize );\r
- virtual ~CvImageReader();\r
-\r
- bool getNegImage(CvMat* img, bool reset = false);\r
- bool getPosImage(CvMat* img, bool reset = false);\r
-private:\r
- CvBackgroundData* bgData;\r
- CvVecFile* vecFile;\r
-};\r
-\r
-#endif
\ No newline at end of file
+++ /dev/null
-#ifndef _INNER_FUNCTIONS_H
-#define _INNER_FUNCTIONS_H
-
-#include "cv.h"
-#include "ml.h"
-
-#define CC_PATH_MAX 512
-
-#include <ctime>
-#ifdef _WIN32
-#define TIME( arg ) (((double) clock()) / CLOCKS_PER_SEC)
-#else
-#define TIME( arg ) (time( arg ))
-#endif /* _WIN32 */
-
-void cvGetSortedIndices( CvMat* val, CvMat* idx, int sortcols CV_DEFAULT( 0 ) );
-
-CvMat*
-cvPreprocessIndexArray( const CvMat* idx_arr, int data_arr_size, bool check_for_duplicates );
-
-static inline double
-log_ratio( double val )
-{
- const double eps = 1e-5;
-
- val = MAX( val, eps );
- val = MIN( val, 1. - eps );
- return log( val/(1. - val) );
-}
-
-// For old Haar Classifier file
-#define ICV_HAAR_SIZE_NAME "size"
-#define ICV_HAAR_STAGES_NAME "stages"
-#define ICV_HAAR_TREES_NAME "trees"
-#define ICV_HAAR_FEATURE_NAME "feature"
-#define ICV_HAAR_RECTS_NAME "rects"
-#define ICV_HAAR_TILTED_NAME "tilted"
-#define ICV_HAAR_THRESHOLD_NAME "threshold"
-#define ICV_HAAR_LEFT_NODE_NAME "left_node"
-#define ICV_HAAR_LEFT_VAL_NAME "left_val"
-#define ICV_HAAR_RIGHT_NODE_NAME "right_node"
-#define ICV_HAAR_RIGHT_VAL_NAME "right_val"
-#define ICV_HAAR_STAGE_THRESHOLD_NAME "stage_threshold"
-#define ICV_HAAR_PARENT_NAME "parent"
-#define ICV_HAAR_NEXT_NAME "next"
-
-#endif
\ No newline at end of file
#include "boost.h"
-#include "_inner_functions.h"
#include "cascadeclassifier.h"
#include <queue>
using namespace std;
+static inline double
+logRatio( double val )
+{
+ const double eps = 1e-5;
+
+ val = max( val, eps );
+ val = min( val, 1. - eps );
+ return log( val/(1. - val) );
+}
#define CV_CMP_FLT(i,j) (i < j)
-//static CV_IMPLEMENT_QSORT_EX( icvSortInt, int, CV_CMP_FLT, const int* )
static CV_IMPLEMENT_QSORT_EX( icvSortFlt, float, CV_CMP_FLT, const float* )
#define CV_CMP_NUM_IDX(i,j) (aux[i] < aux[j])
static CV_IMPLEMENT_QSORT_EX( icvSortIntAux, int, CV_CMP_NUM_IDX, const float* )
static CV_IMPLEMENT_QSORT_EX( icvSortUShAux, unsigned short, CV_CMP_NUM_IDX, const float* )
-#define CV_CMP_PAIRS(a,b) (*((a).i) < *((b).i))
-//static CV_IMPLEMENT_QSORT_EX( icvSortPairs, CvPair16u32s, CV_CMP_PAIRS, int )
-
-#define CV_CMP_NUM_PTR(a,b) (*(a) < *(b))
-//static CV_IMPLEMENT_QSORT_EX( icvSortIntPtr, int*, CV_CMP_NUM_PTR, int )
-//static CV_IMPLEMENT_QSORT_EX( icvSortDblPtr, double*, CV_CMP_NUM_PTR, int )
-
#define CV_THRESHOLD_EPS (0.00001F)
static const int MinBlockSize = 1 << 16;
//----------------------------- CascadeBoostParams -------------------------------------------------
+CvCascadeBoostParams::CvCascadeBoostParams() : minHitRate( 0.995F), maxFalseAlarm( 0.5F )
+{
+ boost_type = CvBoost::GENTLE;
+}
+
CvCascadeBoostParams::CvCascadeBoostParams( int _boostType,
float _minHitRate, float _maxFalseAlarm,
- double _weightTrimRate, int _maxDepth, int _maxWeakCount, const float* priors ) :
- CvBoostParams( _boostType, _maxWeakCount, _weightTrimRate, _maxDepth, false, priors )
+ double _weightTrimRate, int _maxDepth, int _maxWeakCount ) :
+ CvBoostParams( _boostType, _maxWeakCount, _weightTrimRate, _maxDepth, false, 0 )
{
boost_type = CvBoost::GENTLE;
minHitRate = _minHitRate;
maxFalseAlarm = _maxFalseAlarm;
}
-void CvCascadeBoostParams::write( CvFileStorage* fs ) const
+void CvCascadeBoostParams::write( FileStorage &fs ) const
{
- CV_FUNCNAME( "CvCascadeBoostParams::write" );
- __BEGIN__;
-
- const char* boostTypeStr;
- boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
- boost_type == CvBoost::REAL ? CC_REAL_BOOST :
- boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
- boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : 0;
- if( boostTypeStr )
- {
- CV_CALL( cvWriteString( fs, CC_BOOST_TYPE, boostTypeStr ) );
- }
- else
- {
- CV_CALL( cvWriteInt( fs, CC_BOOST_TYPE, boost_type ) );
- }
-
- CV_CALL( cvWriteReal( fs, CC_MINHITRATE, minHitRate ) );
- CV_CALL( cvWriteReal( fs, CC_MAXFALSEALARM, maxFalseAlarm ) );
- CV_CALL( cvWriteReal( fs, CC_TRIM_RATE, weight_trim_rate ) );
- CV_CALL( cvWriteInt( fs, CC_MAX_DEPTH, max_depth ) );
- CV_CALL( cvWriteInt( fs, CC_WEAK_COUNT, weak_count ) );
-
- __END__;
+ String boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
+ boost_type == CvBoost::REAL ? CC_REAL_BOOST :
+ boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
+ boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : String();
+ CV_Assert( !boostTypeStr.empty() );
+ fs << CC_BOOST_TYPE << boostTypeStr;
+ fs << CC_MINHITRATE << minHitRate;
+ fs << CC_MAXFALSEALARM << maxFalseAlarm;
+ fs << CC_TRIM_RATE << weight_trim_rate;
+ fs << CC_MAX_DEPTH << max_depth;
+ fs << CC_WEAK_COUNT << weak_count;
}
-bool CvCascadeBoostParams::read( CvFileStorage* fs, CvFileNode* map )
+bool CvCascadeBoostParams::read( const FileNode &node )
{
- bool res = false;
-
- CV_FUNCNAME( "CvCascadeBoostParams::read" );
- __BEGIN__;
-
- const char* boostTypeStr;
- CV_CALL( boostTypeStr = cvReadStringByName( fs, map, CC_BOOST_TYPE ) );
- if ( !boostTypeStr )
- EXIT;
- CV_CALL( boost_type = strcmp( boostTypeStr, CC_DISCRETE_BOOST ) == 0 ? CvBoost::DISCRETE :
- strcmp( boostTypeStr, CC_REAL_BOOST ) == 0 ? CvBoost::REAL :
- strcmp( boostTypeStr, CC_LOGIT_BOOST ) == 0 ? CvBoost::LOGIT :
- strcmp( boostTypeStr, CC_GENTLE_BOOST ) == 0 ? CvBoost::GENTLE : cvReadIntByName( fs, map, CC_BOOST_TYPE ) );
- CV_CALL( minHitRate = (float)cvReadRealByName( fs, map, CC_MINHITRATE ) );
- CV_CALL( maxFalseAlarm = (float)cvReadRealByName( fs, map, CC_MAXFALSEALARM ) );
- CV_CALL( weight_trim_rate = cvReadRealByName( fs, map, CC_TRIM_RATE ) );
- CV_CALL( max_depth = cvReadIntByName( fs, map, CC_MAX_DEPTH ) );
- CV_CALL( weak_count = cvReadIntByName( fs, map, CC_WEAK_COUNT ) );
+ String boostTypeStr;
+ FileNode rnode = node[CC_BOOST_TYPE];
+ rnode >> boostTypeStr;
+ boost_type = !boostTypeStr.compare( CC_DISCRETE_BOOST ) ? CvBoost::DISCRETE :
+ !boostTypeStr.compare( CC_REAL_BOOST ) ? CvBoost::REAL :
+ !boostTypeStr.compare( CC_LOGIT_BOOST ) ? CvBoost::LOGIT :
+ !boostTypeStr.compare( CC_GENTLE_BOOST ) ? CvBoost::GENTLE : -1;
+ if (boost_type == -1)
+ CV_Error( CV_StsBadArg, "unsupported Boost type" );
+ node[CC_MINHITRATE] >> minHitRate;
+ node[CC_MAXFALSEALARM] >> maxFalseAlarm;
+ node[CC_TRIM_RATE] >> weight_trim_rate ;
+ node[CC_MAX_DEPTH] >> max_depth ;
+ node[CC_WEAK_COUNT] >> weak_count ;
if ( minHitRate <= 0 || minHitRate > 1 ||
maxFalseAlarm <= 0 || maxFalseAlarm > 1 ||
weight_trim_rate <= 0 || weight_trim_rate > 1 ||
- max_depth <= 0 ||
- weak_count <= 0)
- EXIT;
-
- res = true;
-
- __END__;
-
- return res;
+ max_depth <= 0 || weak_count <= 0 )
+ CV_Error( CV_StsBadArg, "bad parameters range");
+ return true;
}
-void CvCascadeBoostParams::printDefault()
+void CvCascadeBoostParams::printDefaults() const
{
- printf( " [-bt <{%s, %s, %s, %s (default)}>]\n"
- " [-minHitRate <min_hit_rate> = %f]\n"
- " [-maxFalseAlarmRate <max_false_alarm_rate = %f>]\n"
- " [-weightTrimRate <weight_trim_rate = %f>]\n"
- " [-maxDepth <max_depth_of_weak_tree = %d>]\n"
- " [-maxWeakCount <max_weak_tree_count = %d>]\n",
- CC_DISCRETE_BOOST, CC_REAL_BOOST, CC_LOGIT_BOOST, CC_GENTLE_BOOST,
- minHitRate, maxFalseAlarm, weight_trim_rate, max_depth, weak_count );
+ cout << "--boostParams--" << endl;
+ cout << " [-bt <{" << CC_DISCRETE_BOOST << ", "
+ << CC_REAL_BOOST << ", "
+ << CC_LOGIT_BOOST ", "
+ << CC_GENTLE_BOOST << "(default)}>]" << endl;
+ cout << " [-minHitRate <min_hit_rate> = " << minHitRate << ">]" << endl;
+ cout << " [-maxFalseAlarmRate <max_false_alarm_rate = " << maxFalseAlarm << ">]" << endl;
+ cout << " [-weightTrimRate <weight_trim_rate = " << weight_trim_rate << ">]" << endl;
+ cout << " [-maxDepth <max_depth_of_weak_tree = " << max_depth << ">]" << endl;
+ cout << " [-maxWeakCount <max_weak_tree_count = " << weak_count << ">]" << endl;
}
-void CvCascadeBoostParams::printAttrs()
+void CvCascadeBoostParams::printAttrs() const
{
- const char* boostTypeStr;
- boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
- boost_type == CvBoost::REAL ? CC_REAL_BOOST :
- boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
- boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : 0;
- printf( "boostType: %s\n", boostTypeStr );
- printf( "minHitRate: %f\n", minHitRate );
- printf( "maxFalseAlarmRate: %f\n", maxFalseAlarm );
- printf( "weightTrimRate: %f\n", weight_trim_rate );
- printf( "maxTreeDepth: %d\n", max_depth );
- printf( "maxWeakCount: %d\n", weak_count );
+ String boostTypeStr = boost_type == CvBoost::DISCRETE ? CC_DISCRETE_BOOST :
+ boost_type == CvBoost::REAL ? CC_REAL_BOOST :
+ boost_type == CvBoost::LOGIT ? CC_LOGIT_BOOST :
+ boost_type == CvBoost::GENTLE ? CC_GENTLE_BOOST : String();
+ CV_Assert( !boostTypeStr.empty() );
+ cout << "boostType: " << boostTypeStr << endl;
+ cout << "minHitRate: " << minHitRate << endl;
+ cout << "maxFalseAlarmRate: " << maxFalseAlarm << endl;
+ cout << "weightTrimRate: " << weight_trim_rate << endl;
+ cout << "maxTreeDepth: " << max_depth << endl;
+ cout << "maxWeakCount: " << weak_count << endl;
}
-bool CvCascadeBoostParams::scanAttr( const char* prmName, const char* val)
+bool CvCascadeBoostParams::scanAttr( const String prmName, const String val)
{
bool res = true;
- if( !strcmp( prmName, "-bt" ) )
+ if( !prmName.compare( "-bt" ) )
{
- boost_type = !strcmp( val, CC_DISCRETE_BOOST ) ? CvBoost::DISCRETE :
- !strcmp( val, CC_REAL_BOOST ) ? CvBoost::REAL :
- !strcmp( val, CC_LOGIT_BOOST ) ? CvBoost::LOGIT :
- !strcmp( val, CC_GENTLE_BOOST ) ? CvBoost::GENTLE : -1;
+ boost_type = !val.compare( CC_DISCRETE_BOOST ) ? CvBoost::DISCRETE :
+ !val.compare( CC_REAL_BOOST ) ? CvBoost::REAL :
+ !val.compare( CC_LOGIT_BOOST ) ? CvBoost::LOGIT :
+ !val.compare( CC_GENTLE_BOOST ) ? CvBoost::GENTLE : -1;
if (boost_type == -1)
res = false;
}
- else if( !strcmp( prmName, "-minHitRate" ) )
+ else if( !prmName.compare( "-minHitRate" ) )
{
- minHitRate = (float) atof( val );
+ minHitRate = (float) atof( val.c_str() );
}
- else if( !strcmp( prmName, "-maxFalseAlarmRate" ) )
+ else if( !prmName.compare( "-maxFalseAlarmRate" ) )
{
- weight_trim_rate = (float) atof( val );
+ weight_trim_rate = (float) atof( val.c_str() );
}
- else if( !strcmp( prmName, "-weightTrimRate" ) )
+ else if( !prmName.compare( "-weightTrimRate" ) )
{
- weight_trim_rate = (float) atof( val );
+ weight_trim_rate = (float) atof( val.c_str() );
}
- else if( !strcmp( prmName, "-maxDepth" ) )
+ else if( !prmName.compare( "-maxDepth" ) )
{
- max_depth = atoi( val );
+ max_depth = atoi( val.c_str() );
}
- else if( !strcmp( prmName, "-maxWeakCount" ) )
+ else if( !prmName.compare( "-maxWeakCount" ) )
{
- weak_count = atoi( val );
+ weak_count = atoi( val.c_str() );
}
else
res = false;
//---------------------------- CascadeBoostTrainData -----------------------------
-CvCascadeBoostTrainData::CvCascadeBoostTrainData()
-{
- valCache = 0;
- cascadeData = 0;
- clear();
-}
-
-CvCascadeBoostTrainData::CvCascadeBoostTrainData( CvCascadeData* _cascadeData )
+CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _featureEvaluator,
+ const CvDTreeParams& _params )
{
- CV_FUNCNAME( "CvCascadeBoostTrainData::CvCascadeBoostTrainData" );
- __BEGIN__;
-
- int maxSplitSize, treeBlockSize;
-
is_classifier = true;
- var_all = var_count = _cascadeData->getNumFeatures();
+ var_all = var_count = (int)_featureEvaluator->getNumFeatures();
- cascadeData = _cascadeData;
+ featureEvaluator = _featureEvaluator;
shared = true;
- valCache = 0;
-
- max_c_count = MAX( 2, cascadeData->getMaxCatCount() );
- assert( max_c_count >= 2 );
-
- CV_CALL( var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 ));
-
- if ( cascadeData->getMaxCatCount() > 0 )
+ set_params( _params );
+ max_c_count = MAX( 2, featureEvaluator->getMaxCatCount() );
+ var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 );
+ if ( featureEvaluator->getMaxCatCount() > 0 )
{
numPrecalcIdx = 0;
cat_var_count = var_count;
}
var_type->data.i[var_count] = cat_var_count;
var_type->data.i[var_count+1] = cat_var_count+1;
-
- max_c_count = MAX( 2, cascadeData->getMaxCatCount() );
-
- maxSplitSize = cvAlign(sizeof(CvDTreeSplit) +
- (MAX(0,max_c_count - 33)/32)*sizeof(int),sizeof(void*));
- treeBlockSize = MAX((int)sizeof(CvDTreeNode)*8, maxSplitSize);
+ int maxSplitSize = cvAlign(sizeof(CvDTreeSplit) + (MAX(0,max_c_count - 33)/32)*sizeof(int),sizeof(void*));
+ int treeBlockSize = MAX((int)sizeof(CvDTreeNode)*8, maxSplitSize);
treeBlockSize = MAX(treeBlockSize + BlockSizeDelta, MinBlockSize);
- CV_CALL( tree_storage = cvCreateMemStorage( treeBlockSize ));
- CV_CALL( node_heap = cvCreateSet( 0, sizeof(node_heap[0]),
- sizeof(CvDTreeNode), tree_storage ));
- CV_CALL( split_heap = cvCreateSet( 0, sizeof(split_heap[0]),
- maxSplitSize, tree_storage ));
- __END__;
+ tree_storage = cvCreateMemStorage( treeBlockSize );
+ node_heap = cvCreateSet( 0, sizeof(node_heap[0]), sizeof(CvDTreeNode), tree_storage );
+ split_heap = cvCreateSet( 0, sizeof(split_heap[0]), maxSplitSize, tree_storage );
}
-CvCascadeBoostTrainData::CvCascadeBoostTrainData( CvCascadeData* _cascadeData,
- int _numPrecalcVal, int _numPrecalcIdx,
- const CvDTreeParams& _params )
+CvCascadeBoostTrainData::CvCascadeBoostTrainData( const CvFeatureEvaluator* _featureEvaluator,
+ int _numSamples,
+ int _numPrecalcVal, int _numPrecalcIdx,
+ const CvDTreeParams& _params )
{
- valCache = 0;
- set_data( _cascadeData, _numPrecalcVal, _numPrecalcIdx, _params );
+ setData( _featureEvaluator, _numSamples, _numPrecalcVal, _numPrecalcIdx, _params );
}
-CvCascadeBoostTrainData::~CvCascadeBoostTrainData()
-{
- clear();
-}
-
-void CvCascadeBoostTrainData::set_data( CvCascadeData* _cascadeData,
+void CvCascadeBoostTrainData::setData( const CvFeatureEvaluator* _featureEvaluator,
+ int _numSamples,
int _numPrecalcVal, int _numPrecalcIdx,
- const CvDTreeParams& _params,
- bool _updateData )
+ const CvDTreeParams& _params )
{
-
- CvCascadeBoostTrainData *data = 0;
-
- CV_FUNCNAME( "CvCascadeBoostTrainData::set_data" );
- __BEGIN__;
-
int* idst = 0;
unsigned short* udst = 0;
- //int totalCatCount = 0;
- int treeBlockSize, tempBlockSize, maxSplitSize, nvSize, size;
- int step = 0;
-
- if( _updateData && data_root )
- {
- CV_ERROR( CV_StsNotImplemented, "data update is not supported" );
- }
clear();
-
shared = true;
have_labels = true;
+ have_priors = false;
+ is_classifier = true;
rng = cvRNG(-1);
- CV_CALL( set_params( _params ));
-
- assert( _cascadeData );
+ set_params( _params );
- cascadeData = _cascadeData;
+ CV_Assert( _featureEvaluator );
+ featureEvaluator = _featureEvaluator;
- responses = cascadeData->getCls();
+ max_c_count = MAX( 2, featureEvaluator->getMaxCatCount() );
+ _resp = featureEvaluator->getCls();
+ responses = &_resp;
// TODO: check responses: elements must be 0 or 1
if( _numPrecalcVal < 0 || _numPrecalcIdx < 0)
- {
- CV_ERROR( CV_StsOutOfRange, "_numPrecalcVal <= 0 and _numPrecalcIdx must be positive or 0" );
- }
-
- var_count = var_all = cascadeData->getNumFeatures();
- sample_count = cascadeData->getNumSamples();
-
- numPrecalcVal = MIN( _numPrecalcVal, var_count );
- numPrecalcIdx = MIN( _numPrecalcIdx, var_count );
+ CV_Error( CV_StsOutOfRange, "_numPrecalcVal and _numPrecalcIdx must be positive or 0" );
+
+ var_count = var_all = featureEvaluator->getNumFeatures();
+ sample_count = _numSamples;
+ numPrecalcVal = min( _numPrecalcVal, var_count );
+ numPrecalcIdx = min( _numPrecalcIdx, var_count );
is_buf_16u = false;
if (sample_count < 65536)
is_buf_16u = true;
- CV_CALL( valCache = cvCreateMat( numPrecalcVal ? numPrecalcVal : 1, sample_count, CV_32FC1 ) );
+ valCache.create( numPrecalcVal ? numPrecalcVal : 1, sample_count, CV_32FC1 );
+ var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 );
- CV_CALL( var_type = cvCreateMat( 1, var_count + 2, CV_32SC1 ));
-
- is_classifier = true;
-
- if ( cascadeData->getMaxCatCount() > 0 )
+ if ( featureEvaluator->getMaxCatCount() > 0 )
{
numPrecalcIdx = 0;
cat_var_count = var_count;
}
var_type->data.i[var_count] = cat_var_count;
var_type->data.i[var_count+1] = cat_var_count+1;
-
work_var_count = ( cat_var_count ? var_count : numPrecalcIdx ) + 1;
buf_size = (work_var_count + 1) * sample_count;
buf_count = 2;
if ( is_buf_16u )
- {
- CV_CALL( buf = cvCreateMat( buf_count, buf_size, CV_16UC1 ));
- }
+ buf = cvCreateMat( buf_count, buf_size, CV_16UC1 );
else
- {
- CV_CALL( buf = cvCreateMat( buf_count, buf_size, CV_32SC1 ));
- }
+ buf = cvCreateMat( buf_count, buf_size, CV_32SC1 );
- size = cat_var_count + 1;
- CV_CALL( cat_count = cvCreateMat( 1, size, CV_32SC1 ));
-
- CV_CALL( pred_float_buf = (float*)cvAlloc(sample_count*sizeof(pred_float_buf[0])) );
- CV_CALL( pred_int_buf = (int*)cvAlloc(sample_count*sizeof(pred_int_buf[0])) );
- CV_CALL( resp_float_buf = (float*)cvAlloc(sample_count*sizeof(resp_float_buf[0])) );
- CV_CALL( resp_int_buf = (int*)cvAlloc(sample_count*sizeof(resp_int_buf[0])) );
- CV_CALL( cv_lables_buf = (int*)cvAlloc(sample_count*sizeof(cv_lables_buf[0])) );
- CV_CALL( sample_idx_buf = (int*)cvAlloc(sample_count*sizeof(sample_idx_buf[0])) );
+ cat_count = cvCreateMat( 1, cat_var_count + 1, CV_32SC1 );
+ pred_float_buf = (float*)cvAlloc(sample_count*sizeof(pred_float_buf[0]));
+ pred_int_buf = (int*)cvAlloc(sample_count*sizeof(pred_int_buf[0]));
+ resp_float_buf = (float*)cvAlloc(sample_count*sizeof(resp_float_buf[0]));
+ resp_int_buf = (int*)cvAlloc(sample_count*sizeof(resp_int_buf[0]));
+ cv_lables_buf = (int*)cvAlloc(sample_count*sizeof(cv_lables_buf[0]));
+ sample_idx_buf = (int*)cvAlloc(sample_count*sizeof(sample_idx_buf[0]));
// precalculate valCache and set indices in buf
precalculate();
// now calculate the maximum size of split,
// create memory storage that will keep nodes and splits of the decision tree
// allocate root node and the buffer for the whole training data
- maxSplitSize = cvAlign(sizeof(CvDTreeSplit) +
+ int maxSplitSize = cvAlign(sizeof(CvDTreeSplit) +
(MAX(0,sample_count - 33)/32)*sizeof(int),sizeof(void*));
- treeBlockSize = MAX((int)sizeof(CvDTreeNode)*8, maxSplitSize);
+ int treeBlockSize = MAX((int)sizeof(CvDTreeNode)*8, maxSplitSize);
treeBlockSize = MAX(treeBlockSize + BlockSizeDelta, MinBlockSize);
- CV_CALL( tree_storage = cvCreateMemStorage( treeBlockSize ));
- CV_CALL( node_heap = cvCreateSet( 0, sizeof(*node_heap), sizeof(CvDTreeNode), tree_storage ));
+ tree_storage = cvCreateMemStorage( treeBlockSize );
+ node_heap = cvCreateSet( 0, sizeof(*node_heap), sizeof(CvDTreeNode), tree_storage );
- nvSize = var_count*sizeof(int);
+ int nvSize = var_count*sizeof(int);
nvSize = cvAlign(MAX( nvSize, (int)sizeof(CvSetElem) ), sizeof(void*));
- tempBlockSize = nvSize;
+ int tempBlockSize = nvSize;
tempBlockSize = MAX( tempBlockSize + BlockSizeDelta, MinBlockSize );
- CV_CALL( temp_storage = cvCreateMemStorage( tempBlockSize ));
- CV_CALL( nv_heap = cvCreateSet( 0, sizeof(*nv_heap), nvSize, temp_storage ));
+ temp_storage = cvCreateMemStorage( tempBlockSize );
+ nv_heap = cvCreateSet( 0, sizeof(*nv_heap), nvSize, temp_storage );
- CV_CALL( data_root = new_node( 0, sample_count, 0, 0 ));
+ data_root = new_node( 0, sample_count, 0, 0 );
// set sample labels
if (is_buf_16u)
else
idst[si] = si;
}
-
- max_c_count = MAX( 2, cascadeData->getMaxCatCount() );
- assert( max_c_count >= 2 );
-
- step = valCache->step / CV_ELEM_SIZE(valCache->type);
for( int vi = 0; vi < var_count; vi++ )
- {
data_root->set_num_valid(vi, sample_count);
- }
for( int vi = 0; vi < cat_var_count; vi++ )
- {
cat_count->data.i[vi] = max_c_count;
- }
+
cat_count->data.i[cat_var_count] = 2;
maxSplitSize = cvAlign(sizeof(CvDTreeSplit) +
(MAX(0,max_c_count - 33)/32)*sizeof(int),sizeof(void*));
- CV_CALL( split_heap = cvCreateSet( 0, sizeof(*split_heap), maxSplitSize, tree_storage ));
-
- have_priors = is_classifier && params.priors;
- if( is_classifier ) // is_classifier == true
- {
- int m = get_num_classes();
- double sum = 0;
- CV_CALL( priors = cvCreateMat( 1, m, CV_64F ));
- for( int i = 0; i < m; i++ )
- {
- double val = have_priors ? params.priors[i] : 1.;
- if( val <= 0 )
- CV_ERROR( CV_StsOutOfRange, "Every class weight should be positive" );
- priors->data.db[i] = val;
- sum += val;
- }
-
- // normalize weights
- if( have_priors )
- cvScale( priors, priors, 1./sum );
-
- CV_CALL( priors_mult = cvCloneMat( priors ));
- CV_CALL( counts = cvCreateMat( 1, m, CV_32SC1 ));
- }
-
- CV_CALL( direction = cvCreateMat( 1, sample_count, CV_8UC1 ));
- CV_CALL( split_buf = cvCreateMat( 1, sample_count, CV_32SC1 ));
-
- __END__;
-
- if( data )
- delete data;
+ split_heap = cvCreateSet( 0, sizeof(*split_heap), maxSplitSize, tree_storage );
+
+ priors = cvCreateMat( 1, get_num_classes(), CV_64F );
+ cvSet(priors, cvScalar(1));
+ priors_mult = cvCloneMat( priors );
+ counts = cvCreateMat( 1, get_num_classes(), CV_32SC1 );
+ direction = cvCreateMat( 1, sample_count, CV_8UC1 );
+ split_buf = cvCreateMat( 1, sample_count, CV_32SC1 );
}
-void CvCascadeBoostTrainData::clear()
+void CvCascadeBoostTrainData::free_train_data()
{
- CvDTreeTrainData::clear();
- cvReleaseMat( &valCache );
- cascadeData = 0;
- numPrecalcVal = numPrecalcIdx = 0;
+ CvDTreeTrainData::free_train_data();
+ valCache.release();
}
void CvCascadeBoostTrainData::get_class_labels( CvDTreeNode* n, int* labelsBuf, const int** labels )
int nodeSampleCount = n->sample_count;
int* sampleIndicesBuf = sample_idx_buf;
const int* sampleIndices = 0;
- int rStep = CV_IS_MAT_CONT( responses->type ) ?
- 1 : responses->step / CV_ELEM_SIZE( responses->type );
+ int rStep = CV_IS_MAT_CONT( responses->type ) ? 1 : responses->step / CV_ELEM_SIZE( responses->type );
get_sample_indices(n, sampleIndicesBuf, &sampleIndices);
void CvCascadeBoostTrainData::get_cv_labels( CvDTreeNode* n, int* labels_buf, const int** labels )
{
- if (have_labels)
- CvDTreeTrainData::get_cat_var_data( n, get_work_var_count()- 1, labels_buf, labels );
+ CvDTreeTrainData::get_cat_var_data( n, get_work_var_count()- 1, labels_buf, labels );
}
int CvCascadeBoostTrainData::get_ord_var_data( CvDTreeNode* n, int vi, float* ordValuesBuf, int* indicesBuf,
const float** ordValues, const int** indices )
{
- int valStep = valCache->step / CV_ELEM_SIZE(valCache->type);
int nodeSampleCount = n->sample_count;
int* sampleIndicesBuf = sample_idx_buf;
const int* sampleIndices = 0;
if ( vi < numPrecalcIdx )
{
if( !is_buf_16u )
- *indices = buf->data.i + n->buf_idx*buf->cols +
- vi*sample_count + n->offset;
- else {
+ *indices = buf->data.i + n->buf_idx*buf->cols + vi*sample_count + n->offset;
+ else
+ {
const unsigned short* shortIndices = (const unsigned short*)(buf->data.s + n->buf_idx*buf->cols +
- vi*sample_count + n->offset );
+ vi*sample_count + n->offset );
for( int i = 0; i < nodeSampleCount; i++ )
indicesBuf[i] = shortIndices[i];
*indices = indicesBuf;
{
int idx = (*indices)[i];
idx = sampleIndices[idx];
- ordValuesBuf[i] = *(valCache->data.fl + vi * valStep + idx);
+ ordValuesBuf[i] = valCache.at<float>( vi, idx);
}
}
else
{
int idx = (*indices)[i];
idx = sampleIndices[idx];
- ordValuesBuf[i] = cascadeData->calcFeature( vi, idx);
+ ordValuesBuf[i] = (*featureEvaluator)( vi, idx);
}
}
}
{
for( int i = 0; i < nodeSampleCount; i++ )
{
- int idx = sampleIndices[i];
indicesBuf[i] = i;
- ((float*)sampleIndices)[i] = *(valCache->data.fl + vi * valStep + idx);
+ ((float*)sampleIndices)[i] = valCache.at<float>( vi, sampleIndices[i] );
}
}
else
{
for( int i = 0; i < nodeSampleCount; i++ )
{
- int idx = sampleIndices[i];
indicesBuf[i] = i;
- ((float*)sampleIndices)[i] = cascadeData->calcFeature( vi, idx);
+ ((float*)sampleIndices)[i] = (*featureEvaluator)( vi, sampleIndices[i]);
}
}
icvSortIntAux( indicesBuf, sample_count, (float *)sampleIndices );
for( int i = 0; i < nodeSampleCount; i++ )
- {
- int idx = indicesBuf[i];
- ordValuesBuf[i] = ((float*)sampleIndices)[idx];
- }
+ ordValuesBuf[i] = ((float*)sampleIndices)[indicesBuf[i]];
*indices = indicesBuf;
}
int CvCascadeBoostTrainData::get_cat_var_data( CvDTreeNode* n, int vi, int* catValuesBuf, const int** catValues )
{
- int valStep = valCache->step / CV_ELEM_SIZE(valCache->type);
int nodeSampleCount = n->sample_count;
int* sampleIndicesBuf = sample_idx_buf;
const int* sampleIndices = 0;
if ( vi < numPrecalcVal )
{
for( int i = 0; i < nodeSampleCount; i++ )
- {
- int idx = sampleIndices[i];
- catValuesBuf[i] = (int)*(valCache->data.fl + vi * valStep + idx);
- }
+ catValuesBuf[i] = (int) valCache.at<float>( vi, sampleIndices[i]);
}
else
{
for( int i = 0; i < nodeSampleCount; i++ )
- {
- int idx = sampleIndices[i];
- catValuesBuf[i] = (int)cascadeData->calcFeature( vi, idx);
- }
+ catValuesBuf[i] = (int)(*featureEvaluator)( vi, sampleIndices[i] );
}
*catValues = catValuesBuf;
- return 0; //TODO: return the number of non-missing values
+ return 0;
}
float CvCascadeBoostTrainData::getVarValue( int vi, int si )
{
- if ( vi < numPrecalcVal && valCache )
- {
- int val_step = valCache->step / CV_ELEM_SIZE(valCache->type);
- return *(valCache->data.fl + vi * val_step + si);
- }
- return cascadeData->calcFeature( vi, si);
-}
-
-void CvCascadeBoostTrainData::free_train_data()
-{
- CvDTreeTrainData::free_train_data();
-
- cvReleaseMat( &valCache );
+ if ( vi < numPrecalcVal && !valCache.empty() )
+ return valCache.at<float>( vi, si );
+ return (*featureEvaluator)( vi, si );
}
void CvCascadeBoostTrainData::precalculate()
{
- //CV_FUNCNAME( "CvCascadeBoostTrainData::precalculate" );
- __BEGIN__;
-
- double proctime;
int minNum = MIN( numPrecalcVal, numPrecalcIdx);
unsigned short* udst = (unsigned short*)buf->data.s;
int* idst = buf->data.i;
- int valStep = valCache ? valCache->step / CV_ELEM_SIZE(valCache->type) : 0;
- assert( valCache );
-
- proctime = -TIME( 0 );
+ CV_DbgAssert( !valCache.empty() );
+ double proctime = -TIME( 0 );
for ( int fi = numPrecalcVal; fi < numPrecalcIdx; fi++)
{
for( int si = 0; si < sample_count; si++ )
{
- float val = cascadeData->calcFeature( fi, si );
- CV_MAT_ELEM( *valCache, float, 0, si ) = val;
+ valCache.ptr<float>(0)[si] = (*featureEvaluator)( fi, si );
if ( is_buf_16u )
- *(udst + fi*sample_count + si) = si;
+ *(udst + fi*sample_count + si) = (unsigned short)si;
else
*(idst + fi*sample_count + si) = si;
}
if ( is_buf_16u )
- icvSortUShAux( udst + fi*sample_count, sample_count,
- valCache->data.fl );
+ icvSortUShAux( udst + fi*sample_count, sample_count, (float*)valCache.data );
else
- icvSortIntAux( idst + fi*sample_count, sample_count,
- valCache->data.fl );
+ icvSortIntAux( idst + fi*sample_count, sample_count, (float*)valCache.data );
}
for ( int fi = 0; fi < minNum; fi++)
{
for( int si = 0; si < sample_count; si++ )
{
- float val = cascadeData->calcFeature( fi, si );
- CV_MAT_ELEM( *valCache, float, fi, si ) = val;
+ valCache.ptr<float>(fi)[si] = (*featureEvaluator)( fi, si );
if ( is_buf_16u )
- *(udst + fi*sample_count + si) = si;
+ *(udst + fi*sample_count + si) = (unsigned short)si;
else
*(idst + fi*sample_count + si) = si;
}
if ( is_buf_16u )
- icvSortUShAux( udst + fi*sample_count, sample_count,
- valCache->data.fl + fi*valStep);
+ icvSortUShAux( udst + fi*sample_count, sample_count, (float*)valCache.data );
else
- icvSortIntAux( idst + fi*sample_count, sample_count,
- valCache->data.fl + fi*valStep );
+ icvSortIntAux( idst + fi*sample_count, sample_count, (float*)valCache.data );
}
for ( int fi = minNum; fi < numPrecalcVal; fi++)
- {
for( int si = 0; si < sample_count; si++ )
- {
- float val = cascadeData->calcFeature( fi, si );
- CV_MAT_ELEM( *valCache, float, fi, si ) = val;
- }
- }
-
- printf( "Precalculation time: %.2f\n", (proctime + TIME( 0 )) );
- __END__;
+ valCache.ptr<float>(fi)[si] = (*featureEvaluator)( fi, si );
+
+ cout << "Precalculation time: " << (proctime + TIME( 0 )) << endl;
}
//-------------------------------- CascadeBoostTree ----------------------------------------
CvDTreeNode* CvCascadeBoostTree::predict( int sampleIdx ) const
{
- CvDTreeNode* result = 0;
-
- CV_FUNCNAME( "CvCascadeBoostTree::predict" );
- __BEGIN__;
-
CvDTreeNode* node = root;
-
if( !node )
- CV_ERROR( CV_StsError, "The tree has not been trained yet" );
-
- if ( ((CvCascadeBoostTrainData*)data)->cascadeData->getMaxCatCount() == 0 ) // ordered
+ CV_Error( CV_StsError, "The tree has not been trained yet" );
+
+ if ( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount() == 0 ) // ordered
{
- while( node->left )
+ while( node->left )
{
CvDTreeSplit* split = node->split;
- int vi = split->var_idx;
- float val = ((CvCascadeBoostTrainData*)data)->getVarValue( vi, sampleIdx );
- int dir = val <= split->ord.c ? -1 : 1;
- node = dir < 0 ? node->left : node->right;
+ float val = ((CvCascadeBoostTrainData*)data)->getVarValue( split->var_idx, sampleIdx );
+ node = val <= split->ord.c ? node->left : node->right;
}
}
else // categorical
while( node->left )
{
CvDTreeSplit* split = node->split;
- int vi = split->var_idx;
- int c = (int)((CvCascadeBoostTrainData*)data)->getVarValue( vi, sampleIdx );
- int dir = CV_DTREE_CAT_DIR(c, split->subset);
- node = dir < 0 ? node->left : node->right;
+ int c = (int)((CvCascadeBoostTrainData*)data)->getVarValue( split->var_idx, sampleIdx );
+ node = CV_DTREE_CAT_DIR(c, split->subset) < 0 ? node->left : node->right;
}
}
- result = node;
-
- __END__;
-
- return result;
+ return node;
}
-void CvCascadeBoostTree::write( CvFileStorage* fs, const CvMat* featureMap )
+void CvCascadeBoostTree::write( FileStorage &fs, const Mat& featureMap )
{
- CV_FUNCNAME( "CvCascadeBoostTree::write" );
- __BEGIN__;
-
- int maxCatCount = ((CvCascadeBoostTrainData*)data)->cascadeData->getMaxCatCount();
+ int maxCatCount = ((CvCascadeBoostTrainData*)data)->featureEvaluator->getMaxCatCount();
int subsetN = (maxCatCount + 31)/32;
queue<CvDTreeNode*> internalNodesQueue;
- float* leafVals = (float *)cvAlloc( (int)pow(2.f, (float)ensemble->get_data()->params.max_depth) *
- sizeof(leafVals[0]) );
+ int size = (int)pow( 2.f, (float)ensemble->get_params().max_depth);
+ Ptr<float> leafVals = new float[size];
int leafValIdx = 0;
int internalNodeIdx = 1;
CvDTreeNode* tempNode;
- assert( root );
+ CV_DbgAssert( root );
internalNodesQueue.push( root );
- CV_CALL( cvStartWriteStruct( fs, 0, CV_NODE_MAP ) );
-
- CV_CALL( cvStartWriteStruct( fs, CC_INTERNAL_NODES, CV_NODE_SEQ | CV_NODE_FLOW ) );
+ fs << "{";
+ fs << CC_INTERNAL_NODES << "[:";
while (!internalNodesQueue.empty())
{
- int fidx;
tempNode = internalNodesQueue.front();
-
- assert ( tempNode->left );
+ CV_Assert( tempNode->left );
if ( !tempNode->left->left && !tempNode->left->right) // left node is leaf
{
leafVals[-leafValIdx] = (float)tempNode->left->value;
- CV_CALL( cvWriteInt( fs, NULL, leafValIdx-- ) );
+ fs << leafValIdx-- ;
}
else
{
internalNodesQueue.push( tempNode->left );
- CV_CALL( cvWriteInt( fs, NULL, internalNodeIdx++ ) );
+ fs << internalNodeIdx++;
}
-
- assert( tempNode->right );
+ CV_Assert( tempNode->right );
if ( !tempNode->right->left && !tempNode->right->right) // right node is leaf
{
leafVals[-leafValIdx] = (float)tempNode->right->value;
- CV_CALL( cvWriteInt( fs, NULL, leafValIdx-- ) );
+ fs << leafValIdx--;
}
else
{
internalNodesQueue.push( tempNode->right );
- CV_CALL( cvWriteInt( fs, NULL, internalNodeIdx++ ) );
+ fs << internalNodeIdx++;
}
- fidx = tempNode->split->var_idx;
- fidx = featureMap ? featureMap->data.i[fidx] : fidx;
- CV_CALL( cvWriteInt( fs, NULL, fidx ) );
+ int fidx = tempNode->split->var_idx;
+ fidx = featureMap.empty() ? fidx : featureMap.at<int>(0, fidx);
+ fs << fidx;
if ( !maxCatCount )
- {
- CV_CALL( cvWriteReal( fs, NULL, tempNode->split->ord.c ) );
- }
+ fs << tempNode->split->ord.c;
else
- {
for( int i = 0; i < subsetN; i++ )
- CV_CALL( cvWriteInt( fs, NULL, tempNode->split->subset[i]) );
- }
-
+ fs << tempNode->split->subset[i];
internalNodesQueue.pop();
}
- CV_CALL( cvEndWriteStruct( fs ) ); //internalNodes
+ fs << "]"; // CC_INTERNAL_NODES
- CV_CALL( cvStartWriteStruct( fs, CC_LEAF_VALUES, CV_NODE_SEQ | CV_NODE_FLOW ) );
+ fs << CC_LEAF_VALUES << "[:";
for (int ni = 0; ni < -leafValIdx; ni++)
- CV_CALL( cvWriteReal( fs, NULL, leafVals[ni] ) );
- CV_CALL( cvEndWriteStruct( fs ) ); //leafValsues
-
- CV_CALL( cvEndWriteStruct( fs ) );
- cvFree( &leafVals );
- __END__;
+ fs << leafVals[ni];
+ fs << "]"; // CC_LEAF_VALUES
+ fs << "}";
}
-void CvCascadeBoostTree::read( CvFileStorage* fs, CvFileNode* node, CvBoost* _ensemble,
+void CvCascadeBoostTree::read( const FileNode &node, CvBoost* _ensemble,
CvDTreeTrainData* _data )
{
- CV_FUNCNAME( "CvCascadeBoostTree::read" );
- __BEGIN__;
-
- int maxCatCount = ((CvCascadeBoostTrainData*)_data)->cascadeData->getMaxCatCount();
+ int maxCatCount = ((CvCascadeBoostTrainData*)_data)->featureEvaluator->getMaxCatCount();
int subsetN = (maxCatCount + 31)/32;
int step = 3 + ( maxCatCount>0 ? subsetN : 1 );
queue<CvDTreeNode*> internalNodesQueue;
-
- CvSeq* internalNodes, *leafValsues;
+ FileNodeIterator internalNodesIt, leafValsuesIt;
CvDTreeNode* prntNode, *cldNode;
- int intIdx, leafIdx;
clear();
data = _data;
pruned_tree_idx = 0;
// read tree nodes
- CV_CALL( internalNodes = ((CvFileNode*)cvGetFileNodeByName( fs, node, CC_INTERNAL_NODES ))->data.seq );
- CV_CALL( leafValsues = ((CvFileNode*)cvGetFileNodeByName( fs, node, CC_LEAF_VALUES ))->data.seq );
-
- intIdx = internalNodes->total;
- leafIdx = leafValsues->total;
- for( int i = 0; i < internalNodes->total/step; i++)
+ FileNode rnode = node[CC_INTERNAL_NODES];
+ internalNodesIt = rnode.end();
+ leafValsuesIt = node[CC_LEAF_VALUES].end();
+ internalNodesIt--; leafValsuesIt--;
+ for( size_t i = 0; i < rnode.size()/step; i++ )
{
prntNode = data->new_node( 0, 0, 0, 0 );
if ( maxCatCount > 0 )
prntNode->split = data->new_split_cat( 0, 0 );
for( int j = subsetN-1; j>=0; j--)
{
- int c = ((CvFileNode*)cvGetSeqElem( internalNodes, --intIdx ))->data.i;
- prntNode->split->subset[j] = c;
+ internalNodesIt >> prntNode->split->subset[j]; internalNodesIt -=2;
}
}
else
{
- float split_value = (float)((CvFileNode*)cvGetSeqElem( internalNodes, --intIdx ))->data.f;
+ float split_value;
+ internalNodesIt >> split_value; internalNodesIt -=2;
prntNode->split = data->new_split_ord( 0, split_value, 0, 0, 0);
}
- int split_var_idx = ((CvFileNode*)cvGetSeqElem( internalNodes, --intIdx ))->data.i;
- prntNode->split->var_idx = split_var_idx;
- int ridx = ((CvFileNode*)cvGetSeqElem( internalNodes, --intIdx ))->data.i;
- int lidx = ((CvFileNode*)cvGetSeqElem( internalNodes, --intIdx ))->data.i;
-
+ internalNodesIt >> prntNode->split->var_idx; internalNodesIt -=2;
+ int ridx, lidx;
+ internalNodesIt >> ridx; internalNodesIt -=2;
+ internalNodesIt >> lidx;internalNodesIt -=2;
if ( ridx <= 0)
{
- float leafValsue = (float)((CvFileNode*)cvGetSeqElem( leafValsues, --leafIdx ))->data.f;
prntNode->right = cldNode = data->new_node( 0, 0, 0, 0 );
- cldNode->value = leafValsue;
+ leafValsuesIt >> cldNode->value; leafValsuesIt-=2;
cldNode->parent = prntNode;
}
else
if ( lidx <= 0)
{
- float leafValsue = (float)((CvFileNode*)cvGetSeqElem( leafValsues, --leafIdx ))->data.f;
prntNode->left = cldNode = data->new_node( 0, 0, 0, 0 );
- cldNode->value = leafValsue;
+ leafValsuesIt >> cldNode->value; leafValsuesIt-=2;
cldNode->parent = prntNode;
}
else
root = internalNodesQueue.front();
internalNodesQueue.pop();
-
- __END__;
}
void CvCascadeBoostTree::split_node_data( CvDTreeNode* node )
{
- int i, n = node->sample_count, nl, nr, scount = data->sample_count;
+ int n = node->sample_count, nl, nr, scount = data->sample_count;
char* dir = (char*)data->direction->data.ptr;
CvDTreeNode *left = 0, *right = 0;
int* newIdx = data->split_buf->data.i;
complete_node_dir(node);
- for( i = nl = nr = 0; i < n; i++ )
+ for( int i = nl = nr = 0; i < n; i++ )
{
int d = dir[i];
// initialize new indices for splitting ordered variables
data->get_ord_var_data(node, vi, src_val_buf, src_idx_buf, &src_val, &src_idx);
- for(i = 0; i < n; i++)
+ for(int i = 0; i < n; i++)
tempBuf[i] = src_idx[i];
if (data->is_buf_16u)
rdst0 = rdst = (unsigned short*)(ldst + nl);
// split sorted
- for( i = 0; i < n1; i++ )
+ for( int i = 0; i < n1; i++ )
{
int idx = tempBuf[i];
int d = dir[idx];
vi*scount + right->offset;
// split sorted
- for( i = 0; i < n1; i++ )
+ for( int i = 0; i < n1; i++ )
{
int idx = tempBuf[i];
int d = dir[idx];
left->set_num_valid(vi, (int)(ldst - ldst0));
right->set_num_valid(vi, (int)(rdst - rdst0));
-
- assert( n1 == n);
+ CV_Assert( n1 == n);
}
}
const int* src_lbls = 0;
data->get_cv_labels(node, src_lbls_buf, &src_lbls);
- for(i = 0; i < n; i++)
+ for(int i = 0; i < n; i++)
tempBuf[i] = src_lbls[i];
if (data->is_buf_16u)
unsigned short *rdst = (unsigned short *)(buf->data.s + right->buf_idx*buf->cols +
(workVarCount-1)*scount + right->offset);
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
- int d = dir[i];
int idx = tempBuf[i];
- if (d)
+ if (dir[i])
{
*rdst = (unsigned short)idx;
rdst++;
int *rdst = buf->data.i + right->buf_idx*buf->cols +
(workVarCount-1)*scount + right->offset;
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
- int d = dir[i];
int idx = tempBuf[i];
- if (d)
+ if (dir[i])
{
*rdst = idx;
rdst++;
*ldst = idx;
ldst++;
}
-
}
}
for( int vi = 0; vi < data->var_count; vi++ )
const int* sampleIdx_src = 0;
data->get_sample_indices(node, sampleIdx_src_buf, &sampleIdx_src);
- for(i = 0; i < n; i++)
+ for(int i = 0; i < n; i++)
tempBuf[i] = sampleIdx_src[i];
if (data->is_buf_16u)
workVarCount*scount + left->offset);
unsigned short* rdst = (unsigned short*)(buf->data.s + right->buf_idx*buf->cols +
workVarCount*scount + right->offset);
- for (i = 0; i < n; i++)
+ for (int i = 0; i < n; i++)
{
- int d = dir[i];
unsigned short idx = (unsigned short)tempBuf[i];
- if (d)
+ if (dir[i])
{
*rdst = idx;
rdst++;
workVarCount*scount + left->offset;
int* rdst = buf->data.i + right->buf_idx*buf->cols +
workVarCount*scount + right->offset;
- for (i = 0; i < n; i++)
+ for (int i = 0; i < n; i++)
{
- int d = dir[i];
int idx = tempBuf[i];
- if (d)
+ if (dir[i])
{
*rdst = idx;
rdst++;
data->free_node_data(node);
}
-void CvCascadeBoostTree::markFeaturesInMap( CvMat* featureMap )
-{
- auxMarkFeaturesInMap( root, featureMap );
-}
-
-void CvCascadeBoostTree::auxMarkFeaturesInMap( const CvDTreeNode* node, CvMat* featureMap)
+void auxMarkFeaturesInMap( const CvDTreeNode* node, Mat& featureMap)
{
if ( node && node->split )
{
- featureMap->data.i[node->split->var_idx] = 1;
+ featureMap.ptr<int>(0)[node->split->var_idx] = 1;
auxMarkFeaturesInMap( node->left, featureMap );
auxMarkFeaturesInMap( node->right, featureMap );
}
}
-//----------------------------------- CascadeBoost --------------------------------------
-CvCascadeBoost::CvCascadeBoost()
+void CvCascadeBoostTree::markFeaturesInMap( Mat& featureMap )
{
- data = 0;
- weak = 0;
- active_vars = active_vars_abs = orig_response = sum_response = weak_eval =
- subsample_mask = weights = subtree_weights = 0;
- have_active_cat_vars = have_subsample = false;
- threshold = -1;
-
- clear();
-}
-
-CvCascadeBoost::CvCascadeBoost( CvCascadeData* _cascadeData,
- int _numPrecalcVal, int _numPrecalcIdx,
- CvCascadeBoostParams _params )
-{
- weak = 0;
- data = 0;
- orig_response = sum_response = weak_eval = subsample_mask = weights = 0;
- threshold = -1;
-
- train( _cascadeData, _numPrecalcVal, _numPrecalcIdx, _params );
+ auxMarkFeaturesInMap( root, featureMap );
}
-bool CvCascadeBoost::set_params( const CvBoostParams& _params )
-{
- minHitRate = ((CvCascadeBoostParams&)_params).minHitRate;
- maxFalseAlarm = ((CvCascadeBoostParams&)_params).maxFalseAlarm;
- return ( ( minHitRate > 0 ) && ( minHitRate < 1) &&
- ( maxFalseAlarm > 0 ) && ( maxFalseAlarm < 1) &&
- CvBoost::set_params( _params ));
-}
+//----------------------------------- CascadeBoost --------------------------------------
-bool CvCascadeBoost::train( CvCascadeData* _cascadeData,
- int _numPrecalcVal, int _numPrecalcIdx,
- CvCascadeBoostParams _params,
- bool _update )
+bool CvCascadeBoost::train( const CvFeatureEvaluator* _featureEvaluator,
+ int _numSamples,
+ int _numPrecalcVal, int _numPrecalcIdx,
+ const CvCascadeBoostParams& _params )
{
- bool ok = false;
- CvMemStorage* storage = 0;
-
- CV_FUNCNAME( "CvCascadeBoost::train" );
- __BEGIN__;
+ CV_Assert( !data );
+ clear();
+ data = new CvCascadeBoostTrainData( _featureEvaluator, _numSamples,
+ _numPrecalcVal, _numPrecalcIdx, _params );
+ CvMemStorage *storage = cvCreateMemStorage();
+ weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
+ storage = 0;
set_params( _params );
-
- cvReleaseMat( &active_vars );
- cvReleaseMat( &active_vars_abs );
-
- if( !_update || !data )
- {
- clear();
- data = new CvCascadeBoostTrainData( _cascadeData, _numPrecalcVal, _numPrecalcIdx, _params );
-
- /*if( data->get_num_classes() != 2 )
- CV_ERROR( CV_StsNotImplemented,
- "Boosted trees can only be used for 2-class classification." );*/
- CV_CALL( storage = cvCreateMemStorage() );
- weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
- storage = 0;
- }
- else
- {
- ((CvCascadeBoostTrainData*)data)->set_data( _cascadeData, _numPrecalcVal, _numPrecalcIdx, _params );
- }
-
if ( (_params.boost_type == LOGIT) || (_params.boost_type == GENTLE) )
data->do_responses_copy();
update_weights( 0 );
- printf( "+----+---------+---------+\n" );
- printf( "| N | HR | FA |\n" );
- printf( "+----+---------+---------+\n" );
+ cout << "+----+---------+---------+" << endl;
+ cout << "| N | HR | FA |" << endl;
+ cout << "+----+---------+---------+" << endl;
do
{
}
while( !isErrDesired() && (weak->total < params.weak_count) );
- get_active_vars();
data->is_classifier = true;
- ok = true;
-
data->free_train_data();
-
- __END__;
-
- return ok;
+ return true;
}
float CvCascadeBoost::predict( int sampleIdx, bool returnSum ) const
{
- //float* buf = 0;
- float value = -FLT_MAX;
-
- CV_FUNCNAME( "CvCascadeBoost::predict" );
- __BEGIN__;
-
- int i, varCount;
- CvSeqReader reader;
+ CV_Assert( weak );
double sum = 0;
-
- CvSize winSize = ((CvCascadeBoostTrainData*)data)->cascadeData->getWinSize();
- //int maxCatCount = ((CvCascadeBoostTrainData*)data)->cascadeData->getMaxCatCount();
- varCount = ((CvCascadeBoostTrainData*)data)->cascadeData->getNumFeatures();
-
- if( !weak )
- CV_ERROR( CV_StsError, "The boosted tree ensemble has not been trained yet" );
-
+ CvSeqReader reader;
cvStartReadSeq( weak, &reader );
cvSetSeqReaderPos( &reader, 0 );
-
- for( i = 0; i < weak->total; i++ )
+ for( int i = 0; i < weak->total; i++ )
{
CvBoostTree* wtree;
CV_READ_SEQ_ELEM( wtree, reader );
sum += ((CvCascadeBoostTree*)wtree)->predict(sampleIdx)->value;
}
- if( returnSum )
- value = (float)sum;
- else
- {
- int clsIdx = sum < threshold - CV_THRESHOLD_EPS ? 0 : 1;
- value = (float)clsIdx;
- }
-
- __END__;
-
- return value;
+ if( !returnSum )
+ sum = sum < threshold - CV_THRESHOLD_EPS ? 0.0 : 1.0;
+ return (float)sum;
}
-const CvCascadeBoostTrainData* CvCascadeBoost::get_data() const
+bool CvCascadeBoost::set_params( const CvBoostParams& _params )
{
- return (CvCascadeBoostTrainData*)data;
+ minHitRate = ((CvCascadeBoostParams&)_params).minHitRate;
+ maxFalseAlarm = ((CvCascadeBoostParams&)_params).maxFalseAlarm;
+ return ( ( minHitRate > 0 ) && ( minHitRate < 1) &&
+ ( maxFalseAlarm > 0 ) && ( maxFalseAlarm < 1) &&
+ CvBoost::set_params( _params ));
}
void CvCascadeBoost::update_weights( CvBoostTree* tree )
{
- CV_FUNCNAME( "CvCascadeBoost::update_weights" );
- __BEGIN__;
-
- int i, n = data->sample_count;
+ int n = data->sample_count;
double sumW = 0.;
int step = 0;
float* fdata = 0;
CvMat* buf = data->buf;
if( !tree ) // before training the first tree, initialize weights and other parameters
{
- int n = data->sample_count;
int* classLabelsBuf = data->resp_int_buf;
const int* classLabels = 0;
data->get_class_labels(data->data_root, classLabelsBuf, &classLabels);
cvReleaseMat( &subsample_mask );
cvReleaseMat( &weights );
- CV_CALL( orig_response = cvCreateMat( 1, n, CV_32S ));
- CV_CALL( weak_eval = cvCreateMat( 1, n, CV_64F ));
- CV_CALL( subsample_mask = cvCreateMat( 1, n, CV_8U ));
- CV_CALL( weights = cvCreateMat( 1, n, CV_64F ));
- CV_CALL( subtree_weights = cvCreateMat( 1, n + 2, CV_64F ));
-
- if( data->have_priors )
- {
- // compute weight scale for each class from their prior probabilities
- int c1 = 0;
- for( i = 0; i < n; i++ )
- c1 += classLabels[i];
- p[0] = data->priors->data.db[0]*(c1 < n ? 1./(n - c1) : 0.);
- p[1] = data->priors->data.db[1]*(c1 > 0 ? 1./c1 : 0.);
- p[0] /= p[0] + p[1];
- p[1] = 1. - p[0];
- }
+ orig_response = cvCreateMat( 1, n, CV_32S );
+ weak_eval = cvCreateMat( 1, n, CV_64F );
+ subsample_mask = cvCreateMat( 1, n, CV_8U );
+ weights = cvCreateMat( 1, n, CV_64F );
+ subtree_weights = cvCreateMat( 1, n + 2, CV_64F );
if (data->is_buf_16u)
{
unsigned short* labels = (unsigned short*)(buf->data.s + data->data_root->buf_idx*buf->cols +
data->data_root->offset + (data->work_var_count-1)*data->sample_count);
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
// save original categorical responses {0,1}, convert them to {-1,1}
orig_response->data.i[i] = classLabels[i]*2 - 1;
int* labels = buf->data.i + data->data_root->buf_idx*buf->cols +
data->data_root->offset + (data->work_var_count-1)*data->sample_count;
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
// save original categorical responses {0,1}, convert them to {-1,1}
orig_response->data.i[i] = classLabels[i]*2 - 1;
- // make all the samples active at start.
- // later, in trim_weights() deactivate/reactive again some, if need
subsample_mask->data.ptr[i] = (uchar)1;
- // make all the initial weights the same.
weights->data.db[i] = w0*p[classLabels[i]];
- // set the labels to find (from within weak tree learning proc)
- // the particular sample weight, and where to store the response.
labels[i] = i;
}
}
if( params.boost_type == LOGIT )
{
- CV_CALL( sum_response = cvCreateMat( 1, n, CV_64F ));
+ sum_response = cvCreateMat( 1, n, CV_64F );
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
sum_response->data.db[i] = 0;
fdata[sampleIdx[i]*step] = orig_response->data.i[i] > 0 ? 2.f : -2.f;
}
else if( params.boost_type == GENTLE )
{
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
fdata[sampleIdx[i]*step] = (float)orig_response->data.i[i];
data->is_classifier = false;
cvXorS( subsample_mask, cvScalar(1.), subsample_mask );
// run tree through all the non-processed samples
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
if( subsample_mask->data.ptr[i] )
{
weak_eval->data.db[i] = ((CvCascadeBoostTree*)tree)->predict( i )->value;
double C, err = 0.;
double scale[] = { 1., 0. };
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
double w = weights->data.db[i];
sumW += w;
if( sumW != 0 )
err /= sumW;
- C = err = -log_ratio( err );
+ C = err = -logRatio( err );
scale[1] = exp(err);
sumW = 0;
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
double w = weights->data.db[i]*
scale[weak_eval->data.db[i] != orig_response->data.i[i]];
// weak_eval[i] = f(x_i) = 0.5*log(p(x_i)/(1-p(x_i))), p(x_i)=P(y=1|x_i)
// w_i *= exp(-y_i*f(x_i))
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
weak_eval->data.db[i] *= -orig_response->data.i[i];
cvExp( weak_eval, weak_eval );
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
double w = weights->data.db[i]*weak_eval->data.db[i];
sumW += w;
const float* responses = 0;
data->get_ord_responses(data->data_root, responsesBuf, &responses);
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
double s = sum_response->data.db[i] + 0.5*weak_eval->data.db[i];
sum_response->data.db[i] = s;
cvExp( weak_eval, weak_eval );
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
double p = 1./(1. + weak_eval->data.db[i]);
double w = p*(1 - p), z;
if( orig_response->data.i[i] > 0 )
{
z = 1./p;
- fdata[sampleIdx[i]*step] = (float)MIN(z, lbZMax);
+ fdata[sampleIdx[i]*step] = (float)min(z, lbZMax);
}
else
{
z = 1./(1-p);
- fdata[sampleIdx[i]*step] = (float)-MIN(z, lbZMax);
+ fdata[sampleIdx[i]*step] = (float)-min(z, lbZMax);
}
}
}
// w_i *= exp(-y_i*f(x_i))
assert( params.boost_type == GENTLE );
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
weak_eval->data.db[i] *= -orig_response->data.i[i];
cvExp( weak_eval, weak_eval );
- for( i = 0; i < n; i++ )
+ for( int i = 0; i < n; i++ )
{
double w = weights->data.db[i] * weak_eval->data.db[i];
weights->data.db[i] = w;
if( sumW > FLT_EPSILON )
{
sumW = 1./sumW;
- for( i = 0; i < n; ++i )
+ for( int i = 0; i < n; ++i )
weights->data.db[i] *= sumW;
}
-
- __END__;
}
bool CvCascadeBoost::isErrDesired()
{
- int sCount = data->sample_count, numPos = 0, numNeg = 0, numFalse = 0, numPosTrue = 0;
+ int sCount = data->sample_count,
+ numPos = 0, numNeg = 0, numFalse = 0, numPosTrue = 0;
float* eval = (float*) cvStackAlloc( sizeof(eval[0]) * sCount );
- float hitRate, falseAlarm;
- int thresholdIdx;
+
for( int i = 0; i < sCount; i++ )
- {
- if( ((CvCascadeBoostTrainData*)data)->cascadeData->getCls( i ) == 1.0F )
- {
- eval[numPos] = predict( i, true );
- numPos++;
- }
- }
+ if( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getCls( i ) == 1.0F )
+ eval[numPos++] = predict( i, true );
icvSortFlt( eval, numPos, 0 );
- thresholdIdx = (int)((1.0F - minHitRate) * numPos);
+ int thresholdIdx = (int)((1.0F - minHitRate) * numPos);
threshold = eval[ thresholdIdx ];
numPosTrue = numPos - thresholdIdx;
for( int i = thresholdIdx - 1; i >= 0; i--)
- {
if ( abs( eval[i] - threshold) < FLT_EPSILON )
numPosTrue++;
- }
- hitRate = ((float) numPosTrue) / ((float) numPos);
+ float hitRate = ((float) numPosTrue) / ((float) numPos);
+
for( int i = 0; i < sCount; i++ )
{
- if( ((CvCascadeBoostTrainData*)data)->cascadeData->getCls( i ) == 0.0F )
+ if( ((CvCascadeBoostTrainData*)data)->featureEvaluator->getCls( i ) == 0.0F )
{
numNeg++;
if( predict( i ) )
numFalse++;
}
}
- falseAlarm = ((float) numFalse) / ((float) numNeg);
+ float falseAlarm = ((float) numFalse) / ((float) numNeg);
- printf( "|%4d|%9f|%9f|\n", weak->total, hitRate, falseAlarm );
- printf( "+----+---------+---------+\n" );
+ cout << "|"; cout.width(4); cout << right << weak->total;
+ cout << "|"; cout.width(9); cout << right << hitRate;
+ cout << "|"; cout.width(9); cout << right << falseAlarm;
+ cout << "|" << endl;
+ cout << "+----+---------+---------+" << endl;
return falseAlarm <= maxFalseAlarm;
}
-void CvCascadeBoost::write( CvFileStorage* fs, const CvMat* featureMap )
+void CvCascadeBoost::write( FileStorage &fs, const Mat& featureMap ) const
{
- const char treeStr[] = "tree";
- char cmnt[30];
-
+// char cmnt[30];
CvCascadeBoostTree* weakTree;
- cvWriteInt( fs, CC_WEAK_COUNT, weak->total );
- cvWriteReal( fs, CC_STAGE_THRESHOLD, threshold );
- cvStartWriteStruct( fs, CC_WEAK_CLASSIFIERS, CV_NODE_SEQ );
+ fs << CC_WEAK_COUNT << weak->total;
+ fs << CC_STAGE_THRESHOLD << threshold;
+ fs << CC_WEAK_CLASSIFIERS << "[";
for( int wi = 0; wi < weak->total; wi++)
{
- sprintf( cmnt, "%s %i", treeStr, wi );
- cvWriteComment( fs, cmnt, 0 );
+ /*sprintf( cmnt, "tree %i", wi );
+ cvWriteComment( fs, cmnt, 0 );*/
weakTree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
weakTree->write( fs, featureMap );
}
- cvEndWriteStruct( fs ); // weak_classifiers
+ fs << "]";
}
-bool CvCascadeBoost::read( CvFileStorage* fs, CvFileNode* node, CvCascadeData* _cascadeData,
- CvCascadeBoostParams* _params )
+bool CvCascadeBoost::read( const FileNode &node,
+ const CvFeatureEvaluator* _featureEvaluator,
+ const CvCascadeBoostParams& _params )
{
- bool res = false;
- CV_FUNCNAME( "CvCascadeBoost::read" );
- __BEGIN__;
-
- CvSeqReader reader;
- CvFileNode* tempNode;
CvMemStorage* storage;
- int i, numTrees;
-
clear();
-
- data = new CvCascadeBoostTrainData( _cascadeData );
- params = *_params;
- minHitRate = _params->minHitRate;
- maxFalseAlarm = _params->maxFalseAlarm;
- CV_CALL( tempNode = cvGetFileNodeByName( fs, node, CC_STAGE_THRESHOLD ) );
- if ( !tempNode )
- EXIT;
- threshold = (float)tempNode->data.f;
- CV_CALL( tempNode = cvGetFileNodeByName( fs, node, CC_WEAK_CLASSIFIERS ) );
- if ( !tempNode )
- EXIT;
- cvStartReadSeq( tempNode->data.seq, &reader );
- numTrees = tempNode->data.seq->total;
-
- CV_CALL( storage = cvCreateMemStorage() );
- weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
+ data = new CvCascadeBoostTrainData( _featureEvaluator, _params );
+ set_params( _params );
+
+ node[CC_STAGE_THRESHOLD] >> threshold;
+ FileNode rnode = node[CC_WEAK_CLASSIFIERS];
- for( i = 0; i < numTrees; i++ )
+ storage = cvCreateMemStorage();
+ weak = cvCreateSeq( 0, sizeof(CvSeq), sizeof(CvBoostTree*), storage );
+ for( FileNodeIterator it = rnode.begin(); it != rnode.end(); it++ )
{
CvCascadeBoostTree* tree = new CvCascadeBoostTree();
- CV_CALL(tree->read( fs, (CvFileNode*)reader.ptr, this, data ));
- CV_NEXT_SEQ_ELEM( reader.seq->elem_size, reader );
+ tree->read( *it, this, data );
cvSeqPush( weak, &tree );
}
-
- res = true;
-
- __END__;
-
- return res;
+ return true;
}
-void CvCascadeBoost::markFeaturesInMap( CvMat* featureMap )
+void CvCascadeBoost::markUsedFeaturesInMap( Mat& featureMap )
{
- CvCascadeBoostTree* weakTree;
for( int wi = 0; wi < weak->total; wi++ )
{
- weakTree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
+ CvCascadeBoostTree* weakTree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
weakTree->markFeaturesInMap( featureMap );
}
}
\ No newline at end of file
-#ifndef BOOST_H
-#define BOOST_H
-
+#pragma once
#include "features.h"
#include "ml.h"
float minHitRate;
float maxFalseAlarm;
- CvCascadeBoostParams() : minHitRate( 0.995F), maxFalseAlarm( 0.5F )
- { boost_type = CvBoost::GENTLE; }
- CvCascadeBoostParams( int _boostType,
- float _minHitRate, float _maxFalseAlarm,
- double _weightTrimRate, int _maxDepth, int _maxWeakCount, const float* priors = 0 );
- virtual ~CvCascadeBoostParams() {}
- void write( CvFileStorage* fs ) const;
- bool read( CvFileStorage* fs, CvFileNode* node );
-
- virtual void printDefault();
- virtual void printAttrs();
- virtual bool scanAttr( const char* prmName, const char* val);
+ CvCascadeBoostParams();
+ CvCascadeBoostParams( int _boostType, float _minHitRate, float _maxFalseAlarm,
+ double _weightTrimRate, int _maxDepth, int _maxWeakCount );
+ void write( FileStorage &fs ) const;
+ bool read( const FileNode &node );
+ virtual void printDefaults() const;
+ virtual void printAttrs() const;
+ virtual bool scanAttr( const String prmName, const String val);
};
-
struct CvCascadeBoostTrainData : CvDTreeTrainData
{
- CvCascadeBoostTrainData();
- CvCascadeBoostTrainData( CvCascadeData* _cascadeData );
- CvCascadeBoostTrainData( CvCascadeData* _cascadeData,
- int _numPrecalcVal, int _numPrecalcIdx, const CvDTreeParams& _params = CvDTreeParams() );
- virtual ~CvCascadeBoostTrainData();
-
- virtual void set_data( CvCascadeData* _cascadeData,
- int _numPrecalcVal, int _numPrecalcIdx, const CvDTreeParams& _params=CvDTreeParams(),
- bool _updateData=false );
+ CvCascadeBoostTrainData( const CvFeatureEvaluator* _featureEvaluator,
+ const CvDTreeParams& _params );
+ CvCascadeBoostTrainData( const CvFeatureEvaluator* _featureEvaluator,
+ int _numSamples, int _numPrecalcVal, int _numPrecalcIdx,
+ const CvDTreeParams& _params = CvDTreeParams() );
+ virtual void setData( const CvFeatureEvaluator* _featureEvaluator,
+ int _numSamples, int _numPrecalcVal, int _numPrecalcIdx,
+ const CvDTreeParams& _params=CvDTreeParams() );
+ void precalculate();
virtual void get_class_labels( CvDTreeNode* n, int* labelsBuf, const int** labels );
virtual void get_cv_labels( CvDTreeNode* n, int* labelsBuf, const int** labels );
virtual void get_sample_indices( CvDTreeNode* n, int* indicesBuf, const int** labels );
virtual int get_ord_var_data( CvDTreeNode* n, int vi, float* ordValuesBuf, int* indicesBuf,
- const float** ordValues, const int** indices );
+ const float** ordValues, const int** indices );
virtual int get_cat_var_data( CvDTreeNode* n, int vi, int* catValuesBuf, const int** catValues );
-
virtual float getVarValue( int vi, int si );
-
- virtual void clear();
virtual void free_train_data();
- void precalculate();
-
- CvCascadeData* cascadeData;
-
- CvMat* valCache; /* precalculated feature values (CV_32FC1) */
+ const CvFeatureEvaluator* featureEvaluator;
+ Mat valCache; // precalculated feature values (CV_32FC1)
+ CvMat _resp; // for casting
int numPrecalcVal, numPrecalcIdx;
};
{
public:
virtual CvDTreeNode* predict( int sampleIdx ) const;
- virtual void write( CvFileStorage* fs, const CvMat* featureMap );
- virtual void read( CvFileStorage* fs, CvFileNode* node, CvBoost* _ensemble,
- CvDTreeTrainData* _data );
- void markFeaturesInMap( CvMat* featureMap );
+ void write( FileStorage &fs, const Mat& featureMap );
+ void read( const FileNode &node, CvBoost* _ensemble, CvDTreeTrainData* _data );
+ void markFeaturesInMap( Mat& featureMap );
protected:
- void auxMarkFeaturesInMap( const CvDTreeNode* node, CvMat* featureMap );
virtual void split_node_data( CvDTreeNode* n );
};
class CvCascadeBoost : public CvBoost
{
public:
- CvCascadeBoost();
- CvCascadeBoost( CvCascadeData* _cascadeData,
- int _numPrecalcVal, int _numPrecalcIdx,
- CvCascadeBoostParams _params=CvCascadeBoostParams() );
- virtual bool train( CvCascadeData* _cascadeData,
- int _numPrecalcVal, int _numPrecalcIdx,
- CvCascadeBoostParams _params=CvCascadeBoostParams(),
- bool _update=false );
-
+ virtual bool train( const CvFeatureEvaluator* _featureEvaluator,
+ int _numSamples, int _numPrecalcVal, int _numPrecalcIdx,
+ const CvCascadeBoostParams& _params=CvCascadeBoostParams() );
virtual float predict( int sampleIdx, bool returnSum = false ) const;
- const CvCascadeBoostTrainData* get_data() const;
- float getThreshold() { return threshold; };
-
- virtual void write( CvFileStorage* fs, const CvMat* featureMap );
- virtual bool read( CvFileStorage* fs, CvFileNode* node, CvCascadeData* _cascadeData,
- CvCascadeBoostParams* _params );
-
- void markFeaturesInMap( CvMat* featureMap);
+ float getThreshold() const { return threshold; };
+ void write( FileStorage &fs, const Mat& featureMap ) const;
+ bool read( const FileNode &node, const CvFeatureEvaluator* _featureEvaluator,
+ const CvCascadeBoostParams& _params );
+ void markUsedFeaturesInMap( Mat& featureMap );
protected:
virtual bool set_params( const CvBoostParams& _params );
virtual void update_weights( CvBoostTree* tree );
float threshold;
float minHitRate, maxFalseAlarm;
-};
-
-
-#endif
\ No newline at end of file
+};
\ No newline at end of file
#include "cascadeclassifier.h"
-#include "_inner_functions.h"
-
#include <queue>
+
using namespace std;
-static const char* stagetypes[] = { CC_BOOST };
-static const char* featuretypes[] = { CC_HAAR, CC_LBP };
+static const char* stageTypes[] = { CC_BOOST };
+static const char* featureTypes[] = { CC_HAAR, CC_LBP };
+
+CvCascadeParams::CvCascadeParams() : stageType( defaultStageType ),
+ featureType( defaultFeatureType ), winSize( cvSize(24, 24) )
+{
+ name = CC_CASCADE_PARAMS;
+}
+CvCascadeParams::CvCascadeParams( int _stageType, int _featureType ) : stageType( _stageType ),
+ featureType( _featureType ), winSize( cvSize(24, 24) )
+{
+ name = CC_CASCADE_PARAMS;
+}
//---------------------------- CascadeParams --------------------------------------
-void CvCascadeParams::write( CvFileStorage* fs ) const
+void CvCascadeParams::write( FileStorage &fs ) const
{
- const char* stageTypeStr = 0;
- const char* featureTypeStr = 0;
-
- stageTypeStr = stageType == BOOST ? CC_BOOST : 0;
- if( stageTypeStr )
- cvWriteString( fs, CC_STAGE_TYPE, stageTypeStr );
- else
- cvWriteInt( fs, CC_STAGE_TYPE, stageType );
-
- featureTypeStr = featureType == HAAR ? CC_HAAR :
- featureType == LBP ? CC_LBP : 0;
- if( stageTypeStr )
- cvWriteString( fs, CC_FEATURE_TYPE, featureTypeStr );
- else
- cvWriteInt( fs, CC_FEATURE_TYPE, featureType );
- cvWriteInt( fs, CC_HEIGHT, winSize.height );
- cvWriteInt( fs, CC_WIDTH, winSize.width );
-
+ String stageTypeStr = stageType == BOOST ? CC_BOOST : String();
+ CV_Assert( !stageTypeStr.empty() );
+ fs << CC_STAGE_TYPE << stageTypeStr;
+ String featureTypeStr = featureType == CvFeatureParams::HAAR ? CC_HAAR :
+ featureType == CvFeatureParams::LBP ? CC_LBP : 0;
+ CV_Assert( !stageTypeStr.empty() );
+ fs << CC_FEATURE_TYPE << featureTypeStr;
+ fs << CC_HEIGHT << winSize.height;
+ fs << CC_WIDTH << winSize.width;
}
-bool CvCascadeParams::read( CvFileStorage* fs, CvFileNode* node )
+bool CvCascadeParams::read( const FileNode &node )
{
- bool res = false;
-
- CV_FUNCNAME( "CvCascadeClassifier::read_cascadeParams" );
- __BEGIN__;
-
- const char* stageTypeStr;
- const char* featureTypeStr;
-
- CV_CALL( stageTypeStr = cvReadStringByName( fs, node, CC_STAGE_TYPE ) );
- if ( !stageTypeStr )
- EXIT;
- CV_CALL( stageType = strcmp( stageTypeStr, CC_BOOST ) == 0 ?
- BOOST : cvReadIntByName( fs, node, CC_STAGE_TYPE ) );
-
- CV_CALL( featureTypeStr = cvReadStringByName( fs, node, CC_FEATURE_TYPE ) );
- if ( !featureTypeStr )
- EXIT;
- CV_CALL( featureType = strcmp( featureTypeStr, CC_HAAR ) == 0 ? HAAR :
- strcmp( featureTypeStr, CC_LBP ) == 0 ? LBP :
- cvReadIntByName( fs, node, CC_FEATURE_TYPE ) );
- CV_CALL( winSize.height = cvReadIntByName( fs, node, CC_HEIGHT ) );
- CV_CALL( winSize.width = cvReadIntByName( fs, node, CC_WIDTH ) );
- if ( winSize.height <= 0 || winSize.width <= 0 )
- EXIT;
-
- res = true;
-
- __END__;
-
- return res;
+ if ( node.empty() )
+ return false;
+ String stageTypeStr, featureTypeStr;
+ FileNode rnode = node[CC_STAGE_TYPE];
+ if ( !rnode.isString() )
+ return false;
+ rnode >> stageTypeStr;
+ stageType = !stageTypeStr.compare( CC_BOOST ) ? BOOST : -1;
+ if (stageType == -1)
+ return false;
+ rnode = node[CC_FEATURE_TYPE];
+ if ( !rnode.isString() )
+ return false;
+ rnode >> featureTypeStr;
+ featureType = !featureTypeStr.compare( CC_HAAR ) ? CvFeatureParams::HAAR :
+ !featureTypeStr.compare( CC_LBP ) ? CvFeatureParams::LBP : -1;
+ if (featureType == -1)
+ return false;
+ node[CC_HEIGHT] >> winSize.height;
+ node[CC_WIDTH] >> winSize.width;
+ return winSize.height > 0 && winSize.width > 0;
}
-void CvCascadeParams::printDefaults()
+void CvCascadeParams::printDefaults() const
{
CvParams::printDefaults();
- printf(" [-stageType <" );
- int i;
- for( i = 0; i < (int)(sizeof(stagetypes)/sizeof(stagetypes[0])); i++ )
+ cout << " [-stageType <";
+ for( int i = 0; i < (int)(sizeof(stageTypes)/sizeof(stageTypes[0])); i++ )
{
- printf("%s%s", i ? " | " : "", stagetypes[i] );
+ cout << (i ? " | " : "") << stageTypes[i];
if ( i == defaultStageType )
- printf("(default)");
+ cout << "(default)";
}
- printf(">]\n" );
+ cout << ">]" << endl;
- printf(" [-featureType <{" );
- for( i = 0; i < (int)(sizeof(featuretypes)/sizeof(featuretypes[0])); i++ )
+ cout << " [-featureType <{";
+ for( int i = 0; i < (int)(sizeof(featureTypes)/sizeof(featureTypes[0])); i++ )
{
- printf("%s%s", i ? ", " : "", featuretypes[i] );
+ cout << (i ? ", " : "") << featureTypes[i];
if ( i == defaultStageType )
- printf("(default)");
+ cout << "(default)";
}
- printf("}>]\n" );
- printf( " [-w <sampleWidth = %d>]\n"
- " [-h <sampleHeight = %d>]\n",
- winSize.width, winSize.height );
+ cout << "}>]" << endl;
+ cout << " [-w <sampleWidth = " << winSize.width << ">]" << endl;
+ cout << " [-h <sampleHeight = " << winSize.height << ">]" << endl;
}
-void CvCascadeParams::printAttrs()
+void CvCascadeParams::printAttrs() const
{
- printf( "stageType: %s\n", stagetypes[stageType] );
- printf( "featureType: %s\n", featuretypes[featureType] );
- printf( "sampleWidth: %d\n", winSize.width );
- printf( "sampleHeight: %d\n", winSize.height );
+ cout << "stageType: " << stageTypes[stageType] << endl;
+ cout << "featureType: " << featureTypes[featureType] << endl;
+ cout << "sampleWidth: " << winSize.width << endl;
+ cout << "sampleHeight: " << winSize.height << endl;
}
-bool CvCascadeParams::scanAttr( const char* prmName, const char* val )
+bool CvCascadeParams::scanAttr( const String prmName, const String val )
{
bool res = true;
-
- if( !strcmp( prmName, "-stageType" ) )
+ if( !prmName.compare( "-stageType" ) )
{
- for( int i = 0; i < (int)(sizeof(stagetypes)/sizeof(stagetypes[0])); i++ )
- {
- if( !strcmp( val, stagetypes[i] ) )
+ for( int i = 0; i < (int)(sizeof(stageTypes)/sizeof(stageTypes[0])); i++ )
+ if( !val.compare( stageTypes[i] ) )
stageType = i;
- }
}
- else if( !strcmp( prmName, "-featureType" ) )
+ else if( !prmName.compare( "-featureType" ) )
{
- for( int i = 0; i < (int)(sizeof(featuretypes)/sizeof(featuretypes[0])); i++ )
- {
- if( !strcmp( val, featuretypes[i] ) )
+ for( int i = 0; i < (int)(sizeof(featureTypes)/sizeof(featureTypes[0])); i++ )
+ if( !val.compare( featureTypes[i] ) )
featureType = i;
- }
}
- else if( !strcmp( prmName, "-w" ) )
+ else if( !prmName.compare( "-w" ) )
{
- winSize.width = atoi( val );
+ winSize.width = atoi( val.c_str() );
}
- else if( !strcmp( prmName, "-h" ) )
+ else if( !prmName.compare( "-h" ) )
{
- winSize.height = atoi( val );
+ winSize.height = atoi( val.c_str() );
}
else
res = false;
-
return res;
}
//---------------------------- CascadeClassifier --------------------------------------
-CvCascadeClassifier::CvCascadeClassifier()
-{
- cascadeData = 0;
- featureParams = 0;
- stageParams = 0;
- stageClassifiers = 0;
-}
-
-CvCascadeClassifier::~CvCascadeClassifier()
-{
- if ( cascadeData )
- {
- delete cascadeData;
- cascadeData = 0;
- }
- if ( featureParams )
- {
- delete featureParams;
- featureParams = 0;
- }
- if ( stageParams )
- {
- delete stageParams;
- stageParams = 0;
- }
- if( stageClassifiers )
- {
- for( int i = 0; i < numCurStages; i++ )
- delete stageClassifiers[i];
- }
- cvFree( &stageClassifiers );
-}
-
-void CvCascadeClassifier::createStageParams()
-{
- CV_FUNCNAME( "CvCascadeClassifier::createStageParams" );
- __BEGIN__;
- switch ( cascadeParams.stageType )
- {
- case CvCascadeParams::BOOST :
- stageParams = new CvCascadeBoostParams();
- break;
- default: CV_ERROR(CV_StsBadFunc, "unsupported stage type");
- }
- __END__;
-}
-
-void CvCascadeClassifier::createCurStage()
-{
- CV_FUNCNAME( "CvCascadeClassifier::createCurStage" );
- __BEGIN__;
- switch ( cascadeParams.stageType )
- {
- case CvCascadeParams::BOOST :
- stageClassifiers[numCurStages] = new CvCascadeBoost();
- break;
- default: CV_ERROR(CV_StsBadFunc, "unsupported stage type");
- }
- __END__;
-}
-
-void CvCascadeClassifier::createFeatureParams()
-{
- CV_FUNCNAME( "CvCascadeClassifier::createCurStage" );
- __BEGIN__;
- switch ( cascadeParams.featureType )
- {
- case CvCascadeParams::HAAR :
- featureParams = new CvHaarFeatureParams();
- break;
- case CvCascadeParams::LBP :
- featureParams = new CvLBPFeatureParams();
- break;
- default: CV_ERROR(CV_StsBadFunc, "unsupported feature type");
- }
- __END__;
-}
-
-void CvCascadeClassifier::createCascadeData()
-{
- CV_FUNCNAME( "CvCascadeClassifier::createCascadeData" );
- __BEGIN__;
- switch ( cascadeParams.featureType )
- {
- case CvCascadeParams::HAAR :
- cascadeData = new CvHaarCascadeData();
- break;
- case CvCascadeParams::LBP :
- cascadeData = new CvLBPCascadeData();
- break;
- default: CV_ERROR(CV_StsBadFunc, "unsupported feature type");
-
- }
- __END__;
-}
-
-bool CvCascadeClassifier::train( const char* _cascadeDirName,
- const char* _vecFileName,
- const char* _bgfileName,
- int _numPos, int _numNeg,
- int _numPrecalcVal, int _numPrecalcIdx,
- int _numStages,
- const CvCascadeParams& _cascadeParams,
- const CvFeatureParams& _featureParams,
- const CvCascadeBoostParams& _stageParams,
- bool baseFormatSave )
+bool CvCascadeClassifier::train( const String _cascadeDirName,
+ const String _posFilename,
+ const String _negFilename,
+ int _numPos, int _numNeg,
+ int _numPrecalcVal, int _numPrecalcIdx,
+ int _numStages,
+ const CvCascadeParams& _cascadeParams,
+ const CvFeatureParams& _featureParams,
+ const CvCascadeBoostParams& _stageParams,
+ bool baseFormatSave )
{
- bool res = false;
+ if( _cascadeDirName.empty() || _posFilename.empty() || _negFilename.empty() )
+ CV_Error( CV_StsBadArg, "_cascadeDirName or _bgfileName or _vecFileName is NULL" );
- CV_FUNCNAME( "CvCascadeClassifier::train" );
- __BEGIN__;
-
- char* nullname = (char*)"(NULL)";
- CvFileStorage* fs;
- char buf[200];
- //int consumed = 0;
- double tempLeafFARate, requiredLeafFARate;
-
- assert( _cascadeDirName );
+ String dirName;
+ if ( _cascadeDirName.find('/') )
+ dirName = _cascadeDirName + '/';
+ else
+ dirName = _cascadeDirName + '\\';
- // TODO: check input params
- // TODO: other stages
- cascadeParams = _cascadeParams;
+ numPos = _numPos;
+ numNeg = _numNeg;
numStages = _numStages;
- numCurStages = 0;
-
- if( !(_bgfileName && _vecFileName) )
- CV_ERROR( CV_StsBadArg, "_bgfileName or _vecFileName is NULL" );
- if ( !loadTempInfo( _cascadeDirName, _vecFileName, _bgfileName, _numPos, _numNeg ) )
+ imgReader.create( _posFilename, _negFilename, cascadeParams.winSize );
+ if ( !load( dirName ) )
{
- createStageParams();
+ cascadeParams = _cascadeParams;
+ featureParams = CvFeatureParams::create(cascadeParams.featureType);
+ featureParams->init(_featureParams);
+ stageParams = new CvCascadeBoostParams;
*stageParams = _stageParams;
-
- createFeatureParams();
- featureParams->set( &_featureParams );
-
- createCascadeData();
- cascadeData->setData( this, _vecFileName, _bgfileName, _numPos, _numNeg, featureParams );
-
- stageClassifiers = (CvCascadeBoost**) cvAlloc( numStages * sizeof(stageClassifiers[0]) );
- memset( stageClassifiers, 0, sizeof(stageClassifiers[0])*numStages);
-
- // save params
- sprintf( buf, "%s/%s", _cascadeDirName, CC_PARAMS_FILENAME );
- fs = cvOpenFileStorage( buf, 0, CV_STORAGE_WRITE );
- if ( !fs )
- EXIT;
- writeParams( fs );
- cvReleaseFileStorage( &fs );
+ featureEvaluator = CvFeatureEvaluator::create(cascadeParams.featureType);
+ featureEvaluator->init( (CvFeatureParams*)featureParams, numPos + numNeg, cascadeParams.winSize );
+ stageClassifiers.reserve( numStages );
}
- // print used parameters
- printf( "cascadeDirName: %s\n", ((_cascadeDirName == NULL) ? nullname : _cascadeDirName ) );
- printf( "vecFileName: %s\n", ((_vecFileName == NULL) ? nullname : _vecFileName ) );
- printf( "bgFileName: %s\n", ((_bgfileName == NULL) ? nullname : _bgfileName ) );
- printf( "numPos: %d\n", _numPos );
- printf( "numNeg: %d\n", _numNeg );
- printf( "numStages: %d\n", numStages );
- printf( "numPrecalcValues : %d\n", _numPrecalcVal );
- printf( "numPrecalcIndices : %d\n", _numPrecalcIdx );
+ cout << "PARAMETERS:" << endl;
+ cout << "cascadeDirName: " << _cascadeDirName << endl;
+ cout << "vecFileName: " << _posFilename << endl;
+ cout << "bgFileName: " << _negFilename << endl;
+ cout << "numPos: " << _numPos << endl;
+ cout << "numNeg: " << _numNeg << endl;
+ cout << "numStages: " << numStages << endl;
+ cout << "numPrecalcValues : " << _numPrecalcVal << endl;
+ cout << "numPrecalcIndices : " << _numPrecalcIdx << endl;
cascadeParams.printAttrs();
stageParams->printAttrs();
featureParams->printAttrs();
- if ( numCurStages > 1 )
- printf("\nStages %d-%d are loaded\n", 0, numCurStages-1);
- else if ( numCurStages == 1)
- printf("\nStage 0 is loaded\n");
+
+ int startNumStages = (int)stageClassifiers.size();
+ if ( startNumStages > 1 )
+ cout << endl << "Stages 0-" << startNumStages-1 << " are loaded" << endl;
+ else if ( startNumStages == 1)
+ cout << endl << "Stage 0 is loaded" << endl;
- requiredLeafFARate = pow( (double) stageParams->maxFalseAlarm, (double) numStages ) / (double)stageParams->max_depth;
+ double requiredLeafFARate = pow( (double) stageParams->maxFalseAlarm, (double) numStages ) /
+ (double)stageParams->max_depth;
+ double tempLeafFARate;
- for( ; numCurStages < numStages; numCurStages++ )
+ for( int i = startNumStages; i < numStages; i++ )
{
- printf( "\nStages\n" );
- for( int i = 0; i <= numCurStages; i++ ) printf( "+---" );
- printf( "+\n" );
- for( int i = 0; i <= numCurStages; i++ ) printf( "|%3d", i );
- printf( "|\n" );
- for( int i = 0; i <= numCurStages; i++ ) printf( "+---" );
- printf( "+\n\n" );
-
- if ( !cascadeData->updateForNextStage( tempLeafFARate ) )
+ cout << endl << "===== TRAINING " << i << "-stage =====" << endl;
+ cout << "<BEGIN" << endl;
+
+ if ( !updateTrainingSet( tempLeafFARate ) )
{
- printf("Train dataset for temp stage can not be filled\n");
- printf("\n===Cascade training ended===\n");
+ cout << "Train dataset for temp stage can not be filled."
+ "Branch training terminated." << endl;
break;
}
-
if( tempLeafFARate <= requiredLeafFARate )
{
- printf( "Required leaf false alarm rate achieved. "
- "Branch training terminated.\n" );
+ cout << "Required leaf false alarm rate achieved. "
+ "Branch training terminated." << endl;
break;
}
- createCurStage();
- stageClassifiers[numCurStages]->train( cascadeData, _numPrecalcVal, _numPrecalcIdx, *stageParams );
+ CvCascadeBoost* tempStage = new CvCascadeBoost;
+ tempStage->train( (CvFeatureEvaluator*)featureEvaluator,
+ curNumSamples, _numPrecalcVal, _numPrecalcIdx,
+ *((CvCascadeBoostParams*)stageParams) );
+ stageClassifiers.push_back( tempStage );
- sprintf( buf, "%s/%d%s", _cascadeDirName, numCurStages, ".xml" );
- fs = cvOpenFileStorage( buf, 0, CV_STORAGE_WRITE );
- if ( !fs )
- EXIT;
- stageClassifiers[numCurStages]->write( fs, 0);
- cvReleaseFileStorage( &fs );
+ cout << "END>" << endl;
+
+ // save params
+ String filename;
+ if ( i == 0)
+ {
+ filename = dirName + CC_PARAMS_FILENAME;
+ FileStorage fs( filename, FileStorage::WRITE);
+ if ( !fs.isOpened() )
+ return false;
+ fs << FileStorage::getDefaultObjectName(filename) << "{";
+ writeParams( fs );
+ fs << "}";
+ }
+ // save temp stage
+ char buf[10];
+ sprintf(buf, "%s%d", "stage", i );
+ filename = dirName + buf + ".xml";
+ FileStorage fs( filename, FileStorage::WRITE );
+ if ( !fs.isOpened() )
+ return false;
+ fs << FileStorage::getDefaultObjectName(filename) << "{";
+ tempStage->write( fs, Mat() );
+ fs << "}";
}
-
- save( _cascadeDirName, baseFormatSave );
-
- cascadeData->clear();
- res = true;
-
- __END__;
- return res;
+ save( dirName + CC_CASCADE_FILENAME, baseFormatSave );
+ return true;
}
int CvCascadeClassifier::predict( int sampleIdx )
{
- //CvCascadeBoost* cur_classifier = stageClassifiers[0];
- for (int i = 0; i < numCurStages; i++ )
+ CV_DbgAssert( sampleIdx < numPos + numNeg );
+ for (Vector<Ptr<CvCascadeBoost>>::iterator it = stageClassifiers.begin();
+ it != stageClassifiers.end(); it++ )
{
- if ( stageClassifiers[i]->predict( sampleIdx ) == 0.f )
- {
+ if ( (*it)->predict( sampleIdx ) == 0.f )
return 0;
- }
}
return 1;
}
-void CvCascadeClassifier::writeParams( CvFileStorage* fs ) const
+bool CvCascadeClassifier::updateTrainingSet( double& acceptanceRatio)
{
- cascadeParams.write( fs );
-
- cvStartWriteStruct( fs, CC_STAGE_PARAMS, CV_NODE_MAP );
- stageParams->write( fs );
- cvEndWriteStruct( fs );
+ int64 posConsumed = 0, negConsumed = 0;
+ imgReader.restart();
+ int posCount = fillPassedSamles( 0, numPos, true, posConsumed );
+ if( !posCount )
+ return false;
+ cout << "POS count : consumed " << posCount << " : " << (int)posConsumed << endl;
+
+ int negCount = fillPassedSamles( numPos, numNeg, false, negConsumed );
+ if ( !negCount )
+ return false;
+ curNumSamples = posCount + negCount;
+ acceptanceRatio = negConsumed == 0 ? 0 : ( (double)negCount/(double)(int64)negConsumed );
+ cout << "NEG count : acceptanceRatio " << negCount << " : " << acceptanceRatio << endl;
+ return true;
+}
- cvStartWriteStruct( fs, CC_FEATURE_PARAMS, CV_NODE_MAP );
- featureParams->write( fs );
- cvEndWriteStruct( fs );
+int CvCascadeClassifier::fillPassedSamles( int first, int count, bool isPositive, int64& consumed )
+{
+ int getcount = 0;
+ Mat img(cascadeParams.winSize, CV_8UC1);
+ for( int i = first; i < first + count; i++ )
+ {
+ for( ; ; )
+ {
+ bool isGetImg = isPositive ? imgReader.getPos( img ) :
+ imgReader.getNeg( img );
+ if( !isGetImg )
+ return getcount;
+ consumed++;
+
+ featureEvaluator->setImage( img, isPositive ? 1 : 0, i );
+ if( predict( i ) == 1.0F )
+ {
+ getcount++;
+ break;
+ }
+ }
+ }
+ return getcount;
}
-void CvCascadeClassifier::writeFeatures( CvFileStorage* fs, const CvMat* featureMap ) const
+void CvCascadeClassifier::writeParams( FileStorage &fs ) const
{
- cascadeData->writeFeatures( fs, featureMap );
+ cascadeParams.write( fs );
+ fs << CC_STAGE_PARAMS << "{"; stageParams->write( fs ); fs << "}";
+ fs << CC_FEATURE_PARAMS << "{"; featureParams->write( fs ); fs << "}";
}
-void CvCascadeClassifier::writeStages( CvFileStorage* fs, const CvMat* featureMap ) const
+void CvCascadeClassifier::writeFeatures( FileStorage &fs, const Mat& featureMap ) const
{
- CV_FUNCNAME( "CvCascadeClassifier::writeStages" );
- __BEGIN__;
+ ((CvFeatureEvaluator*)((Ptr<CvFeatureEvaluator>)featureEvaluator))->writeFeatures( fs, featureMap );
+}
- const char stage_str[] = "stage";
- char cmnt[30];
- CV_CALL( cvStartWriteStruct( fs, CC_STAGES, CV_NODE_SEQ ) );
- for( int i = 0; i < numCurStages; i++ )
+void CvCascadeClassifier::writeStages( FileStorage &fs, const Mat& featureMap ) const
+{
+ //char cmnt[30];
+ //int i = 0;
+ fs << CC_STAGES << "[";
+ for( Vector<Ptr<CvCascadeBoost>>::const_iterator it = stageClassifiers.begin();
+ it != stageClassifiers.end(); it++/*, i++*/ )
{
- sprintf( cmnt, "%s %i", stage_str, i );
- CV_CALL( cvWriteComment( fs, cmnt, 0 ) );
- cvStartWriteStruct( fs, 0, CV_NODE_MAP );
- stageClassifiers[i]->write( fs, featureMap );
- cvEndWriteStruct( fs );
+ /*sprintf( cmnt, "stage %d", i );
+ CV_CALL( cvWriteComment( fs, cmnt, 0 ) );*/
+ fs << "{";
+ ((CvCascadeBoost*)((Ptr<CvCascadeBoost>)*it))->write( fs, featureMap );
+ fs << "}";
}
- CV_CALL( cvEndWriteStruct( fs ) );
-
- __END__;
+ fs << "]";
}
-bool CvCascadeClassifier::readParams( CvFileStorage* fs, CvFileNode* _node )
+bool CvCascadeClassifier::readParams( const FileNode &node )
{
- bool res = false;
-
- //CV_FUNCNAME( "CvCascadeClassifier::readParams" );
- __BEGIN__;
-
- CvFileNode *node;
-
- if ( !cascadeParams.read( fs, _node ) )
- EXIT;
+ if ( !node.isMap() || !cascadeParams.read( node ) )
+ return false;
- createStageParams();
- node = cvGetFileNodeByName( fs, _node, CC_STAGE_PARAMS);
- if ( !node )
- EXIT;
- if ( !stageParams->read( fs, node ) )
- EXIT;
+ stageParams = new CvCascadeBoostParams;
+ FileNode rnode = node[CC_STAGE_PARAMS];
+ if ( !stageParams->read( rnode ) )
+ return false;
- createFeatureParams();
- node = cvGetFileNodeByName( fs, _node, CC_FEATURE_PARAMS);
- if ( !node )
- EXIT;
- if ( !featureParams->read( fs, node ) )
- EXIT;
-
- res = true;
-
- __END__;
-
- return res;
+ featureParams = CvFeatureParams::create(cascadeParams.featureType);
+ rnode = node[CC_FEATURE_PARAMS];
+ if ( !featureParams->read( rnode ) )
+ return false;
+ return true;
}
-bool CvCascadeClassifier::readStages( CvFileStorage* fs, CvFileNode* _node )
+bool CvCascadeClassifier::readStages( const FileNode &node)
{
- bool res = false;
-
- CV_FUNCNAME( "CvCascadeClassifier::readStages" );
- __BEGIN__;
-
- CvFileNode* node = cvGetFileNodeByName( fs, _node, CC_STAGES );
- CvSeq* stageClassifiers_seq;
- if ( !node )
- EXIT;
- CV_CALL( stageClassifiers = (CvCascadeBoost**) cvAlloc( numStages * sizeof(stageClassifiers[0]) ) );
- memset( stageClassifiers, 0, sizeof(stageClassifiers[0])*numStages);
-
- stageClassifiers_seq = node->data.seq;
- for( numCurStages = 0; numCurStages < MIN( stageClassifiers_seq->total, numStages ); numCurStages++ )
+ FileNode rnode = node[CC_STAGES];
+ if (!rnode.empty() || !rnode.isSeq())
+ return false;
+ stageClassifiers.reserve(numStages);
+ FileNodeIterator it = rnode.begin();
+ for( int i = 0; i < min( (int)rnode.size(), numStages ); i++, it++ )
{
- createCurStage();
- node = ((CvFileNode*)cvGetSeqElem( stageClassifiers_seq, numCurStages ));
- if ( !stageClassifiers[numCurStages]->read( fs, node, cascadeData, stageParams ) )
- EXIT;
+ CvCascadeBoost* tempStage = new CvCascadeBoost;
+ if ( !tempStage->read( *it, (CvFeatureEvaluator *)featureEvaluator, *((CvCascadeBoostParams*)stageParams) ) )
+ {
+ delete tempStage;
+ return false;
+ }
+ stageClassifiers.push_back(tempStage);
}
- res = true;
-
- __END__;
-
- return res;
+ return true;
}
-bool CvCascadeClassifier::save( const char* cascadeDirName, bool baseFormat )
+// For old Haar Classifier file saving
+#define ICV_HAAR_SIZE_NAME "size"
+#define ICV_HAAR_STAGES_NAME "stages"
+#define ICV_HAAR_TREES_NAME "trees"
+#define ICV_HAAR_FEATURE_NAME "feature"
+#define ICV_HAAR_RECTS_NAME "rects"
+#define ICV_HAAR_TILTED_NAME "tilted"
+#define ICV_HAAR_THRESHOLD_NAME "threshold"
+#define ICV_HAAR_LEFT_NODE_NAME "left_node"
+#define ICV_HAAR_LEFT_VAL_NAME "left_val"
+#define ICV_HAAR_RIGHT_NODE_NAME "right_node"
+#define ICV_HAAR_RIGHT_VAL_NAME "right_val"
+#define ICV_HAAR_STAGE_THRESHOLD_NAME "stage_threshold"
+#define ICV_HAAR_PARENT_NAME "parent"
+#define ICV_HAAR_NEXT_NAME "next"
+
+void CvCascadeClassifier::save( const String filename, bool baseFormat )
{
- bool res = false;
-
- CV_FUNCNAME( "CvCascadeClassifier::save" );
- __BEGIN__;
+ FileStorage fs( filename, FileStorage::WRITE );
- cv::String cascadeName = cv::FileStorage::getDefaultObjectName(CC_CASCADE_FILENAME);
- cv::String fileName = cv::String(cascadeDirName) + '/' + cv::String(CC_CASCADE_FILENAME);
- CvFileStorage* fs = cvOpenFileStorage( fileName.c_str(), 0, CV_STORAGE_WRITE );
+ if ( !fs.isOpened() )
+ return;
- if ( !fs )
- EXIT;
-
+ fs << FileStorage::getDefaultObjectName(filename) << "{";
if ( !baseFormat )
{
- CvMat* featureMap = cvCreateMat( 1, cascadeData->getNumFeatures(), CV_32SC1 );
- cvSet( featureMap, cvScalar(-1) );
- markFeaturesInMap( featureMap );
-
- cvStartWriteStruct(fs, cascadeName.c_str(), CV_NODE_MAP, "opencv-cascade-classifier");
+ Mat featureMap;
+ getUsedFeaturesIdxMap( featureMap );
writeParams( fs );
- cvWriteInt( fs, CC_STAGE_NUM, numCurStages );
- writeStages( fs, featureMap);
- writeFeatures(fs, featureMap);
- cvReleaseMat( &featureMap );
- cvEndWriteStruct(fs);
+ fs << CC_STAGE_NUM << (int)stageClassifiers.size();
+ writeStages( fs, featureMap );
+ writeFeatures( fs, featureMap );
}
else
{
- char buf[256];
+ //char buf[256];
CvSeq* weak;
- if ( cascadeParams.featureType != CvCascadeParams::HAAR )
- CV_ERROR( CV_StsBadFunc, "old file format is used for Haar-like features only");
- cvStartWriteStruct(fs, cascadeName.c_str(), CV_NODE_MAP, CV_TYPE_NAME_HAAR );
- CV_CALL( cvStartWriteStruct( fs, ICV_HAAR_SIZE_NAME, CV_NODE_SEQ | CV_NODE_FLOW ) );
- CV_CALL( cvWriteInt( fs, NULL, cascadeParams.winSize.width ) );
- CV_CALL( cvWriteInt( fs, NULL, cascadeParams.winSize.height ) );
- CV_CALL( cvEndWriteStruct( fs ) ); /* ICV_HAAR_SIZE_NAME */
-
- CV_CALL( cvStartWriteStruct( fs, ICV_HAAR_STAGES_NAME, CV_NODE_SEQ ) );
- for( int si = 0; si < numCurStages; si++ )
+ if ( cascadeParams.featureType != CvFeatureParams::HAAR )
+ CV_Error( CV_StsBadFunc, "old file format is used for Haar-like features only");
+ fs << ICV_HAAR_SIZE_NAME << "[:" << cascadeParams.winSize.width <<
+ cascadeParams.winSize.height << "]";
+ fs << ICV_HAAR_STAGES_NAME << "[";
+ for( size_t si = 0; si < stageClassifiers.size(); si++ )
{
- CV_CALL( cvStartWriteStruct( fs, NULL, CV_NODE_MAP ) );
- sprintf( buf, "stage %d", si );
- CV_CALL( cvWriteComment( fs, buf, 1 ) );
+ fs << "{"; //stage
+ /*sprintf( buf, "stage %d", si );
+ CV_CALL( cvWriteComment( fs, buf, 1 ) );*/
weak = stageClassifiers[si]->get_weak_predictors();
-
- CV_CALL( cvStartWriteStruct( fs, ICV_HAAR_TREES_NAME, CV_NODE_SEQ ) );
+ fs << ICV_HAAR_TREES_NAME << "[";
for( int wi = 0; wi < weak->total; wi++ )
{
int inner_node_idx = -1, total_inner_node_idx = -1;
queue<const CvDTreeNode*> inner_nodes_queue;
CvCascadeBoostTree* tree = *((CvCascadeBoostTree**) cvGetSeqElem( weak, wi ));
- CV_CALL( cvStartWriteStruct( fs, NULL, CV_NODE_SEQ ) );
- sprintf( buf, "tree %d", wi );
- CV_CALL( cvWriteComment( fs, buf, 1 ) );
+ fs << "[";
+ /*sprintf( buf, "tree %d", wi );
+ CV_CALL( cvWriteComment( fs, buf, 1 ) );*/
const CvDTreeNode* tempNode;
tempNode = inner_nodes_queue.front();
inner_node_idx++;
- CV_CALL( cvStartWriteStruct( fs, NULL, CV_NODE_MAP ) );
- if( inner_node_idx != 0 )
- {
- sprintf( buf, "node %d", inner_node_idx );
- }
- else
- {
- sprintf( buf, "root node" );
- }
- CV_CALL( cvWriteComment( fs, buf, 1 ) );
-
- CV_CALL( cvStartWriteStruct( fs, ICV_HAAR_FEATURE_NAME, CV_NODE_MAP ) );
- cascadeData->writeFeature( fs, tempNode->split->var_idx );
- CV_CALL( cvEndWriteStruct( fs ) ); /* feature */
+ fs << "{";
+ fs << ICV_HAAR_FEATURE_NAME << "{";
+ ((CvHaarEvaluator*)((CvFeatureEvaluator*)featureEvaluator))->writeFeature( fs, tempNode->split->var_idx );
+ fs << "}";
- CV_CALL( cvWriteReal( fs, ICV_HAAR_THRESHOLD_NAME, tempNode->split->ord.c) );
+ fs << ICV_HAAR_THRESHOLD_NAME << tempNode->split->ord.c;
if( tempNode->left->left || tempNode->left->right )
{
inner_nodes_queue.push( tempNode->left );
total_inner_node_idx++;
- CV_CALL( cvWriteInt( fs, ICV_HAAR_LEFT_NODE_NAME, total_inner_node_idx ) );
+ fs << ICV_HAAR_LEFT_NODE_NAME << total_inner_node_idx;
}
else
- {
- CV_CALL( cvWriteReal( fs, ICV_HAAR_LEFT_VAL_NAME, tempNode->left->value ) );
- }
+ fs << ICV_HAAR_LEFT_VAL_NAME << tempNode->left->value;
if( tempNode->right->left || tempNode->right->right )
{
inner_nodes_queue.push( tempNode->right );
total_inner_node_idx++;
- CV_CALL( cvWriteInt( fs, ICV_HAAR_RIGHT_NODE_NAME, total_inner_node_idx ) );
+ fs << ICV_HAAR_RIGHT_NODE_NAME << total_inner_node_idx;
}
else
- {
- CV_CALL( cvWriteReal( fs, ICV_HAAR_RIGHT_VAL_NAME, tempNode->right->value ) );
- }
-
- CV_CALL( cvEndWriteStruct( fs ) );
+ fs << ICV_HAAR_RIGHT_VAL_NAME << tempNode->right->value;
+ fs << "}"; // ICV_HAAR_FEATURE_NAME
inner_nodes_queue.pop();
}
- CV_CALL( cvEndWriteStruct( fs ) );
+ fs << "]";
}
-
- CV_CALL( cvEndWriteStruct( fs ) ); /* trees */
-
- CV_CALL( cvWriteReal( fs, ICV_HAAR_STAGE_THRESHOLD_NAME,
- stageClassifiers[si]->getThreshold() ) );
-
- CV_CALL( cvWriteInt( fs, ICV_HAAR_PARENT_NAME, si-1 ) );
- CV_CALL( cvWriteInt( fs, ICV_HAAR_NEXT_NAME, -1) );
-
- CV_CALL( cvEndWriteStruct( fs ) ); /* stage */
+ fs << "]"; //ICV_HAAR_TREES_NAME
+ fs << ICV_HAAR_STAGE_THRESHOLD_NAME << stageClassifiers[si]->getThreshold();
+ fs << ICV_HAAR_PARENT_NAME << (int)si-1 << ICV_HAAR_NEXT_NAME << -1;
+ fs << "}"; //stage
} /* for each stage */
-
- CV_CALL( cvEndWriteStruct( fs ) ); /* stages */
- cvEndWriteStruct(fs);
+ fs << "]"; //ICV_HAAR_STAGES_NAME
}
-
- cvReleaseFileStorage( &fs );
- res = true;
-
- __END__;
-
- return res;
+ fs << "}";
}
-bool CvCascadeClassifier::loadTempInfo( const char* cascadeDirName,
- const char* _vecFileName, const char* _bgfileName, int _numPos, int _numNeg )
+bool CvCascadeClassifier::load( const String cascadeDirName )
{
- bool res = false;
-
- CV_FUNCNAME( "CvCascadeClassifier::loadTempInfo" );
- __BEGIN__;
-
- char buf[200];
-
- sprintf( buf, "%s/%s", cascadeDirName, CC_PARAMS_FILENAME );
- // features are not read
- CvFileStorage* fs = cvOpenFileStorage( buf, 0, CV_STORAGE_READ );
- if ( !fs )
- EXIT;
-
- if ( !readParams( fs, 0 ) )
- EXIT;
-
- createCascadeData();
- cascadeData->setData( this, _vecFileName, _bgfileName, _numPos, _numNeg, featureParams );
- cvReleaseFileStorage( &fs );
-
- CV_CALL( stageClassifiers = (CvCascadeBoost**) cvAlloc( numStages * sizeof(stageClassifiers[0]) ) );
- memset( stageClassifiers, 0, sizeof(stageClassifiers[0])*numStages);
-
- numCurStages = 0;
+ FileStorage fs( cascadeDirName + CC_PARAMS_FILENAME, FileStorage::READ );
+ if ( !fs.isOpened() )
+ return false;
+ FileNode node = fs.getFirstTopLevelNode();
+ if ( !readParams( node ) )
+ return false;
+ featureEvaluator = CvFeatureEvaluator::create(cascadeParams.featureType);
+ featureEvaluator->init( ((CvFeatureParams*)featureParams), numPos + numNeg, cascadeParams.winSize );
+ fs.release();
+
+ char buf[10];
for ( int si = 0; si < numStages; si++ )
{
- sprintf( buf, "%s/%d%s", cascadeDirName, si, ".xml" );
- fs = cvOpenFileStorage( buf, 0, CV_STORAGE_READ );
- if ( !fs )
+ sprintf( buf, "%s%d", "stage", si);
+ fs.open( cascadeDirName + buf + ".xml", FileStorage::READ );
+ node = fs.getFirstTopLevelNode();
+ if ( !fs.isOpened() )
break;
- createCurStage();
- if ( !stageClassifiers[si]->read( fs, 0, cascadeData, stageParams ) )
+ CvCascadeBoost *tempStage = new CvCascadeBoost;
+
+ if ( !tempStage->read( node, (CvFeatureEvaluator*)featureEvaluator, *((CvCascadeBoostParams*)stageParams )) )
{
- delete stageClassifiers[si];
- if ( fs )
- cvReleaseFileStorage( &fs );
+ delete tempStage;
+ fs.release();
break;
}
- cvReleaseFileStorage( &fs );
- numCurStages++;
+ stageClassifiers.push_back(tempStage);
}
-
- res = true;
-
- __END__;
-
- return res;
+ return true;
}
-void CvCascadeClassifier::markFeaturesInMap( CvMat* featureMap )
+void CvCascadeClassifier::getUsedFeaturesIdxMap( Mat& featureMap )
{
- int idx = 0;
- for( int si = 0; si < numCurStages; si++ )
- stageClassifiers[si]->markFeaturesInMap( featureMap );
+ featureMap.create( 1, featureEvaluator->getNumFeatures(), CV_32SC1 );
+ featureMap.setTo(Scalar(-1));
+
+ for( Vector<Ptr<CvCascadeBoost>>::const_iterator it = stageClassifiers.begin();
+ it != stageClassifiers.end(); it++ )
+ ((CvCascadeBoost*)((Ptr<CvCascadeBoost>)(*it)))->markUsedFeaturesInMap( featureMap );
- for( int fi = 0; fi < cascadeData->getNumFeatures(); fi++ )
- if ( featureMap->data.i[fi] >= 0 )
- featureMap->data.i[fi] = idx++;
+ for( int fi = 0, idx = 0; fi < featureEvaluator->getNumFeatures(); fi++ )
+ if ( featureMap.at<int>(0, fi) >= 0 )
+ featureMap.ptr<int>(0)[fi] = idx++;
}
\ No newline at end of file
-#ifndef CASCADECLASSIFIER_H
-#define CASCADECLASSIFIER_H
-
+#pragma once
+#include <ctime>
#include "features.h"
#include "haarfeatures.h"
#include "lbpfeatures.h"
#define CC_INTERNAL_NODES "internalNodes"
#define CC_LEAF_VALUES "leafValues"
-#define CC_FEATURES "features"
+#define CC_FEATURES FEATURES
#define CC_FEATURE_PARAMS "featureParams"
#define CC_MAX_CAT_COUNT "maxCatCount"
#define CC_LBP "LBP"
#define CC_RECT "rect"
-#define CV_NEW_SAVE_FORMAT 0
-#define CV_OLD_SAVE_FORMAT 1
+#ifdef _WIN32
+#define TIME( arg ) (((double) clock()) / CLOCKS_PER_SEC)
+#else
+#define TIME( arg ) (time( arg ))
+#endif
-struct CvCascadeParams : CvParams
+class CvCascadeParams : public CvParams
{
+public:
enum { BOOST = 0 };
- enum { HAAR = 0, LBP = 1 };
-
static const int defaultStageType = BOOST;
- static const int defaultFeatureType = HAAR;
+ static const int defaultFeatureType = CvFeatureParams::HAAR;
- CvCascadeParams() : stageType( defaultStageType ), featureType( defaultFeatureType ), winSize( cvSize(24, 24) )
- { name = CC_CASCADE_PARAMS; }
- CvCascadeParams( int _stageType, int _featureType ) :
- stageType( _stageType ), featureType( _featureType ), winSize( cvSize(24, 24) )
- { name = CC_CASCADE_PARAMS; }
- virtual ~CvCascadeParams() {}
- void write( CvFileStorage* fs ) const;
- bool read( CvFileStorage* fs, CvFileNode* node );
+ CvCascadeParams();
+ CvCascadeParams( int _stageType, int _featureType );
+ void write( FileStorage &fs ) const;
+ bool read( const FileNode &node );
- void printDefaults();
- void printAttrs();
- bool scanAttr( const char* prmName, const char* val );
+ void printDefaults() const;
+ void printAttrs() const;
+ bool scanAttr( const String prmName, const String val );
int stageType;
int featureType;
- CvSize winSize;
+ Size winSize;
};
class CvCascadeClassifier
{
public:
- CvCascadeClassifier();
- virtual ~CvCascadeClassifier();
-
- virtual bool train( const char* _cascadeDirName,
- const char* _vecFileName,
- const char* _bgfileName,
- int _numPos, int _numNeg,
- int _numPrecalcVal, int _numPrecalcIdx,
- int _numStages,
- const CvCascadeParams& _cascadeParams,
- const CvFeatureParams& _featureParams,
- const CvCascadeBoostParams& _stageParams,
- bool baseFormatSave = false );
- virtual int predict( int sampleIdx );
- virtual bool save( const char* cascadeDirName, bool baseFormat = false );
- const CvCascadeParams* getParams() const { return &cascadeParams; }
-protected:
- void createStageParams();
- void createCurStage();
- void createFeatureParams();
- void createCascadeData();
-
- virtual void writeParams( CvFileStorage* fs ) const;
- virtual void writeStages( CvFileStorage* fs, const CvMat* featureMap ) const;
- virtual void writeFeatures( CvFileStorage* fs, const CvMat* featureMap ) const;
+ bool train( const String _cascadeDirName,
+ const String _posFilename,
+ const String _negFilename,
+ int _numPos, int _numNeg,
+ int _numPrecalcVal, int _numPrecalcIdx,
+ int _numStages,
+ const CvCascadeParams& _cascadeParams,
+ const CvFeatureParams& _featureParams,
+ const CvCascadeBoostParams& _stageParams,
+ bool baseFormatSave = false );
+private:
+ int predict( int sampleIdx );
+ void save( const String cascadeDirName, bool baseFormat = false );
+ bool load( const String cascadeDirName );
+ bool updateTrainingSet( double& acceptanceRatio );
+ int fillPassedSamles( int first, int count, bool isPositive, int64& consumed );
+
+ void writeParams( FileStorage &fs ) const;
+ void writeStages( FileStorage &fs, const Mat& featureMap ) const;
+ void writeFeatures( FileStorage &fs, const Mat& featureMap ) const;
+ bool readParams( const FileNode &node );
+ bool readStages( const FileNode &node );
- virtual bool readParams( CvFileStorage* fs, CvFileNode* node );
- virtual bool readStages( CvFileStorage* fs, CvFileNode* node );
-
- virtual bool loadTempInfo( const char* cascadeDirName, const char* _vecFileName, const char* _bgFileName,
- int _numPos, int _numNeg );
- void markFeaturesInMap( CvMat* featureMap);
-
- int numStages, numCurStages;
-
- CvCascadeData* cascadeData;
- CvCascadeBoost** stageClassifiers;
+ void getUsedFeaturesIdxMap( Mat& featureMap );
CvCascadeParams cascadeParams;
- CvCascadeBoostParams* stageParams;
- CvFeatureParams* featureParams;
-};
-
-#endif
\ No newline at end of file
+ Ptr<CvFeatureParams> featureParams;
+ Ptr<CvCascadeBoostParams> stageParams;
+
+ Ptr<CvFeatureEvaluator> featureEvaluator;
+ Vector<Ptr<CvCascadeBoost>> stageClassifiers;
+ CvCascadeImageReader imgReader;
+ int numStages, curNumSamples;
+ int numPos, numNeg;
+};
\ No newline at end of file
#include "features.h"
#include "cascadeclassifier.h"
-//---------------------------- FeatureParams --------------------------------------
-
-CvFeatureParams::CvFeatureParams() : maxCatCount( 0 )
-{ name = CC_FEATURE_PARAMS; }
-
-CvFeatureParams::CvFeatureParams( CvSize _winSize ) : maxCatCount( 0 )
-{ name = CC_FEATURE_PARAMS; }
-
-void CvFeatureParams::set( const CvFeatureParams* fp )
+float calcNormFactor( const Mat& sum, const Mat& sqSum )
{
- maxCatCount = fp->maxCatCount;
+ CV_DbgAssert( sum.cols > 3 && sqSum.rows > 3 );
+ Rect normrect( 1, 1, sum.cols - 3, sum.rows - 3 );
+ size_t p0, p1, p2, p3;
+ CV_SUM_OFFSETS( p0, p1, p2, p3, normrect, sum.step1() )
+ double area = normrect.width * normrect.height;
+ const int *sp = (const int*)sum.data;
+ int valSum = sp[p0] - sp[p1] - sp[p2] + sp[p3];
+ const double *sqp = (const double *)sqSum.data;
+ double valSqSum = sqp[p0] - sqp[p1] - sqp[p2] + sqp[p3];
+ return (float) sqrt( (double) (area * valSqSum - (double)valSum * valSum) );
}
-void CvFeatureParams::write( CvFileStorage* fs ) const
-{
- CV_FUNCNAME( "CvFeatureParams::write" );
- __BEGIN__;
-
- CV_CALL( cvWriteInt( fs, CC_MAX_CAT_COUNT, maxCatCount ) );
+CvParams::CvParams() : name( "params" ) {}
+void CvParams::printDefaults() const
+{ cout << "--" << name << "--" << endl; }
+void CvParams::printAttrs() const {}
+bool CvParams::scanAttr( const String prmName, const String val ) { return false; }
- __END__;
-}
-bool CvFeatureParams::read( CvFileStorage* fs, CvFileNode* map )
-{
- bool res = false;
-
- CV_FUNCNAME( "CvFeatureParams::read" );
- __BEGIN__;
-
- CV_CALL( maxCatCount = cvReadIntByName( fs, map, CC_MAX_CAT_COUNT ) );
-
- res = true;
-
- __END__;
- return res;
-}
-
-//---------------------------- CascadeData --------------------------------------
+//---------------------------- FeatureParams --------------------------------------
-CvCascadeData::CvCascadeData()
+CvFeatureParams::CvFeatureParams() : maxCatCount( 0 )
{
- cls = 0;
- cascade = 0;
- imgreader = 0;
- features = 0;
+ name = CC_FEATURE_PARAMS;
}
-CvCascadeData::~CvCascadeData()
+void CvFeatureParams::init( const CvFeatureParams& fp )
{
- clear();
+ maxCatCount = fp.maxCatCount;
}
-void CvCascadeData::setData( CvCascadeClassifier* _cascade,
- const char* _vecFileName, const char* _bgFileName,
- int _numPos, int _numNeg, const CvFeatureParams* _featureParams )
+void CvFeatureParams::write( FileStorage &fs ) const
{
- numPos = _numPos;
- numNeg = _numNeg;
-
- cascade = _cascade;
- winSize = _cascade->getParams()->winSize;
- featureParams = _featureParams;
-
- maxCatCount = _featureParams->maxCatCount;
-
- cls = numPos + numNeg == 0 ? 0 : cvCreateMat( numPos + numNeg, 1, CV_32FC1 );
-
- imgreader = _vecFileName && _bgFileName ?
- new CvImageReader( _vecFileName, _bgFileName, winSize ) : 0;
- assert( !features );
- generateFeatures();
+ fs << CC_MAX_CAT_COUNT << maxCatCount;
}
-void CvCascadeData::clear()
+bool CvFeatureParams::read( const FileNode &node )
{
- cvReleaseMat( &cls );
- if ( imgreader )
- {
- delete imgreader;
- imgreader = 0;
- }
-
- if ( features )
- {
- for( int i = 0; i < numFeatures; i++ )
- if ( features[i] )
- delete features[i];
- cvFree( &features );
- features = 0;
- }
+ if ( node.empty() )
+ return false;
+ maxCatCount = node[CC_MAX_CAT_COUNT];
+ return maxCatCount >= 0;
}
-void CvCascadeData::calcNormfactor( const CvMat* _sum, const CvMat* _sqSum, float& _normfactor )
+Ptr<CvFeatureParams> CvFeatureParams::create( int featureType )
{
- CvRect normrect = cvRect( 1, 1, _sum->cols - 3, _sum->rows - 3 );
- int p0, p1, p2, p3;
- int valSum = 0;
- double valSqSum = 0;
- double area = 0.0;
- int offest = _sum->step/sizeof(_sum->data.i[0]);
-
- CV_SUM_OFFSETS( p0, p1, p2, p3, normrect, offest )
-
- area = normrect.width * normrect.height;
- valSum = _sum->data.i[p0] - _sum->data.i[p1]
- - _sum->data.i[p2] + _sum->data.i[p3];
- valSqSum = _sqSum->data.db[p0]
- - _sqSum->data.db[p1]
- - _sqSum->data.db[p2]
- + _sqSum->data.db[p3];
-
- _normfactor = (float) sqrt( (double) (area * valSqSum - (double)valSum * valSum) );
+ return featureType == HAAR ? Ptr<CvFeatureParams>(new CvHaarFeatureParams) :
+ featureType == LBP ? Ptr<CvFeatureParams>(new CvLBPFeatureParams) : new CvFeatureParams;
}
-float CvCascadeData::getCls( int sampleIdx ) const
-{
- int clsStep = cls->step / CV_ELEM_SIZE(cls->type);
- assert( cls );
- return cls->data.fl[sampleIdx*clsStep];
-}
+//------------------------------------- FeatureEvaluator ---------------------------------------
-bool CvCascadeData::updateForNextStage( double& acceptanceRatio )
+void CvFeatureEvaluator::init(const CvFeatureParams *_featureParams,
+ int _maxSampleCount, Size _winSize )
{
- int64 posConsumed = 0, negConsumed = 0;
- int posCount = fillPassedSamles( 0, numPos, true, posConsumed ), negCount;
- if( !posCount )
- return false;
-
- negCount = fillPassedSamles( numPos, numNeg, false, negConsumed );
- if ( !negCount )
- return false;
- acceptanceRatio = CCOUNTER_DIV(negCount, negConsumed);
- printf( "POS count : consumed %d : %d\n", posCount, (int)posConsumed );
- printf( "NEG count : acceptanceRatio %d : %f\n", negCount, acceptanceRatio );
-
- numImg = posCount + negCount;
- return true;
+ CV_Assert(_maxSampleCount > 0);
+ featureParams = (CvFeatureParams *)_featureParams;
+ winSize = _winSize;
+ numFeatures = 0;
+ cls.create( (int)_maxSampleCount, 1, CV_32FC1 );
+ generateFeatures();
}
-void CvCascadeData::writeFeature( CvFileStorage* fs, int fi )
+void CvFeatureEvaluator::setImage(const Mat &img, uchar clsLabel, int idx)
{
- if ((fi < numFeatures) && ( fi >= 0))
- features[fi]->write( fs );
+ CV_Assert(img.cols == winSize.width);
+ CV_Assert(img.rows == winSize.height);
+ CV_Assert(idx < cls.rows);
+ cls.ptr<float>(idx)[0] = clsLabel;
}
-void CvCascadeData::writeFeatures( CvFileStorage* fs, const CvMat* featureMap )
+Ptr<CvFeatureEvaluator> CvFeatureEvaluator::create(int type)
{
- CV_FUNCNAME( "CvCascadeData::writeFeatures" );
- __BEGIN__;
-
- CV_CALL( cvStartWriteStruct( fs, CC_FEATURES, CV_NODE_SEQ ) );
- for ( int fi = 0; fi < featureMap->cols; fi++ )
- {
- if ( featureMap->data.i[fi] >= 0 )
- {
- CV_CALL( cvStartWriteStruct( fs, 0, CV_NODE_MAP ) );
- features[fi]->write( fs );
- CV_CALL( cvEndWriteStruct( fs ) );
- }
- }
- CV_CALL( cvEndWriteStruct( fs ) );
-
- __END__;
+ return type == CvFeatureParams::HAAR ? Ptr<CvFeatureEvaluator>(new CvHaarEvaluator) :
+ type == CvFeatureParams::LBP ? Ptr<CvFeatureEvaluator>(new CvLBPEvaluator) : Ptr<CvFeatureEvaluator>();
}
\ No newline at end of file
-#ifndef FEATURES_H
-#define FEATURES_H
-
-#include "_imagestorage.h"
+#pragma once
+#include "imagestorage.h"
#include "cxcore.h"
#include "cv.h"
#include "ml.h"
#include <stdio.h>
-typedef uint64 ccounter_t;
-#define CCOUNTER_DIV(cc0, cc1) ( ((cc1) == 0) ? 0 : ( ((double)(cc0))/(double)(int64)(cc1) ) )
+#define FEATURES "features"
#define CV_SUM_OFFSETS( p0, p1, p2, p3, rect, step ) \
/* (x, y) */ \
(p3) = (rect).x + (rect).width - (rect).height \
+ (step) * ((rect).y + (rect).width + (rect).height);
-//-------------------------------------- Params ---------------------------------------------
+float calcNormFactor( const Mat& sum, const Mat& sqSum );
-struct CvParams
+template<class Feature>
+void _writeFeatures( const Vector<Feature> features, FileStorage &fs, const Mat& featureMap )
+{
+ fs << FEATURES << "[";
+ for ( int fi = 0; fi < featureMap.cols; fi++ )
+ if ( featureMap.at<int>(0, fi) >= 0 )
+ {
+ fs << "{";
+ features[fi].write( fs );
+ fs << "}";
+ }
+ fs << "]";
+}
+
+class CvParams
{
- CvParams() : name( "params" ) {}
- virtual ~CvParams() {}
+public:
+ CvParams();
// from|to file
- virtual void write( CvFileStorage* fs ) const = 0;
- virtual bool read( CvFileStorage* fs, CvFileNode* node ) = 0;
-
+ virtual void write( FileStorage &fs ) const = 0;
+ virtual bool read( const FileNode &node ) = 0;
// from|to screen
- virtual void printDefaults()
- { printf( "--%s--\n", name ); };
- virtual void printAttrs(){};
- virtual bool scanAttr( const char* prmName, const char* val ){ return false; }
- const char* name;
+ virtual void printDefaults() const;
+ virtual void printAttrs() const;
+ virtual bool scanAttr( const String prmName, const String val );
+ String name;
};
-//---------------------------------- FeatureParams ----------------------------------------
-struct CvFeatureParams : CvParams
+class CvFeatureParams : public CvParams
{
+public:
+ enum { HAAR = 0, LBP = 1 };
CvFeatureParams();
- CvFeatureParams( CvSize _winSize );
- virtual ~CvFeatureParams()
- {}
-
- virtual void set( const CvFeatureParams* fp );
-
- virtual void write( CvFileStorage* fs ) const;
- virtual bool read( CvFileStorage* fs, CvFileNode* node );
-
+ virtual void init( const CvFeatureParams& fp );
+ virtual void write( FileStorage &fs ) const;
+ virtual bool read( const FileNode &node );
+ static Ptr<CvFeatureParams> create( int featureType );
int maxCatCount; // 0 in case of numerical features
};
-//---------------------------------- Features ----------------------------------------------
-
-struct CvFeature
-{
- CvFeature() {}
- virtual ~CvFeature() {}
- virtual void write( CvFileStorage* fs ) const = 0;
-};
-
-//---------------------------------- CascadeData ----------------------------------------
-
-class CvCascadeClassifier;
-
-class CvCascadeData
+class CvFeatureEvaluator
{
public:
- CvCascadeData();
- virtual ~CvCascadeData();
-
- virtual void setData( CvCascadeClassifier* _cascade,
- const char* _vecFileName, const char* _bgFileName,
- int _numPos, int _numNeg, const CvFeatureParams* _featureParams );
- virtual void clear();
- virtual void generateFeatures() = 0;
- virtual float calcFeature( int featureIdx, int sampleIdx ) = 0;
-
- static void calcNormfactor( const CvMat* _sum, const CvMat* _sqSum, float& _normFactor );
-
- virtual bool updateForNextStage( double& acceptanceRatio );
+ virtual void init(const CvFeatureParams *_featureParams,
+ int _maxSampleCount, Size _winSize );
+ virtual void setImage(const Mat& img, uchar clsLabel, int idx);
+ virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const = 0;
+ virtual float operator()(int featureIdx, int sampleIdx) const = 0;
+ static Ptr<CvFeatureEvaluator> create(int type);
- int getMaxCatCount() const { return featureParams->maxCatCount; }
- CvSize getWinSize() const { return winSize; }
int getNumFeatures() const { return numFeatures; }
- int getNumSamples() const { return numImg; }
- float getCls( int sampleIdx ) const;
- const CvMat* getCls() const { return cls; }
-
- virtual void writeFeature( CvFileStorage* fs, int fi ); // for old file format
- virtual void writeFeatures( CvFileStorage* fs, const CvMat* featureMap );
+ int getMaxCatCount() const { return featureParams->maxCatCount; }
+ const Mat& getCls() const { return cls; }
+ float getCls(int si) const { return cls.at<float>(si, 0); }
+protected:
+ virtual void generateFeatures() = 0;
-protected:
- virtual int fillPassedSamles( int first, int count, bool isPositive, int64& consumed ) = 0;
- int maxCatCount;
+ int npos, nneg;
int numFeatures;
- int numPos, numNeg, numImg;
- CvSize winSize;
-
- CvMat* cls; /* classes. 1.0 - object, 0.0 - background */
-
- CvCascadeClassifier* cascade;
- const CvFeatureParams* featureParams;
- CvImageReader* imgreader;
- CvFeature** features;
+ Size winSize;
+ CvFeatureParams *featureParams;
+ Mat cls;
};
-
-#endif
\ No newline at end of file
#include "haarfeatures.h"
#include "cascadeclassifier.h"
-//------------------------------ HaarFeatureParams -----------------------------------
+CvHaarFeatureParams::CvHaarFeatureParams() : mode(BASIC)
+{
+ name = HFP_NAME;
+}
-void CvHaarFeatureParams::set( const CvFeatureParams* fp )
+CvHaarFeatureParams::CvHaarFeatureParams( int _mode ) : mode( _mode )
{
- CvFeatureParams::set( fp );
- mode = ((const CvHaarFeatureParams*)fp)->mode;
+ name = HFP_NAME;
}
-void CvHaarFeatureParams::write( CvFileStorage* fs ) const
+void CvHaarFeatureParams::init( const CvFeatureParams& fp )
{
- CV_FUNCNAME( "CvHaarFeatureParams::write" );
- __BEGIN__;
- const char* mode_str;
+ CvFeatureParams::init( fp );
+ mode = ((const CvHaarFeatureParams&)fp).mode;
+}
+void CvHaarFeatureParams::write( FileStorage &fs ) const
+{
CvFeatureParams::write( fs );
-
- mode_str = mode == BASIC ? CC_MODE_BASIC :
- mode == CORE ? CC_MODE_CORE :
- mode == ALL ? CC_MODE_ALL : 0;
- if ( mode_str )
- { CV_CALL( cvWriteString( fs, CC_MODE, mode_str ) ); }
- else
- { CV_CALL( cvWriteInt( fs, CC_MODE, mode ) ); }
-
- __END__;
+ String modeStr = mode == BASIC ? CC_MODE_BASIC :
+ mode == CORE ? CC_MODE_CORE :
+ mode == ALL ? CC_MODE_ALL : String();
+ CV_Assert( !modeStr.empty() );
+ fs << CC_MODE << modeStr;
}
-void CvHaarFeatureParams::printDefaults()
+void CvHaarFeatureParams::printDefaults() const
{
CvFeatureParams::printDefaults();
- printf(" [-mode <%s (default) | %s | %s>]\n", CC_MODE_BASIC, CC_MODE_CORE, CC_MODE_ALL );
+ cout << " [-mode <" CC_MODE_BASIC << "(default) | "
+ << CC_MODE_CORE <<" | " << CC_MODE_ALL << endl;
}
-void CvHaarFeatureParams::printAttrs()
+void CvHaarFeatureParams::printAttrs() const
{
CvFeatureParams::printAttrs();
- const char* mode_str = mode == BASIC ? CC_MODE_BASIC :
- mode == CORE ? CC_MODE_CORE :
- mode == ALL ? CC_MODE_ALL : 0;
- printf("mode: %s\n", mode_str);
+ String mode_str = mode == BASIC ? CC_MODE_BASIC :
+ mode == CORE ? CC_MODE_CORE :
+ mode == ALL ? CC_MODE_ALL : 0;
+ cout << "mode: " << mode_str << endl;
}
-bool CvHaarFeatureParams::scanAttr( const char* prmName, const char* val)
+bool CvHaarFeatureParams::scanAttr( const String prmName, const String val)
{
if ( !CvFeatureParams::scanAttr( prmName, val ) )
{
- if( !strcmp( prmName, "-mode" ) )
+ if( !prmName.compare("-mode") )
{
- mode = !strcmp( val, CC_MODE_CORE ) ? CORE :
- !strcmp( val, CC_MODE_ALL ) ? ALL :
- !strcmp( val, CC_MODE_BASIC ) ? BASIC : -1;
+ mode = !val.compare( CC_MODE_CORE ) ? CORE :
+ !val.compare( CC_MODE_ALL ) ? ALL :
+ !val.compare( CC_MODE_BASIC ) ? BASIC : -1;
if (mode == -1)
return false;
}
}
return true;
}
-//--------------------------------- HaarFeature --------------------------------------
-CV_INLINE CvHaarFeature1::CvHaarFeature1()
-{
- tilted = 0;
- rect[0].r = rect[1].r = rect[2].r = cvRect(0,0,0,0);
- rect[0].weight = rect[1].weight = rect[2].weight = 0;
-}
-
-CV_INLINE CvHaarFeature1::CvHaarFeature1( bool _tilted,
- int x0, int y0, int w0, int h0, float wt0,
- int x1, int y1, int w1, int h1, float wt1,
- int x2, int y2, int w2, int h2, float wt2 )
-{
- assert( CV_HAAR_FEATURE_MAX >= 3 );
-
- tilted = _tilted;
-
- rect[0].r.x = x0;
- rect[0].r.y = y0;
- rect[0].r.width = w0;
- rect[0].r.height = h0;
- rect[0].weight = wt0;
-
- rect[1].r.x = x1;
- rect[1].r.y = y1;
- rect[1].r.width = w1;
- rect[1].r.height = h1;
- rect[1].weight = wt1;
-
- rect[2].r.x = x2;
- rect[2].r.y = y2;
- rect[2].r.width = w2;
- rect[2].r.height = h2;
- rect[2].weight = wt2;
-}
+//--------------------- HaarFeatureEvaluator ----------------
-CV_INLINE void CvHaarFeature1::write( CvFileStorage* fs ) const
+void CvHaarEvaluator::init(const CvFeatureParams *_featureParams,
+ int _maxSampleCount, Size _winSize )
{
- CV_FUNCNAME( "CvHaarFeature1::write" );
- __BEGIN__;
-
- CV_CALL( cvStartWriteStruct( fs, CC_RECTS, CV_NODE_SEQ ) );
- for( int ri = 0; ri < CV_HAAR_FEATURE_MAX && rect[ri].r.width != 0; ++ri )
- {
- CV_CALL( cvStartWriteStruct( fs, NULL, CV_NODE_SEQ | CV_NODE_FLOW ) );
- CV_CALL( cvWriteInt( fs, NULL, rect[ri].r.x ) );
- CV_CALL( cvWriteInt( fs, NULL, rect[ri].r.y ) );
- CV_CALL( cvWriteInt( fs, NULL, rect[ri].r.width ) );
- CV_CALL( cvWriteInt( fs, NULL, rect[ri].r.height ) );
- CV_CALL( cvWriteReal( fs, NULL, rect[ri].weight ) );
- CV_CALL( cvEndWriteStruct( fs ) );
- }
- CV_CALL( cvEndWriteStruct( fs ) ); /* HF_RECTS */
- CV_CALL( cvWriteInt( fs, CC_TILTED, tilted ) );
-
- __END__;
+ CV_Assert(_maxSampleCount > 0);
+ int cols = (_winSize.width + 1) * (_winSize.height + 1);
+ sum.create((int)_maxSampleCount, cols, CV_32SC1);
+ tilted.create((int)_maxSampleCount, cols, CV_32SC1);
+ normfactor.create(1, (int)_maxSampleCount, CV_32FC1);
+ CvFeatureEvaluator::init( _featureParams, _maxSampleCount, _winSize );
}
-//---------------------------- HaarCascadeData --------------------------------------
-
-CvHaarCascadeData::CvHaarCascadeData()
+void CvHaarEvaluator::setImage(const Mat& img, uchar clsLabel, int idx)
{
- sum = tilted = normfactor = 0;
+ CV_DbgAssert( !sum.empty() && !tilted.empty() && !normfactor.empty() );
+ CvFeatureEvaluator::setImage( img, clsLabel, idx);
+ Mat innSum(winSize.height + 1, winSize.width + 1, sum.type(), sum.ptr<int>((int)idx));
+ Mat innTilted(winSize.height + 1, winSize.width + 1, tilted.type(), tilted.ptr<int>((int)idx));
+ Mat innSqSum;
+ integral(img, innSum, innSqSum, innTilted);
+ normfactor.ptr<float>(0)[idx] = calcNormFactor( innSum, innSqSum );
}
-void CvHaarCascadeData::setData( CvCascadeClassifier* _cascade,
- const char* _vecFileName, const char* _bgfileName,
- int _numPos, int _numNeg, const CvFeatureParams* _featureParams )
+void CvHaarEvaluator::writeFeatures( FileStorage &fs, const Mat& featureMap ) const
{
- int maxNumSamples = _numPos + _numNeg;
- CvSize ws = _cascade->getParams()->winSize;
- int sumCols = (ws.width + 1) * (ws.height + 1);
-
- sum = cvCreateMat( maxNumSamples, sumCols, CV_32SC1);
- tilted = cvCreateMat( maxNumSamples, sumCols, CV_32SC1);
- normfactor = cvCreateMat( 1, maxNumSamples, CV_32FC1 );
-
- CvCascadeData::setData( _cascade, _vecFileName, _bgfileName, _numPos, _numNeg, _featureParams );
+ _writeFeatures( features, fs, featureMap );
}
-void CvHaarCascadeData::clear()
+void CvHaarEvaluator::writeFeature(FileStorage &fs, int fi) const
{
- cvReleaseMat( &sum );
- cvReleaseMat( &tilted );
- cvReleaseMat( &normfactor );
- CvCascadeData::clear();
+ CV_DbgAssert( fi < (int)features.size() );
+ features[fi].write(fs);
}
-void CvHaarCascadeData::generateFeatures()
+void CvHaarEvaluator::generateFeatures()
{
- CvHaarFeature1* haarFeature;
- CvMemStorage* storage = cvCreateMemStorage();
- CvSeq* seq = NULL;
- CvSeqWriter writer;
- int mode = ((const CvHaarFeatureParams*)featureParams)->mode;
-
- cvStartWriteSeq( 0, sizeof( CvSeq ), sizeof( haarFeature ), storage, &writer );
-
+ int mode = ((const CvHaarFeatureParams*)((CvFeatureParams*)featureParams))->mode;
+ int offset = winSize.width + 1;
for( int x = 0; x < winSize.width; x++ )
{
for( int y = 0; y < winSize.height; y++ )
if ( (x+dx*2 <= winSize.width) && (y+dy <= winSize.height) )
{
if ( x+x+dx*2 <= winSize.width )
- {
- haarFeature = new CvHaarFeature1( false,
- x, y, dx*2, dy, -1,
- x+dx, y, dx , dy, +2 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, false,
+ x, y, dx*2, dy, -1,
+ x+dx, y, dx , dy, +2 ) );
}
// haar_y2
if ( (x+dx*2 <= winSize.height) && (y+dy <= winSize.width) )
{
- if ( y+y+dy <= winSize.width ) {
- haarFeature = new CvHaarFeature1( false,
- y, x, dy, dx*2, -1,
- y, x+dx, dy, dx, +2 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ if ( y+y+dy <= winSize.width )
+ features.push_back( HaarFeature( offset, false,
+ y, x, dy, dx*2, -1,
+ y, x+dx, dy, dx, +2 ) );
}
-
// haar_x3
if ( (x+dx*3 <= winSize.width) && (y+dy <= winSize.height) )
{
if ( x+x+dx*3 <= winSize.width )
- {
- haarFeature = new CvHaarFeature1( false,
- x, y, dx*3, dy, -1,
- x+dx, y, dx, dy, +3 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, false,
+ x, y, dx*3, dy, -1,
+ x+dx, y, dx , dy, +3 ) );
}
// haar_y3
- if ( (x+dx*3 <= winSize.height) && (y+dy <= winSize.width) ) {
- if ( y+y+dy <= winSize.width ) {
- haarFeature = new CvHaarFeature1( false,
- y, x, dy, dx*3, -1,
- y, x+dx, dy, dx, +3 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ if ( (x+dx*3 <= winSize.height) && (y+dy <= winSize.width) )
+ {
+ if ( y+y+dy <= winSize.width )
+ features.push_back( HaarFeature( offset, false,
+ y, x, dy, dx*3, -1,
+ y, x+dx, dy, dx, +3 ) );
}
if( mode != CvHaarFeatureParams::BASIC )
{
// haar_x4
if ( (x+dx*4 <= winSize.width) && (y+dy <= winSize.height) )
{
- if ( x+x+dx*4 <= winSize.width ) {
- haarFeature = new CvHaarFeature1( false,
- x, y, dx*4, dy, -1,
- x+dx, y, dx*2, dy, +2 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ if ( x+x+dx*4 <= winSize.width )
+ features.push_back( HaarFeature( offset, false,
+ x, y, dx*4, dy, -1,
+ x+dx, y, dx*2, dy, +2 ) );
}
// haar_y4
if ( (x+dx*4 <= winSize.height) && (y+dy <= winSize.width ) )
{
- if ( y+y+dy <= winSize.width ) {
- haarFeature = new CvHaarFeature1( false,
- y, x, dy, dx*4, -1,
- y, x+dx, dy, dx*2, +2 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ if ( y+y+dy <= winSize.width )
+ features.push_back( HaarFeature( offset, false,
+ y, x, dy, dx*4, -1,
+ y, x+dx, dy, dx*2, +2 ) );
}
}
// x2_y2
if ( (x+dx*2 <= winSize.width) && (y+dy*2 <= winSize.height) )
{
if ( x+x+dx*2 <= winSize.width )
- {
- haarFeature = new CvHaarFeature1( false,
- x , y, dx*2, dy*2, -1,
- x , y , dx , dy, +2,
- x+dx, y+dy, dx , dy, +2 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, false,
+ x, y, dx*2, dy*2, -1,
+ x, y, dx, dy, +2,
+ x+dx, y+dy, dx, dy, +2 ) );
}
if (mode != CvHaarFeatureParams::BASIC)
{
if ( (x+dx*3 <= winSize.width) && (y+dy*3 <= winSize.height) )
{
if ( x+x+dx*3 <= winSize.width )
- {
- haarFeature = new CvHaarFeature1( false,
- x , y, dx*3, dy*3, -1,
- x+dx, y+dy, dx , dy , +9);
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, false,
+ x , y , dx*3, dy*3, -1,
+ x+dx, y+dy, dx , dy , +9) );
}
}
if (mode == CvHaarFeatureParams::ALL)
{
- // tilted haar_x2 (x, y, w, h, b, weight)
+ // tilted haar_x2
if ( (x+2*dx <= winSize.width) && (y+2*dx+dy <= winSize.height) && (x-dy>= 0) )
{
if ( x <= (winSize.width / 2) )
- {
- haarFeature = new CvHaarFeature1( true,
- x, y, dx*2, dy, -1,
- x, y, dx , dy, +2 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, true,
+ x, y, dx*2, dy, -1,
+ x, y, dx, dy, +2 ) );
}
- // tilted haar_y2 (x, y, w, h, b, weight)
+ // tilted haar_y2
if ( (x+dx <= winSize.width) && (y+dx+2*dy <= winSize.height) && (x-2*dy>= 0) )
{
if ( x <= (winSize.width / 2) )
- {
- haarFeature = new CvHaarFeature1( true,
- x, y, dx, 2*dy, -1,
- x, y, dx, dy, +2 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, true,
+ x, y, dx, 2*dy, -1,
+ x, y, dx, dy, +2 ) );
}
- // tilted haar_x3 (x, y, w, h, b, weight)
+ // tilted haar_x3
if ( (x+3*dx <= winSize.width) && (y+3*dx+dy <= winSize.height) && (x-dy>= 0) )
{
if ( x <= (winSize.width / 2) )
- {
- haarFeature = new CvHaarFeature1( true,
- x, y, dx*3, dy, -1,
- x+dx, y+dx, dx , dy, +3 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, true,
+ x, y, dx*3, dy, -1,
+ x+dx, y+dx, dx, dy, +3 ) );
}
- // tilted haar_y3 (x, y, w, h, b, weight)
+ // tilted haar_y3
if ( (x+dx <= winSize.width) && (y+dx+3*dy <= winSize.height) && (x-3*dy>= 0) )
{
if ( x <= (winSize.width / 2) )
- {
- haarFeature = new CvHaarFeature1( true,
- x, y, dx, 3*dy, -1,
- x-dy, y+dy, dx, dy, +3 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, true,
+ x, y, dx, 3*dy, -1,
+ x-dy, y+dy, dx, dy, +3 ) );
}
- // tilted haar_x4 (x, y, w, h, b, weight)
+ // tilted haar_x4
if ( (x+4*dx <= winSize.width) && (y+4*dx+dy <= winSize.height) && (x-dy>= 0) )
{
if ( x <= (winSize.width / 2) )
- {
- haarFeature = new CvHaarFeature1( true,
- x, y, dx*4, dy, -1,
- x+dx, y+dx, dx*2, dy, +2 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, true,
+ x, y, dx*4, dy, -1,
+ x+dx, y+dx, dx*2, dy, +2 ) );
}
- // tilted haar_y4 (x, y, w, h, b, weight)
+ // tilted haar_y4
if ( (x+dx <= winSize.width) && (y+dx+4*dy <= winSize.height) && (x-4*dy>= 0) )
{
if ( x <= (winSize.width / 2) )
- {
- haarFeature = new CvHaarFeature1( true,
- x, y, dx, 4*dy, -1,
- x-dy, y+dy, dx, 2*dy, +2 );
- CV_WRITE_SEQ_ELEM( haarFeature, writer );
- }
+ features.push_back( HaarFeature( offset, true,
+ x, y, dx, 4*dy, -1,
+ x-dy, y+dy, dx, 2*dy, +2 ) );
}
}
}
}
}
}
- seq = cvEndWriteSeq( &writer );
- numFeatures = seq->total;
- features = (CvFeature**) cvAlloc( sizeof( CvHaarFeature1* ) * numFeatures );
- cvCvtSeqToArray( seq, (CvArr*)features );
- updateFastFeatures( winSize.width + 1 );
-
- cvReleaseMemStorage( &storage );
+ numFeatures = (int)features.size();
}
-void CvHaarCascadeData::updateFastFeatures(int step)
+CvHaarEvaluator::HaarFeature::HaarFeature()
{
- for( int fi = 0; fi < numFeatures; fi++)
- {
- if( !((CvHaarFeature1*)features[fi])->tilted )
- {
- for( int j = 0; j < CV_HAAR_FEATURE_MAX; j++ )
- {
- if( ((CvHaarFeature1*)features[fi])->rect[j].weight == 0.0F )
- {
- break;
- }
- CV_SUM_OFFSETS( ((CvHaarFeature1*)features[fi])->fastRect[j].p0,
- ((CvHaarFeature1*)features[fi])->fastRect[j].p1,
- ((CvHaarFeature1*)features[fi])->fastRect[j].p2,
- ((CvHaarFeature1*)features[fi])->fastRect[j].p3,
- ((CvHaarFeature1*)features[fi])->rect[j].r, step )
- }
- }
- else
- {
- for( int j = 0; j < CV_HAAR_FEATURE_MAX; j++ )
- {
- if( ((CvHaarFeature1*)features[fi])->rect[j].weight == 0.0F )
- {
- break;
- }
- CV_TILTED_OFFSETS( ((CvHaarFeature1*)features[fi])->fastRect[j].p0,
- ((CvHaarFeature1*)features[fi])->fastRect[j].p1,
- ((CvHaarFeature1*)features[fi])->fastRect[j].p2,
- ((CvHaarFeature1*)features[fi])->fastRect[j].p3,
- ((CvHaarFeature1*)features[fi])->rect[j].r, step )
- }
- }
- }
+ tilted = false;
+ rect[0].r = rect[1].r = rect[2].r = Rect(0,0,0,0);
+ rect[0].weight = rect[1].weight = rect[2].weight = 0;
}
-int CvHaarCascadeData::fillPassedSamles( int first, int count, bool isPositive, int64& consumed )
+CvHaarEvaluator::HaarFeature::HaarFeature( int offset, bool _tilted,
+ int x0, int y0, int w0, int h0, float wt0,
+ int x1, int y1, int w1, int h1, float wt1,
+ int x2, int y2, int w2, int h2, float wt2 )
{
- int getcount = 0;
-
- //CV_FUNCNAME( "CvHaarCascadeData::fillPassedSamles" );
- __BEGIN__;
-
- bool reset = true;
- CvMat innSum, innSqSum, innTilted, img;
- int step = sum->step / CV_ELEM_SIZE(sum->type),
- clsStep = cls->step / CV_ELEM_SIZE(cls->type);
-
- assert( first + count <= numPos + numNeg );
-
- img = cvMat( winSize.height, winSize.width, CV_8UC1,
- cvStackAlloc( sizeof( uchar ) * winSize.height * winSize.width ) );
+ tilted = _tilted;
- innSum = cvMat( winSize.height + 1, winSize.width + 1,
- CV_32SC1, NULL );
+ rect[0].r.x = x0;
+ rect[0].r.y = y0;
+ rect[0].r.width = w0;
+ rect[0].r.height = h0;
+ rect[0].weight = wt0;
- innSqSum = cvMat( winSize.height + 1, winSize.width + 1, CV_64F,
- cvStackAlloc( (winSize.height + 1) * (winSize.width + 1) * CV_ELEM_SIZE(CV_64F)) );
+ rect[1].r.x = x1;
+ rect[1].r.y = y1;
+ rect[1].r.width = w1;
+ rect[1].r.height = h1;
+ rect[1].weight = wt1;
- innTilted = cvMat( winSize.height + 1, winSize.width + 1,
- CV_32SC1, NULL );
+ rect[2].r.x = x2;
+ rect[2].r.y = y2;
+ rect[2].r.width = w2;
+ rect[2].r.height = h2;
+ rect[2].weight = wt2;
- for( int i = first; i < first + count; i++ )
+ if( !tilted )
{
- for( ; ; )
+ for( int j = 0; j < CV_HAAR_FEATURE_MAX; j++ )
{
- bool is_get_img = isPositive ? imgreader->getPosImage( &img, reset ) :
- imgreader->getNegImage( &img, reset );
- reset = false;
- if( !is_get_img )
- EXIT;
-
- innSum.data.i = sum->data.i + i * step;
- innTilted.data.i = tilted->data.i + i * step;
-
- cvIntegralImage( &img, &innSum, &innSqSum, &innTilted );
- calcNormfactor( &innSum, &innSqSum, normfactor->data.fl[i]);
- consumed++;
-
- if( cascade->predict( i ) == 1.0F )
- {
- getcount++;
- cls->data.fl[i*clsStep] = isPositive ? 1.f : 0.f;
+ if( rect[j].weight == 0.0F )
break;
- }
+ CV_SUM_OFFSETS( fastRect[j].p0, fastRect[j].p1, fastRect[j].p2, fastRect[j].p3, rect[j].r, offset )
+ }
+ }
+ else
+ {
+ for( int j = 0; j < CV_HAAR_FEATURE_MAX; j++ )
+ {
+ if( rect[j].weight == 0.0F )
+ break;
+ CV_TILTED_OFFSETS( fastRect[j].p0, fastRect[j].p1, fastRect[j].p2, fastRect[j].p3, rect[j].r, offset )
}
}
+}
- __END__;
- return getcount;
+void CvHaarEvaluator::HaarFeature::write( FileStorage &fs ) const
+{
+ fs << CC_RECTS << "[";
+ for( int ri = 0; ri < CV_HAAR_FEATURE_MAX && rect[ri].r.width != 0; ++ri )
+ {
+ fs << "[:" << rect[ri].r.x << rect[ri].r.y <<
+ rect[ri].r.width << rect[ri].r.height << rect[ri].weight << "]";
+ }
+ fs << "]" << CC_TILTED << tilted;
}
\ No newline at end of file
-#ifndef HAARFEATURES_H
-#define HAARFEATURES_H
-
+#pragma once
#include "features.h"
#define CV_HAAR_FEATURE_MAX 3
-#define CV_HAAR_FEATURE_DESC_MAX 20
-
#define HFP_NAME "haarFeatureParams"
-struct CvHaarFeatureParams : CvFeatureParams
+class CvHaarFeatureParams : public CvFeatureParams
{
+public:
enum { BASIC = 0, CORE = 1, ALL = 2 };
/* 0 - BASIC = Viola
* 1 - CORE = All upright
- * 2 - ALL = All features
- */
-
- CvHaarFeatureParams() : mode(BASIC)
- { name = HFP_NAME; }
- CvHaarFeatureParams( CvSize _winSize, int _mode ) :
- CvFeatureParams( _winSize ), mode( _mode )
- { name = HFP_NAME; }
- virtual ~CvHaarFeatureParams()
- {}
+ * 2 - ALL = All features */
+
+ CvHaarFeatureParams();
+ CvHaarFeatureParams( int _mode );
- virtual void set( const CvFeatureParams* fp );
- virtual void write( CvFileStorage* fs ) const;
+ virtual void init( const CvFeatureParams& fp );
+ virtual void write( FileStorage &fs ) const;
- virtual void printDefaults();
- virtual void printAttrs();
- virtual bool scanAttr( const char* prm, const char* val);
+ virtual void printDefaults() const;
+ virtual void printAttrs() const;
+ virtual bool scanAttr( const String prm, const String val);
int mode;
};
-struct CvHaarFeature1 : public CvFeature
+class CvHaarEvaluator : public CvFeatureEvaluator
{
- CvHaarFeature1();
- CvHaarFeature1( bool _tilted,
- int x0, int y0, int w0, int h0, float wt0,
- int x1, int y1, int w1, int h1, float wt1,
- int x2 CV_DEFAULT( 0 ), int y2 CV_DEFAULT( 0 ),
- int w2 CV_DEFAULT( 0 ), int h2 CV_DEFAULT( 0 ),
- float wt2 CV_DEFAULT( 0.0F ) );
- virtual ~CvHaarFeature1() {}
-
- virtual void write( CvFileStorage* fs ) const;
- float calc( const int* _sum_row, const int* _tilted_row );
-
- bool tilted;
- struct
- {
- CvRect r;
- float weight;
- } rect[CV_HAAR_FEATURE_MAX];
+public:
+ virtual void init(const CvFeatureParams *_featureParams,
+ int _maxSampleCount, Size _winSize );
+ virtual void setImage(const Mat& img, uchar clsLabel, int idx);
+ virtual float operator()(int featureIdx, int sampleIdx) const;
+ virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const;
+ void writeFeature( FileStorage &fs, int fi ) const; // for old file fornat
+protected:
+ virtual void generateFeatures();
- struct
+ class HaarFeature
{
- int p0, p1, p2, p3;
- } fastRect[CV_HAAR_FEATURE_MAX];
+ public:
+ HaarFeature();
+ HaarFeature( int offset, bool _tilted,
+ int x0, int y0, int w0, int h0, float wt0,
+ int x1, int y1, int w1, int h1, float wt1,
+ int x2 = 0, int y2 = 0, int w2 = 0, int h2 = 0, float wt2 = 0.0F );
+ float calc( const Mat &sum, const Mat &tilted, size_t y) const;
+ void write( FileStorage &fs ) const;
+
+ bool tilted;
+ struct
+ {
+ Rect r;
+ float weight;
+ } rect[CV_HAAR_FEATURE_MAX];
+
+ struct
+ {
+ int p0, p1, p2, p3;
+ } fastRect[CV_HAAR_FEATURE_MAX];
+ };
+
+ Vector<HaarFeature> features;
+ Mat sum; /* sum images (each row represents image) */
+ Mat tilted; /* tilted sum images (each row represents image) */
+ Mat normfactor; /* normalization factor */
};
-class CvHaarCascadeData : public CvCascadeData
+inline float CvHaarEvaluator::operator()(int featureIdx, int sampleIdx) const
{
-public:
- CvHaarCascadeData();
- virtual void setData( CvCascadeClassifier* _cascade,
- const char* _vecFileName, const char* _bgFileName,
- int _numPos, int _numNeg, const CvFeatureParams* _featureParams );
-
- virtual void clear();
-
- virtual void generateFeatures();
- void updateFastFeatures(int step); // offset - row step for the integral image ( weight + 1)
-
- virtual float calcFeature( int featureIdx, int sampleIdx );
-protected:
- virtual int fillPassedSamles( int first, int count, bool isPositive, int64& consumed );
- CvMat* sum; /* sum images (each row represents image) */
- CvMat* tilted; /* tilted sum images (each row represents image) */
- CvMat* normfactor; /* normalization factor */
-};
+ float nf = normfactor.at<float>(0, sampleIdx);
+ return !nf ? 0.0f : (features[featureIdx].calc( sum, tilted, sampleIdx)/nf);
+}
-float CV_INLINE CvHaarFeature1::calc( const int* _sum_row, const int* _tilted_row )
+inline float CvHaarEvaluator::HaarFeature::calc( const Mat &_sum, const Mat &_tilted, size_t y) const
{
- const int* img = tilted ? _tilted_row : _sum_row;
+ const int* img = tilted ? _tilted.ptr<int>((int)y) : _sum.ptr<int>((int)y);
float ret = rect[0].weight * (img[fastRect[0].p0] - img[fastRect[0].p1] - img[fastRect[0].p2] + img[fastRect[0].p3] ) +
rect[1].weight * (img[fastRect[1].p0] - img[fastRect[1].p1] - img[fastRect[1].p2] + img[fastRect[1].p3] );
if( rect[2].weight != 0.0f )
return ret;
}
-float CV_INLINE CvHaarCascadeData::calcFeature( int featureIdx, int sampleIdx )
-{
- int sumStep = sum->step / CV_ELEM_SIZE( sum->type );
- int tiltedStep = sum->step / CV_ELEM_SIZE( tilted->type );
- float nf, val;
-
- assert( features );
- nf = normfactor->data.fl[sampleIdx];
- val = ((CvHaarFeature1*)(features[featureIdx]))->calc( sum->data.i + sampleIdx * sumStep,
- tilted->data.i + sampleIdx * tiltedStep );
- val = ( nf == 0.0F ) ? 0.0F : (val / nf);
- return val;
-}
-#endif
#include "cv.h"\r
-#include "_imagestorage.h"\r
-#include "_inner_functions.h"\r
+#include "imagestorage.h"\r
#include <stdio.h>\r
-\r
-//---------------------------------------- Background reading ------------------------------------------------
+#include <iostream>\r
+#include <fstream>\r
-CvBackgroundReader::CvBackgroundReader()
+bool CvCascadeImageReader::create( const String _posFilename, const String _negFilename, Size _winSize )\r
+{\r
+ return posReader.create(_posFilename) && negReader.create(_negFilename, _winSize);\r
+}\r
+\r
+CvCascadeImageReader::NegReader::NegReader()
{
- src = cvMat( 1, 1, CV_8UC1, NULL );
- img = cvMat( 1, 1, CV_8UC1, NULL );
- offset = cvPoint( 0, 0 );
+ src.create( 0, 0 , CV_8UC1 );
+ img.create( 0, 0, CV_8UC1 );
+ point = offset = Point( 0, 0 );
scale = 1.0F;
scaleFactor = 1.4142135623730950488016887242097F;
stepFactor = 0.5F;
- point = offset;
-}
-
-CvBackgroundReader::~CvBackgroundReader()
-{
- if( src.data.ptr != NULL )
- {
- cvFree( &src.data.ptr );
- }
- if( img.data.ptr != NULL )
- {
- cvFree( &img.data.ptr );
- }
-}
-
-
-CvBackgroundData::CvBackgroundData()
-{
- fileName = 0;
}
-CvBackgroundData::CvBackgroundData( const char* _fileName, CvSize _winSize )
+bool CvCascadeImageReader::NegReader::create( const String _filename, Size _winSize )
{
- const char* dir = NULL;
- char full[CC_PATH_MAX];
- char* imgFileName = NULL;
- FILE* input = NULL;
- size_t len = 0, fileNamesLen = 0;
+ String dirname, str;
+ std::ifstream file(_filename.c_str());
+ if ( !file.is_open() )
+ return false;
- assert( _fileName );
-
- dir = strrchr( _fileName, '\\' );
- if( dir == NULL )
- {
- dir = strrchr( _fileName, '/' );
- }
- if( dir == NULL )
- {
- imgFileName = &(full[0]);
- }
- else
+ size_t pos = _filename.rfind('\\');
+ char dlmrt = '\\';
+ if (pos == String::npos)
{
- strncpy( &(full[0]), _fileName, (dir - _fileName + 1) );
- imgFileName = &(full[(dir - _fileName + 1)]);
+ pos = _filename.rfind('/');
+ dlmrt = '/';
}
-
- input = fopen( _fileName, "r" );
- if( input != NULL )
+ dirname = _filename.substr(0, pos) + dlmrt;
+ while( !file.eof() )
{
- count = 0;
- fileNamesLen = 0;
-
- /* count */
- while( !feof( input ) )
- {
- *imgFileName = '\0';
- if( !fscanf( input, "%s", imgFileName ))
- break;
- len = strlen( imgFileName );
- if( len > 0 )
- {
- if( (*imgFileName) == '#' ) continue; /* comment */
- count++;
- fileNamesLen += sizeof( char ) * (strlen( &(full[0]) ) + 1);
- }
- }
- if( count > 0 )
- {
- char* tmp;
- fseek( input, 0, SEEK_SET );
- fileNamesLen += sizeof( char* ) * count;
- fileName = (char**) cvAlloc( fileNamesLen );
- memset( (void*) fileName, 0, fileNamesLen );
- last = round = 0;
- winSize = _winSize;
- tmp = (char*)(fileName + count);
- count = 0;
- while( !feof( input ) )
- {
- *imgFileName = '\0';
- if( !fscanf( input, "%s", imgFileName ))
- break;
- len = strlen( imgFileName );
- if( len > 0 )
- {
- if( (*imgFileName) == '#' ) continue; /* comment */
- fileName[count++] = tmp;
- strcpy( tmp, &(full[0]) );
- tmp += strlen( &(full[0]) ) + 1;
- }
- }
- }
- fclose( input );
+ std::getline(file, str);
+ if (str.empty()) break;
+ if (str.at(0) == '#' ) continue; /* comment */
+ imgFilenames.push_back(dirname + str);
}
+ file.close();
- bgReader = new CvBackgroundReader();
-}
-
-CvBackgroundData::~CvBackgroundData()
-{
- delete bgReader;
- char* fb = (char*)fileName;
- cvFree( &fb );
+ winSize = _winSize;
+ last = round = 0;
+ return true;
}
-bool CvBackgroundData::getNext( bool reset )
+bool CvCascadeImageReader::NegReader::nextImg()
{
- IplImage* img = NULL;
- size_t dataSize = 0;
- int i = 0;
- CvPoint offset = cvPoint(0,0);
-
- if( bgReader->src.data.ptr != NULL )
- {
- cvFree( &(bgReader->src.data.ptr) );
- bgReader->src.data.ptr = NULL;
- }
- if( bgReader->img.data.ptr != NULL )
- {
- cvFree( &(bgReader->img.data.ptr) );
- bgReader->img.data.ptr = NULL;
- }
- if ( reset )
- last = 0;
- {
- for( i = 0; i < count; i++ )
- {
- img = cvLoadImage( fileName[last++], 0 );
- if( !img )
- continue;
- round += last / count;
- round = round % (winSize.width * winSize.height);
- last %= count;
-
- offset.x = round % winSize.width;
- offset.y = round / winSize.width;
-
- offset.x = MIN( offset.x, img->width - winSize.width );
- offset.y = MIN( offset.y, img->height - winSize.height );
-
- if( img != NULL && img->depth == IPL_DEPTH_8U && img->nChannels == 1 &&
- offset.x >= 0 && offset.y >= 0 )
- {
- break;
- }
- if( img != NULL )
- cvReleaseImage( &img );
- img = NULL;
- }
- }
- if( img == NULL )
- {
- /* no appropriate image */
- return 0;
- }
- dataSize = sizeof( uchar ) * img->width * img->height;
- bgReader->src = cvMat( img->height, img->width, CV_8UC1, (void*) cvAlloc( dataSize ) );
- cvCopy( img, &bgReader->src, NULL );
- cvReleaseImage( &img );
- img = NULL;
-
- bgReader->offset = offset;
- bgReader->point = bgReader->offset;
- bgReader->scale = MAX(
- ((float) winSize.width + bgReader->point.x) / ((float) bgReader->src.cols),
- ((float) winSize.height + bgReader->point.y) / ((float) bgReader->src.rows) );
+ Point _offset = Point(0,0);
+ size_t count = imgFilenames.size();
+ for( size_t i = 0; i < count; i++ )
+ {
+ src = imread( imgFilenames[last++], 0 );
+ if( src.empty() )
+ continue;
+ round += last / count;
+ round = round % (winSize.width * winSize.height);
+ last %= count;
+
+ _offset.x = min( (int)round % winSize.width, src.cols - winSize.width );
+ _offset.y = min( (int)round / winSize.width, src.rows - winSize.height );
+ if( !src.empty() && src.type() == CV_8UC1
+ && offset.x >= 0 && offset.y >= 0 )
+ break;
+ }
+
+ if( src.empty() )
+ return false; // no appropriate image
+ point = offset = _offset;
+ scale = max( ((float)winSize.width + point.x) / ((float)src.cols),
+ ((float)winSize.height + point.y) / ((float)src.rows) );
- bgReader->img = cvMat( (int) (bgReader->scale * bgReader->src.rows + 0.5F),
- (int) (bgReader->scale * bgReader->src.cols + 0.5F),
- CV_8UC1, (void*) cvAlloc( dataSize ) );
- cvResize( &(bgReader->src), &(bgReader->img) );
-
- return 1;
+ Size sz( (int)(scale*src.cols + 0.5F), (int)(scale*src.rows + 0.5F) );
+ resize( src, img, sz );
+ return true;
}
-
-bool CvBackgroundData::getImage( CvMat* img, bool reset )
+bool CvCascadeImageReader::NegReader::get( Mat& _img )
{
- CvMat mat;
-
- assert( img != NULL );
- assert( CV_MAT_TYPE( img->type ) == CV_8UC1 );
- assert( img->cols == winSize.width );
- assert( img->rows == winSize.height );
+ CV_Assert( !_img.empty() );
+ CV_Assert( _img.type() == CV_8UC1 );
+ CV_Assert( _img.cols == winSize.width );
+ CV_Assert( _img.rows == winSize.height );
- if( bgReader->img.data.ptr == NULL )
- {
- if ( !getNext( reset ) )
- return 0;
- }
+ if( img.empty() )
+ if ( !nextImg() )
+ return false;
- mat = cvMat( winSize.height, winSize.width, CV_8UC1 );
- cvSetData( &mat, (void*) (bgReader->img.data.ptr + bgReader->point.y * bgReader->img.step
- + bgReader->point.x * sizeof( uchar )), bgReader->img.step );
+ Mat mat( winSize.height, winSize.width, CV_8UC1,
+ (void*)(img.data + point.y * img.step + point.x * img.elemSize()), img.step );
+ mat.copyTo(_img);
- cvCopy( &mat, img, 0 );
- if( (int) ( bgReader->point.x + (1.0F + bgReader->stepFactor ) * winSize.width )
- < bgReader->img.cols )
- {
- bgReader->point.x += (int) (bgReader->stepFactor * winSize.width);
- }
+ if( (int)( point.x + (1.0F + stepFactor ) * winSize.width ) < img.cols )
+ point.x += (int)(stepFactor * winSize.width);
else
{
- bgReader->point.x = bgReader->offset.x;
- if( (int) ( bgReader->point.y + (1.0F + bgReader->stepFactor ) * winSize.height )
- < bgReader->img.rows )
- {
- bgReader->point.y += (int) (bgReader->stepFactor * winSize.height);
- }
+ point.x = offset.x;
+ if( (int)( point.y + (1.0F + stepFactor ) * winSize.height ) < img.rows )
+ point.y += (int)(stepFactor * winSize.height);
else
{
- bgReader->point.y = bgReader->offset.y;
- bgReader->scale *= bgReader->scaleFactor;
- if( bgReader->scale <= 1.0F )
- {
- bgReader->img = cvMat( (int) (bgReader->scale * bgReader->src.rows),
- (int) (bgReader->scale * bgReader->src.cols),
- CV_8UC1, (void*) (bgReader->img.data.ptr) );
- cvResize( &(bgReader->src), &(bgReader->img) );
- }
+ point.y = offset.y;
+ scale *= scaleFactor;
+ if( scale <= 1.0F )
+ resize( src, img, Size( (int)(scale*src.cols), (int)(scale*src.rows) ) );
else
{
- if ( !getNext( reset ) )
- return 0;
+ if ( !nextImg() )
+ return false;
}
}
}
-
- return 1;
+ return true;
}
-//------------------------------------------- VecFile reading ------------------------------------------------
-
-CvVecFile::CvVecFile()
+CvCascadeImageReader::PosReader::PosReader()
{
- input = 0;
- vector = 0;
+ file = 0;
+ vec = 0;
}
-CvVecFile::CvVecFile( const char* _vecFileName )
+bool CvCascadeImageReader::PosReader::create( const String _filename )
{
- CV_FUNCNAME( "CvVecFile::CvVecFile" );
- __BEGIN__;
+ if ( file )
+ fclose( file );
+ file = fopen( _filename.c_str(), "rb" );
+ if( !file )
+ return false;
short tmp = 0;
-
- input = NULL;
- if( _vecFileName ) input = fopen( _vecFileName, "rb" );
-
- if( input != NULL )
- {
- fread( &count, sizeof( count ), 1, input );
- fread( &vecSize, sizeof( vecSize ), 1, input );
- fread( &tmp, sizeof( tmp ), 1, input );
- fread( &tmp, sizeof( tmp ), 1, input );
- base = sizeof( count ) + sizeof( vecSize ) + sizeof( tmp ) + sizeof( tmp );
- if( !feof( input ) )
- {
- last = 0;
- CV_CALL( vector = (short*) cvAlloc( sizeof( *vector ) * vecSize ) );
- }
- }
- else
- CV_ERROR( CV_StsNullPtr, "vecfile can not be opened" );
-
- __END__;
+ fread( &count, sizeof( count ), 1, file );
+ fread( &vecSize, sizeof( vecSize ), 1, file );
+ fread( &tmp, sizeof( tmp ), 1, file );
+ fread( &tmp, sizeof( tmp ), 1, file );
+ base = sizeof( count ) + sizeof( vecSize ) + 2*sizeof( tmp );
+ if( feof( file ) )
+ return false;
+ last = 0;
+ vec = (short*) cvAlloc( sizeof( *vec ) * vecSize );
+ CV_Assert( vec );
+ return true;
}
-CvVecFile::~CvVecFile()
+bool CvCascadeImageReader::PosReader::get( Mat &_img )
{
- fclose( input );
- cvFree( &vector );
-}
-
-//------------------------------------------- CvImageReader --------------------------------------------------
-
-CvImageReader::CvImageReader( const char* _vecFileName, const char* _bgfileName, CvSize _winSize )\r
-{\r
- bgData = new CvBackgroundData( _bgfileName, _winSize );\r
- vecFile = new CvVecFile( _vecFileName );\r
-}\r
-\r
-CvImageReader::~CvImageReader()\r
-{\r
- delete bgData;\r
- delete vecFile;\r
-}\r
-\r
-bool CvImageReader::getNegImage(CvMat* img, bool reset )\r
-{\r
- return bgData->getImage( img, reset );\r
-}\r
-\r
-bool CvImageReader::getPosImage(CvMat* img, bool reset )\r
-{\r
+ CV_Assert( _img.rows * _img.cols == vecSize );
uchar tmp = 0;\r
- int r = 0;
- int c = 0;\r
-\r
- assert( img->rows * img->cols == vecFile->vecSize );
-
- if ( reset )
- {
- vecFile->last = 0;
- fseek( vecFile->input, vecFile->base, SEEK_SET );
- }
+ fread( &tmp, sizeof( tmp ), 1, file );
+ fread( vec, sizeof( vec[0] ), vecSize, file );
- fread( &tmp, sizeof( tmp ), 1, vecFile->input );
- fread( vecFile->vector, sizeof( short ), vecFile->vecSize, vecFile->input );
+ if( feof( file ) || last++ >= count )
+ return false;
- if( feof( vecFile->input ) || vecFile->last++ >= vecFile->count )
- {
- return 0;
- }
-
- for( r = 0; r < img->rows; r++ )
+ for( int r = 0; r < _img.rows; r++ )
{
- for( c = 0; c < img->cols; c++ )
- {
- CV_MAT_ELEM( *img, uchar, r, c ) =
- (uchar) vecFile->vector[r * img->cols + c];
- }
+ for( int c = 0; c < _img.cols; c++ )
+ _img.ptr(r)[c] = (uchar)vec[r * _img.cols + c];
}\r
- return 1;\r
+ return true;
+}
+
+void CvCascadeImageReader::PosReader::restart()
+{
+ CV_Assert( file );
+ last = 0;
+ fseek( file, base, SEEK_SET );
+}
+
+CvCascadeImageReader::PosReader::~PosReader()
+{
+ fclose( file );
+ cvFree( &vec );
}
\ No newline at end of file
--- /dev/null
+#pragma once\r
+#include <highgui.h>\r
+\r
+using namespace cv;\r
+\r
+class CvCascadeImageReader\r
+{\r
+public:\r
+ bool create( const String _posFilename, const String _negFilename, Size _winSize );\r
+ void restart() { posReader.restart(); }\r
+ bool getNeg(Mat &_img) { return negReader.get( _img ); }\r
+ bool getPos(Mat &_img) { return posReader.get( _img ); }\r
+\r
+private:\r
+ class PosReader
+ {
+ public:
+ PosReader();
+ virtual ~PosReader();
+ bool create( const String _filename );
+ bool get( Mat &_img );
+ void restart();
+
+ short* vec;
+ FILE* file;
+ int count;
+ int vecSize;
+ int last;
+ int base;
+ } posReader;\r
+\r
+ class NegReader
+ {
+ public:
+ NegReader();
+ bool create( const String _filename, Size _winSize );
+ bool get( Mat& _img );
+ bool nextImg();
+
+ Mat src, img;
+ Vector<String> imgFilenames;
+ Point offset, point;
+ float scale;
+ float scaleFactor;
+ float stepFactor;
+ size_t last, round;
+ Size winSize;
+ } negReader;\r
+};
\ No newline at end of file
+++ /dev/null
-#include "_inner_functions.h"
-
-struct CvValArray
-{
- uchar* data;
- size_t step;
-};
-
-#define CMP_VALUES( idx1, idx2 ) \
- ( *( (float*) (aux->data + ((int) (idx1)) * aux->step ) ) < \
- *( (float*) (aux->data + ((int) (idx2)) * aux->step ) ) )
-
-CV_IMPLEMENT_QSORT_EX( icvSortIndexedValArray_16s, short, CMP_VALUES, CvValArray* )
-
-CV_IMPLEMENT_QSORT_EX( icvSortIndexedValArray_32s, int, CMP_VALUES, CvValArray* )
-
-CV_IMPLEMENT_QSORT_EX( icvSortIndexedValArray_32f, float, CMP_VALUES, CvValArray* )
-
-
-#include <sys/stat.h>
-#include <sys/types.h>
-#ifdef _WIN32
-#include <direct.h>
-#endif /* _WIN32 */
-
-static int CV_CDECL
-icvCmpIntegers( const void* a, const void* b )
-{
- return *(const int*)a - *(const int*)b;
-}
-
-CvMat*
-cvPreprocessIndexArray( const CvMat* idx_arr, int data_arr_size, bool check_for_duplicates )
-{
- CvMat* idx = 0;
-
- CV_FUNCNAME( "cvPreprocessIndexArray" );
- __BEGIN__;
-
- int i, idx_total, idx_selected = 0, step, type, prev = INT_MIN, is_sorted = 1;
- uchar* srcb = 0;
- int* srci = 0;
- int* dsti;
-
- if( !CV_IS_MAT(idx_arr) )
- CV_ERROR( CV_StsBadArg, "Invalid index array" );
-
- if( idx_arr->rows != 1 && idx_arr->cols != 1 )
- CV_ERROR( CV_StsBadSize, "the index array must be 1-dimensional" );
-
- idx_total = idx_arr->rows + idx_arr->cols - 1;
- srcb = idx_arr->data.ptr;
- srci = idx_arr->data.i;
-
- type = CV_MAT_TYPE(idx_arr->type);
- step = CV_IS_MAT_CONT(idx_arr->type) ? 1 : idx_arr->step/CV_ELEM_SIZE(type);
-
- switch( type )
- {
- case CV_8UC1:
- case CV_8SC1:
- // idx_arr is array of 1's and 0's -
- // i.e. it is a mask of the selected components
- if( idx_total != data_arr_size )
- CV_ERROR( CV_StsUnmatchedSizes,
- "Component mask should contain as many elements as the total number of input variables" );
-
- for( i = 0; i < idx_total; i++ )
- idx_selected += srcb[i*step] != 0;
-
- if( idx_selected == 0 )
- CV_ERROR( CV_StsOutOfRange, "No components/input_variables is selected!" );
-
- if( idx_selected == idx_total )
- EXIT;
- break;
- case CV_32SC1:
- // idx_arr is array of integer indices of selected components
- if( idx_total > data_arr_size )
- CV_ERROR( CV_StsOutOfRange,
- "index array may not contain more elements than the total number of input variables" );
- idx_selected = idx_total;
- // check if sorted already
- for( i = 0; i < idx_total; i++ )
- {
- int val = srci[i*step];
- if( val >= prev )
- {
- is_sorted = 0;
- break;
- }
- prev = val;
- }
- break;
- default:
- CV_ERROR( CV_StsUnsupportedFormat, "Unsupported index array data type "
- "(it should be 8uC1, 8sC1 or 32sC1)" );
- }
-
- CV_CALL( idx = cvCreateMat( 1, idx_selected, CV_32SC1 ));
- dsti = idx->data.i;
-
- if( type < CV_32SC1 )
- {
- for( i = 0; i < idx_total; i++ )
- if( srcb[i*step] )
- *dsti++ = i;
- }
- else
- {
- for( i = 0; i < idx_total; i++ )
- dsti[i] = srci[i*step];
-
- if( !is_sorted )
- qsort( dsti, idx_total, sizeof(dsti[0]), icvCmpIntegers );
-
- if( dsti[0] < 0 || dsti[idx_total-1] >= data_arr_size )
- CV_ERROR( CV_StsOutOfRange, "the index array elements are out of range" );
-
- if( check_for_duplicates )
- {
- for( i = 1; i < idx_total; i++ )
- if( dsti[i] <= dsti[i-1] )
- CV_ERROR( CV_StsBadArg, "There are duplicated index array elements" );
- }
- }
-
- __END__;
-
- if( cvGetErrStatus() < 0 )
- cvReleaseMat( &idx );
-
- return idx;
-}
\ No newline at end of file
#include "lbpfeatures.h"
#include "cascadeclassifier.h"
-//------------------------------------- LBPFeature -------------------------------------
-
-CvLBPFeature::CvLBPFeature()
-{
- rect = cvRect(0, 0, 0, 0);
-}
-
-CvLBPFeature::CvLBPFeature( int x, int y, int _blockWidth, int _blockHeight )
-{
- rect = cvRect(x, y, _blockWidth, _blockHeight);
-}
-
-CV_INLINE void CvLBPFeature::write( CvFileStorage* fs ) const
+CvLBPFeatureParams::CvLBPFeatureParams()
{
- CV_FUNCNAME( "CvLBPFeature::write" );
- __BEGIN__;
-
- CV_CALL( cvStartWriteStruct( fs, CC_RECT, CV_NODE_SEQ ) );
-
- CV_CALL( cvWriteInt( fs, NULL, rect.x ) );
- CV_CALL( cvWriteInt( fs, NULL, rect.y ) );
- CV_CALL( cvWriteInt( fs, NULL, rect.width ) );
- CV_CALL( cvWriteInt( fs, NULL, rect.height ) );
-
- CV_CALL( cvEndWriteStruct( fs ) ); /* LBP_RECT */
-
- __END__;
+ maxCatCount = 256;
+ name = LBPF_NAME;
}
-//------------------------------------- LBPCascadeData ---------------------------------
-
-CvLBPCascadeData::CvLBPCascadeData()
+void CvLBPEvaluator::init(const CvFeatureParams *_featureParams, int _maxSampleCount, Size _winSize)
{
- sum = 0;
+ CV_Assert( _maxSampleCount > 0);
+ sum.create((int)_maxSampleCount, (_winSize.width + 1) * (_winSize.height + 1), CV_32SC1);
+ CvFeatureEvaluator::init( _featureParams, _maxSampleCount, _winSize );
}
-void CvLBPCascadeData::setData( CvCascadeClassifier* _cascade,
- const char* _vecFileName, const char* _bgfileName,
- int _numPos, int _numNeg, const CvFeatureParams* _featureParams )
+void CvLBPEvaluator::setImage(const Mat &img, uchar clsLabel, int idx)
{
- CvSize ws = _cascade->getParams()->winSize;
- int sumCols = (ws.width + 1) * (ws.height + 1);
-
- sum = cvCreateMat( _numPos + _numNeg, sumCols, CV_32SC1);
- CvCascadeData::setData( _cascade, _vecFileName, _bgfileName, _numPos, _numNeg, _featureParams );
+ CV_DbgAssert( !sum.empty() );
+ CvFeatureEvaluator::setImage( img, clsLabel, idx );
+ Mat sum(winSize.height + 1, winSize.width + 1, sum.type(), sum.ptr<int>((int)idx));
+ integral( img, sum );
}
-void CvLBPCascadeData::clear()
+void CvLBPEvaluator::writeFeatures( FileStorage &fs, const Mat& featureMap ) const
{
- cvReleaseMat( &sum );
-
- CvCascadeData::clear();
+ _writeFeatures( features, fs, featureMap );
}
-void CvLBPCascadeData::generateFeatures()
+void CvLBPEvaluator::generateFeatures()
{
- CvLBPFeature* lbpFeature;
-
- CvMemStorage* storage = NULL;
- CvSeq* seq = NULL;
- CvSeqWriter writer;
-
- storage = cvCreateMemStorage();
- cvStartWriteSeq( 0, sizeof( CvSeq ), sizeof( lbpFeature ), storage, &writer );
-
+ int offset = winSize.width + 1;
for( int x = 0; x < winSize.width; x++ )
- {
for( int y = 0; y < winSize.height; y++ )
- {
for( int w = 1; w <= winSize.width / 3; w++ )
- {
for( int h = 1; h <= winSize.height / 3; h++ )
- {
if ( (x+3*w <= winSize.width) && (y+3*h <= winSize.height) )
- {
- lbpFeature = new CvLBPFeature( x, y, w, h );
- CV_WRITE_SEQ_ELEM( lbpFeature, writer );
- }
- }
- }
- }
- }
- seq = cvEndWriteSeq( &writer );
- numFeatures = seq->total;
- features = (CvFeature**) cvAlloc( sizeof( CvLBPFeature* ) * numFeatures );
-
- cvCvtSeqToArray( seq, (CvArr*)features );
- updateFastFeatures( winSize.width + 1 );
- cvReleaseMemStorage( &storage );
+ features.push_back( LBPFeature(offset, x, y, w, h ) );
+ numFeatures = (int)features.size();
}
-void CvLBPCascadeData::updateFastFeatures(int offset)
+CvLBPEvaluator::LBPFeature::LBPFeature()
{
- for( int fi = 0; fi < numFeatures; fi++ )
- {
- CvLBPFeature* tempFeature = (CvLBPFeature*)(features[fi]);
- int *p = tempFeature->p;
- CvRect tr;
- tr = tempFeature->rect;
- CV_SUM_OFFSETS( p[0], p[1], p[4], p[5], tr, offset )
- tr.x += 2*tempFeature->rect.width;
- CV_SUM_OFFSETS( p[2], p[3], p[6], p[7], tr, offset )
- tr.y +=2*tempFeature->rect.height;
- CV_SUM_OFFSETS( p[10], p[11], p[14], p[15], tr, offset )
- tr.x -= 2*tempFeature->rect.width;
- CV_SUM_OFFSETS( p[8], p[9], p[12], p[13], tr, offset )
- }
+ rect = cvRect(0, 0, 0, 0);
}
-int CvLBPCascadeData::fillPassedSamles( int first, int count, bool isPositive, int64& consumed )
+CvLBPEvaluator::LBPFeature::LBPFeature( int offset, int x, int y, int _blockWidth, int _blockHeight )
{
- int getcount = 0;
-
- //CV_FUNCNAME( "CvLBPCascadeData::fillPassedSamles" );
- __BEGIN__;
-
- bool reset = true;
- CvMat innSum, img;
- int step = sum->step / CV_ELEM_SIZE(sum->type),
- clsStep = cls->step / CV_ELEM_SIZE(cls->type);
-
- assert( first + count <= numPos + numNeg );
-
- img = cvMat( winSize.height, winSize.width, CV_8UC1,
- cvStackAlloc( sizeof( uchar ) * winSize.height * winSize.width ) );
- innSum = cvMat( winSize.height + 1, winSize.width + 1,
- CV_32SC1, NULL );
-
- for( int i = first; i < first + count; i++ )
- {
- for( ; ; )
- {
- bool is_get_img = isPositive ? imgreader->getPosImage( &img, reset ) :
- imgreader->getNegImage( &img, reset );
- reset = false;
- if( !is_get_img )
- EXIT;
-
- innSum.data.i = sum->data.i + i * step;
- cvIntegralImage( &img, &innSum, 0, 0 );
- consumed++;
-
- if( cascade->predict( i ) == 1.0F )
- {
- getcount++;
- cls->data.fl[i*clsStep] = isPositive ? 1.f : 0.f;
- break;
- }
- }
- }
+ Rect tr = rect = cvRect(x, y, _blockWidth, _blockHeight);
+ CV_SUM_OFFSETS( p[0], p[1], p[4], p[5], tr, offset )
+ tr.x += 2*rect.width;
+ CV_SUM_OFFSETS( p[2], p[3], p[6], p[7], tr, offset )
+ tr.y +=2*rect.height;
+ CV_SUM_OFFSETS( p[10], p[11], p[14], p[15], tr, offset )
+ tr.x -= 2*rect.width;
+ CV_SUM_OFFSETS( p[8], p[9], p[12], p[13], tr, offset )
+}
- __END__;
- return getcount;
+void CvLBPEvaluator::LBPFeature::write(FileStorage &fs) const
+{
+ fs << CC_RECT << "[:" << rect.x << rect.y << rect.width << rect.height << "]";
}
\ No newline at end of file
-#ifndef LBP_H
-#define LBP_H
-
+#pragma once
#include "features.h"
#define LBPF_NAME "lbpFeatureParams"
struct CvLBPFeatureParams : CvFeatureParams
{
- CvLBPFeatureParams()
- { maxCatCount = 256; name = LBPF_NAME; }
- CvLBPFeatureParams( CvSize _winSize ) :
- CvFeatureParams( _winSize )
- { maxCatCount = 256; name = LBPF_NAME; }
- virtual ~CvLBPFeatureParams()
- {}
-};
-
-struct CvLBPFeature : public CvFeature
-{
- CvLBPFeature();
- CvLBPFeature( int x, int y, int _block_w, int _block_h );
- virtual ~CvLBPFeature() {}
-
- virtual void write( CvFileStorage* fs ) const;
-
- float calc( const int* _sum_row, int offset );
-
- CvRect rect;
- int p[16];
+ CvLBPFeatureParams();
+
};
-class CvLBPCascadeData : public CvCascadeData
+class CvLBPEvaluator : public CvFeatureEvaluator
{
public:
- CvLBPCascadeData();
- virtual void setData( CvCascadeClassifier* _cascade,
- const char* _vecFileName, const char* _bgFileName,
- int _numPos, int _numNeg, const CvFeatureParams* _featureParams );
- virtual void clear();
+ virtual void init(const CvFeatureParams *_featureParams,
+ int _maxSampleCount, Size _winSize );
+ virtual void setImage(const Mat& img, uchar clsLabel, int idx);
+ virtual float operator()(int featureIdx, int sampleIdx) const
+ { return (float)features[featureIdx].calc( sum, sampleIdx); }
+ virtual void writeFeatures( FileStorage &fs, const Mat& featureMap ) const;
+protected:
virtual void generateFeatures();
- void updateFastFeatures(int step); // offset - row step for the integral image ( weight + 1)
- virtual float calcFeature( int featureIdx, int sampleIdx );
-protected:
- virtual int fillPassedSamles( int first, int count, bool isPositive, int64& consumed );
- int sumCols;
- CvMat* sum; /* sum images (each row represents image) */
-};
+ class LBPFeature
+ {
+ public:
+ LBPFeature();
+ LBPFeature( int offset, int x, int y, int _block_w, int _block_h );
+ uchar calc( const Mat& _sum, size_t y ) const;
+ void write( FileStorage &fs ) const;
-float CV_INLINE CvLBPFeature::calc( const int* _sum_row, int offset )
-{
- int cval = _sum_row[p[5]] - _sum_row[p[6]] - _sum_row[p[9]] + _sum_row[p[10]];
+ Rect rect;
+ int p[16];
+ };
+ Vector<LBPFeature> features;
- return (float)((_sum_row[p[0]] - _sum_row[p[1]] - _sum_row[p[4]] + _sum_row[p[5]] >= cval ? 128 : 0) | // 0
- (_sum_row[p[1]] - _sum_row[p[2]] - _sum_row[p[5]] + _sum_row[p[6]] >= cval ? 64 : 0) | // 1
- (_sum_row[p[2]] - _sum_row[p[3]] - _sum_row[p[6]] + _sum_row[p[7]] >= cval ? 32 : 0) | // 2
- (_sum_row[p[6]] - _sum_row[p[7]] - _sum_row[p[10]] + _sum_row[p[11]] >= cval ? 16 : 0) | // 5
- (_sum_row[p[10]] - _sum_row[p[11]] - _sum_row[p[14]] + _sum_row[p[15]] >= cval ? 8 : 0)| // 8
- (_sum_row[p[9]] - _sum_row[p[10]] - _sum_row[p[13]] + _sum_row[p[14]] >= cval ? 4 : 0)| // 7
- (_sum_row[p[8]] - _sum_row[p[9]] - _sum_row[p[12]] + _sum_row[p[13]] >= cval ? 2 : 0)| // 6
- (_sum_row[p[4]] - _sum_row[p[5]] - _sum_row[p[8]] + _sum_row[p[9]] >= cval ? 1 : 0)); // 3
-}
+ Mat sum;
+};
-float CV_INLINE CvLBPCascadeData::calcFeature( int featureIdx, int sampleIdx )
+inline uchar CvLBPEvaluator::LBPFeature::calc(const Mat &_sum, size_t y) const
{
- int sumStep = sum->step / CV_ELEM_SIZE( sum->type );
- assert( features );
- return ((CvLBPFeature*)(features[featureIdx]))->calc( sum->data.i + sampleIdx * sumStep,
- winSize.width + 1 );
+ const int* sum = _sum.ptr<int>((int)y);
+ int cval = sum[p[5]] - sum[p[6]] - sum[p[9]] + sum[p[10]];
+
+ return (uchar)((sum[p[0]] - sum[p[1]] - sum[p[4]] + sum[p[5]] >= cval ? 128 : 0) | // 0
+ (sum[p[1]] - sum[p[2]] - sum[p[5]] + sum[p[6]] >= cval ? 64 : 0) | // 1
+ (sum[p[2]] - sum[p[3]] - sum[p[6]] + sum[p[7]] >= cval ? 32 : 0) | // 2
+ (sum[p[6]] - sum[p[7]] - sum[p[10]] + sum[p[11]] >= cval ? 16 : 0) | // 5
+ (sum[p[10]] - sum[p[11]] - sum[p[14]] + sum[p[15]] >= cval ? 8 : 0) | // 8
+ (sum[p[9]] - sum[p[10]] - sum[p[13]] + sum[p[14]] >= cval ? 4 : 0) | // 7
+ (sum[p[8]] - sum[p[9]] - sum[p[12]] + sum[p[13]] >= cval ? 2 : 0) | // 6
+ (sum[p[4]] - sum[p[5]] - sum[p[8]] + sum[p[9]] >= cval ? 1 : 0)); // 3
}
-
-#endif
\ No newline at end of file
int main( int argc, char* argv[] )
{
CvCascadeClassifier classifier;
- char* cascadeDirName = NULL;
- char* vecName = NULL;
- char* bgName = NULL;
- int numPos = 1000;
+ String cascadeDirName, vecName, bgName;
+ int numPos = 2000;
int numNeg = 1000;
int numStages = 20;
int numPrecalcVal = 10000,
bool baseFormatSave = false;
CvCascadeParams cascadeParams;
-
CvCascadeBoostParams stageParams;
-
- CvFeatureParams* featureParams[] = { new CvHaarFeatureParams(),
- new CvLBPFeatureParams() };
+ Ptr<CvFeatureParams> featureParams[] = { Ptr<CvFeatureParams>(new CvHaarFeatureParams),
+ Ptr<CvFeatureParams>(new CvLBPFeatureParams)
+ };
int fc = sizeof(featureParams)/sizeof(featureParams[0]);
-
if( argc == 1 )
{
- printf( "Usage: %s\n"
- " -data <cascade_dir_name>\n"
- " -vec <vec_file_name>\n"
- " -bg <background_file_name>\n"
- " [-numPos <number_of_positive_samples = %d>]\n"
- " [-numNeg <number_of_negative_samples = %d>]\n"
- " [-numStages <number_of_stages = %d>]\n"
- " [-numPrecalcVal <number_of_precalculated_vals = %d>]\n"
- " [-numPrecalcIdx <number_of_precalculated_idxs = %d>]\n"
- " [-baseFormatSave]\n",
- argv[0], numPos, numNeg, numStages, numPrecalcVal, numPrecalcIdx );
-
+ cout << "Usage: " << argv[0] << endl;
+ cout << " -data <cascade_dir_name>" << endl;
+ cout << " -vec <vec_file_name>" << endl;
+ cout << " -bg <background_file_name>" << endl;
+ cout << " [-numPos <number_of_positive_samples = " << numPos << ">]" << endl;
+ cout << " [-numNeg <number_of_negative_samples = " << numNeg << ">]" << endl;
+ cout << " [-numStages <number_of_stages = " << numStages << ">]" << endl;
+ cout << " [-numPrecalcVal <number_of_precalculated_vals = " << numPrecalcVal << ">]" << endl;
+ cout << " [-numPrecalcIdx <number_of_precalculated_idxs = " << numPrecalcIdx << ">]" << endl;
+ cout << " [-baseFormatSave]" << endl;
cascadeParams.printDefaults();
-
- printf("--boostStageParams--\n");
- stageParams.printDefault();
-
+ stageParams.printDefaults();
for( int fi = 0; fi < fc; fi++ )
featureParams[fi]->printDefaults();
-
return 0;
}
*featureParams[cascadeParams.featureType],
stageParams,
baseFormatSave );
-
- for( int fi = 0; fi < fc; fi++ )
- delete featureParams[fi];
-
return 0;
}
\ No newline at end of file
virtual bool setImage(const Mat&, Size origWinSize);
virtual int setWindow(Point);
- virtual double calcOrd(int featureIdx, int pOffset) const;
- virtual int calcCat(int featureIdx, int pOffset) const;
+ /*virtual double calcOrd(int featureIdx, int pOffset) const;
+ virtual int calcCat(int featureIdx, int pOffset) const;*/
enum { HAAR = 0, LBP = 1 };
static Ptr<FeatureEvaluator> create(int type);
MY_DEFINE_EXAMPLE(distrans distrans.c)
MY_DEFINE_EXAMPLE(drawing drawing.c)
MY_DEFINE_EXAMPLE(edge edge.c)
- MY_DEFINE_EXAMPLE(facedetect facedetect.c)
+ MY_DEFINE_EXAMPLE(facedetect facedetect.cpp)
MY_DEFINE_EXAMPLE(ffilldemo ffilldemo.c)
MY_DEFINE_EXAMPLE(find_obj find_obj.cpp)
MY_DEFINE_EXAMPLE(fitellipse fitellipse.cpp)
+++ /dev/null
-#define CV_NO_BACKWARD_COMPATIBILITY
-
-#include "cv.h"
-#include "highgui.h"
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <assert.h>
-#include <math.h>
-#include <float.h>
-#include <limits.h>
-#include <time.h>
-#include <ctype.h>
-
-#ifdef _EiC
-#define WIN32
-#endif
-
-static CvMemStorage* storage = 0;
-static CvHaarClassifierCascade* cascade = 0;
-static CvHaarClassifierCascade* nested_cascade = 0;
-int use_nested_cascade = 0;
-
-void detect_and_draw( IplImage* image );
-
-const char* cascade_name =
- "../../data/haarcascades/haarcascade_frontalface_alt.xml";
-/* "haarcascade_profileface.xml";*/
-const char* nested_cascade_name =
- "../../data/haarcascades/haarcascade_eye_tree_eyeglasses.xml";
-// "../../data/haarcascades/haarcascade_eye.xml";
-double scale = 1;
-
-int main( int argc, char** argv )
-{
- CvCapture* capture = 0;
- IplImage *frame, *frame_copy = 0;
- IplImage *image = 0;
- const char* scale_opt = "--scale=";
- int scale_opt_len = (int)strlen(scale_opt);
- const char* cascade_opt = "--cascade=";
- int cascade_opt_len = (int)strlen(cascade_opt);
- const char* nested_cascade_opt = "--nested-cascade";
- int nested_cascade_opt_len = (int)strlen(nested_cascade_opt);
- int i;
- const char* input_name = 0;
-
- for( i = 1; i < argc; i++ )
- {
- if( strncmp( argv[i], cascade_opt, cascade_opt_len) == 0 )
- cascade_name = argv[i] + cascade_opt_len;
- else if( strncmp( argv[i], nested_cascade_opt, nested_cascade_opt_len ) == 0 )
- {
- if( argv[i][nested_cascade_opt_len] == '=' )
- nested_cascade_name = argv[i] + nested_cascade_opt_len + 1;
- nested_cascade = (CvHaarClassifierCascade*)cvLoad( nested_cascade_name, 0, 0, 0 );
- if( !nested_cascade )
- fprintf( stderr, "WARNING: Could not load classifier cascade for nested objects\n" );
- }
- else if( strncmp( argv[i], scale_opt, scale_opt_len ) == 0 )
- {
- if( !sscanf( argv[i] + scale_opt_len, "%lf", &scale ) || scale < 1 )
- scale = 1;
- }
- else if( argv[i][0] == '-' )
- {
- fprintf( stderr, "WARNING: Unknown option %s\n", argv[i] );
- }
- else
- input_name = argv[i];
- }
-
- cascade = (CvHaarClassifierCascade*)cvLoad( cascade_name, 0, 0, 0 );
-
- if( !cascade )
- {
- fprintf( stderr, "ERROR: Could not load classifier cascade\n" );
- fprintf( stderr,
- "Usage: facedetect [--cascade=\"<cascade_path>\"]\n"
- " [--nested-cascade[=\"nested_cascade_path\"]]\n"
- " [--scale[=<image scale>\n"
- " [filename|camera_index]\n" );
- return -1;
- }
- storage = cvCreateMemStorage(0);
-
- if( !input_name || (isdigit(input_name[0]) && input_name[1] == '\0') )
- capture = cvCaptureFromCAM( !input_name ? 0 : input_name[0] - '0' );
- else if( input_name )
- {
- image = cvLoadImage( input_name, 1 );
- if( !image )
- capture = cvCaptureFromAVI( input_name );
- }
- else
- image = cvLoadImage( "lena.jpg", 1 );
-
- cvNamedWindow( "result", 1 );
-
- if( capture )
- {
- for(;;)
- {
- frame = cvQueryFrame( capture );
- if( !frame )
- break;
- if( !frame_copy )
- frame_copy = cvCreateImage( cvSize(frame->width,frame->height),
- IPL_DEPTH_8U, frame->nChannels );
- if( frame->origin == IPL_ORIGIN_TL )
- cvCopy( frame, frame_copy, 0 );
- else
- cvFlip( frame, frame_copy, 0 );
-
- detect_and_draw( frame_copy );
-
- if( cvWaitKey( 10 ) >= 0 )
- goto _cleanup_;
- }
-
- cvWaitKey(0);
-_cleanup_:
- cvReleaseImage( &frame_copy );
- cvReleaseCapture( &capture );
- }
- else
- {
- if( image )
- {
- detect_and_draw( image );
- cvWaitKey(0);
- cvReleaseImage( &image );
- }
- else if( input_name )
- {
- /* assume it is a text file containing the
- list of the image filenames to be processed - one per line */
- FILE* f = fopen( input_name, "rt" );
- if( f )
- {
- char buf[1000+1];
- while( fgets( buf, 1000, f ) )
- {
- int len = (int)strlen(buf), c;
- while( len > 0 && isspace(buf[len-1]) )
- len--;
- buf[len] = '\0';
- printf( "file %s\n", buf );
- image = cvLoadImage( buf, 1 );
- if( image )
- {
- detect_and_draw( image );
- c = cvWaitKey(0);
- if( c == 27 || c == 'q' || c == 'Q' )
- break;
- cvReleaseImage( &image );
- }
- }
- fclose(f);
- }
- }
- }
-
- cvDestroyWindow("result");
-
- if (storage)
- {
- cvReleaseMemStorage(&storage);
- }
- if (cascade)
- {
- cvReleaseHaarClassifierCascade(&cascade);
- }
-
- return 0;
-}
-
-void detect_and_draw( IplImage* img )
-{
- static CvScalar colors[] =
- {
- {{0,0,255}},
- {{0,128,255}},
- {{0,255,255}},
- {{0,255,0}},
- {{255,128,0}},
- {{255,255,0}},
- {{255,0,0}},
- {{255,0,255}}
- };
-
- IplImage *gray, *small_img;
- int i, j;
-
- gray = cvCreateImage( cvSize(img->width,img->height), 8, 1 );
- small_img = cvCreateImage( cvSize( cvRound (img->width/scale),
- cvRound (img->height/scale)), 8, 1 );
-
- cvCvtColor( img, gray, CV_BGR2GRAY );
- cvResize( gray, small_img, CV_INTER_LINEAR );
- cvEqualizeHist( small_img, small_img );
- cvClearMemStorage( storage );
-
- if( cascade )
- {
- double t = (double)cvGetTickCount();
- CvSeq* faces = cvHaarDetectObjects( small_img, cascade, storage,
- 1.1, 2, 0
- //|CV_HAAR_FIND_BIGGEST_OBJECT
- //|CV_HAAR_DO_ROUGH_SEARCH
- |CV_HAAR_DO_CANNY_PRUNING
- //|CV_HAAR_SCALE_IMAGE
- ,
- cvSize(30, 30) );
- t = (double)cvGetTickCount() - t;
- printf( "detection time = %gms\n", t/((double)cvGetTickFrequency()*1000.) );
- for( i = 0; i < (faces ? faces->total : 0); i++ )
- {
- CvRect* r = (CvRect*)cvGetSeqElem( faces, i );
- CvMat small_img_roi;
- CvSeq* nested_objects;
- CvPoint center;
- CvScalar color = colors[i%8];
- int radius;
- center.x = cvRound((r->x + r->width*0.5)*scale);
- center.y = cvRound((r->y + r->height*0.5)*scale);
- radius = cvRound((r->width + r->height)*0.25*scale);
- cvCircle( img, center, radius, color, 3, 8, 0 );
- if( !nested_cascade )
- continue;
- cvGetSubRect( small_img, &small_img_roi, *r );
- nested_objects = cvHaarDetectObjects( &small_img_roi, nested_cascade, storage,
- 1.1, 2, 0
- //|CV_HAAR_FIND_BIGGEST_OBJECT
- //|CV_HAAR_DO_ROUGH_SEARCH
- //|CV_HAAR_DO_CANNY_PRUNING
- //|CV_HAAR_SCALE_IMAGE
- ,
- cvSize(0, 0) );
- for( j = 0; j < (nested_objects ? nested_objects->total : 0); j++ )
- {
- CvRect* nr = (CvRect*)cvGetSeqElem( nested_objects, j );
- center.x = cvRound((r->x + nr->x + nr->width*0.5)*scale);
- center.y = cvRound((r->y + nr->y + nr->height*0.5)*scale);
- radius = cvRound((nr->width + nr->height)*0.25*scale);
- cvCircle( img, center, radius, color, 3, 8, 0 );
- }
- }
- }
-
- cvShowImage( "result", img );
- cvReleaseImage( &gray );
- cvReleaseImage( &small_img );
-}
-
//-----------------------------------------------------------------------------------------------------------------
#define CC_CASCADE_PARAMS "cascadeParams"
-#define CC_STAGE_TYPE "stageType"
-#define CC_FEATURE_TYPE "featureType"
-#define CC_HEIGHT "height"
-#define CC_WIDTH "width"
+#define CC_STAGE_TYPE "stageType"
+#define CC_FEATURE_TYPE "featureType"
+#define CC_HEIGHT "height"
+#define CC_WIDTH "width"
#define CC_STAGE_NUM "stageNum"
#define CC_STAGES "stages"
#define CC_INTERNAL_NODES "internalNodes"
#define CC_LEAF_VALUES "leafValues"
-#define CC_FEATURES "features"
+#define CC_FEATURES "features"
#define CC_FEATURE_PARAMS "featureParams"
#define CC_MAX_CAT_COUNT "maxCatCount"
-#define CC_HAAR "HAAR"
-#define CC_RECTS "rects"
-#define CC_TILTED "tilted"
+#define CC_HAAR "HAAR"
+#define CC_RECTS "rects"
+#define CC_TILTED "tilted"
#define CC_LBP "LBP"
#define CC_RECT "rect"
bool FeatureEvaluator::setImage(const Mat&, Size) { return true; }
int FeatureEvaluator::setWindow(Point) { return true; }
-double FeatureEvaluator::calcOrd(int, int) const { return 0.; }
-int FeatureEvaluator::calcCat(int, int) const { return 0; }
-
-
class HaarEvaluator : public FeatureEvaluator
{
public:
double operator()(int featureIdx, int pOffset) const
{ return features[featureIdx].calc(pOffset) * varianceNormFactor; }
- virtual double calcOrd(int featureIdx, int pOffset) const
- { return (*this)(featureIdx, pOffset); }
-
private:
Size origWinSize;
Vector<Feature> features;
void updatePtrs( const Mat& sum );
bool read(const FileNode& node );
- enum { POINT_NUM = 16 };
-
Rect rect; // weight and height for block
- const int* p[POINT_NUM]; // fast
+ const int* p[16]; // fast
};
LBPEvaluator();
inline LBPEvaluator::Feature :: Feature()
{
rect = Rect();
- for( int i = 0; i < POINT_NUM; i++ )
+ for( int i = 0; i < 16; i++ )
p[i] = 0;
}