Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/ml/src/rtrees.cpp
16337 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2000, Intel Corporation, all rights reserved.
14
// Copyright (C) 2014, Itseez Inc, all rights reserved.
15
// Third party copyrights are property of their respective owners.
16
//
17
// Redistribution and use in source and binary forms, with or without modification,
18
// are permitted provided that the following conditions are met:
19
//
20
// * Redistribution's of source code must retain the above copyright notice,
21
// this list of conditions and the following disclaimer.
22
//
23
// * Redistribution's in binary form must reproduce the above copyright notice,
24
// this list of conditions and the following disclaimer in the documentation
25
// and/or other materials provided with the distribution.
26
//
27
// * The name of the copyright holders may not be used to endorse or promote products
28
// derived from this software without specific prior written permission.
29
//
30
// This software is provided by the copyright holders and contributors "as is" and
31
// any express or implied warranties, including, but not limited to, the implied
32
// warranties of merchantability and fitness for a particular purpose are disclaimed.
33
// In no event shall the Intel Corporation or contributors be liable for any direct,
34
// indirect, incidental, special, exemplary, or consequential damages
35
// (including, but not limited to, procurement of substitute goods or services;
36
// loss of use, data, or profits; or business interruption) however caused
37
// and on any theory of liability, whether in contract, strict liability,
38
// or tort (including negligence or otherwise) arising in any way out of
39
// the use of this software, even if advised of the possibility of such damage.
40
//
41
//M*/
42
43
#include "precomp.hpp"
44
namespace cv {
45
namespace ml {
46
47
//////////////////////////////////////////////////////////////////////////////////////////
48
// Random trees //
49
//////////////////////////////////////////////////////////////////////////////////////////
50
RTreeParams::RTreeParams()
51
{
52
CV_TRACE_FUNCTION();
53
calcVarImportance = false;
54
nactiveVars = 0;
55
termCrit = TermCriteria(TermCriteria::EPS + TermCriteria::COUNT, 50, 0.1);
56
}
57
58
RTreeParams::RTreeParams(bool _calcVarImportance,
59
int _nactiveVars,
60
TermCriteria _termCrit )
61
{
62
CV_TRACE_FUNCTION();
63
calcVarImportance = _calcVarImportance;
64
nactiveVars = _nactiveVars;
65
termCrit = _termCrit;
66
}
67
68
69
class DTreesImplForRTrees CV_FINAL : public DTreesImpl
70
{
71
public:
72
DTreesImplForRTrees()
73
{
74
CV_TRACE_FUNCTION();
75
params.setMaxDepth(5);
76
params.setMinSampleCount(10);
77
params.setRegressionAccuracy(0.f);
78
params.useSurrogates = false;
79
params.setMaxCategories(10);
80
params.setCVFolds(0);
81
params.use1SERule = false;
82
params.truncatePrunedTree = false;
83
params.priors = Mat();
84
oobError = 0;
85
}
86
virtual ~DTreesImplForRTrees() {}
87
88
void clear() CV_OVERRIDE
89
{
90
CV_TRACE_FUNCTION();
91
DTreesImpl::clear();
92
oobError = 0.;
93
rng = RNG((uint64)-1);
94
}
95
96
const vector<int>& getActiveVars() CV_OVERRIDE
97
{
98
CV_TRACE_FUNCTION();
99
int i, nvars = (int)allVars.size(), m = (int)activeVars.size();
100
for( i = 0; i < nvars; i++ )
101
{
102
int i1 = rng.uniform(0, nvars);
103
int i2 = rng.uniform(0, nvars);
104
std::swap(allVars[i1], allVars[i2]);
105
}
106
for( i = 0; i < m; i++ )
107
activeVars[i] = allVars[i];
108
return activeVars;
109
}
110
111
void startTraining( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
112
{
113
CV_TRACE_FUNCTION();
114
DTreesImpl::startTraining(trainData, flags);
115
int nvars = w->data->getNVars();
116
int i, m = rparams.nactiveVars > 0 ? rparams.nactiveVars : cvRound(std::sqrt((double)nvars));
117
m = std::min(std::max(m, 1), nvars);
118
allVars.resize(nvars);
119
activeVars.resize(m);
120
for( i = 0; i < nvars; i++ )
121
allVars[i] = varIdx[i];
122
}
123
124
void endTraining() CV_OVERRIDE
125
{
126
CV_TRACE_FUNCTION();
127
DTreesImpl::endTraining();
128
vector<int> a, b;
129
std::swap(allVars, a);
130
std::swap(activeVars, b);
131
}
132
133
bool train( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
134
{
135
CV_TRACE_FUNCTION();
136
startTraining(trainData, flags);
137
int treeidx, ntrees = (rparams.termCrit.type & TermCriteria::COUNT) != 0 ?
138
rparams.termCrit.maxCount : 10000;
139
int i, j, k, vi, vi_, n = (int)w->sidx.size();
140
int nclasses = (int)classLabels.size();
141
double eps = (rparams.termCrit.type & TermCriteria::EPS) != 0 &&
142
rparams.termCrit.epsilon > 0 ? rparams.termCrit.epsilon : 0.;
143
vector<int> sidx(n);
144
vector<uchar> oobmask(n);
145
vector<int> oobidx;
146
vector<int> oobperm;
147
vector<double> oobres(n, 0.);
148
vector<int> oobcount(n, 0);
149
vector<int> oobvotes(n*nclasses, 0);
150
int nvars = w->data->getNVars();
151
int nallvars = w->data->getNAllVars();
152
const int* vidx = !varIdx.empty() ? &varIdx[0] : 0;
153
vector<float> samplebuf(nallvars);
154
Mat samples = w->data->getSamples();
155
float* psamples = samples.ptr<float>();
156
size_t sstep0 = samples.step1(), sstep1 = 1;
157
Mat sample0, sample(nallvars, 1, CV_32F, &samplebuf[0]);
158
int predictFlags = _isClassifier ? (PREDICT_MAX_VOTE + RAW_OUTPUT) : PREDICT_SUM;
159
160
bool calcOOBError = eps > 0 || rparams.calcVarImportance;
161
double max_response = 0.;
162
163
if( w->data->getLayout() == COL_SAMPLE )
164
std::swap(sstep0, sstep1);
165
166
if( !_isClassifier )
167
{
168
for( i = 0; i < n; i++ )
169
{
170
double val = std::abs(w->ord_responses[w->sidx[i]]);
171
max_response = std::max(max_response, val);
172
}
173
CV_Assert(fabs(max_response) > 0);
174
}
175
176
if( rparams.calcVarImportance )
177
varImportance.resize(nallvars, 0.f);
178
179
for( treeidx = 0; treeidx < ntrees; treeidx++ )
180
{
181
for( i = 0; i < n; i++ )
182
oobmask[i] = (uchar)1;
183
184
for( i = 0; i < n; i++ )
185
{
186
j = rng.uniform(0, n);
187
sidx[i] = w->sidx[j];
188
oobmask[j] = (uchar)0;
189
}
190
int root = addTree( sidx );
191
if( root < 0 )
192
return false;
193
194
if( calcOOBError )
195
{
196
oobidx.clear();
197
for( i = 0; i < n; i++ )
198
{
199
if( oobmask[i] )
200
oobidx.push_back(i);
201
}
202
int n_oob = (int)oobidx.size();
203
// if there is no out-of-bag samples, we can not compute OOB error
204
// nor update the variable importance vector; so we proceed to the next tree
205
if( n_oob == 0 )
206
continue;
207
double ncorrect_responses = 0.;
208
209
oobError = 0.;
210
for( i = 0; i < n_oob; i++ )
211
{
212
j = oobidx[i];
213
sample = Mat( nallvars, 1, CV_32F, psamples + sstep0*w->sidx[j], sstep1*sizeof(psamples[0]) );
214
215
double val = predictTrees(Range(treeidx, treeidx+1), sample, predictFlags);
216
if( !_isClassifier )
217
{
218
oobres[j] += val;
219
oobcount[j]++;
220
double true_val = w->ord_responses[w->sidx[j]];
221
double a = oobres[j]/oobcount[j] - true_val;
222
oobError += a*a;
223
val = (val - true_val)/max_response;
224
ncorrect_responses += std::exp( -val*val );
225
}
226
else
227
{
228
int ival = cvRound(val);
229
//Voting scheme to combine OOB errors of each tree
230
int* votes = &oobvotes[j*nclasses];
231
votes[ival]++;
232
int best_class = 0;
233
for( k = 1; k < nclasses; k++ )
234
if( votes[best_class] < votes[k] )
235
best_class = k;
236
int diff = best_class != w->cat_responses[w->sidx[j]];
237
oobError += diff;
238
ncorrect_responses += diff == 0;
239
}
240
}
241
242
oobError /= n_oob;
243
if( rparams.calcVarImportance && n_oob > 1 )
244
{
245
Mat sample_clone;
246
oobperm.resize(n_oob);
247
for( i = 0; i < n_oob; i++ )
248
oobperm[i] = oobidx[i];
249
for (i = n_oob - 1; i > 0; --i) //Randomly shuffle indices so we can permute features
250
{
251
int r_i = rng.uniform(0, n_oob);
252
std::swap(oobperm[i], oobperm[r_i]);
253
}
254
255
for( vi_ = 0; vi_ < nvars; vi_++ )
256
{
257
vi = vidx ? vidx[vi_] : vi_; //Ensure that only the user specified predictors are used for training
258
double ncorrect_responses_permuted = 0;
259
260
for( i = 0; i < n_oob; i++ )
261
{
262
j = oobidx[i];
263
int vj = oobperm[i];
264
sample0 = Mat( nallvars, 1, CV_32F, psamples + sstep0*w->sidx[j], sstep1*sizeof(psamples[0]) );
265
sample0.copyTo(sample_clone); //create a copy so we don't mess up the original data
266
sample_clone.at<float>(vi) = psamples[sstep0*w->sidx[vj] + sstep1*vi];
267
268
double val = predictTrees(Range(treeidx, treeidx+1), sample_clone, predictFlags);
269
if( !_isClassifier )
270
{
271
val = (val - w->ord_responses[w->sidx[j]])/max_response;
272
ncorrect_responses_permuted += exp( -val*val );
273
}
274
else
275
{
276
ncorrect_responses_permuted += cvRound(val) == w->cat_responses[w->sidx[j]];
277
}
278
}
279
varImportance[vi] += (float)(ncorrect_responses - ncorrect_responses_permuted);
280
}
281
}
282
}
283
if( calcOOBError && oobError < eps )
284
break;
285
}
286
287
if( rparams.calcVarImportance )
288
{
289
for( vi_ = 0; vi_ < nallvars; vi_++ )
290
varImportance[vi_] = std::max(varImportance[vi_], 0.f);
291
normalize(varImportance, varImportance, 1., 0, NORM_L1);
292
}
293
endTraining();
294
return true;
295
}
296
297
void writeTrainingParams( FileStorage& fs ) const CV_OVERRIDE
298
{
299
CV_TRACE_FUNCTION();
300
DTreesImpl::writeTrainingParams(fs);
301
fs << "nactive_vars" << rparams.nactiveVars;
302
}
303
304
void write( FileStorage& fs ) const CV_OVERRIDE
305
{
306
CV_TRACE_FUNCTION();
307
if( roots.empty() )
308
CV_Error( CV_StsBadArg, "RTrees have not been trained" );
309
310
writeFormat(fs);
311
writeParams(fs);
312
313
fs << "oob_error" << oobError;
314
if( !varImportance.empty() )
315
fs << "var_importance" << varImportance;
316
317
int k, ntrees = (int)roots.size();
318
319
fs << "ntrees" << ntrees
320
<< "trees" << "[";
321
322
for( k = 0; k < ntrees; k++ )
323
{
324
fs << "{";
325
writeTree(fs, roots[k]);
326
fs << "}";
327
}
328
329
fs << "]";
330
}
331
332
void readParams( const FileNode& fn ) CV_OVERRIDE
333
{
334
CV_TRACE_FUNCTION();
335
DTreesImpl::readParams(fn);
336
337
FileNode tparams_node = fn["training_params"];
338
rparams.nactiveVars = (int)tparams_node["nactive_vars"];
339
}
340
341
void read( const FileNode& fn ) CV_OVERRIDE
342
{
343
CV_TRACE_FUNCTION();
344
clear();
345
346
//int nclasses = (int)fn["nclasses"];
347
//int nsamples = (int)fn["nsamples"];
348
oobError = (double)fn["oob_error"];
349
int ntrees = (int)fn["ntrees"];
350
351
readVectorOrMat(fn["var_importance"], varImportance);
352
353
readParams(fn);
354
355
FileNode trees_node = fn["trees"];
356
FileNodeIterator it = trees_node.begin();
357
CV_Assert( ntrees == (int)trees_node.size() );
358
359
for( int treeidx = 0; treeidx < ntrees; treeidx++, ++it )
360
{
361
FileNode nfn = (*it)["nodes"];
362
readTree(nfn);
363
}
364
}
365
366
void getVotes( InputArray input, OutputArray output, int flags ) const
367
{
368
CV_TRACE_FUNCTION();
369
CV_Assert( !roots.empty() );
370
int nclasses = (int)classLabels.size(), ntrees = (int)roots.size();
371
Mat samples = input.getMat(), results;
372
int i, j, nsamples = samples.rows;
373
374
int predictType = flags & PREDICT_MASK;
375
if( predictType == PREDICT_AUTO )
376
{
377
predictType = !_isClassifier || (classLabels.size() == 2 && (flags & RAW_OUTPUT) != 0) ?
378
PREDICT_SUM : PREDICT_MAX_VOTE;
379
}
380
381
if( predictType == PREDICT_SUM )
382
{
383
output.create(nsamples, ntrees, CV_32F);
384
results = output.getMat();
385
for( i = 0; i < nsamples; i++ )
386
{
387
for( j = 0; j < ntrees; j++ )
388
{
389
float val = predictTrees( Range(j, j+1), samples.row(i), flags);
390
results.at<float> (i, j) = val;
391
}
392
}
393
} else
394
{
395
vector<int> votes;
396
output.create(nsamples+1, nclasses, CV_32S);
397
results = output.getMat();
398
399
for ( j = 0; j < nclasses; j++)
400
{
401
results.at<int> (0, j) = classLabels[j];
402
}
403
404
for( i = 0; i < nsamples; i++ )
405
{
406
votes.clear();
407
for( j = 0; j < ntrees; j++ )
408
{
409
int val = (int)predictTrees( Range(j, j+1), samples.row(i), flags);
410
votes.push_back(val);
411
}
412
413
for ( j = 0; j < nclasses; j++)
414
{
415
results.at<int> (i+1, j) = (int)std::count(votes.begin(), votes.end(), classLabels[j]);
416
}
417
}
418
}
419
}
420
421
RTreeParams rparams;
422
double oobError;
423
vector<float> varImportance;
424
vector<int> allVars, activeVars;
425
RNG rng;
426
};
427
428
429
class RTreesImpl CV_FINAL : public RTrees
430
{
431
public:
432
inline bool getCalculateVarImportance() const CV_OVERRIDE { return impl.rparams.calcVarImportance; }
433
inline void setCalculateVarImportance(bool val) CV_OVERRIDE { impl.rparams.calcVarImportance = val; }
434
inline int getActiveVarCount() const CV_OVERRIDE { return impl.rparams.nactiveVars; }
435
inline void setActiveVarCount(int val) CV_OVERRIDE { impl.rparams.nactiveVars = val; }
436
inline TermCriteria getTermCriteria() const CV_OVERRIDE { return impl.rparams.termCrit; }
437
inline void setTermCriteria(const TermCriteria& val) CV_OVERRIDE { impl.rparams.termCrit = val; }
438
439
inline int getMaxCategories() const CV_OVERRIDE { return impl.params.getMaxCategories(); }
440
inline void setMaxCategories(int val) CV_OVERRIDE { impl.params.setMaxCategories(val); }
441
inline int getMaxDepth() const CV_OVERRIDE { return impl.params.getMaxDepth(); }
442
inline void setMaxDepth(int val) CV_OVERRIDE { impl.params.setMaxDepth(val); }
443
inline int getMinSampleCount() const CV_OVERRIDE { return impl.params.getMinSampleCount(); }
444
inline void setMinSampleCount(int val) CV_OVERRIDE { impl.params.setMinSampleCount(val); }
445
inline int getCVFolds() const CV_OVERRIDE { return impl.params.getCVFolds(); }
446
inline void setCVFolds(int val) CV_OVERRIDE { impl.params.setCVFolds(val); }
447
inline bool getUseSurrogates() const CV_OVERRIDE { return impl.params.getUseSurrogates(); }
448
inline void setUseSurrogates(bool val) CV_OVERRIDE { impl.params.setUseSurrogates(val); }
449
inline bool getUse1SERule() const CV_OVERRIDE { return impl.params.getUse1SERule(); }
450
inline void setUse1SERule(bool val) CV_OVERRIDE { impl.params.setUse1SERule(val); }
451
inline bool getTruncatePrunedTree() const CV_OVERRIDE { return impl.params.getTruncatePrunedTree(); }
452
inline void setTruncatePrunedTree(bool val) CV_OVERRIDE { impl.params.setTruncatePrunedTree(val); }
453
inline float getRegressionAccuracy() const CV_OVERRIDE { return impl.params.getRegressionAccuracy(); }
454
inline void setRegressionAccuracy(float val) CV_OVERRIDE { impl.params.setRegressionAccuracy(val); }
455
inline cv::Mat getPriors() const CV_OVERRIDE { return impl.params.getPriors(); }
456
inline void setPriors(const cv::Mat& val) CV_OVERRIDE { impl.params.setPriors(val); }
457
inline void getVotes(InputArray input, OutputArray output, int flags) const CV_OVERRIDE {return impl.getVotes(input,output,flags);}
458
459
RTreesImpl() {}
460
virtual ~RTreesImpl() CV_OVERRIDE {}
461
462
String getDefaultName() const CV_OVERRIDE { return "opencv_ml_rtrees"; }
463
464
bool train( const Ptr<TrainData>& trainData, int flags ) CV_OVERRIDE
465
{
466
CV_TRACE_FUNCTION();
467
if (impl.getCVFolds() != 0)
468
CV_Error(Error::StsBadArg, "Cross validation for RTrees is not implemented");
469
return impl.train(trainData, flags);
470
}
471
472
float predict( InputArray samples, OutputArray results, int flags ) const CV_OVERRIDE
473
{
474
CV_TRACE_FUNCTION();
475
return impl.predict(samples, results, flags);
476
}
477
478
void write( FileStorage& fs ) const CV_OVERRIDE
479
{
480
CV_TRACE_FUNCTION();
481
impl.write(fs);
482
}
483
484
void read( const FileNode& fn ) CV_OVERRIDE
485
{
486
CV_TRACE_FUNCTION();
487
impl.read(fn);
488
}
489
490
Mat getVarImportance() const CV_OVERRIDE { return Mat_<float>(impl.varImportance, true); }
491
int getVarCount() const CV_OVERRIDE { return impl.getVarCount(); }
492
493
bool isTrained() const CV_OVERRIDE { return impl.isTrained(); }
494
bool isClassifier() const CV_OVERRIDE { return impl.isClassifier(); }
495
496
const vector<int>& getRoots() const CV_OVERRIDE { return impl.getRoots(); }
497
const vector<Node>& getNodes() const CV_OVERRIDE { return impl.getNodes(); }
498
const vector<Split>& getSplits() const CV_OVERRIDE { return impl.getSplits(); }
499
const vector<int>& getSubsets() const CV_OVERRIDE { return impl.getSubsets(); }
500
501
DTreesImplForRTrees impl;
502
};
503
504
505
Ptr<RTrees> RTrees::create()
506
{
507
CV_TRACE_FUNCTION();
508
return makePtr<RTreesImpl>();
509
}
510
511
//Function needed for Python and Java wrappers
512
Ptr<RTrees> RTrees::load(const String& filepath, const String& nodeName)
513
{
514
CV_TRACE_FUNCTION();
515
return Algorithm::load<RTrees>(filepath, nodeName);
516
}
517
518
}}
519
520
// End of file.
521
522