Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/ml/test/test_mltests.cpp
16354 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// Intel License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2000, Intel Corporation, all rights reserved.
14
// Third party copyrights are property of their respective owners.
15
//
16
// Redistribution and use in source and binary forms, with or without modification,
17
// are permitted provided that the following conditions are met:
18
//
19
// * Redistribution's of source code must retain the above copyright notice,
20
// this list of conditions and the following disclaimer.
21
//
22
// * Redistribution's in binary form must reproduce the above copyright notice,
23
// this list of conditions and the following disclaimer in the documentation
24
// and/or other materials provided with the distribution.
25
//
26
// * The name of Intel Corporation may not be used to endorse or promote products
27
// derived from this software without specific prior written permission.
28
//
29
// This software is provided by the copyright holders and contributors "as is" and
30
// any express or implied warranties, including, but not limited to, the implied
31
// warranties of merchantability and fitness for a particular purpose are disclaimed.
32
// In no event shall the Intel Corporation or contributors be liable for any direct,
33
// indirect, incidental, special, exemplary, or consequential damages
34
// (including, but not limited to, procurement of substitute goods or services;
35
// loss of use, data, or profits; or business interruption) however caused
36
// and on any theory of liability, whether in contract, strict liability,
37
// or tort (including negligence or otherwise) arising in any way out of
38
// the use of this software, even if advised of the possibility of such damage.
39
//
40
//M*/
41
42
#include "test_precomp.hpp"
43
44
namespace opencv_test {
45
46
CV_AMLTest::CV_AMLTest( const char* _modelName ) : CV_MLBaseTest( _modelName )
47
{
48
validationFN = "avalidation.xml";
49
}
50
51
int CV_AMLTest::run_test_case( int testCaseIdx )
52
{
53
CV_TRACE_FUNCTION();
54
int code = cvtest::TS::OK;
55
code = prepare_test_case( testCaseIdx );
56
57
if (code == cvtest::TS::OK)
58
{
59
//#define GET_STAT
60
#ifdef GET_STAT
61
const char* data_name = ((CvFileNode*)cvGetSeqElem( dataSetNames, testCaseIdx ))->data.str.ptr;
62
printf("%s, %s ", name, data_name);
63
const int icount = 100;
64
float res[icount];
65
for (int k = 0; k < icount; k++)
66
{
67
#endif
68
data->shuffleTrainTest();
69
code = train( testCaseIdx );
70
#ifdef GET_STAT
71
float case_result = get_error();
72
73
res[k] = case_result;
74
}
75
float mean = 0, sigma = 0;
76
for (int k = 0; k < icount; k++)
77
{
78
mean += res[k];
79
}
80
mean = mean /icount;
81
for (int k = 0; k < icount; k++)
82
{
83
sigma += (res[k] - mean)*(res[k] - mean);
84
}
85
sigma = sqrt(sigma/icount);
86
printf("%f, %f\n", mean, sigma);
87
#endif
88
}
89
return code;
90
}
91
92
int CV_AMLTest::validate_test_results( int testCaseIdx )
93
{
94
CV_TRACE_FUNCTION();
95
int iters;
96
float mean, sigma;
97
// read validation params
98
FileNode resultNode =
99
validationFS.getFirstTopLevelNode()["validation"][modelName][dataSetNames[testCaseIdx]]["result"];
100
resultNode["iter_count"] >> iters;
101
if ( iters > 0)
102
{
103
resultNode["mean"] >> mean;
104
resultNode["sigma"] >> sigma;
105
model->save(format("/Users/vp/tmp/dtree/testcase_%02d.cur.yml", testCaseIdx));
106
float curErr = get_test_error( testCaseIdx );
107
const int coeff = 4;
108
ts->printf( cvtest::TS::LOG, "Test case = %d; test error = %f; mean error = %f (diff=%f), %d*sigma = %f\n",
109
testCaseIdx, curErr, mean, abs( curErr - mean), coeff, coeff*sigma );
110
if ( abs( curErr - mean) > coeff*sigma )
111
{
112
ts->printf( cvtest::TS::LOG, "abs(%f - %f) > %f - OUT OF RANGE!\n", curErr, mean, coeff*sigma, coeff );
113
return cvtest::TS::FAIL_BAD_ACCURACY;
114
}
115
else
116
ts->printf( cvtest::TS::LOG, ".\n" );
117
118
}
119
else
120
{
121
ts->printf( cvtest::TS::LOG, "validation info is not suitable" );
122
return cvtest::TS::FAIL_INVALID_TEST_DATA;
123
}
124
return cvtest::TS::OK;
125
}
126
127
namespace {
128
129
TEST(ML_DTree, regression) { CV_AMLTest test( CV_DTREE ); test.safe_run(); }
130
TEST(ML_Boost, regression) { CV_AMLTest test( CV_BOOST ); test.safe_run(); }
131
TEST(ML_RTrees, regression) { CV_AMLTest test( CV_RTREES ); test.safe_run(); }
132
TEST(DISABLED_ML_ERTrees, regression) { CV_AMLTest test( CV_ERTREES ); test.safe_run(); }
133
134
TEST(ML_NBAYES, regression_5911)
135
{
136
int N=12;
137
Ptr<ml::NormalBayesClassifier> nb = cv::ml::NormalBayesClassifier::create();
138
139
// data:
140
Mat_<float> X(N,4);
141
X << 1,2,3,4, 1,2,3,4, 1,2,3,4, 1,2,3,4,
142
5,5,5,5, 5,5,5,5, 5,5,5,5, 5,5,5,5,
143
4,3,2,1, 4,3,2,1, 4,3,2,1, 4,3,2,1;
144
145
// labels:
146
Mat_<int> Y(N,1);
147
Y << 0,0,0,0, 1,1,1,1, 2,2,2,2;
148
nb->train(X, ml::ROW_SAMPLE, Y);
149
150
// single prediction:
151
Mat R1,P1;
152
for (int i=0; i<N; i++)
153
{
154
Mat r,p;
155
nb->predictProb(X.row(i), r, p);
156
R1.push_back(r);
157
P1.push_back(p);
158
}
159
160
// bulk prediction (continuous memory):
161
Mat R2,P2;
162
nb->predictProb(X, R2, P2);
163
164
EXPECT_EQ(sum(R1 == R2)[0], 255 * R2.total());
165
EXPECT_EQ(sum(P1 == P2)[0], 255 * P2.total());
166
167
// bulk prediction, with non-continuous memory storage
168
Mat R3_(N, 1+1, CV_32S),
169
P3_(N, 3+1, CV_32F);
170
nb->predictProb(X, R3_.col(0), P3_.colRange(0,3));
171
Mat R3 = R3_.col(0).clone(),
172
P3 = P3_.colRange(0,3).clone();
173
174
EXPECT_EQ(sum(R1 == R3)[0], 255 * R3.total());
175
EXPECT_EQ(sum(P1 == P3)[0], 255 * P3.total());
176
}
177
178
TEST(ML_RTrees, getVotes)
179
{
180
int n = 12;
181
int count, i;
182
int label_size = 3;
183
int predicted_class = 0;
184
int max_votes = -1;
185
int val;
186
// RTrees for classification
187
Ptr<ml::RTrees> rt = cv::ml::RTrees::create();
188
189
//data
190
Mat data(n, 4, CV_32F);
191
randu(data, 0, 10);
192
193
//labels
194
Mat labels = (Mat_<int>(n,1) << 0,0,0,0, 1,1,1,1, 2,2,2,2);
195
196
rt->train(data, ml::ROW_SAMPLE, labels);
197
198
//run function
199
Mat test(1, 4, CV_32F);
200
Mat result;
201
randu(test, 0, 10);
202
rt->getVotes(test, result, 0);
203
204
//count vote amount and find highest vote
205
count = 0;
206
const int* result_row = result.ptr<int>(1);
207
for( i = 0; i < label_size; i++ )
208
{
209
val = result_row[i];
210
//predicted_class = max_votes < val? i;
211
if( max_votes < val )
212
{
213
max_votes = val;
214
predicted_class = i;
215
}
216
count += val;
217
}
218
219
EXPECT_EQ(count, (int)rt->getRoots().size());
220
EXPECT_EQ(result.at<float>(0, predicted_class), rt->predict(test));
221
}
222
223
}} // namespace
224
/* End of file. */
225
226