Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/features2d/src/matchers.cpp
16337 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// Intel License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2000, Intel Corporation, all rights reserved.
14
// Third party copyrights are property of their respective owners.
15
//
16
// Redistribution and use in source and binary forms, with or without modification,
17
// are permitted provided that the following conditions are met:
18
//
19
// * Redistribution's of source code must retain the above copyright notice,
20
// this list of conditions and the following disclaimer.
21
//
22
// * Redistribution's in binary form must reproduce the above copyright notice,
23
// this list of conditions and the following disclaimer in the documentation
24
// and/or other materials provided with the distribution.
25
//
26
// * The name of Intel Corporation may not be used to endorse or promote products
27
// derived from this software without specific prior written permission.
28
//
29
// This software is provided by the copyright holders and contributors "as is" and
30
// any express or implied warranties, including, but not limited to, the implied
31
// warranties of merchantability and fitness for a particular purpose are disclaimed.
32
// In no event shall the Intel Corporation or contributors be liable for any direct,
33
// indirect, incidental, special, exemplary, or consequential damages
34
// (including, but not limited to, procurement of substitute goods or services;
35
// loss of use, data, or profits; or business interruption) however caused
36
// and on any theory of liability, whether in contract, strict liability,
37
// or tort (including negligence or otherwise) arising in any way out of
38
// the use of this software, even if advised of the possibility of such damage.
39
//
40
//M*/
41
42
#include "precomp.hpp"
43
#ifdef HAVE_OPENCV_FLANN
44
#include "opencv2/flann/miniflann.hpp"
45
#endif
46
#include <limits>
47
#include "opencl_kernels_features2d.hpp"
48
49
#if defined(HAVE_EIGEN) && EIGEN_WORLD_VERSION == 2
50
# if defined(_MSC_VER)
51
# pragma warning(push)
52
# pragma warning(disable:4701) // potentially uninitialized local variable
53
# pragma warning(disable:4702) // unreachable code
54
# pragma warning(disable:4714) // const marked as __forceinline not inlined
55
# endif
56
# include <Eigen/Array>
57
# if defined(_MSC_VER)
58
# pragma warning(pop)
59
# endif
60
#endif
61
62
namespace cv
63
{
64
65
/////////////////////// ocl functions for BFMatcher ///////////////////////////
66
67
#ifdef HAVE_OPENCL
68
static void ensureSizeIsEnough(int rows, int cols, int type, UMat &m)
69
{
70
if (m.type() == type && m.rows >= rows && m.cols >= cols)
71
m = m(Rect(0, 0, cols, rows));
72
else
73
m.create(rows, cols, type);
74
}
75
76
static bool ocl_matchSingle(InputArray query, InputArray train,
77
UMat &trainIdx, UMat &distance, int distType)
78
{
79
if (query.empty() || train.empty())
80
return false;
81
82
const int query_rows = query.rows();
83
const int query_cols = query.cols();
84
85
ensureSizeIsEnough(1, query_rows, CV_32S, trainIdx);
86
ensureSizeIsEnough(1, query_rows, CV_32F, distance);
87
88
ocl::Device devDef = ocl::Device::getDefault();
89
90
UMat uquery = query.getUMat(), utrain = train.getUMat();
91
int kercn = 1;
92
if (devDef.isIntel() &&
93
(0 == (uquery.step % 4)) && (0 == (uquery.cols % 4)) && (0 == (uquery.offset % 4)) &&
94
(0 == (utrain.step % 4)) && (0 == (utrain.cols % 4)) && (0 == (utrain.offset % 4)))
95
kercn = 4;
96
97
int block_size = 16;
98
int max_desc_len = 0;
99
bool is_cpu = devDef.type() == ocl::Device::TYPE_CPU;
100
if (query_cols <= 64)
101
max_desc_len = 64 / kercn;
102
else if (query_cols <= 128 && !is_cpu)
103
max_desc_len = 128 / kercn;
104
105
int depth = query.depth();
106
cv::String opts;
107
opts = cv::format("-D T=%s -D TN=%s -D kercn=%d %s -D DIST_TYPE=%d -D BLOCK_SIZE=%d -D MAX_DESC_LEN=%d",
108
ocl::typeToStr(depth), ocl::typeToStr(CV_MAKETYPE(depth, kercn)), kercn, depth == CV_32F ? "-D T_FLOAT" : "", distType, block_size, max_desc_len);
109
ocl::Kernel k("BruteForceMatch_Match", ocl::features2d::brute_force_match_oclsrc, opts);
110
if(k.empty())
111
return false;
112
113
size_t globalSize[] = {((size_t)query.size().height + block_size - 1) / block_size * block_size, (size_t)block_size};
114
size_t localSize[] = {(size_t)block_size, (size_t)block_size};
115
116
int idx = 0;
117
idx = k.set(idx, ocl::KernelArg::PtrReadOnly(uquery));
118
idx = k.set(idx, ocl::KernelArg::PtrReadOnly(utrain));
119
idx = k.set(idx, ocl::KernelArg::PtrWriteOnly(trainIdx));
120
idx = k.set(idx, ocl::KernelArg::PtrWriteOnly(distance));
121
idx = k.set(idx, uquery.rows);
122
idx = k.set(idx, uquery.cols);
123
idx = k.set(idx, utrain.rows);
124
idx = k.set(idx, utrain.cols);
125
idx = k.set(idx, (int)(uquery.step / sizeof(float)));
126
127
return k.run(2, globalSize, localSize, false);
128
}
129
130
static bool ocl_matchConvert(const Mat &trainIdx, const Mat &distance, std::vector< std::vector<DMatch> > &matches)
131
{
132
if (trainIdx.empty() || distance.empty())
133
return false;
134
135
if( (trainIdx.type() != CV_32SC1) || (distance.type() != CV_32FC1 || distance.cols != trainIdx.cols) )
136
return false;
137
138
const int nQuery = trainIdx.cols;
139
140
matches.clear();
141
matches.reserve(nQuery);
142
143
const int *trainIdx_ptr = trainIdx.ptr<int>();
144
const float *distance_ptr = distance.ptr<float>();
145
for (int queryIdx = 0; queryIdx < nQuery; ++queryIdx, ++trainIdx_ptr, ++distance_ptr)
146
{
147
int trainIndex = *trainIdx_ptr;
148
149
if (trainIndex == -1)
150
continue;
151
152
float dst = *distance_ptr;
153
154
DMatch m(queryIdx, trainIndex, 0, dst);
155
156
std::vector<DMatch> temp;
157
temp.push_back(m);
158
matches.push_back(temp);
159
}
160
return true;
161
}
162
163
static bool ocl_matchDownload(const UMat &trainIdx, const UMat &distance, std::vector< std::vector<DMatch> > &matches)
164
{
165
if (trainIdx.empty() || distance.empty())
166
return false;
167
168
Mat trainIdxCPU = trainIdx.getMat(ACCESS_READ);
169
Mat distanceCPU = distance.getMat(ACCESS_READ);
170
171
return ocl_matchConvert(trainIdxCPU, distanceCPU, matches);
172
}
173
174
static bool ocl_knnMatchSingle(InputArray query, InputArray train, UMat &trainIdx,
175
UMat &distance, int distType)
176
{
177
if (query.empty() || train.empty())
178
return false;
179
180
const int query_rows = query.rows();
181
const int query_cols = query.cols();
182
183
ensureSizeIsEnough(1, query_rows, CV_32SC2, trainIdx);
184
ensureSizeIsEnough(1, query_rows, CV_32FC2, distance);
185
186
trainIdx.setTo(Scalar::all(-1));
187
188
ocl::Device devDef = ocl::Device::getDefault();
189
190
UMat uquery = query.getUMat(), utrain = train.getUMat();
191
int kercn = 1;
192
if (devDef.isIntel() &&
193
(0 == (uquery.step % 4)) && (0 == (uquery.cols % 4)) && (0 == (uquery.offset % 4)) &&
194
(0 == (utrain.step % 4)) && (0 == (utrain.cols % 4)) && (0 == (utrain.offset % 4)))
195
kercn = 4;
196
197
int block_size = 16;
198
int max_desc_len = 0;
199
bool is_cpu = devDef.type() == ocl::Device::TYPE_CPU;
200
if (query_cols <= 64)
201
max_desc_len = 64 / kercn;
202
else if (query_cols <= 128 && !is_cpu)
203
max_desc_len = 128 / kercn;
204
205
int depth = query.depth();
206
cv::String opts;
207
opts = cv::format("-D T=%s -D TN=%s -D kercn=%d %s -D DIST_TYPE=%d -D BLOCK_SIZE=%d -D MAX_DESC_LEN=%d",
208
ocl::typeToStr(depth), ocl::typeToStr(CV_MAKETYPE(depth, kercn)), kercn, depth == CV_32F ? "-D T_FLOAT" : "", distType, block_size, max_desc_len);
209
ocl::Kernel k("BruteForceMatch_knnMatch", ocl::features2d::brute_force_match_oclsrc, opts);
210
if(k.empty())
211
return false;
212
213
size_t globalSize[] = {((size_t)query_rows + block_size - 1) / block_size * block_size, (size_t)block_size};
214
size_t localSize[] = {(size_t)block_size, (size_t)block_size};
215
216
int idx = 0;
217
idx = k.set(idx, ocl::KernelArg::PtrReadOnly(uquery));
218
idx = k.set(idx, ocl::KernelArg::PtrReadOnly(utrain));
219
idx = k.set(idx, ocl::KernelArg::PtrWriteOnly(trainIdx));
220
idx = k.set(idx, ocl::KernelArg::PtrWriteOnly(distance));
221
idx = k.set(idx, uquery.rows);
222
idx = k.set(idx, uquery.cols);
223
idx = k.set(idx, utrain.rows);
224
idx = k.set(idx, utrain.cols);
225
idx = k.set(idx, (int)(uquery.step / sizeof(float)));
226
227
return k.run(2, globalSize, localSize, false);
228
}
229
230
static bool ocl_knnMatchConvert(const Mat &trainIdx, const Mat &distance, std::vector< std::vector<DMatch> > &matches, bool compactResult)
231
{
232
if (trainIdx.empty() || distance.empty())
233
return false;
234
235
if(trainIdx.type() != CV_32SC2 && trainIdx.type() != CV_32SC1) return false;
236
if(distance.type() != CV_32FC2 && distance.type() != CV_32FC1)return false;
237
if(distance.size() != trainIdx.size()) return false;
238
if(!trainIdx.isContinuous() || !distance.isContinuous()) return false;
239
240
const int nQuery = trainIdx.type() == CV_32SC2 ? trainIdx.cols : trainIdx.rows;
241
const int k = trainIdx.type() == CV_32SC2 ? 2 : trainIdx.cols;
242
243
matches.clear();
244
matches.reserve(nQuery);
245
246
const int *trainIdx_ptr = trainIdx.ptr<int>();
247
const float *distance_ptr = distance.ptr<float>();
248
249
for (int queryIdx = 0; queryIdx < nQuery; ++queryIdx)
250
{
251
matches.push_back(std::vector<DMatch>());
252
std::vector<DMatch> &curMatches = matches.back();
253
curMatches.reserve(k);
254
255
for (int i = 0; i < k; ++i, ++trainIdx_ptr, ++distance_ptr)
256
{
257
int trainIndex = *trainIdx_ptr;
258
259
if (trainIndex != -1)
260
{
261
float dst = *distance_ptr;
262
263
DMatch m(queryIdx, trainIndex, 0, dst);
264
265
curMatches.push_back(m);
266
}
267
}
268
269
if (compactResult && curMatches.empty())
270
matches.pop_back();
271
}
272
return true;
273
}
274
275
static bool ocl_knnMatchDownload(const UMat &trainIdx, const UMat &distance, std::vector< std::vector<DMatch> > &matches, bool compactResult)
276
{
277
if (trainIdx.empty() || distance.empty())
278
return false;
279
280
Mat trainIdxCPU = trainIdx.getMat(ACCESS_READ);
281
Mat distanceCPU = distance.getMat(ACCESS_READ);
282
283
return ocl_knnMatchConvert(trainIdxCPU, distanceCPU, matches, compactResult);
284
}
285
286
static bool ocl_radiusMatchSingle(InputArray query, InputArray train,
287
UMat &trainIdx, UMat &distance, UMat &nMatches, float maxDistance, int distType)
288
{
289
if (query.empty() || train.empty())
290
return false;
291
292
const int query_rows = query.rows();
293
const int train_rows = train.rows();
294
295
ensureSizeIsEnough(1, query_rows, CV_32SC1, nMatches);
296
297
if (trainIdx.empty())
298
{
299
ensureSizeIsEnough(query_rows, std::max((train_rows / 100), 10), CV_32SC1, trainIdx);
300
ensureSizeIsEnough(query_rows, std::max((train_rows / 100), 10), CV_32FC1, distance);
301
}
302
303
nMatches.setTo(Scalar::all(0));
304
305
ocl::Device devDef = ocl::Device::getDefault();
306
UMat uquery = query.getUMat(), utrain = train.getUMat();
307
int kercn = 1;
308
if (devDef.isIntel() &&
309
(0 == (uquery.step % 4)) && (0 == (uquery.cols % 4)) && (0 == (uquery.offset % 4)) &&
310
(0 == (utrain.step % 4)) && (0 == (utrain.cols % 4)) && (0 == (utrain.offset % 4)))
311
kercn = 4;
312
313
int block_size = 16;
314
int depth = query.depth();
315
cv::String opts;
316
opts = cv::format("-D T=%s -D TN=%s -D kercn=%d %s -D DIST_TYPE=%d -D BLOCK_SIZE=%d",
317
ocl::typeToStr(depth), ocl::typeToStr(CV_MAKETYPE(depth, kercn)), kercn, depth == CV_32F ? "-D T_FLOAT" : "", distType, block_size);
318
ocl::Kernel k("BruteForceMatch_RadiusMatch", ocl::features2d::brute_force_match_oclsrc, opts);
319
if (k.empty())
320
return false;
321
322
size_t globalSize[] = {((size_t)train_rows + block_size - 1) / block_size * block_size, ((size_t)query_rows + block_size - 1) / block_size * block_size};
323
size_t localSize[] = {(size_t)block_size, (size_t)block_size};
324
325
int idx = 0;
326
idx = k.set(idx, ocl::KernelArg::PtrReadOnly(uquery));
327
idx = k.set(idx, ocl::KernelArg::PtrReadOnly(utrain));
328
idx = k.set(idx, maxDistance);
329
idx = k.set(idx, ocl::KernelArg::PtrWriteOnly(trainIdx));
330
idx = k.set(idx, ocl::KernelArg::PtrWriteOnly(distance));
331
idx = k.set(idx, ocl::KernelArg::PtrWriteOnly(nMatches));
332
idx = k.set(idx, uquery.rows);
333
idx = k.set(idx, uquery.cols);
334
idx = k.set(idx, utrain.rows);
335
idx = k.set(idx, utrain.cols);
336
idx = k.set(idx, trainIdx.cols);
337
idx = k.set(idx, (int)(uquery.step / sizeof(float)));
338
idx = k.set(idx, (int)(trainIdx.step / sizeof(int)));
339
340
return k.run(2, globalSize, localSize, false);
341
}
342
343
static bool ocl_radiusMatchConvert(const Mat &trainIdx, const Mat &distance, const Mat &_nMatches,
344
std::vector< std::vector<DMatch> > &matches, bool compactResult)
345
{
346
if (trainIdx.empty() || distance.empty() || _nMatches.empty())
347
return false;
348
349
if( (trainIdx.type() != CV_32SC1) ||
350
(distance.type() != CV_32FC1 || distance.size() != trainIdx.size()) ||
351
(_nMatches.type() != CV_32SC1 || _nMatches.cols != trainIdx.rows) )
352
return false;
353
354
const int nQuery = trainIdx.rows;
355
356
matches.clear();
357
matches.reserve(nQuery);
358
359
const int *nMatches_ptr = _nMatches.ptr<int>();
360
361
for (int queryIdx = 0; queryIdx < nQuery; ++queryIdx)
362
{
363
const int *trainIdx_ptr = trainIdx.ptr<int>(queryIdx);
364
const float *distance_ptr = distance.ptr<float>(queryIdx);
365
366
const int nMatches = std::min(nMatches_ptr[queryIdx], trainIdx.cols);
367
368
if (nMatches == 0)
369
{
370
if (!compactResult)
371
matches.push_back(std::vector<DMatch>());
372
continue;
373
}
374
375
matches.push_back(std::vector<DMatch>(nMatches));
376
std::vector<DMatch> &curMatches = matches.back();
377
378
for (int i = 0; i < nMatches; ++i, ++trainIdx_ptr, ++distance_ptr)
379
{
380
int trainIndex = *trainIdx_ptr;
381
382
float dst = *distance_ptr;
383
384
DMatch m(queryIdx, trainIndex, 0, dst);
385
386
curMatches[i] = m;
387
}
388
389
std::sort(curMatches.begin(), curMatches.end());
390
}
391
return true;
392
}
393
394
static bool ocl_radiusMatchDownload(const UMat &trainIdx, const UMat &distance, const UMat &nMatches,
395
std::vector< std::vector<DMatch> > &matches, bool compactResult)
396
{
397
if (trainIdx.empty() || distance.empty() || nMatches.empty())
398
return false;
399
400
Mat trainIdxCPU = trainIdx.getMat(ACCESS_READ);
401
Mat distanceCPU = distance.getMat(ACCESS_READ);
402
Mat nMatchesCPU = nMatches.getMat(ACCESS_READ);
403
404
return ocl_radiusMatchConvert(trainIdxCPU, distanceCPU, nMatchesCPU, matches, compactResult);
405
}
406
#endif
407
408
/****************************************************************************************\
409
* DescriptorMatcher *
410
\****************************************************************************************/
411
DescriptorMatcher::DescriptorCollection::DescriptorCollection()
412
{}
413
414
DescriptorMatcher::DescriptorCollection::DescriptorCollection( const DescriptorCollection& collection )
415
{
416
mergedDescriptors = collection.mergedDescriptors.clone();
417
std::copy( collection.startIdxs.begin(), collection.startIdxs.begin(), startIdxs.begin() );
418
}
419
420
DescriptorMatcher::DescriptorCollection::~DescriptorCollection()
421
{}
422
423
void DescriptorMatcher::DescriptorCollection::set( const std::vector<Mat>& descriptors )
424
{
425
clear();
426
427
size_t imageCount = descriptors.size();
428
CV_Assert( imageCount > 0 );
429
430
startIdxs.resize( imageCount );
431
432
int dim = -1;
433
int type = -1;
434
startIdxs[0] = 0;
435
for( size_t i = 1; i < imageCount; i++ )
436
{
437
int s = 0;
438
if( !descriptors[i-1].empty() )
439
{
440
dim = descriptors[i-1].cols;
441
type = descriptors[i-1].type();
442
s = descriptors[i-1].rows;
443
}
444
startIdxs[i] = startIdxs[i-1] + s;
445
}
446
if( imageCount == 1 )
447
{
448
if( descriptors[0].empty() ) return;
449
450
dim = descriptors[0].cols;
451
type = descriptors[0].type();
452
}
453
CV_Assert( dim > 0 );
454
455
int count = startIdxs[imageCount-1] + descriptors[imageCount-1].rows;
456
457
if( count > 0 )
458
{
459
mergedDescriptors.create( count, dim, type );
460
for( size_t i = 0; i < imageCount; i++ )
461
{
462
if( !descriptors[i].empty() )
463
{
464
CV_Assert( descriptors[i].cols == dim && descriptors[i].type() == type );
465
Mat m = mergedDescriptors.rowRange( startIdxs[i], startIdxs[i] + descriptors[i].rows );
466
descriptors[i].copyTo(m);
467
}
468
}
469
}
470
}
471
472
void DescriptorMatcher::DescriptorCollection::clear()
473
{
474
startIdxs.clear();
475
mergedDescriptors.release();
476
}
477
478
const Mat DescriptorMatcher::DescriptorCollection::getDescriptor( int imgIdx, int localDescIdx ) const
479
{
480
CV_Assert( imgIdx < (int)startIdxs.size() );
481
int globalIdx = startIdxs[imgIdx] + localDescIdx;
482
CV_Assert( globalIdx < (int)size() );
483
484
return getDescriptor( globalIdx );
485
}
486
487
const Mat& DescriptorMatcher::DescriptorCollection::getDescriptors() const
488
{
489
return mergedDescriptors;
490
}
491
492
const Mat DescriptorMatcher::DescriptorCollection::getDescriptor( int globalDescIdx ) const
493
{
494
CV_Assert( globalDescIdx < size() );
495
return mergedDescriptors.row( globalDescIdx );
496
}
497
498
void DescriptorMatcher::DescriptorCollection::getLocalIdx( int globalDescIdx, int& imgIdx, int& localDescIdx ) const
499
{
500
CV_Assert( (globalDescIdx>=0) && (globalDescIdx < size()) );
501
std::vector<int>::const_iterator img_it = std::upper_bound(startIdxs.begin(), startIdxs.end(), globalDescIdx);
502
--img_it;
503
imgIdx = (int)(img_it - startIdxs.begin());
504
localDescIdx = globalDescIdx - (*img_it);
505
}
506
507
int DescriptorMatcher::DescriptorCollection::size() const
508
{
509
return mergedDescriptors.rows;
510
}
511
512
/*
513
* DescriptorMatcher
514
*/
515
static void convertMatches( const std::vector<std::vector<DMatch> >& knnMatches, std::vector<DMatch>& matches )
516
{
517
matches.clear();
518
matches.reserve( knnMatches.size() );
519
for( size_t i = 0; i < knnMatches.size(); i++ )
520
{
521
CV_Assert( knnMatches[i].size() <= 1 );
522
if( !knnMatches[i].empty() )
523
matches.push_back( knnMatches[i][0] );
524
}
525
}
526
527
DescriptorMatcher::~DescriptorMatcher()
528
{}
529
530
void DescriptorMatcher::add( InputArrayOfArrays _descriptors )
531
{
532
if( _descriptors.isUMatVector() )
533
{
534
std::vector<UMat> descriptors;
535
_descriptors.getUMatVector( descriptors );
536
utrainDescCollection.insert( utrainDescCollection.end(), descriptors.begin(), descriptors.end() );
537
}
538
else if( _descriptors.isUMat() )
539
{
540
std::vector<UMat> descriptors = std::vector<UMat>(1, _descriptors.getUMat());
541
utrainDescCollection.insert( utrainDescCollection.end(), descriptors.begin(), descriptors.end() );
542
}
543
else if( _descriptors.isMatVector() )
544
{
545
std::vector<Mat> descriptors;
546
_descriptors.getMatVector(descriptors);
547
trainDescCollection.insert( trainDescCollection.end(), descriptors.begin(), descriptors.end() );
548
}
549
else if( _descriptors.isMat() )
550
{
551
std::vector<Mat> descriptors = std::vector<Mat>(1, _descriptors.getMat());
552
trainDescCollection.insert( trainDescCollection.end(), descriptors.begin(), descriptors.end() );
553
}
554
else
555
{
556
CV_Assert( _descriptors.isUMat() || _descriptors.isUMatVector() || _descriptors.isMat() || _descriptors.isMatVector() );
557
}
558
}
559
560
const std::vector<Mat>& DescriptorMatcher::getTrainDescriptors() const
561
{
562
return trainDescCollection;
563
}
564
565
void DescriptorMatcher::clear()
566
{
567
utrainDescCollection.clear();
568
trainDescCollection.clear();
569
}
570
571
bool DescriptorMatcher::empty() const
572
{
573
return trainDescCollection.empty() && utrainDescCollection.empty();
574
}
575
576
void DescriptorMatcher::train()
577
{}
578
579
void DescriptorMatcher::match( InputArray queryDescriptors, InputArray trainDescriptors,
580
std::vector<DMatch>& matches, InputArray mask ) const
581
{
582
CV_INSTRUMENT_REGION();
583
584
Ptr<DescriptorMatcher> tempMatcher = clone(true);
585
tempMatcher->add(trainDescriptors);
586
tempMatcher->match( queryDescriptors, matches, std::vector<Mat>(1, mask.getMat()) );
587
}
588
589
void DescriptorMatcher::knnMatch( InputArray queryDescriptors, InputArray trainDescriptors,
590
std::vector<std::vector<DMatch> >& matches, int knn,
591
InputArray mask, bool compactResult ) const
592
{
593
CV_INSTRUMENT_REGION();
594
595
Ptr<DescriptorMatcher> tempMatcher = clone(true);
596
tempMatcher->add(trainDescriptors);
597
tempMatcher->knnMatch( queryDescriptors, matches, knn, std::vector<Mat>(1, mask.getMat()), compactResult );
598
}
599
600
void DescriptorMatcher::radiusMatch( InputArray queryDescriptors, InputArray trainDescriptors,
601
std::vector<std::vector<DMatch> >& matches, float maxDistance, InputArray mask,
602
bool compactResult ) const
603
{
604
CV_INSTRUMENT_REGION();
605
606
Ptr<DescriptorMatcher> tempMatcher = clone(true);
607
tempMatcher->add(trainDescriptors);
608
tempMatcher->radiusMatch( queryDescriptors, matches, maxDistance, std::vector<Mat>(1, mask.getMat()), compactResult );
609
}
610
611
void DescriptorMatcher::match( InputArray queryDescriptors, std::vector<DMatch>& matches, InputArrayOfArrays masks )
612
{
613
CV_INSTRUMENT_REGION();
614
615
std::vector<std::vector<DMatch> > knnMatches;
616
knnMatch( queryDescriptors, knnMatches, 1, masks, true /*compactResult*/ );
617
convertMatches( knnMatches, matches );
618
}
619
620
void DescriptorMatcher::checkMasks( InputArrayOfArrays _masks, int queryDescriptorsCount ) const
621
{
622
std::vector<Mat> masks;
623
_masks.getMatVector(masks);
624
625
if( isMaskSupported() && !masks.empty() )
626
{
627
// Check masks
628
size_t imageCount = std::max(trainDescCollection.size(), utrainDescCollection.size() );
629
CV_Assert( masks.size() == imageCount );
630
for( size_t i = 0; i < imageCount; i++ )
631
{
632
if( !masks[i].empty() && (!trainDescCollection[i].empty() || !utrainDescCollection[i].empty() ) )
633
{
634
int rows = trainDescCollection[i].empty() ? utrainDescCollection[i].rows : trainDescCollection[i].rows;
635
CV_Assert( masks[i].rows == queryDescriptorsCount &&
636
masks[i].cols == rows && masks[i].type() == CV_8UC1);
637
}
638
}
639
}
640
}
641
642
void DescriptorMatcher::knnMatch( InputArray queryDescriptors, std::vector<std::vector<DMatch> >& matches, int knn,
643
InputArrayOfArrays masks, bool compactResult )
644
{
645
CV_INSTRUMENT_REGION();
646
647
if( empty() || queryDescriptors.empty() )
648
return;
649
650
CV_Assert( knn > 0 );
651
652
checkMasks( masks, queryDescriptors.size().height );
653
654
train();
655
knnMatchImpl( queryDescriptors, matches, knn, masks, compactResult );
656
}
657
658
void DescriptorMatcher::radiusMatch( InputArray queryDescriptors, std::vector<std::vector<DMatch> >& matches, float maxDistance,
659
InputArrayOfArrays masks, bool compactResult )
660
{
661
CV_INSTRUMENT_REGION();
662
663
matches.clear();
664
if( empty() || queryDescriptors.empty() )
665
return;
666
667
CV_Assert( maxDistance > std::numeric_limits<float>::epsilon() );
668
669
checkMasks( masks, queryDescriptors.size().height );
670
671
train();
672
radiusMatchImpl( queryDescriptors, matches, maxDistance, masks, compactResult );
673
}
674
675
void DescriptorMatcher::read( const FileNode& )
676
{}
677
678
void DescriptorMatcher::write( FileStorage& ) const
679
{}
680
681
bool DescriptorMatcher::isPossibleMatch( InputArray _mask, int queryIdx, int trainIdx )
682
{
683
Mat mask = _mask.getMat();
684
return mask.empty() || mask.at<uchar>(queryIdx, trainIdx);
685
}
686
687
bool DescriptorMatcher::isMaskedOut( InputArrayOfArrays _masks, int queryIdx )
688
{
689
std::vector<Mat> masks;
690
_masks.getMatVector(masks);
691
692
size_t outCount = 0;
693
for( size_t i = 0; i < masks.size(); i++ )
694
{
695
if( !masks[i].empty() && (countNonZero(masks[i].row(queryIdx)) == 0) )
696
outCount++;
697
}
698
699
return !masks.empty() && outCount == masks.size() ;
700
}
701
702
703
////////////////////////////////////////////////////// BruteForceMatcher /////////////////////////////////////////////////
704
705
BFMatcher::BFMatcher( int _normType, bool _crossCheck )
706
{
707
normType = _normType;
708
crossCheck = _crossCheck;
709
}
710
711
Ptr<BFMatcher> BFMatcher::create(int _normType, bool _crossCheck )
712
{
713
return makePtr<BFMatcher>(_normType, _crossCheck);
714
}
715
716
Ptr<DescriptorMatcher> BFMatcher::clone( bool emptyTrainData ) const
717
{
718
Ptr<BFMatcher> matcher = makePtr<BFMatcher>(normType, crossCheck);
719
if( !emptyTrainData )
720
{
721
matcher->trainDescCollection.resize(trainDescCollection.size());
722
std::transform( trainDescCollection.begin(), trainDescCollection.end(),
723
matcher->trainDescCollection.begin(), clone_op );
724
}
725
return matcher;
726
}
727
728
#ifdef HAVE_OPENCL
729
static bool ocl_match(InputArray query, InputArray _train, std::vector< std::vector<DMatch> > &matches, int dstType)
730
{
731
UMat trainIdx, distance;
732
if (!ocl_matchSingle(query, _train, trainIdx, distance, dstType))
733
return false;
734
if (!ocl_matchDownload(trainIdx, distance, matches))
735
return false;
736
return true;
737
}
738
739
static bool ocl_knnMatch(InputArray query, InputArray _train, std::vector< std::vector<DMatch> > &matches, int k, int dstType, bool compactResult)
740
{
741
UMat trainIdx, distance;
742
if (k != 2)
743
return false;
744
if (!ocl_knnMatchSingle(query, _train, trainIdx, distance, dstType))
745
return false;
746
if (!ocl_knnMatchDownload(trainIdx, distance, matches, compactResult) )
747
return false;
748
return true;
749
}
750
#endif
751
752
void BFMatcher::knnMatchImpl( InputArray _queryDescriptors, std::vector<std::vector<DMatch> >& matches, int knn,
753
InputArrayOfArrays _masks, bool compactResult )
754
{
755
int trainDescType = trainDescCollection.empty() ? utrainDescCollection[0].type() : trainDescCollection[0].type();
756
CV_Assert( _queryDescriptors.type() == trainDescType );
757
758
const int IMGIDX_SHIFT = 18;
759
const int IMGIDX_ONE = (1 << IMGIDX_SHIFT);
760
761
if( _queryDescriptors.empty() || (trainDescCollection.empty() && utrainDescCollection.empty()))
762
{
763
matches.clear();
764
return;
765
}
766
767
std::vector<Mat> masks;
768
_masks.getMatVector(masks);
769
770
if(!trainDescCollection.empty() && !utrainDescCollection.empty())
771
{
772
for(int i = 0; i < (int)utrainDescCollection.size(); i++)
773
{
774
Mat tempMat;
775
utrainDescCollection[i].copyTo(tempMat);
776
trainDescCollection.push_back(tempMat);
777
}
778
utrainDescCollection.clear();
779
}
780
781
#ifdef HAVE_OPENCL
782
int trainDescVectorSize = trainDescCollection.empty() ? (int)utrainDescCollection.size() : (int)trainDescCollection.size();
783
Size trainDescSize = trainDescCollection.empty() ? utrainDescCollection[0].size() : trainDescCollection[0].size();
784
int trainDescOffset = trainDescCollection.empty() ? (int)utrainDescCollection[0].offset : 0;
785
786
if ( ocl::isOpenCLActivated() && _queryDescriptors.isUMat() && _queryDescriptors.dims()<=2 && trainDescVectorSize == 1 &&
787
_queryDescriptors.type() == CV_32FC1 && _queryDescriptors.offset() == 0 && trainDescOffset == 0 &&
788
trainDescSize.width == _queryDescriptors.size().width && masks.size() == 1 && masks[0].total() == 0 )
789
{
790
if(knn == 1)
791
{
792
if(trainDescCollection.empty())
793
{
794
if(ocl_match(_queryDescriptors, utrainDescCollection[0], matches, normType))
795
{
796
CV_IMPL_ADD(CV_IMPL_OCL);
797
return;
798
}
799
}
800
else
801
{
802
if(ocl_match(_queryDescriptors, trainDescCollection[0], matches, normType))
803
{
804
CV_IMPL_ADD(CV_IMPL_OCL);
805
return;
806
}
807
}
808
}
809
else
810
{
811
if(trainDescCollection.empty())
812
{
813
if(ocl_knnMatch(_queryDescriptors, utrainDescCollection[0], matches, knn, normType, compactResult) )
814
{
815
CV_IMPL_ADD(CV_IMPL_OCL);
816
return;
817
}
818
}
819
else
820
{
821
if(ocl_knnMatch(_queryDescriptors, trainDescCollection[0], matches, knn, normType, compactResult) )
822
{
823
CV_IMPL_ADD(CV_IMPL_OCL);
824
return;
825
}
826
}
827
}
828
}
829
#endif
830
831
Mat queryDescriptors = _queryDescriptors.getMat();
832
if(trainDescCollection.empty() && !utrainDescCollection.empty())
833
{
834
for(int i = 0; i < (int)utrainDescCollection.size(); i++)
835
{
836
Mat tempMat;
837
utrainDescCollection[i].copyTo(tempMat);
838
trainDescCollection.push_back(tempMat);
839
}
840
utrainDescCollection.clear();
841
}
842
843
matches.reserve(queryDescriptors.rows);
844
845
Mat dist, nidx;
846
847
int iIdx, imgCount = (int)trainDescCollection.size(), update = 0;
848
int dtype = normType == NORM_HAMMING || normType == NORM_HAMMING2 ||
849
(normType == NORM_L1 && queryDescriptors.type() == CV_8U) ? CV_32S : CV_32F;
850
851
CV_Assert( (int64)imgCount*IMGIDX_ONE < INT_MAX );
852
853
for( iIdx = 0; iIdx < imgCount; iIdx++ )
854
{
855
CV_Assert( trainDescCollection[iIdx].rows < IMGIDX_ONE );
856
batchDistance(queryDescriptors, trainDescCollection[iIdx], dist, dtype, nidx,
857
normType, knn, masks.empty() ? Mat() : masks[iIdx], update, crossCheck);
858
update += IMGIDX_ONE;
859
}
860
861
if( dtype == CV_32S )
862
{
863
Mat temp;
864
dist.convertTo(temp, CV_32F);
865
dist = temp;
866
}
867
868
for( int qIdx = 0; qIdx < queryDescriptors.rows; qIdx++ )
869
{
870
const float* distptr = dist.ptr<float>(qIdx);
871
const int* nidxptr = nidx.ptr<int>(qIdx);
872
873
matches.push_back( std::vector<DMatch>() );
874
std::vector<DMatch>& mq = matches.back();
875
mq.reserve(knn);
876
877
for( int k = 0; k < nidx.cols; k++ )
878
{
879
if( nidxptr[k] < 0 )
880
break;
881
mq.push_back( DMatch(qIdx, nidxptr[k] & (IMGIDX_ONE - 1),
882
nidxptr[k] >> IMGIDX_SHIFT, distptr[k]) );
883
}
884
885
if( mq.empty() && compactResult )
886
matches.pop_back();
887
}
888
}
889
890
#ifdef HAVE_OPENCL
891
static bool ocl_radiusMatch(InputArray query, InputArray _train, std::vector< std::vector<DMatch> > &matches,
892
float maxDistance, int dstType, bool compactResult)
893
{
894
UMat trainIdx, distance, nMatches;
895
if (!ocl_radiusMatchSingle(query, _train, trainIdx, distance, nMatches, maxDistance, dstType))
896
return false;
897
if (!ocl_radiusMatchDownload(trainIdx, distance, nMatches, matches, compactResult))
898
return false;
899
return true;
900
}
901
#endif
902
903
void BFMatcher::radiusMatchImpl( InputArray _queryDescriptors, std::vector<std::vector<DMatch> >& matches,
904
float maxDistance, InputArrayOfArrays _masks, bool compactResult )
905
{
906
int trainDescType = trainDescCollection.empty() ? utrainDescCollection[0].type() : trainDescCollection[0].type();
907
CV_Assert( _queryDescriptors.type() == trainDescType );
908
909
if( _queryDescriptors.empty() || (trainDescCollection.empty() && utrainDescCollection.empty()))
910
{
911
matches.clear();
912
return;
913
}
914
915
std::vector<Mat> masks;
916
_masks.getMatVector(masks);
917
918
if(!trainDescCollection.empty() && !utrainDescCollection.empty())
919
{
920
for(int i = 0; i < (int)utrainDescCollection.size(); i++)
921
{
922
Mat tempMat;
923
utrainDescCollection[i].copyTo(tempMat);
924
trainDescCollection.push_back(tempMat);
925
}
926
utrainDescCollection.clear();
927
}
928
929
#ifdef HAVE_OPENCL
930
int trainDescVectorSize = trainDescCollection.empty() ? (int)utrainDescCollection.size() : (int)trainDescCollection.size();
931
Size trainDescSize = trainDescCollection.empty() ? utrainDescCollection[0].size() : trainDescCollection[0].size();
932
int trainDescOffset = trainDescCollection.empty() ? (int)utrainDescCollection[0].offset : 0;
933
934
if ( ocl::isOpenCLActivated() && _queryDescriptors.isUMat() && _queryDescriptors.dims()<=2 && trainDescVectorSize == 1 &&
935
_queryDescriptors.type() == CV_32FC1 && _queryDescriptors.offset() == 0 && trainDescOffset == 0 &&
936
trainDescSize.width == _queryDescriptors.size().width && masks.size() == 1 && masks[0].total() == 0 )
937
{
938
if (trainDescCollection.empty())
939
{
940
if(ocl_radiusMatch(_queryDescriptors, utrainDescCollection[0], matches, maxDistance, normType, compactResult) )
941
{
942
CV_IMPL_ADD(CV_IMPL_OCL);
943
return;
944
}
945
}
946
else
947
{
948
if (ocl_radiusMatch(_queryDescriptors, trainDescCollection[0], matches, maxDistance, normType, compactResult) )
949
{
950
CV_IMPL_ADD(CV_IMPL_OCL);
951
return;
952
}
953
}
954
}
955
#endif
956
957
Mat queryDescriptors = _queryDescriptors.getMat();
958
if(trainDescCollection.empty() && !utrainDescCollection.empty())
959
{
960
for(int i = 0; i < (int)utrainDescCollection.size(); i++)
961
{
962
Mat tempMat;
963
utrainDescCollection[i].copyTo(tempMat);
964
trainDescCollection.push_back(tempMat);
965
}
966
utrainDescCollection.clear();
967
}
968
969
matches.resize(queryDescriptors.rows);
970
Mat dist, distf;
971
972
int iIdx, imgCount = (int)trainDescCollection.size();
973
int dtype = normType == NORM_HAMMING || normType == NORM_HAMMING2 ||
974
(normType == NORM_L1 && queryDescriptors.type() == CV_8U) ? CV_32S : CV_32F;
975
976
for( iIdx = 0; iIdx < imgCount; iIdx++ )
977
{
978
batchDistance(queryDescriptors, trainDescCollection[iIdx], dist, dtype, noArray(),
979
normType, 0, masks.empty() ? Mat() : masks[iIdx], 0, false);
980
if( dtype == CV_32S )
981
dist.convertTo(distf, CV_32F);
982
else
983
distf = dist;
984
985
for( int qIdx = 0; qIdx < queryDescriptors.rows; qIdx++ )
986
{
987
const float* distptr = distf.ptr<float>(qIdx);
988
989
std::vector<DMatch>& mq = matches[qIdx];
990
for( int k = 0; k < distf.cols; k++ )
991
{
992
if( distptr[k] <= maxDistance )
993
mq.push_back( DMatch(qIdx, k, iIdx, distptr[k]) );
994
}
995
}
996
}
997
998
int qIdx0 = 0;
999
for( int qIdx = 0; qIdx < queryDescriptors.rows; qIdx++ )
1000
{
1001
if( matches[qIdx].empty() && compactResult )
1002
continue;
1003
1004
if( qIdx0 < qIdx )
1005
std::swap(matches[qIdx], matches[qIdx0]);
1006
1007
std::sort( matches[qIdx0].begin(), matches[qIdx0].end() );
1008
qIdx0++;
1009
}
1010
}
1011
1012
///////////////////////////////////////////////////////////////////////////////////////////////////////
1013
1014
/*
1015
* Factory function for DescriptorMatcher creating
1016
*/
1017
Ptr<DescriptorMatcher> DescriptorMatcher::create( const String& descriptorMatcherType )
1018
{
1019
Ptr<DescriptorMatcher> dm;
1020
#ifdef HAVE_OPENCV_FLANN
1021
if( !descriptorMatcherType.compare( "FlannBased" ) )
1022
{
1023
dm = makePtr<FlannBasedMatcher>();
1024
}
1025
else
1026
#endif
1027
if( !descriptorMatcherType.compare( "BruteForce" ) ) // L2
1028
{
1029
dm = makePtr<BFMatcher>(int(NORM_L2)); // anonymous enums can't be template parameters
1030
}
1031
else if( !descriptorMatcherType.compare( "BruteForce-SL2" ) ) // Squared L2
1032
{
1033
dm = makePtr<BFMatcher>(int(NORM_L2SQR));
1034
}
1035
else if( !descriptorMatcherType.compare( "BruteForce-L1" ) )
1036
{
1037
dm = makePtr<BFMatcher>(int(NORM_L1));
1038
}
1039
else if( !descriptorMatcherType.compare("BruteForce-Hamming") ||
1040
!descriptorMatcherType.compare("BruteForce-HammingLUT") )
1041
{
1042
dm = makePtr<BFMatcher>(int(NORM_HAMMING));
1043
}
1044
else if( !descriptorMatcherType.compare("BruteForce-Hamming(2)") )
1045
{
1046
dm = makePtr<BFMatcher>(int(NORM_HAMMING2));
1047
}
1048
else
1049
CV_Error( Error::StsBadArg, "Unknown matcher name" );
1050
1051
return dm;
1052
}
1053
1054
Ptr<DescriptorMatcher> DescriptorMatcher::create( const MatcherType& matcherType )
1055
{
1056
1057
1058
String name;
1059
1060
switch(matcherType)
1061
{
1062
#ifdef HAVE_OPENCV_FLANN
1063
case FLANNBASED:
1064
name = "FlannBased";
1065
break;
1066
#endif
1067
case BRUTEFORCE:
1068
name = "BruteForce";
1069
break;
1070
case BRUTEFORCE_L1:
1071
name = "BruteForce-L1";
1072
break;
1073
case BRUTEFORCE_HAMMING:
1074
name = "BruteForce-Hamming";
1075
break;
1076
case BRUTEFORCE_HAMMINGLUT:
1077
name = "BruteForce-HammingLUT";
1078
break;
1079
case BRUTEFORCE_SL2:
1080
name = "BruteForce-SL2";
1081
break;
1082
default:
1083
CV_Error( Error::StsBadArg, "Specified descriptor matcher type is not supported." );
1084
break;
1085
}
1086
1087
return DescriptorMatcher::create(name);
1088
1089
}
1090
1091
#ifdef HAVE_OPENCV_FLANN
1092
1093
/*
1094
* Flann based matcher
1095
*/
1096
FlannBasedMatcher::FlannBasedMatcher( const Ptr<flann::IndexParams>& _indexParams, const Ptr<flann::SearchParams>& _searchParams )
1097
: indexParams(_indexParams), searchParams(_searchParams), addedDescCount(0)
1098
{
1099
CV_Assert( _indexParams );
1100
CV_Assert( _searchParams );
1101
}
1102
1103
Ptr<FlannBasedMatcher> FlannBasedMatcher::create()
1104
{
1105
return makePtr<FlannBasedMatcher>();
1106
}
1107
1108
void FlannBasedMatcher::add( InputArrayOfArrays _descriptors )
1109
{
1110
DescriptorMatcher::add( _descriptors );
1111
1112
if( _descriptors.isUMatVector() )
1113
{
1114
std::vector<UMat> descriptors;
1115
_descriptors.getUMatVector( descriptors );
1116
1117
for( size_t i = 0; i < descriptors.size(); i++ )
1118
{
1119
addedDescCount += descriptors[i].rows;
1120
}
1121
}
1122
else if( _descriptors.isUMat() )
1123
{
1124
addedDescCount += _descriptors.getUMat().rows;
1125
}
1126
else if( _descriptors.isMatVector() )
1127
{
1128
std::vector<Mat> descriptors;
1129
_descriptors.getMatVector(descriptors);
1130
for( size_t i = 0; i < descriptors.size(); i++ )
1131
{
1132
addedDescCount += descriptors[i].rows;
1133
}
1134
}
1135
else if( _descriptors.isMat() )
1136
{
1137
addedDescCount += _descriptors.getMat().rows;
1138
}
1139
else
1140
{
1141
CV_Assert( _descriptors.isUMat() || _descriptors.isUMatVector() || _descriptors.isMat() || _descriptors.isMatVector() );
1142
}
1143
}
1144
1145
void FlannBasedMatcher::clear()
1146
{
1147
DescriptorMatcher::clear();
1148
1149
mergedDescriptors.clear();
1150
flannIndex.release();
1151
1152
addedDescCount = 0;
1153
}
1154
1155
void FlannBasedMatcher::train()
1156
{
1157
CV_INSTRUMENT_REGION();
1158
1159
if( !flannIndex || mergedDescriptors.size() < addedDescCount )
1160
{
1161
// FIXIT: Workaround for 'utrainDescCollection' issue (PR #2142)
1162
if (!utrainDescCollection.empty())
1163
{
1164
CV_Assert(trainDescCollection.size() == 0);
1165
for (size_t i = 0; i < utrainDescCollection.size(); ++i)
1166
trainDescCollection.push_back(utrainDescCollection[i].getMat(ACCESS_READ));
1167
}
1168
mergedDescriptors.set( trainDescCollection );
1169
flannIndex = makePtr<flann::Index>( mergedDescriptors.getDescriptors(), *indexParams );
1170
}
1171
}
1172
1173
using namespace cv::flann;
1174
1175
void FlannBasedMatcher::read( const FileNode& fn)
1176
{
1177
if (!indexParams)
1178
indexParams = makePtr<flann::IndexParams>();
1179
1180
FileNode ip = fn["indexParams"];
1181
CV_Assert(ip.type() == FileNode::SEQ);
1182
1183
for(int i = 0; i < (int)ip.size(); ++i)
1184
{
1185
CV_Assert(ip[i].type() == FileNode::MAP);
1186
String _name = (String)ip[i]["name"];
1187
FlannIndexType type = (FlannIndexType)(int)ip[i]["type"];
1188
CV_CheckLE((int)type, (int)LAST_VALUE_FLANN_INDEX_TYPE, "");
1189
1190
switch(type)
1191
{
1192
case FLANN_INDEX_TYPE_8U:
1193
case FLANN_INDEX_TYPE_8S:
1194
case FLANN_INDEX_TYPE_16U:
1195
case FLANN_INDEX_TYPE_16S:
1196
case FLANN_INDEX_TYPE_32S:
1197
indexParams->setInt(_name, (int) ip[i]["value"]);
1198
break;
1199
case FLANN_INDEX_TYPE_32F:
1200
indexParams->setFloat(_name, (float) ip[i]["value"]);
1201
break;
1202
case FLANN_INDEX_TYPE_64F:
1203
indexParams->setDouble(_name, (double) ip[i]["value"]);
1204
break;
1205
case FLANN_INDEX_TYPE_STRING:
1206
indexParams->setString(_name, (String) ip[i]["value"]);
1207
break;
1208
case FLANN_INDEX_TYPE_BOOL:
1209
indexParams->setBool(_name, (int) ip[i]["value"] != 0);
1210
break;
1211
case FLANN_INDEX_TYPE_ALGORITHM:
1212
indexParams->setAlgorithm((int) ip[i]["value"]);
1213
break;
1214
// don't default: - compiler warning is here
1215
};
1216
}
1217
1218
if (!searchParams)
1219
searchParams = makePtr<flann::SearchParams>();
1220
1221
FileNode sp = fn["searchParams"];
1222
CV_Assert(sp.type() == FileNode::SEQ);
1223
1224
for(int i = 0; i < (int)sp.size(); ++i)
1225
{
1226
CV_Assert(sp[i].type() == FileNode::MAP);
1227
String _name = (String)sp[i]["name"];
1228
FlannIndexType type = (FlannIndexType)(int)sp[i]["type"];
1229
CV_CheckLE((int)type, (int)LAST_VALUE_FLANN_INDEX_TYPE, "");
1230
1231
switch(type)
1232
{
1233
case FLANN_INDEX_TYPE_8U:
1234
case FLANN_INDEX_TYPE_8S:
1235
case FLANN_INDEX_TYPE_16U:
1236
case FLANN_INDEX_TYPE_16S:
1237
case FLANN_INDEX_TYPE_32S:
1238
searchParams->setInt(_name, (int) sp[i]["value"]);
1239
break;
1240
case FLANN_INDEX_TYPE_32F:
1241
searchParams->setFloat(_name, (float) ip[i]["value"]);
1242
break;
1243
case FLANN_INDEX_TYPE_64F:
1244
searchParams->setDouble(_name, (double) ip[i]["value"]);
1245
break;
1246
case FLANN_INDEX_TYPE_STRING:
1247
searchParams->setString(_name, (String) ip[i]["value"]);
1248
break;
1249
case FLANN_INDEX_TYPE_BOOL:
1250
searchParams->setBool(_name, (int) ip[i]["value"] != 0);
1251
break;
1252
case FLANN_INDEX_TYPE_ALGORITHM:
1253
searchParams->setAlgorithm((int) ip[i]["value"]);
1254
break;
1255
// don't default: - compiler warning is here
1256
};
1257
}
1258
1259
flannIndex.release();
1260
}
1261
1262
void FlannBasedMatcher::write( FileStorage& fs) const
1263
{
1264
writeFormat(fs);
1265
fs << "indexParams" << "[";
1266
1267
if (indexParams)
1268
{
1269
std::vector<String> names;
1270
std::vector<FlannIndexType> types;
1271
std::vector<String> strValues;
1272
std::vector<double> numValues;
1273
1274
indexParams->getAll(names, types, strValues, numValues);
1275
1276
for(size_t i = 0; i < names.size(); ++i)
1277
{
1278
fs << "{" << "name" << names[i] << "type" << types[i] << "value";
1279
FlannIndexType type = (FlannIndexType)types[i];
1280
if ((int)type < 0 || type > LAST_VALUE_FLANN_INDEX_TYPE)
1281
{
1282
fs << (double)numValues[i];
1283
fs << "typename" << strValues[i];
1284
fs << "}";
1285
continue;
1286
}
1287
switch(type)
1288
{
1289
case FLANN_INDEX_TYPE_8U:
1290
fs << (uchar)numValues[i];
1291
break;
1292
case FLANN_INDEX_TYPE_8S:
1293
fs << (char)numValues[i];
1294
break;
1295
case FLANN_INDEX_TYPE_16U:
1296
fs << (ushort)numValues[i];
1297
break;
1298
case FLANN_INDEX_TYPE_16S:
1299
fs << (short)numValues[i];
1300
break;
1301
case FLANN_INDEX_TYPE_32S:
1302
case FLANN_INDEX_TYPE_BOOL:
1303
case FLANN_INDEX_TYPE_ALGORITHM:
1304
fs << (int)numValues[i];
1305
break;
1306
case FLANN_INDEX_TYPE_32F:
1307
fs << (float)numValues[i];
1308
break;
1309
case FLANN_INDEX_TYPE_64F:
1310
fs << (double)numValues[i];
1311
break;
1312
case FLANN_INDEX_TYPE_STRING:
1313
fs << strValues[i];
1314
break;
1315
// don't default: - compiler warning is here
1316
}
1317
fs << "}";
1318
}
1319
}
1320
1321
fs << "]" << "searchParams" << "[";
1322
1323
if (searchParams)
1324
{
1325
std::vector<String> names;
1326
std::vector<FlannIndexType> types;
1327
std::vector<String> strValues;
1328
std::vector<double> numValues;
1329
1330
searchParams->getAll(names, types, strValues, numValues);
1331
1332
for(size_t i = 0; i < names.size(); ++i)
1333
{
1334
fs << "{" << "name" << names[i] << "type" << types[i] << "value";
1335
FlannIndexType type = (FlannIndexType)types[i];
1336
if ((int)type < 0 || type > LAST_VALUE_FLANN_INDEX_TYPE)
1337
{
1338
fs << (double)numValues[i];
1339
fs << "typename" << strValues[i];
1340
fs << "}";
1341
continue;
1342
}
1343
switch(type)
1344
{
1345
case FLANN_INDEX_TYPE_8U:
1346
fs << (uchar)numValues[i];
1347
break;
1348
case FLANN_INDEX_TYPE_8S:
1349
fs << (char)numValues[i];
1350
break;
1351
case FLANN_INDEX_TYPE_16U:
1352
fs << (ushort)numValues[i];
1353
break;
1354
case FLANN_INDEX_TYPE_16S:
1355
fs << (short)numValues[i];
1356
break;
1357
case FLANN_INDEX_TYPE_32S:
1358
case FLANN_INDEX_TYPE_BOOL:
1359
case FLANN_INDEX_TYPE_ALGORITHM:
1360
fs << (int)numValues[i];
1361
break;
1362
case CV_32F:
1363
fs << (float)numValues[i];
1364
break;
1365
case CV_64F:
1366
fs << (double)numValues[i];
1367
break;
1368
case FLANN_INDEX_TYPE_STRING:
1369
fs << strValues[i];
1370
break;
1371
// don't default: - compiler warning is here
1372
}
1373
fs << "}";
1374
}
1375
}
1376
fs << "]";
1377
}
1378
1379
bool FlannBasedMatcher::isMaskSupported() const
1380
{
1381
return false;
1382
}
1383
1384
Ptr<DescriptorMatcher> FlannBasedMatcher::clone( bool emptyTrainData ) const
1385
{
1386
Ptr<FlannBasedMatcher> matcher = makePtr<FlannBasedMatcher>(indexParams, searchParams);
1387
if( !emptyTrainData )
1388
{
1389
CV_Error( Error::StsNotImplemented, "deep clone functionality is not implemented, because "
1390
"Flann::Index has not copy constructor or clone method ");
1391
#if 0
1392
//matcher->flannIndex;
1393
matcher->addedDescCount = addedDescCount;
1394
matcher->mergedDescriptors = DescriptorCollection( mergedDescriptors );
1395
std::transform( trainDescCollection.begin(), trainDescCollection.end(),
1396
matcher->trainDescCollection.begin(), clone_op );
1397
#endif
1398
}
1399
return matcher;
1400
}
1401
1402
void FlannBasedMatcher::convertToDMatches( const DescriptorCollection& collection, const Mat& indices, const Mat& dists,
1403
std::vector<std::vector<DMatch> >& matches )
1404
{
1405
matches.resize( indices.rows );
1406
for( int i = 0; i < indices.rows; i++ )
1407
{
1408
for( int j = 0; j < indices.cols; j++ )
1409
{
1410
int idx = indices.at<int>(i, j);
1411
if( idx >= 0 )
1412
{
1413
int imgIdx, trainIdx;
1414
collection.getLocalIdx( idx, imgIdx, trainIdx );
1415
float dist = 0;
1416
if (dists.type() == CV_32S)
1417
dist = static_cast<float>( dists.at<int>(i,j) );
1418
else
1419
dist = std::sqrt(dists.at<float>(i,j));
1420
matches[i].push_back( DMatch( i, trainIdx, imgIdx, dist ) );
1421
}
1422
}
1423
}
1424
}
1425
1426
void FlannBasedMatcher::knnMatchImpl( InputArray _queryDescriptors, std::vector<std::vector<DMatch> >& matches, int knn,
1427
InputArrayOfArrays /*masks*/, bool /*compactResult*/ )
1428
{
1429
CV_INSTRUMENT_REGION();
1430
1431
Mat queryDescriptors = _queryDescriptors.getMat();
1432
Mat indices( queryDescriptors.rows, knn, CV_32SC1 );
1433
Mat dists( queryDescriptors.rows, knn, CV_32FC1);
1434
flannIndex->knnSearch( queryDescriptors, indices, dists, knn, *searchParams );
1435
1436
convertToDMatches( mergedDescriptors, indices, dists, matches );
1437
}
1438
1439
void FlannBasedMatcher::radiusMatchImpl( InputArray _queryDescriptors, std::vector<std::vector<DMatch> >& matches, float maxDistance,
1440
InputArrayOfArrays /*masks*/, bool /*compactResult*/ )
1441
{
1442
CV_INSTRUMENT_REGION();
1443
1444
Mat queryDescriptors = _queryDescriptors.getMat();
1445
const int count = mergedDescriptors.size(); // TODO do count as param?
1446
Mat indices( queryDescriptors.rows, count, CV_32SC1, Scalar::all(-1) );
1447
Mat dists( queryDescriptors.rows, count, CV_32FC1, Scalar::all(-1) );
1448
for( int qIdx = 0; qIdx < queryDescriptors.rows; qIdx++ )
1449
{
1450
Mat queryDescriptorsRow = queryDescriptors.row(qIdx);
1451
Mat indicesRow = indices.row(qIdx);
1452
Mat distsRow = dists.row(qIdx);
1453
flannIndex->radiusSearch( queryDescriptorsRow, indicesRow, distsRow, maxDistance*maxDistance, count, *searchParams );
1454
}
1455
1456
convertToDMatches( mergedDescriptors, indices, dists, matches );
1457
}
1458
1459
#endif
1460
1461
}
1462
1463