Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/calib3d/src/undistort.cpp
16344 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
14
// Copyright (C) 2009, Willow Garage Inc., all rights reserved.
15
// Third party copyrights are property of their respective owners.
16
//
17
// Redistribution and use in source and binary forms, with or without modification,
18
// are permitted provided that the following conditions are met:
19
//
20
// * Redistribution's of source code must retain the above copyright notice,
21
// this list of conditions and the following disclaimer.
22
//
23
// * Redistribution's in binary form must reproduce the above copyright notice,
24
// this list of conditions and the following disclaimer in the documentation
25
// and/or other materials provided with the distribution.
26
//
27
// * The name of the copyright holders may not be used to endorse or promote products
28
// derived from this software without specific prior written permission.
29
//
30
// This software is provided by the copyright holders and contributors "as is" and
31
// any express or implied warranties, including, but not limited to, the implied
32
// warranties of merchantability and fitness for a particular purpose are disclaimed.
33
// In no event shall the Intel Corporation or contributors be liable for any direct,
34
// indirect, incidental, special, exemplary, or consequential damages
35
// (including, but not limited to, procurement of substitute goods or services;
36
// loss of use, data, or profits; or business interruption) however caused
37
// and on any theory of liability, whether in contract, strict liability,
38
// or tort (including negligence or otherwise) arising in any way out of
39
// the use of this software, even if advised of the possibility of such damage.
40
//
41
//M*/
42
43
#include "precomp.hpp"
44
#include "distortion_model.hpp"
45
#include "undistort.hpp"
46
47
#include "opencv2/calib3d/calib3d_c.h"
48
49
cv::Mat cv::getDefaultNewCameraMatrix( InputArray _cameraMatrix, Size imgsize,
50
bool centerPrincipalPoint )
51
{
52
Mat cameraMatrix = _cameraMatrix.getMat();
53
if( !centerPrincipalPoint && cameraMatrix.type() == CV_64F )
54
return cameraMatrix;
55
56
Mat newCameraMatrix;
57
cameraMatrix.convertTo(newCameraMatrix, CV_64F);
58
if( centerPrincipalPoint )
59
{
60
newCameraMatrix.ptr<double>()[2] = (imgsize.width-1)*0.5;
61
newCameraMatrix.ptr<double>()[5] = (imgsize.height-1)*0.5;
62
}
63
return newCameraMatrix;
64
}
65
66
class initUndistortRectifyMapComputer : public cv::ParallelLoopBody
67
{
68
public:
69
initUndistortRectifyMapComputer(
70
cv::Size _size, cv::Mat &_map1, cv::Mat &_map2, int _m1type,
71
const double* _ir, cv::Matx33d &_matTilt,
72
double _u0, double _v0, double _fx, double _fy,
73
double _k1, double _k2, double _p1, double _p2,
74
double _k3, double _k4, double _k5, double _k6,
75
double _s1, double _s2, double _s3, double _s4)
76
: size(_size),
77
map1(_map1),
78
map2(_map2),
79
m1type(_m1type),
80
ir(_ir),
81
matTilt(_matTilt),
82
u0(_u0),
83
v0(_v0),
84
fx(_fx),
85
fy(_fy),
86
k1(_k1),
87
k2(_k2),
88
p1(_p1),
89
p2(_p2),
90
k3(_k3),
91
k4(_k4),
92
k5(_k5),
93
k6(_k6),
94
s1(_s1),
95
s2(_s2),
96
s3(_s3),
97
s4(_s4) {
98
#if CV_TRY_AVX2
99
useAVX2 = cv::checkHardwareSupport(CV_CPU_AVX2);
100
#endif
101
}
102
103
void operator()( const cv::Range& range ) const CV_OVERRIDE
104
{
105
const int begin = range.start;
106
const int end = range.end;
107
108
for( int i = begin; i < end; i++ )
109
{
110
float* m1f = map1.ptr<float>(i);
111
float* m2f = map2.empty() ? 0 : map2.ptr<float>(i);
112
short* m1 = (short*)m1f;
113
ushort* m2 = (ushort*)m2f;
114
double _x = i*ir[1] + ir[2], _y = i*ir[4] + ir[5], _w = i*ir[7] + ir[8];
115
116
int j = 0;
117
118
if (m1type == CV_16SC2)
119
CV_Assert(m1 != NULL && m2 != NULL);
120
else if (m1type == CV_32FC1)
121
CV_Assert(m1f != NULL && m2f != NULL);
122
else
123
CV_Assert(m1 != NULL);
124
125
#if CV_TRY_AVX2
126
if( useAVX2 )
127
j = cv::initUndistortRectifyMapLine_AVX(m1f, m2f, m1, m2,
128
matTilt.val, ir, _x, _y, _w, size.width, m1type,
129
k1, k2, k3, k4, k5, k6, p1, p2, s1, s2, s3, s4, u0, v0, fx, fy);
130
#endif
131
for( ; j < size.width; j++, _x += ir[0], _y += ir[3], _w += ir[6] )
132
{
133
double w = 1./_w, x = _x*w, y = _y*w;
134
double x2 = x*x, y2 = y*y;
135
double r2 = x2 + y2, _2xy = 2*x*y;
136
double kr = (1 + ((k3*r2 + k2)*r2 + k1)*r2)/(1 + ((k6*r2 + k5)*r2 + k4)*r2);
137
double xd = (x*kr + p1*_2xy + p2*(r2 + 2*x2) + s1*r2+s2*r2*r2);
138
double yd = (y*kr + p1*(r2 + 2*y2) + p2*_2xy + s3*r2+s4*r2*r2);
139
cv::Vec3d vecTilt = matTilt*cv::Vec3d(xd, yd, 1);
140
double invProj = vecTilt(2) ? 1./vecTilt(2) : 1;
141
double u = fx*invProj*vecTilt(0) + u0;
142
double v = fy*invProj*vecTilt(1) + v0;
143
if( m1type == CV_16SC2 )
144
{
145
int iu = cv::saturate_cast<int>(u*cv::INTER_TAB_SIZE);
146
int iv = cv::saturate_cast<int>(v*cv::INTER_TAB_SIZE);
147
m1[j*2] = (short)(iu >> cv::INTER_BITS);
148
m1[j*2+1] = (short)(iv >> cv::INTER_BITS);
149
m2[j] = (ushort)((iv & (cv::INTER_TAB_SIZE-1))*cv::INTER_TAB_SIZE + (iu & (cv::INTER_TAB_SIZE-1)));
150
}
151
else if( m1type == CV_32FC1 )
152
{
153
m1f[j] = (float)u;
154
m2f[j] = (float)v;
155
}
156
else
157
{
158
m1f[j*2] = (float)u;
159
m1f[j*2+1] = (float)v;
160
}
161
}
162
}
163
}
164
165
private:
166
cv::Size size;
167
cv::Mat &map1;
168
cv::Mat &map2;
169
int m1type;
170
const double* ir;
171
cv::Matx33d &matTilt;
172
double u0;
173
double v0;
174
double fx;
175
double fy;
176
double k1;
177
double k2;
178
double p1;
179
double p2;
180
double k3;
181
double k4;
182
double k5;
183
double k6;
184
double s1;
185
double s2;
186
double s3;
187
double s4;
188
#if CV_TRY_AVX2
189
bool useAVX2;
190
#endif
191
};
192
193
void cv::initUndistortRectifyMap( InputArray _cameraMatrix, InputArray _distCoeffs,
194
InputArray _matR, InputArray _newCameraMatrix,
195
Size size, int m1type, OutputArray _map1, OutputArray _map2 )
196
{
197
Mat cameraMatrix = _cameraMatrix.getMat(), distCoeffs = _distCoeffs.getMat();
198
Mat matR = _matR.getMat(), newCameraMatrix = _newCameraMatrix.getMat();
199
200
if( m1type <= 0 )
201
m1type = CV_16SC2;
202
CV_Assert( m1type == CV_16SC2 || m1type == CV_32FC1 || m1type == CV_32FC2 );
203
_map1.create( size, m1type );
204
Mat map1 = _map1.getMat(), map2;
205
if( m1type != CV_32FC2 )
206
{
207
_map2.create( size, m1type == CV_16SC2 ? CV_16UC1 : CV_32FC1 );
208
map2 = _map2.getMat();
209
}
210
else
211
_map2.release();
212
213
Mat_<double> R = Mat_<double>::eye(3, 3);
214
Mat_<double> A = Mat_<double>(cameraMatrix), Ar;
215
216
if( !newCameraMatrix.empty() )
217
Ar = Mat_<double>(newCameraMatrix);
218
else
219
Ar = getDefaultNewCameraMatrix( A, size, true );
220
221
if( !matR.empty() )
222
R = Mat_<double>(matR);
223
224
if( !distCoeffs.empty() )
225
distCoeffs = Mat_<double>(distCoeffs);
226
else
227
{
228
distCoeffs.create(14, 1, CV_64F);
229
distCoeffs = 0.;
230
}
231
232
CV_Assert( A.size() == Size(3,3) && A.size() == R.size() );
233
CV_Assert( Ar.size() == Size(3,3) || Ar.size() == Size(4, 3));
234
Mat_<double> iR = (Ar.colRange(0,3)*R).inv(DECOMP_LU);
235
const double* ir = &iR(0,0);
236
237
double u0 = A(0, 2), v0 = A(1, 2);
238
double fx = A(0, 0), fy = A(1, 1);
239
240
CV_Assert( distCoeffs.size() == Size(1, 4) || distCoeffs.size() == Size(4, 1) ||
241
distCoeffs.size() == Size(1, 5) || distCoeffs.size() == Size(5, 1) ||
242
distCoeffs.size() == Size(1, 8) || distCoeffs.size() == Size(8, 1) ||
243
distCoeffs.size() == Size(1, 12) || distCoeffs.size() == Size(12, 1) ||
244
distCoeffs.size() == Size(1, 14) || distCoeffs.size() == Size(14, 1));
245
246
if( distCoeffs.rows != 1 && !distCoeffs.isContinuous() )
247
distCoeffs = distCoeffs.t();
248
249
const double* const distPtr = distCoeffs.ptr<double>();
250
double k1 = distPtr[0];
251
double k2 = distPtr[1];
252
double p1 = distPtr[2];
253
double p2 = distPtr[3];
254
double k3 = distCoeffs.cols + distCoeffs.rows - 1 >= 5 ? distPtr[4] : 0.;
255
double k4 = distCoeffs.cols + distCoeffs.rows - 1 >= 8 ? distPtr[5] : 0.;
256
double k5 = distCoeffs.cols + distCoeffs.rows - 1 >= 8 ? distPtr[6] : 0.;
257
double k6 = distCoeffs.cols + distCoeffs.rows - 1 >= 8 ? distPtr[7] : 0.;
258
double s1 = distCoeffs.cols + distCoeffs.rows - 1 >= 12 ? distPtr[8] : 0.;
259
double s2 = distCoeffs.cols + distCoeffs.rows - 1 >= 12 ? distPtr[9] : 0.;
260
double s3 = distCoeffs.cols + distCoeffs.rows - 1 >= 12 ? distPtr[10] : 0.;
261
double s4 = distCoeffs.cols + distCoeffs.rows - 1 >= 12 ? distPtr[11] : 0.;
262
double tauX = distCoeffs.cols + distCoeffs.rows - 1 >= 14 ? distPtr[12] : 0.;
263
double tauY = distCoeffs.cols + distCoeffs.rows - 1 >= 14 ? distPtr[13] : 0.;
264
265
// Matrix for trapezoidal distortion of tilted image sensor
266
cv::Matx33d matTilt = cv::Matx33d::eye();
267
cv::detail::computeTiltProjectionMatrix(tauX, tauY, &matTilt);
268
269
parallel_for_(Range(0, size.height), initUndistortRectifyMapComputer(
270
size, map1, map2, m1type, ir, matTilt, u0, v0,
271
fx, fy, k1, k2, p1, p2, k3, k4, k5, k6, s1, s2, s3, s4));
272
}
273
274
275
void cv::undistort( InputArray _src, OutputArray _dst, InputArray _cameraMatrix,
276
InputArray _distCoeffs, InputArray _newCameraMatrix )
277
{
278
CV_INSTRUMENT_REGION();
279
280
Mat src = _src.getMat(), cameraMatrix = _cameraMatrix.getMat();
281
Mat distCoeffs = _distCoeffs.getMat(), newCameraMatrix = _newCameraMatrix.getMat();
282
283
_dst.create( src.size(), src.type() );
284
Mat dst = _dst.getMat();
285
286
CV_Assert( dst.data != src.data );
287
288
int stripe_size0 = std::min(std::max(1, (1 << 12) / std::max(src.cols, 1)), src.rows);
289
Mat map1(stripe_size0, src.cols, CV_16SC2), map2(stripe_size0, src.cols, CV_16UC1);
290
291
Mat_<double> A, Ar, I = Mat_<double>::eye(3,3);
292
293
cameraMatrix.convertTo(A, CV_64F);
294
if( !distCoeffs.empty() )
295
distCoeffs = Mat_<double>(distCoeffs);
296
else
297
{
298
distCoeffs.create(5, 1, CV_64F);
299
distCoeffs = 0.;
300
}
301
302
if( !newCameraMatrix.empty() )
303
newCameraMatrix.convertTo(Ar, CV_64F);
304
else
305
A.copyTo(Ar);
306
307
double v0 = Ar(1, 2);
308
for( int y = 0; y < src.rows; y += stripe_size0 )
309
{
310
int stripe_size = std::min( stripe_size0, src.rows - y );
311
Ar(1, 2) = v0 - y;
312
Mat map1_part = map1.rowRange(0, stripe_size),
313
map2_part = map2.rowRange(0, stripe_size),
314
dst_part = dst.rowRange(y, y + stripe_size);
315
316
initUndistortRectifyMap( A, distCoeffs, I, Ar, Size(src.cols, stripe_size),
317
map1_part.type(), map1_part, map2_part );
318
remap( src, dst_part, map1_part, map2_part, INTER_LINEAR, BORDER_CONSTANT );
319
}
320
}
321
322
323
CV_IMPL void
324
cvUndistort2( const CvArr* srcarr, CvArr* dstarr, const CvMat* Aarr, const CvMat* dist_coeffs, const CvMat* newAarr )
325
{
326
cv::Mat src = cv::cvarrToMat(srcarr), dst = cv::cvarrToMat(dstarr), dst0 = dst;
327
cv::Mat A = cv::cvarrToMat(Aarr), distCoeffs = cv::cvarrToMat(dist_coeffs), newA;
328
if( newAarr )
329
newA = cv::cvarrToMat(newAarr);
330
331
CV_Assert( src.size() == dst.size() && src.type() == dst.type() );
332
cv::undistort( src, dst, A, distCoeffs, newA );
333
}
334
335
336
CV_IMPL void cvInitUndistortMap( const CvMat* Aarr, const CvMat* dist_coeffs,
337
CvArr* mapxarr, CvArr* mapyarr )
338
{
339
cv::Mat A = cv::cvarrToMat(Aarr), distCoeffs = cv::cvarrToMat(dist_coeffs);
340
cv::Mat mapx = cv::cvarrToMat(mapxarr), mapy, mapx0 = mapx, mapy0;
341
342
if( mapyarr )
343
mapy0 = mapy = cv::cvarrToMat(mapyarr);
344
345
cv::initUndistortRectifyMap( A, distCoeffs, cv::Mat(), A,
346
mapx.size(), mapx.type(), mapx, mapy );
347
CV_Assert( mapx0.data == mapx.data && mapy0.data == mapy.data );
348
}
349
350
void
351
cvInitUndistortRectifyMap( const CvMat* Aarr, const CvMat* dist_coeffs,
352
const CvMat *Rarr, const CvMat* ArArr, CvArr* mapxarr, CvArr* mapyarr )
353
{
354
cv::Mat A = cv::cvarrToMat(Aarr), distCoeffs, R, Ar;
355
cv::Mat mapx = cv::cvarrToMat(mapxarr), mapy, mapx0 = mapx, mapy0;
356
357
if( mapyarr )
358
mapy0 = mapy = cv::cvarrToMat(mapyarr);
359
360
if( dist_coeffs )
361
distCoeffs = cv::cvarrToMat(dist_coeffs);
362
if( Rarr )
363
R = cv::cvarrToMat(Rarr);
364
if( ArArr )
365
Ar = cv::cvarrToMat(ArArr);
366
367
cv::initUndistortRectifyMap( A, distCoeffs, R, Ar, mapx.size(), mapx.type(), mapx, mapy );
368
CV_Assert( mapx0.data == mapx.data && mapy0.data == mapy.data );
369
}
370
371
static void cvUndistortPointsInternal( const CvMat* _src, CvMat* _dst, const CvMat* _cameraMatrix,
372
const CvMat* _distCoeffs,
373
const CvMat* matR, const CvMat* matP, cv::TermCriteria criteria)
374
{
375
CV_Assert(criteria.isValid());
376
double A[3][3], RR[3][3], k[14]={0,0,0,0,0,0,0,0,0,0,0,0,0,0};
377
CvMat matA=cvMat(3, 3, CV_64F, A), _Dk;
378
CvMat _RR=cvMat(3, 3, CV_64F, RR);
379
cv::Matx33d invMatTilt = cv::Matx33d::eye();
380
cv::Matx33d matTilt = cv::Matx33d::eye();
381
382
CV_Assert( CV_IS_MAT(_src) && CV_IS_MAT(_dst) &&
383
(_src->rows == 1 || _src->cols == 1) &&
384
(_dst->rows == 1 || _dst->cols == 1) &&
385
_src->cols + _src->rows - 1 == _dst->rows + _dst->cols - 1 &&
386
(CV_MAT_TYPE(_src->type) == CV_32FC2 || CV_MAT_TYPE(_src->type) == CV_64FC2) &&
387
(CV_MAT_TYPE(_dst->type) == CV_32FC2 || CV_MAT_TYPE(_dst->type) == CV_64FC2));
388
389
CV_Assert( CV_IS_MAT(_cameraMatrix) &&
390
_cameraMatrix->rows == 3 && _cameraMatrix->cols == 3 );
391
392
cvConvert( _cameraMatrix, &matA );
393
394
395
if( _distCoeffs )
396
{
397
CV_Assert( CV_IS_MAT(_distCoeffs) &&
398
(_distCoeffs->rows == 1 || _distCoeffs->cols == 1) &&
399
(_distCoeffs->rows*_distCoeffs->cols == 4 ||
400
_distCoeffs->rows*_distCoeffs->cols == 5 ||
401
_distCoeffs->rows*_distCoeffs->cols == 8 ||
402
_distCoeffs->rows*_distCoeffs->cols == 12 ||
403
_distCoeffs->rows*_distCoeffs->cols == 14));
404
405
_Dk = cvMat( _distCoeffs->rows, _distCoeffs->cols,
406
CV_MAKETYPE(CV_64F,CV_MAT_CN(_distCoeffs->type)), k);
407
408
cvConvert( _distCoeffs, &_Dk );
409
if (k[12] != 0 || k[13] != 0)
410
{
411
cv::detail::computeTiltProjectionMatrix<double>(k[12], k[13], NULL, NULL, NULL, &invMatTilt);
412
cv::detail::computeTiltProjectionMatrix<double>(k[12], k[13], &matTilt, NULL, NULL);
413
}
414
}
415
416
if( matR )
417
{
418
CV_Assert( CV_IS_MAT(matR) && matR->rows == 3 && matR->cols == 3 );
419
cvConvert( matR, &_RR );
420
}
421
else
422
cvSetIdentity(&_RR);
423
424
if( matP )
425
{
426
double PP[3][3];
427
CvMat _P3x3, _PP=cvMat(3, 3, CV_64F, PP);
428
CV_Assert( CV_IS_MAT(matP) && matP->rows == 3 && (matP->cols == 3 || matP->cols == 4));
429
cvConvert( cvGetCols(matP, &_P3x3, 0, 3), &_PP );
430
cvMatMul( &_PP, &_RR, &_RR );
431
}
432
433
const CvPoint2D32f* srcf = (const CvPoint2D32f*)_src->data.ptr;
434
const CvPoint2D64f* srcd = (const CvPoint2D64f*)_src->data.ptr;
435
CvPoint2D32f* dstf = (CvPoint2D32f*)_dst->data.ptr;
436
CvPoint2D64f* dstd = (CvPoint2D64f*)_dst->data.ptr;
437
int stype = CV_MAT_TYPE(_src->type);
438
int dtype = CV_MAT_TYPE(_dst->type);
439
int sstep = _src->rows == 1 ? 1 : _src->step/CV_ELEM_SIZE(stype);
440
int dstep = _dst->rows == 1 ? 1 : _dst->step/CV_ELEM_SIZE(dtype);
441
442
double fx = A[0][0];
443
double fy = A[1][1];
444
double ifx = 1./fx;
445
double ify = 1./fy;
446
double cx = A[0][2];
447
double cy = A[1][2];
448
449
int n = _src->rows + _src->cols - 1;
450
for( int i = 0; i < n; i++ )
451
{
452
double x, y, x0 = 0, y0 = 0, u, v;
453
if( stype == CV_32FC2 )
454
{
455
x = srcf[i*sstep].x;
456
y = srcf[i*sstep].y;
457
}
458
else
459
{
460
x = srcd[i*sstep].x;
461
y = srcd[i*sstep].y;
462
}
463
u = x; v = y;
464
x = (x - cx)*ifx;
465
y = (y - cy)*ify;
466
467
if( _distCoeffs ) {
468
// compensate tilt distortion
469
cv::Vec3d vecUntilt = invMatTilt * cv::Vec3d(x, y, 1);
470
double invProj = vecUntilt(2) ? 1./vecUntilt(2) : 1;
471
x0 = x = invProj * vecUntilt(0);
472
y0 = y = invProj * vecUntilt(1);
473
474
double error = std::numeric_limits<double>::max();
475
// compensate distortion iteratively
476
477
for( int j = 0; ; j++ )
478
{
479
if ((criteria.type & cv::TermCriteria::COUNT) && j >= criteria.maxCount)
480
break;
481
if ((criteria.type & cv::TermCriteria::EPS) && error < criteria.epsilon)
482
break;
483
double r2 = x*x + y*y;
484
double icdist = (1 + ((k[7]*r2 + k[6])*r2 + k[5])*r2)/(1 + ((k[4]*r2 + k[1])*r2 + k[0])*r2);
485
double deltaX = 2*k[2]*x*y + k[3]*(r2 + 2*x*x)+ k[8]*r2+k[9]*r2*r2;
486
double deltaY = k[2]*(r2 + 2*y*y) + 2*k[3]*x*y+ k[10]*r2+k[11]*r2*r2;
487
x = (x0 - deltaX)*icdist;
488
y = (y0 - deltaY)*icdist;
489
490
if(criteria.type & cv::TermCriteria::EPS)
491
{
492
double r4, r6, a1, a2, a3, cdist, icdist2;
493
double xd, yd, xd0, yd0;
494
cv::Vec3d vecTilt;
495
496
r2 = x*x + y*y;
497
r4 = r2*r2;
498
r6 = r4*r2;
499
a1 = 2*x*y;
500
a2 = r2 + 2*x*x;
501
a3 = r2 + 2*y*y;
502
cdist = 1 + k[0]*r2 + k[1]*r4 + k[4]*r6;
503
icdist2 = 1./(1 + k[5]*r2 + k[6]*r4 + k[7]*r6);
504
xd0 = x*cdist*icdist2 + k[2]*a1 + k[3]*a2 + k[8]*r2+k[9]*r4;
505
yd0 = y*cdist*icdist2 + k[2]*a3 + k[3]*a1 + k[10]*r2+k[11]*r4;
506
507
vecTilt = matTilt*cv::Vec3d(xd0, yd0, 1);
508
invProj = vecTilt(2) ? 1./vecTilt(2) : 1;
509
xd = invProj * vecTilt(0);
510
yd = invProj * vecTilt(1);
511
512
double x_proj = xd*fx + cx;
513
double y_proj = yd*fy + cy;
514
515
error = sqrt( pow(x_proj - u, 2) + pow(y_proj - v, 2) );
516
}
517
}
518
}
519
520
double xx = RR[0][0]*x + RR[0][1]*y + RR[0][2];
521
double yy = RR[1][0]*x + RR[1][1]*y + RR[1][2];
522
double ww = 1./(RR[2][0]*x + RR[2][1]*y + RR[2][2]);
523
x = xx*ww;
524
y = yy*ww;
525
526
if( dtype == CV_32FC2 )
527
{
528
dstf[i*dstep].x = (float)x;
529
dstf[i*dstep].y = (float)y;
530
}
531
else
532
{
533
dstd[i*dstep].x = x;
534
dstd[i*dstep].y = y;
535
}
536
}
537
}
538
539
void cvUndistortPoints(const CvMat* _src, CvMat* _dst, const CvMat* _cameraMatrix,
540
const CvMat* _distCoeffs,
541
const CvMat* matR, const CvMat* matP)
542
{
543
cvUndistortPointsInternal(_src, _dst, _cameraMatrix, _distCoeffs, matR, matP,
544
cv::TermCriteria(cv::TermCriteria::COUNT, 5, 0.01));
545
}
546
547
namespace cv {
548
549
void undistortPoints(InputArray _src, OutputArray _dst,
550
InputArray _cameraMatrix,
551
InputArray _distCoeffs,
552
InputArray _Rmat,
553
InputArray _Pmat)
554
{
555
undistortPoints(_src, _dst, _cameraMatrix, _distCoeffs, _Rmat, _Pmat, TermCriteria(TermCriteria::MAX_ITER, 5, 0.01));
556
}
557
558
void undistortPoints(InputArray _src, OutputArray _dst,
559
InputArray _cameraMatrix,
560
InputArray _distCoeffs,
561
InputArray _Rmat,
562
InputArray _Pmat,
563
TermCriteria criteria)
564
{
565
Mat src = _src.getMat(), cameraMatrix = _cameraMatrix.getMat();
566
Mat distCoeffs = _distCoeffs.getMat(), R = _Rmat.getMat(), P = _Pmat.getMat();
567
568
CV_Assert( src.isContinuous() && (src.depth() == CV_32F || src.depth() == CV_64F) &&
569
((src.rows == 1 && src.channels() == 2) || src.cols*src.channels() == 2));
570
571
_dst.create(src.size(), src.type(), -1, true);
572
Mat dst = _dst.getMat();
573
574
CvMat _csrc = cvMat(src), _cdst = cvMat(dst), _ccameraMatrix = cvMat(cameraMatrix);
575
CvMat matR, matP, _cdistCoeffs, *pR=0, *pP=0, *pD=0;
576
if( !R.empty() )
577
pR = &(matR = cvMat(R));
578
if( !P.empty() )
579
pP = &(matP = cvMat(P));
580
if( !distCoeffs.empty() )
581
pD = &(_cdistCoeffs = cvMat(distCoeffs));
582
cvUndistortPointsInternal(&_csrc, &_cdst, &_ccameraMatrix, pD, pR, pP, criteria);
583
}
584
585
static Point2f mapPointSpherical(const Point2f& p, float alpha, Vec4d* J, enum UndistortTypes projType)
586
{
587
double x = p.x, y = p.y;
588
double beta = 1 + 2*alpha;
589
double v = x*x + y*y + 1, iv = 1/v;
590
double u = std::sqrt(beta*v + alpha*alpha);
591
592
double k = (u - alpha)*iv;
593
double kv = (v*beta/u - (u - alpha)*2)*iv*iv;
594
double kx = kv*x, ky = kv*y;
595
596
if( projType == PROJ_SPHERICAL_ORTHO )
597
{
598
if(J)
599
*J = Vec4d(kx*x + k, kx*y, ky*x, ky*y + k);
600
return Point2f((float)(x*k), (float)(y*k));
601
}
602
if( projType == PROJ_SPHERICAL_EQRECT )
603
{
604
// equirectangular
605
double iR = 1/(alpha + 1);
606
double x1 = std::max(std::min(x*k*iR, 1.), -1.);
607
double y1 = std::max(std::min(y*k*iR, 1.), -1.);
608
609
if(J)
610
{
611
double fx1 = iR/std::sqrt(1 - x1*x1);
612
double fy1 = iR/std::sqrt(1 - y1*y1);
613
*J = Vec4d(fx1*(kx*x + k), fx1*ky*x, fy1*kx*y, fy1*(ky*y + k));
614
}
615
return Point2f((float)asin(x1), (float)asin(y1));
616
}
617
CV_Error(Error::StsBadArg, "Unknown projection type");
618
}
619
620
621
static Point2f invMapPointSpherical(Point2f _p, float alpha, enum UndistortTypes projType)
622
{
623
double eps = 1e-12;
624
Vec2d p(_p.x, _p.y), q(_p.x, _p.y), err;
625
Vec4d J;
626
int i, maxiter = 5;
627
628
for( i = 0; i < maxiter; i++ )
629
{
630
Point2f p1 = mapPointSpherical(Point2f((float)q[0], (float)q[1]), alpha, &J, projType);
631
err = Vec2d(p1.x, p1.y) - p;
632
if( err[0]*err[0] + err[1]*err[1] < eps )
633
break;
634
635
Vec4d JtJ(J[0]*J[0] + J[2]*J[2], J[0]*J[1] + J[2]*J[3],
636
J[0]*J[1] + J[2]*J[3], J[1]*J[1] + J[3]*J[3]);
637
double d = JtJ[0]*JtJ[3] - JtJ[1]*JtJ[2];
638
d = d ? 1./d : 0;
639
Vec4d iJtJ(JtJ[3]*d, -JtJ[1]*d, -JtJ[2]*d, JtJ[0]*d);
640
Vec2d JtErr(J[0]*err[0] + J[2]*err[1], J[1]*err[0] + J[3]*err[1]);
641
642
q -= Vec2d(iJtJ[0]*JtErr[0] + iJtJ[1]*JtErr[1], iJtJ[2]*JtErr[0] + iJtJ[3]*JtErr[1]);
643
//Matx22d J(kx*x + k, kx*y, ky*x, ky*y + k);
644
//q -= Vec2d((J.t()*J).inv()*(J.t()*err));
645
}
646
647
return i < maxiter ? Point2f((float)q[0], (float)q[1]) : Point2f(-FLT_MAX, -FLT_MAX);
648
}
649
650
float initWideAngleProjMap(InputArray _cameraMatrix0, InputArray _distCoeffs0,
651
Size imageSize, int destImageWidth, int m1type,
652
OutputArray _map1, OutputArray _map2,
653
enum UndistortTypes projType, double _alpha)
654
{
655
Mat cameraMatrix0 = _cameraMatrix0.getMat(), distCoeffs0 = _distCoeffs0.getMat();
656
double k[14] = {0,0,0,0,0,0,0,0,0,0,0,0,0,0}, M[9]={0,0,0,0,0,0,0,0,0};
657
Mat distCoeffs(distCoeffs0.rows, distCoeffs0.cols, CV_MAKETYPE(CV_64F,distCoeffs0.channels()), k);
658
Mat cameraMatrix(3,3,CV_64F,M);
659
Point2f scenter((float)cameraMatrix.at<double>(0,2), (float)cameraMatrix.at<double>(1,2));
660
Point2f dcenter((destImageWidth-1)*0.5f, 0.f);
661
float xmin = FLT_MAX, xmax = -FLT_MAX, ymin = FLT_MAX, ymax = -FLT_MAX;
662
int N = 9;
663
std::vector<Point2f> uvec(1), vvec(1);
664
Mat I = Mat::eye(3,3,CV_64F);
665
float alpha = (float)_alpha;
666
667
int ndcoeffs = distCoeffs0.cols*distCoeffs0.rows*distCoeffs0.channels();
668
CV_Assert((distCoeffs0.cols == 1 || distCoeffs0.rows == 1) &&
669
(ndcoeffs == 4 || ndcoeffs == 5 || ndcoeffs == 8 || ndcoeffs == 12 || ndcoeffs == 14));
670
CV_Assert(cameraMatrix0.size() == Size(3,3));
671
distCoeffs0.convertTo(distCoeffs,CV_64F);
672
cameraMatrix0.convertTo(cameraMatrix,CV_64F);
673
674
alpha = std::min(alpha, 0.999f);
675
676
for( int i = 0; i < N; i++ )
677
for( int j = 0; j < N; j++ )
678
{
679
Point2f p((float)j*imageSize.width/(N-1), (float)i*imageSize.height/(N-1));
680
uvec[0] = p;
681
undistortPoints(uvec, vvec, cameraMatrix, distCoeffs, I, I);
682
Point2f q = mapPointSpherical(vvec[0], alpha, 0, projType);
683
if( xmin > q.x ) xmin = q.x;
684
if( xmax < q.x ) xmax = q.x;
685
if( ymin > q.y ) ymin = q.y;
686
if( ymax < q.y ) ymax = q.y;
687
}
688
689
float scale = (float)std::min(dcenter.x/fabs(xmax), dcenter.x/fabs(xmin));
690
Size dsize(destImageWidth, cvCeil(std::max(scale*fabs(ymin)*2, scale*fabs(ymax)*2)));
691
dcenter.y = (dsize.height - 1)*0.5f;
692
693
Mat mapxy(dsize, CV_32FC2);
694
double k1 = k[0], k2 = k[1], k3 = k[2], p1 = k[3], p2 = k[4], k4 = k[5], k5 = k[6], k6 = k[7], s1 = k[8], s2 = k[9], s3 = k[10], s4 = k[11];
695
double fx = cameraMatrix.at<double>(0,0), fy = cameraMatrix.at<double>(1,1), cx = scenter.x, cy = scenter.y;
696
cv::Matx33d matTilt;
697
cv::detail::computeTiltProjectionMatrix(k[12], k[13], &matTilt);
698
699
for( int y = 0; y < dsize.height; y++ )
700
{
701
Point2f* mxy = mapxy.ptr<Point2f>(y);
702
for( int x = 0; x < dsize.width; x++ )
703
{
704
Point2f p = (Point2f((float)x, (float)y) - dcenter)*(1.f/scale);
705
Point2f q = invMapPointSpherical(p, alpha, projType);
706
if( q.x <= -FLT_MAX && q.y <= -FLT_MAX )
707
{
708
mxy[x] = Point2f(-1.f, -1.f);
709
continue;
710
}
711
double x2 = q.x*q.x, y2 = q.y*q.y;
712
double r2 = x2 + y2, _2xy = 2*q.x*q.y;
713
double kr = 1 + ((k3*r2 + k2)*r2 + k1)*r2/(1 + ((k6*r2 + k5)*r2 + k4)*r2);
714
double xd = (q.x*kr + p1*_2xy + p2*(r2 + 2*x2) + s1*r2+ s2*r2*r2);
715
double yd = (q.y*kr + p1*(r2 + 2*y2) + p2*_2xy + s3*r2+ s4*r2*r2);
716
cv::Vec3d vecTilt = matTilt*cv::Vec3d(xd, yd, 1);
717
double invProj = vecTilt(2) ? 1./vecTilt(2) : 1;
718
double u = fx*invProj*vecTilt(0) + cx;
719
double v = fy*invProj*vecTilt(1) + cy;
720
721
mxy[x] = Point2f((float)u, (float)v);
722
}
723
}
724
725
if(m1type == CV_32FC2)
726
{
727
_map1.create(mapxy.size(), mapxy.type());
728
Mat map1 = _map1.getMat();
729
mapxy.copyTo(map1);
730
_map2.release();
731
}
732
else
733
convertMaps(mapxy, Mat(), _map1, _map2, m1type, false);
734
735
return scale;
736
}
737
738
} // namespace
739
/* End of file */
740
741