Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/stitching/src/warpers.cpp
16337 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
14
// Copyright (C) 2009, Willow Garage Inc., all rights reserved.
15
// Third party copyrights are property of their respective owners.
16
//
17
// Redistribution and use in source and binary forms, with or without modification,
18
// are permitted provided that the following conditions are met:
19
//
20
// * Redistribution's of source code must retain the above copyright notice,
21
// this list of conditions and the following disclaimer.
22
//
23
// * Redistribution's in binary form must reproduce the above copyright notice,
24
// this list of conditions and the following disclaimer in the documentation
25
// and/or other materials provided with the distribution.
26
//
27
// * The name of the copyright holders may not be used to endorse or promote products
28
// derived from this software without specific prior written permission.
29
//
30
// This software is provided by the copyright holders and contributors "as is" and
31
// any express or implied warranties, including, but not limited to, the implied
32
// warranties of merchantability and fitness for a particular purpose are disclaimed.
33
// In no event shall the Intel Corporation or contributors be liable for any direct,
34
// indirect, incidental, special, exemplary, or consequential damages
35
// (including, but not limited to, procurement of substitute goods or services;
36
// loss of use, data, or profits; or business interruption) however caused
37
// and on any theory of liability, whether in contract, strict liability,
38
// or tort (including negligence or otherwise) arising in any way out of
39
// the use of this software, even if advised of the possibility of such damage.
40
//
41
//M*/
42
43
#include "precomp.hpp"
44
#include "opencl_kernels_stitching.hpp"
45
46
namespace cv {
47
namespace detail {
48
49
void ProjectorBase::setCameraParams(InputArray _K, InputArray _R, InputArray _T)
50
{
51
Mat K = _K.getMat(), R = _R.getMat(), T = _T.getMat();
52
53
CV_Assert(K.size() == Size(3, 3) && K.type() == CV_32F);
54
CV_Assert(R.size() == Size(3, 3) && R.type() == CV_32F);
55
CV_Assert((T.size() == Size(1, 3) || T.size() == Size(3, 1)) && T.type() == CV_32F);
56
57
Mat_<float> K_(K);
58
k[0] = K_(0,0); k[1] = K_(0,1); k[2] = K_(0,2);
59
k[3] = K_(1,0); k[4] = K_(1,1); k[5] = K_(1,2);
60
k[6] = K_(2,0); k[7] = K_(2,1); k[8] = K_(2,2);
61
62
Mat_<float> Rinv = R.t();
63
rinv[0] = Rinv(0,0); rinv[1] = Rinv(0,1); rinv[2] = Rinv(0,2);
64
rinv[3] = Rinv(1,0); rinv[4] = Rinv(1,1); rinv[5] = Rinv(1,2);
65
rinv[6] = Rinv(2,0); rinv[7] = Rinv(2,1); rinv[8] = Rinv(2,2);
66
67
Mat_<float> R_Kinv = R * K.inv();
68
r_kinv[0] = R_Kinv(0,0); r_kinv[1] = R_Kinv(0,1); r_kinv[2] = R_Kinv(0,2);
69
r_kinv[3] = R_Kinv(1,0); r_kinv[4] = R_Kinv(1,1); r_kinv[5] = R_Kinv(1,2);
70
r_kinv[6] = R_Kinv(2,0); r_kinv[7] = R_Kinv(2,1); r_kinv[8] = R_Kinv(2,2);
71
72
Mat_<float> K_Rinv = K * Rinv;
73
k_rinv[0] = K_Rinv(0,0); k_rinv[1] = K_Rinv(0,1); k_rinv[2] = K_Rinv(0,2);
74
k_rinv[3] = K_Rinv(1,0); k_rinv[4] = K_Rinv(1,1); k_rinv[5] = K_Rinv(1,2);
75
k_rinv[6] = K_Rinv(2,0); k_rinv[7] = K_Rinv(2,1); k_rinv[8] = K_Rinv(2,2);
76
77
Mat_<float> T_(T.reshape(0, 3));
78
t[0] = T_(0,0); t[1] = T_(1,0); t[2] = T_(2,0);
79
}
80
81
82
Point2f PlaneWarper::warpPoint(const Point2f &pt, InputArray K, InputArray R, InputArray T)
83
{
84
projector_.setCameraParams(K, R, T);
85
Point2f uv;
86
projector_.mapForward(pt.x, pt.y, uv.x, uv.y);
87
return uv;
88
}
89
90
Point2f PlaneWarper::warpPoint(const Point2f &pt, InputArray K, InputArray R)
91
{
92
float tz[] = {0.f, 0.f, 0.f};
93
Mat_<float> T(3, 1, tz);
94
return warpPoint(pt, K, R, T);
95
}
96
97
Rect PlaneWarper::buildMaps(Size src_size, InputArray K, InputArray R, OutputArray xmap, OutputArray ymap)
98
{
99
return buildMaps(src_size, K, R, Mat::zeros(3, 1, CV_32FC1), xmap, ymap);
100
}
101
102
Rect PlaneWarper::buildMaps(Size src_size, InputArray K, InputArray R, InputArray T, OutputArray _xmap, OutputArray _ymap)
103
{
104
projector_.setCameraParams(K, R, T);
105
106
Point dst_tl, dst_br;
107
detectResultRoi(src_size, dst_tl, dst_br);
108
109
Size dsize(dst_br.x - dst_tl.x + 1, dst_br.y - dst_tl.y + 1);
110
_xmap.create(dsize, CV_32FC1);
111
_ymap.create(dsize, CV_32FC1);
112
113
#ifdef HAVE_OPENCL
114
if (ocl::isOpenCLActivated())
115
{
116
ocl::Kernel k("buildWarpPlaneMaps", ocl::stitching::warpers_oclsrc);
117
if (!k.empty())
118
{
119
int rowsPerWI = ocl::Device::getDefault().isIntel() ? 4 : 1;
120
Mat k_rinv(1, 9, CV_32FC1, projector_.k_rinv), t(1, 3, CV_32FC1, projector_.t);
121
UMat uxmap = _xmap.getUMat(), uymap = _ymap.getUMat(),
122
uk_rinv = k_rinv.getUMat(ACCESS_READ), ut = t.getUMat(ACCESS_READ);
123
124
k.args(ocl::KernelArg::WriteOnlyNoSize(uxmap), ocl::KernelArg::WriteOnly(uymap),
125
ocl::KernelArg::PtrReadOnly(uk_rinv), ocl::KernelArg::PtrReadOnly(ut),
126
dst_tl.x, dst_tl.y, 1/projector_.scale, rowsPerWI);
127
128
size_t globalsize[2] = { (size_t)dsize.width, ((size_t)dsize.height + rowsPerWI - 1) / rowsPerWI };
129
if (k.run(2, globalsize, NULL, true))
130
{
131
CV_IMPL_ADD(CV_IMPL_OCL);
132
return Rect(dst_tl, dst_br);
133
}
134
}
135
}
136
#endif
137
138
Mat xmap = _xmap.getMat(), ymap = _ymap.getMat();
139
140
float x, y;
141
for (int v = dst_tl.y; v <= dst_br.y; ++v)
142
{
143
for (int u = dst_tl.x; u <= dst_br.x; ++u)
144
{
145
projector_.mapBackward(static_cast<float>(u), static_cast<float>(v), x, y);
146
xmap.at<float>(v - dst_tl.y, u - dst_tl.x) = x;
147
ymap.at<float>(v - dst_tl.y, u - dst_tl.x) = y;
148
}
149
}
150
151
return Rect(dst_tl, dst_br);
152
}
153
154
155
Point PlaneWarper::warp(InputArray src, InputArray K, InputArray R, InputArray T, int interp_mode, int border_mode,
156
OutputArray dst)
157
{
158
UMat uxmap, uymap;
159
Rect dst_roi = buildMaps(src.size(), K, R, T, uxmap, uymap);
160
161
dst.create(dst_roi.height + 1, dst_roi.width + 1, src.type());
162
remap(src, dst, uxmap, uymap, interp_mode, border_mode);
163
164
return dst_roi.tl();
165
}
166
167
Point PlaneWarper::warp(InputArray src, InputArray K, InputArray R,
168
int interp_mode, int border_mode, OutputArray dst)
169
{
170
float tz[] = {0.f, 0.f, 0.f};
171
Mat_<float> T(3, 1, tz);
172
return warp(src, K, R, T, interp_mode, border_mode, dst);
173
}
174
175
Rect PlaneWarper::warpRoi(Size src_size, InputArray K, InputArray R, InputArray T)
176
{
177
projector_.setCameraParams(K, R, T);
178
179
Point dst_tl, dst_br;
180
detectResultRoi(src_size, dst_tl, dst_br);
181
182
return Rect(dst_tl, Point(dst_br.x + 1, dst_br.y + 1));
183
}
184
185
Rect PlaneWarper::warpRoi(Size src_size, InputArray K, InputArray R)
186
{
187
float tz[] = {0.f, 0.f, 0.f};
188
Mat_<float> T(3, 1, tz);
189
return warpRoi(src_size, K, R, T);
190
}
191
192
193
void PlaneWarper::detectResultRoi(Size src_size, Point &dst_tl, Point &dst_br)
194
{
195
float tl_uf = std::numeric_limits<float>::max();
196
float tl_vf = std::numeric_limits<float>::max();
197
float br_uf = -std::numeric_limits<float>::max();
198
float br_vf = -std::numeric_limits<float>::max();
199
200
float u, v;
201
202
projector_.mapForward(0, 0, u, v);
203
tl_uf = std::min(tl_uf, u); tl_vf = std::min(tl_vf, v);
204
br_uf = std::max(br_uf, u); br_vf = std::max(br_vf, v);
205
206
projector_.mapForward(0, static_cast<float>(src_size.height - 1), u, v);
207
tl_uf = std::min(tl_uf, u); tl_vf = std::min(tl_vf, v);
208
br_uf = std::max(br_uf, u); br_vf = std::max(br_vf, v);
209
210
projector_.mapForward(static_cast<float>(src_size.width - 1), 0, u, v);
211
tl_uf = std::min(tl_uf, u); tl_vf = std::min(tl_vf, v);
212
br_uf = std::max(br_uf, u); br_vf = std::max(br_vf, v);
213
214
projector_.mapForward(static_cast<float>(src_size.width - 1), static_cast<float>(src_size.height - 1), u, v);
215
tl_uf = std::min(tl_uf, u); tl_vf = std::min(tl_vf, v);
216
br_uf = std::max(br_uf, u); br_vf = std::max(br_vf, v);
217
218
dst_tl.x = static_cast<int>(tl_uf);
219
dst_tl.y = static_cast<int>(tl_vf);
220
dst_br.x = static_cast<int>(br_uf);
221
dst_br.y = static_cast<int>(br_vf);
222
}
223
224
225
Point2f AffineWarper::warpPoint(const Point2f &pt, InputArray K, InputArray H)
226
{
227
Mat R, T;
228
getRTfromHomogeneous(H, R, T);
229
return PlaneWarper::warpPoint(pt, K, R, T);
230
}
231
232
233
Rect AffineWarper::buildMaps(Size src_size, InputArray K, InputArray H, OutputArray xmap, OutputArray ymap)
234
{
235
Mat R, T;
236
getRTfromHomogeneous(H, R, T);
237
return PlaneWarper::buildMaps(src_size, K, R, T, xmap, ymap);
238
}
239
240
241
Point AffineWarper::warp(InputArray src, InputArray K, InputArray H,
242
int interp_mode, int border_mode, OutputArray dst)
243
{
244
Mat R, T;
245
getRTfromHomogeneous(H, R, T);
246
return PlaneWarper::warp(src, K, R, T, interp_mode, border_mode, dst);
247
}
248
249
250
Rect AffineWarper::warpRoi(Size src_size, InputArray K, InputArray H)
251
{
252
Mat R, T;
253
getRTfromHomogeneous(H, R, T);
254
return PlaneWarper::warpRoi(src_size, K, R, T);
255
}
256
257
258
void AffineWarper::getRTfromHomogeneous(InputArray H_, Mat &R, Mat &T)
259
{
260
Mat H = H_.getMat();
261
CV_Assert(H.size() == Size(3, 3) && H.type() == CV_32F);
262
263
T = Mat::zeros(3, 1, CV_32F);
264
R = H.clone();
265
266
T.at<float>(0,0) = R.at<float>(0,2);
267
T.at<float>(1,0) = R.at<float>(1,2);
268
R.at<float>(0,2) = 0.f;
269
R.at<float>(1,2) = 0.f;
270
271
// we want to compensate transform to fit into plane warper
272
R = R.t();
273
T = (R * T) * -1;
274
}
275
276
277
void SphericalWarper::detectResultRoi(Size src_size, Point &dst_tl, Point &dst_br)
278
{
279
detectResultRoiByBorder(src_size, dst_tl, dst_br);
280
281
float tl_uf = static_cast<float>(dst_tl.x);
282
float tl_vf = static_cast<float>(dst_tl.y);
283
float br_uf = static_cast<float>(dst_br.x);
284
float br_vf = static_cast<float>(dst_br.y);
285
286
float x = projector_.rinv[1];
287
float y = projector_.rinv[4];
288
float z = projector_.rinv[7];
289
if (y > 0.f)
290
{
291
float x_ = (projector_.k[0] * x + projector_.k[1] * y) / z + projector_.k[2];
292
float y_ = projector_.k[4] * y / z + projector_.k[5];
293
if (x_ > 0.f && x_ < src_size.width && y_ > 0.f && y_ < src_size.height)
294
{
295
tl_uf = std::min(tl_uf, 0.f); tl_vf = std::min(tl_vf, static_cast<float>(CV_PI * projector_.scale));
296
br_uf = std::max(br_uf, 0.f); br_vf = std::max(br_vf, static_cast<float>(CV_PI * projector_.scale));
297
}
298
}
299
300
x = projector_.rinv[1];
301
y = -projector_.rinv[4];
302
z = projector_.rinv[7];
303
if (y > 0.f)
304
{
305
float x_ = (projector_.k[0] * x + projector_.k[1] * y) / z + projector_.k[2];
306
float y_ = projector_.k[4] * y / z + projector_.k[5];
307
if (x_ > 0.f && x_ < src_size.width && y_ > 0.f && y_ < src_size.height)
308
{
309
tl_uf = std::min(tl_uf, 0.f); tl_vf = std::min(tl_vf, static_cast<float>(0));
310
br_uf = std::max(br_uf, 0.f); br_vf = std::max(br_vf, static_cast<float>(0));
311
}
312
}
313
314
dst_tl.x = static_cast<int>(tl_uf);
315
dst_tl.y = static_cast<int>(tl_vf);
316
dst_br.x = static_cast<int>(br_uf);
317
dst_br.y = static_cast<int>(br_vf);
318
}
319
320
void SphericalPortraitWarper::detectResultRoi(Size src_size, Point &dst_tl, Point &dst_br)
321
{
322
detectResultRoiByBorder(src_size, dst_tl, dst_br);
323
324
float tl_uf = static_cast<float>(dst_tl.x);
325
float tl_vf = static_cast<float>(dst_tl.y);
326
float br_uf = static_cast<float>(dst_br.x);
327
float br_vf = static_cast<float>(dst_br.y);
328
329
float x = projector_.rinv[0];
330
float y = projector_.rinv[3];
331
float z = projector_.rinv[6];
332
if (y > 0.f)
333
{
334
float x_ = (projector_.k[0] * x + projector_.k[1] * y) / z + projector_.k[2];
335
float y_ = projector_.k[4] * y / z + projector_.k[5];
336
if (x_ > 0.f && x_ < src_size.width && y_ > 0.f && y_ < src_size.height)
337
{
338
tl_uf = std::min(tl_uf, 0.f); tl_vf = std::min(tl_vf, static_cast<float>(CV_PI * projector_.scale));
339
br_uf = std::max(br_uf, 0.f); br_vf = std::max(br_vf, static_cast<float>(CV_PI * projector_.scale));
340
}
341
}
342
343
x = projector_.rinv[0];
344
y = -projector_.rinv[3];
345
z = projector_.rinv[6];
346
if (y > 0.f)
347
{
348
float x_ = (projector_.k[0] * x + projector_.k[1] * y) / z + projector_.k[2];
349
float y_ = projector_.k[4] * y / z + projector_.k[5];
350
if (x_ > 0.f && x_ < src_size.width && y_ > 0.f && y_ < src_size.height)
351
{
352
tl_uf = std::min(tl_uf, 0.f); tl_vf = std::min(tl_vf, static_cast<float>(0));
353
br_uf = std::max(br_uf, 0.f); br_vf = std::max(br_vf, static_cast<float>(0));
354
}
355
}
356
357
dst_tl.x = static_cast<int>(tl_uf);
358
dst_tl.y = static_cast<int>(tl_vf);
359
dst_br.x = static_cast<int>(br_uf);
360
dst_br.y = static_cast<int>(br_vf);
361
}
362
363
/////////////////////////////////////////// SphericalWarper ////////////////////////////////////////
364
365
Rect SphericalWarper::buildMaps(Size src_size, InputArray K, InputArray R, OutputArray xmap, OutputArray ymap)
366
{
367
#ifdef HAVE_OPENCL
368
if (ocl::isOpenCLActivated())
369
{
370
ocl::Kernel k("buildWarpSphericalMaps", ocl::stitching::warpers_oclsrc);
371
if (!k.empty())
372
{
373
int rowsPerWI = ocl::Device::getDefault().isIntel() ? 4 : 1;
374
projector_.setCameraParams(K, R);
375
376
Point dst_tl, dst_br;
377
detectResultRoi(src_size, dst_tl, dst_br);
378
379
Size dsize(dst_br.x - dst_tl.x + 1, dst_br.y - dst_tl.y + 1);
380
xmap.create(dsize, CV_32FC1);
381
ymap.create(dsize, CV_32FC1);
382
383
Mat k_rinv(1, 9, CV_32FC1, projector_.k_rinv);
384
UMat uxmap = xmap.getUMat(), uymap = ymap.getUMat(), uk_rinv = k_rinv.getUMat(ACCESS_READ);
385
386
k.args(ocl::KernelArg::WriteOnlyNoSize(uxmap), ocl::KernelArg::WriteOnly(uymap),
387
ocl::KernelArg::PtrReadOnly(uk_rinv), dst_tl.x, dst_tl.y, 1/projector_.scale, rowsPerWI);
388
389
size_t globalsize[2] = { (size_t)dsize.width, ((size_t)dsize.height + rowsPerWI - 1) / rowsPerWI };
390
if (k.run(2, globalsize, NULL, true))
391
{
392
CV_IMPL_ADD(CV_IMPL_OCL);
393
return Rect(dst_tl, dst_br);
394
}
395
}
396
}
397
#endif
398
return RotationWarperBase<SphericalProjector>::buildMaps(src_size, K, R, xmap, ymap);
399
}
400
401
Point SphericalWarper::warp(InputArray src, InputArray K, InputArray R, int interp_mode, int border_mode, OutputArray dst)
402
{
403
UMat uxmap, uymap;
404
Rect dst_roi = buildMaps(src.size(), K, R, uxmap, uymap);
405
406
dst.create(dst_roi.height + 1, dst_roi.width + 1, src.type());
407
remap(src, dst, uxmap, uymap, interp_mode, border_mode);
408
409
return dst_roi.tl();
410
}
411
412
/////////////////////////////////////////// CylindricalWarper ////////////////////////////////////////
413
414
Rect CylindricalWarper::buildMaps(Size src_size, InputArray K, InputArray R, OutputArray xmap, OutputArray ymap)
415
{
416
#ifdef HAVE_OPENCL
417
if (ocl::isOpenCLActivated())
418
{
419
ocl::Kernel k("buildWarpCylindricalMaps", ocl::stitching::warpers_oclsrc);
420
if (!k.empty())
421
{
422
int rowsPerWI = ocl::Device::getDefault().isIntel() ? 4 : 1;
423
projector_.setCameraParams(K, R);
424
425
Point dst_tl, dst_br;
426
detectResultRoi(src_size, dst_tl, dst_br);
427
428
Size dsize(dst_br.x - dst_tl.x + 1, dst_br.y - dst_tl.y + 1);
429
xmap.create(dsize, CV_32FC1);
430
ymap.create(dsize, CV_32FC1);
431
432
Mat k_rinv(1, 9, CV_32FC1, projector_.k_rinv);
433
UMat uxmap = xmap.getUMat(), uymap = ymap.getUMat(), uk_rinv = k_rinv.getUMat(ACCESS_READ);
434
435
k.args(ocl::KernelArg::WriteOnlyNoSize(uxmap), ocl::KernelArg::WriteOnly(uymap),
436
ocl::KernelArg::PtrReadOnly(uk_rinv), dst_tl.x, dst_tl.y, 1/projector_.scale,
437
rowsPerWI);
438
439
size_t globalsize[2] = { (size_t)dsize.width, ((size_t)dsize.height + rowsPerWI - 1) / rowsPerWI };
440
if (k.run(2, globalsize, NULL, true))
441
{
442
CV_IMPL_ADD(CV_IMPL_OCL);
443
return Rect(dst_tl, dst_br);
444
}
445
}
446
}
447
#endif
448
return RotationWarperBase<CylindricalProjector>::buildMaps(src_size, K, R, xmap, ymap);
449
}
450
451
Point CylindricalWarper::warp(InputArray src, InputArray K, InputArray R, int interp_mode, int border_mode, OutputArray dst)
452
{
453
UMat uxmap, uymap;
454
Rect dst_roi = buildMaps(src.size(), K, R, uxmap, uymap);
455
456
dst.create(dst_roi.height + 1, dst_roi.width + 1, src.type());
457
remap(src, dst, uxmap, uymap, interp_mode, border_mode);
458
459
return dst_roi.tl();
460
}
461
462
} // namespace detail
463
} // namespace cv
464
465