Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/videoio/src/cap_openni2.cpp
16344 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// Intel License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2000, Intel Corporation, all rights reserved.
14
// Third party copyrights are property of their respective owners.
15
//
16
// Redistribution and use in source and binary forms, with or without modification,
17
// are permitted provided that the following conditions are met:
18
//
19
// * Redistribution's of source code must retain the above copyright notice,
20
// this list of conditions and the following disclaimer.
21
//
22
// * Redistribution's in binary form must reproduce the above copyright notice,
23
// this list of conditions and the following disclaimer in the documentation
24
// and/or other materials provided with the distribution.
25
//
26
// * The name of Intel Corporation may not be used to endorse or promote products
27
// derived from this software without specific prior written permission.
28
//
29
// This software is provided by the copyright holders and contributors "as is" and
30
// any express or implied warranties, including, but not limited to, the implied
31
// warranties of merchantability and fitness for a particular purpose are disclaimed.
32
// In no event shall the Intel Corporation or contributors be liable for any direct,
33
// indirect, incidental, special, exemplary, or consequential damages
34
// (including, but not limited to, procurement of substitute goods or services;
35
// loss of use, data, or profits; or business interruption) however caused
36
// and on any theory of liability, whether in contract, strict liability,
37
// or tort (including negligence or otherwise) arising in any way out of
38
// the use of this software, even if advised of the possibility of such damage.
39
//
40
//M*/
41
#include "precomp.hpp"
42
#include "opencv2/core.hpp"
43
#include "opencv2/imgproc.hpp"
44
45
#ifdef HAVE_OPENNI2
46
47
#include <queue>
48
49
#ifndef i386
50
# define i386 0
51
#endif
52
#ifndef __arm__
53
# define __arm__ 0
54
#endif
55
#ifndef _ARC
56
# define _ARC 0
57
#endif
58
#ifndef __APPLE__
59
# define __APPLE__ 0
60
#endif
61
62
#define CV_STREAM_TIMEOUT 2000
63
64
#define CV_DEPTH_STREAM 0
65
#define CV_COLOR_STREAM 1
66
#define CV_IR_STREAM 2
67
#define CV_MAX_NUM_STREAMS 3
68
69
#include "OpenNI.h"
70
#include "PS1080.h"
71
72
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
73
74
static cv::Mutex initOpenNI2Mutex;
75
76
struct OpenNI2Initializer
77
{
78
public:
79
static void init()
80
{
81
cv::AutoLock al(initOpenNI2Mutex);
82
static OpenNI2Initializer initializer;
83
}
84
85
private:
86
OpenNI2Initializer()
87
{
88
// Initialize and configure the context.
89
openni::Status status = openni::OpenNI::initialize();
90
if (status != openni::STATUS_OK)
91
{
92
CV_Error(CV_StsError, std::string("Failed to initialize:") + openni::OpenNI::getExtendedError());
93
}
94
}
95
96
~OpenNI2Initializer()
97
{
98
openni::OpenNI::shutdown();
99
}
100
};
101
102
class CvCapture_OpenNI2 : public CvCapture
103
{
104
public:
105
enum { DEVICE_DEFAULT=0, DEVICE_MS_KINECT=0, DEVICE_ASUS_XTION=1, DEVICE_MAX=1 };
106
107
static const int INVALID_PIXEL_VAL = 0;
108
static const int INVALID_COORDINATE_VAL = 0;
109
110
static const int DEFAULT_MAX_BUFFER_SIZE = 2;
111
static const int DEFAULT_IS_CIRCLE_BUFFER = 0;
112
static const int DEFAULT_MAX_TIME_DURATION = 20;
113
114
CvCapture_OpenNI2(int index = 0);
115
CvCapture_OpenNI2(const char * filename);
116
virtual ~CvCapture_OpenNI2();
117
118
virtual double getProperty(int propIdx) const CV_OVERRIDE;
119
virtual bool setProperty(int probIdx, double propVal) CV_OVERRIDE;
120
virtual bool grabFrame() CV_OVERRIDE;
121
virtual IplImage* retrieveFrame(int outputType) CV_OVERRIDE;
122
123
bool isOpened() const;
124
125
protected:
126
struct OutputMap
127
{
128
public:
129
cv::Mat mat;
130
IplImage* getIplImagePtr();
131
private:
132
IplImage iplHeader;
133
};
134
135
static const int outputMapsTypesCount = 8;
136
137
static openni::VideoMode defaultStreamOutputMode(int stream);
138
139
CvCapture_OpenNI2(int index, const char * filename);
140
141
IplImage* retrieveDepthMap();
142
IplImage* retrievePointCloudMap();
143
IplImage* retrieveDisparityMap();
144
IplImage* retrieveDisparityMap_32F();
145
IplImage* retrieveValidDepthMask();
146
IplImage* retrieveBGRImage();
147
IplImage* retrieveGrayImage();
148
IplImage* retrieveIrImage();
149
150
void toggleStream(int stream, bool toggle);
151
void readCamerasParams();
152
153
double getDepthGeneratorProperty(int propIdx) const;
154
bool setDepthGeneratorProperty(int propIdx, double propVal);
155
double getImageGeneratorProperty(int propIdx) const;
156
bool setImageGeneratorProperty(int propIdx, double propVal);
157
double getIrGeneratorProperty(int propIdx) const;
158
bool setIrGeneratorProperty(int propIdx, double propVal);
159
double getCommonProperty(int propIdx) const;
160
bool setCommonProperty(int propIdx, double propVal);
161
162
// OpenNI context
163
openni::Device device;
164
bool isContextOpened;
165
166
// Data generators with its metadata
167
std::vector<openni::VideoStream> streams;
168
std::vector<openni::VideoFrameRef> streamFrames;
169
std::vector<cv::Mat> streamImages;
170
171
int maxBufferSize, maxTimeDuration; // for approx sync
172
bool isCircleBuffer;
173
//cv::Ptr<ApproximateSyncGrabber> approxSyncGrabber;
174
175
// Cameras settings:
176
// TODO find in OpenNI function to convert z->disparity and remove fields "baseline" and depthFocalLength_VGA
177
// Distance between IR projector and IR camera (in meters)
178
double baseline;
179
// Focal length for the IR camera in VGA resolution (in pixels)
180
int depthFocalLength_VGA;
181
182
// The value for shadow (occluded pixels)
183
int shadowValue;
184
// The value for pixels without a valid disparity measurement
185
int noSampleValue;
186
187
std::vector<OutputMap> outputMaps;
188
};
189
190
IplImage* CvCapture_OpenNI2::OutputMap::getIplImagePtr()
191
{
192
if( mat.empty() )
193
return 0;
194
195
iplHeader = cvIplImage(mat);
196
return &iplHeader;
197
}
198
199
bool CvCapture_OpenNI2::isOpened() const
200
{
201
return isContextOpened;
202
}
203
204
openni::VideoMode CvCapture_OpenNI2::defaultStreamOutputMode(int stream)
205
{
206
openni::VideoMode mode;
207
mode.setResolution(640, 480);
208
mode.setFps(30);
209
switch (stream)
210
{
211
case CV_DEPTH_STREAM:
212
mode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_1_MM);
213
break;
214
case CV_COLOR_STREAM:
215
mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);
216
break;
217
case CV_IR_STREAM:
218
mode.setPixelFormat(openni::PIXEL_FORMAT_GRAY16);
219
break;
220
}
221
return mode;
222
}
223
224
225
CvCapture_OpenNI2::CvCapture_OpenNI2(int index) :
226
CvCapture_OpenNI2(index, nullptr)
227
{ }
228
229
CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename) :
230
CvCapture_OpenNI2(-1, filename)
231
{ }
232
233
CvCapture_OpenNI2::CvCapture_OpenNI2(int index, const char * filename) :
234
device(),
235
isContextOpened(false),
236
streams(CV_MAX_NUM_STREAMS),
237
streamFrames(CV_MAX_NUM_STREAMS),
238
streamImages(CV_MAX_NUM_STREAMS),
239
maxBufferSize(DEFAULT_MAX_BUFFER_SIZE),
240
maxTimeDuration(DEFAULT_MAX_TIME_DURATION),
241
isCircleBuffer(DEFAULT_IS_CIRCLE_BUFFER),
242
baseline(0),
243
depthFocalLength_VGA(0),
244
shadowValue(0),
245
noSampleValue(0),
246
outputMaps(outputMapsTypesCount)
247
{
248
// Initialize and configure the context.
249
OpenNI2Initializer::init();
250
251
const char* deviceURI = openni::ANY_DEVICE;
252
bool needColor = true;
253
bool needIR = true;
254
if (index >= 0)
255
{
256
int deviceType = DEVICE_DEFAULT;
257
if (index >= 10)
258
{
259
deviceType = index / 10;
260
index %= 10;
261
}
262
// Asus XTION and Occipital Structure Sensor do not have an image generator
263
needColor = (deviceType != DEVICE_ASUS_XTION);
264
265
// find appropriate device URI
266
openni::Array<openni::DeviceInfo> ldevs;
267
if (index > 0)
268
{
269
openni::OpenNI::enumerateDevices(&ldevs);
270
if (index < ldevs.getSize())
271
deviceURI = ldevs[index].getUri();
272
else
273
{
274
CV_Error(CV_StsError, "OpenCVKinect2: Device index exceeds the number of available OpenNI devices");
275
}
276
}
277
}
278
else
279
{
280
deviceURI = filename;
281
}
282
283
openni::Status status;
284
status = device.open(deviceURI);
285
if (status != openni::STATUS_OK)
286
{
287
CV_Error(CV_StsError, std::string("OpenCVKinect2: Failed to open device: ") + openni::OpenNI::getExtendedError());
288
}
289
290
toggleStream(CV_DEPTH_STREAM, true);
291
if (needColor)
292
toggleStream(CV_COLOR_STREAM, true);
293
if (needIR)
294
toggleStream(CV_IR_STREAM, true);
295
296
setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0);
297
298
// default for Kinect2 camera
299
setProperty(CV_CAP_PROP_OPENNI2_MIRROR, 0.0);
300
301
isContextOpened = true;
302
}
303
304
CvCapture_OpenNI2::~CvCapture_OpenNI2()
305
{
306
for (size_t i = 0; i < streams.size(); ++i)
307
{
308
streamFrames[i].release();
309
streams[i].stop();
310
streams[i].destroy();
311
}
312
device.close();
313
}
314
315
void CvCapture_OpenNI2::toggleStream(int stream, bool toggle)
316
{
317
openni::Status status;
318
319
// for logging
320
static const std::string stream_names[CV_MAX_NUM_STREAMS] = {
321
"depth",
322
"color",
323
"IR"
324
};
325
326
static const openni::SensorType stream_sensor_types[CV_MAX_NUM_STREAMS] = {
327
openni::SENSOR_DEPTH,
328
openni::SENSOR_COLOR,
329
openni::SENSOR_IR
330
};
331
332
if (toggle) // want to open stream
333
{
334
// already opened
335
if (streams[stream].isValid())
336
return;
337
338
// open stream
339
status = streams[stream].create(device, stream_sensor_types[stream]);
340
if (status == openni::STATUS_OK)
341
{
342
// try to set up default stream mode (if available)
343
const openni::Array<openni::VideoMode>& vm = streams[stream].getSensorInfo().getSupportedVideoModes();
344
openni::VideoMode dm = defaultStreamOutputMode(stream);
345
for (int i = 0; i < vm.getSize(); i++)
346
{
347
if (vm[i].getPixelFormat() == dm.getPixelFormat() &&
348
vm[i].getResolutionX() == dm.getResolutionX() &&
349
vm[i].getResolutionY() == dm.getResolutionY() &&
350
vm[i].getFps() == dm.getFps())
351
{
352
status = streams[stream].setVideoMode(defaultStreamOutputMode(stream));
353
if (status != openni::STATUS_OK)
354
{
355
streams[stream].destroy();
356
CV_Error(CV_StsError, std::string("OpenCVKinect2 : Couldn't set ") +
357
stream_names[stream] + std::string(" stream output mode: ") +
358
std::string(openni::OpenNI::getExtendedError()));
359
}
360
}
361
}
362
363
// start stream
364
status = streams[stream].start();
365
if (status != openni::STATUS_OK)
366
{
367
streams[stream].destroy();
368
CV_Error(CV_StsError, std::string("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't start ") +
369
stream_names[stream] + std::string(" stream: ") +
370
std::string(openni::OpenNI::getExtendedError()));
371
}
372
}
373
else
374
{
375
CV_Error(CV_StsError, std::string("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find ") +
376
stream_names[stream] + " stream: " +
377
std::string(openni::OpenNI::getExtendedError()));
378
}
379
}
380
else if (streams[stream].isValid()) // want to close stream
381
{
382
//FIX for libfreenect2
383
//which stops the whole device when stopping only one stream
384
385
//streams[stream].stop();
386
//streams[stream].destroy();
387
}
388
}
389
390
391
void CvCapture_OpenNI2::readCamerasParams()
392
{
393
double pixelSize = 0;
394
if (streams[CV_DEPTH_STREAM].getProperty<double>(XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE, &pixelSize) != openni::STATUS_OK)
395
{
396
CV_Error(CV_StsError, "CvCapture_OpenNI2::readCamerasParams : Could not read pixel size!" +
397
std::string(openni::OpenNI::getExtendedError()));
398
}
399
400
// pixel size @ VGA = pixel size @ SXGA x 2
401
pixelSize *= 2.0; // in mm
402
403
// focal length of IR camera in pixels for VGA resolution
404
unsigned long long zeroPlaneDistance; // in mm
405
if (streams[CV_DEPTH_STREAM].getProperty(XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE, &zeroPlaneDistance) != openni::STATUS_OK)
406
{
407
CV_Error(CV_StsError, "CvCapture_OpenNI2::readCamerasParams : Could not read virtual plane distance!" +
408
std::string(openni::OpenNI::getExtendedError()));
409
}
410
411
if (streams[CV_DEPTH_STREAM].getProperty<double>(XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE, &baseline) != openni::STATUS_OK)
412
{
413
CV_Error(CV_StsError, "CvCapture_OpenNI2::readCamerasParams : Could not read base line!" +
414
std::string(openni::OpenNI::getExtendedError()));
415
}
416
417
// baseline from cm -> mm
418
baseline *= 10;
419
420
// focal length from mm -> pixels (valid for 640x480)
421
depthFocalLength_VGA = (int)((double)zeroPlaneDistance / (double)pixelSize);
422
}
423
424
double CvCapture_OpenNI2::getProperty( int propIdx ) const
425
{
426
double propValue = 0;
427
428
if( isOpened() )
429
{
430
int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK;
431
432
if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR )
433
{
434
propValue = getImageGeneratorProperty( purePropIdx );
435
}
436
else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR )
437
{
438
propValue = getDepthGeneratorProperty( purePropIdx );
439
}
440
else if ((propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IR_GENERATOR)
441
{
442
propValue = getIrGeneratorProperty(purePropIdx);
443
}
444
else
445
{
446
propValue = getCommonProperty( purePropIdx );
447
}
448
}
449
450
return propValue;
451
}
452
453
bool CvCapture_OpenNI2::setProperty( int propIdx, double propValue )
454
{
455
bool isSet = false;
456
if( isOpened() )
457
{
458
int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK;
459
460
if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR )
461
{
462
isSet = setImageGeneratorProperty( purePropIdx, propValue );
463
}
464
else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR )
465
{
466
isSet = setDepthGeneratorProperty( purePropIdx, propValue );
467
}
468
else if ((propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IR_GENERATOR)
469
{
470
isSet = setIrGeneratorProperty(purePropIdx, propValue);
471
}
472
else
473
{
474
isSet = setCommonProperty( purePropIdx, propValue );
475
}
476
}
477
478
return isSet;
479
}
480
481
double CvCapture_OpenNI2::getCommonProperty( int propIdx ) const
482
{
483
double propValue = 0;
484
485
switch( propIdx )
486
{
487
case CV_CAP_PROP_FRAME_WIDTH :
488
case CV_CAP_PROP_FRAME_HEIGHT :
489
case CV_CAP_PROP_FPS :
490
case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH :
491
case CV_CAP_PROP_OPENNI_BASELINE :
492
case CV_CAP_PROP_OPENNI_FOCAL_LENGTH :
493
case CV_CAP_PROP_OPENNI_REGISTRATION :
494
propValue = getDepthGeneratorProperty( propIdx );
495
break;
496
case CV_CAP_PROP_OPENNI2_SYNC :
497
propValue = const_cast<CvCapture_OpenNI2 *>(this)->device.getDepthColorSyncEnabled();
498
break;
499
case CV_CAP_PROP_OPENNI2_MIRROR:
500
{
501
bool isMirroring = false;
502
for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i)
503
isMirroring |= streams[i].getMirroringEnabled();
504
propValue = isMirroring ? 1.0 : 0.0;
505
break;
506
}
507
default :
508
CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for getting.", propIdx) );
509
}
510
511
return propValue;
512
}
513
514
bool CvCapture_OpenNI2::setCommonProperty( int propIdx, double propValue )
515
{
516
bool isSet = false;
517
518
switch( propIdx )
519
{
520
case CV_CAP_PROP_OPENNI2_MIRROR:
521
{
522
bool mirror = propValue > 0.0 ? true : false;
523
for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i)
524
{
525
if (streams[i].isValid())
526
isSet |= streams[i].setMirroringEnabled(mirror) == openni::STATUS_OK;
527
}
528
}
529
break;
530
// There is a set of properties that correspond to depth generator by default
531
// (is they are pass without particular generator flag).
532
case CV_CAP_PROP_OPENNI_REGISTRATION:
533
isSet = setDepthGeneratorProperty(propIdx, propValue);
534
break;
535
case CV_CAP_PROP_OPENNI2_SYNC:
536
isSet = device.setDepthColorSyncEnabled(propValue > 0.0) == openni::STATUS_OK;
537
break;
538
539
case CV_CAP_PROP_FRAME_WIDTH:
540
case CV_CAP_PROP_FRAME_HEIGHT:
541
case CV_CAP_PROP_AUTOFOCUS:
542
isSet = false;
543
break;
544
545
default:
546
CV_Error(CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for setting.", propIdx));
547
}
548
549
return isSet;
550
}
551
552
double CvCapture_OpenNI2::getDepthGeneratorProperty( int propIdx ) const
553
{
554
double propValue = 0;
555
if( !streams[CV_DEPTH_STREAM].isValid() )
556
return propValue;
557
558
openni::VideoMode mode;
559
560
switch( propIdx )
561
{
562
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
563
propValue = streams[CV_DEPTH_STREAM].isValid();
564
break;
565
case CV_CAP_PROP_FRAME_WIDTH :
566
propValue = streams[CV_DEPTH_STREAM].getVideoMode().getResolutionX();
567
break;
568
case CV_CAP_PROP_FRAME_HEIGHT :
569
propValue = streams[CV_DEPTH_STREAM].getVideoMode().getResolutionY();
570
break;
571
case CV_CAP_PROP_FPS :
572
mode = streams[CV_DEPTH_STREAM].getVideoMode();
573
propValue = mode.getFps();
574
break;
575
case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH :
576
propValue = streams[CV_DEPTH_STREAM].getMaxPixelValue();
577
break;
578
case CV_CAP_PROP_OPENNI_BASELINE :
579
if(baseline <= 0)
580
const_cast<CvCapture_OpenNI2*>(this)->readCamerasParams();
581
propValue = baseline;
582
break;
583
case CV_CAP_PROP_OPENNI_FOCAL_LENGTH :
584
if(depthFocalLength_VGA <= 0)
585
const_cast<CvCapture_OpenNI2*>(this)->readCamerasParams();
586
propValue = (double)depthFocalLength_VGA;
587
break;
588
case CV_CAP_PROP_OPENNI_REGISTRATION :
589
propValue = device.getImageRegistrationMode();
590
break;
591
case CV_CAP_PROP_POS_MSEC :
592
propValue = (double)streamFrames[CV_DEPTH_STREAM].getTimestamp();
593
break;
594
case CV_CAP_PROP_POS_FRAMES :
595
propValue = streamFrames[CV_DEPTH_STREAM].getFrameIndex();
596
break;
597
default :
598
CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for getting.", propIdx) );
599
}
600
601
return propValue;
602
}
603
604
bool CvCapture_OpenNI2::setDepthGeneratorProperty( int propIdx, double propValue )
605
{
606
bool isSet = false;
607
608
switch( propIdx )
609
{
610
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
611
if (isContextOpened)
612
{
613
toggleStream(CV_DEPTH_STREAM, propValue > 0.0);
614
isSet = true;
615
}
616
break;
617
case CV_CAP_PROP_OPENNI_REGISTRATION:
618
{
619
CV_Assert(streams[CV_DEPTH_STREAM].isValid());
620
if( propValue != 0.0 ) // "on"
621
{
622
// if there isn't image generator (i.e. ASUS XtionPro doesn't have it)
623
// then the property isn't available
624
if ( streams[CV_COLOR_STREAM].isValid() )
625
{
626
openni::ImageRegistrationMode mode = propValue != 0.0 ? openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR : openni::IMAGE_REGISTRATION_OFF;
627
if( device.getImageRegistrationMode() != mode )
628
{
629
if (device.isImageRegistrationModeSupported(mode))
630
{
631
openni::Status status = device.setImageRegistrationMode(mode);
632
if( status != openni::STATUS_OK )
633
CV_Error(CV_StsError, std::string("CvCapture_OpenNI2::setDepthGeneratorProperty: ") +
634
std::string(openni::OpenNI::getExtendedError()));
635
else
636
isSet = true;
637
}
638
else
639
CV_Error(CV_StsError, "CvCapture_OpenNI2::setDepthGeneratorProperty: Unsupported viewpoint.");
640
}
641
else
642
isSet = true;
643
}
644
}
645
else // "off"
646
{
647
openni::Status status = device.setImageRegistrationMode(openni::IMAGE_REGISTRATION_OFF);
648
if( status != openni::STATUS_OK )
649
CV_Error(CV_StsError, std::string("CvCapture_OpenNI2::setDepthGeneratorProperty: ") +
650
std::string(openni::OpenNI::getExtendedError()));
651
else
652
isSet = true;
653
}
654
}
655
break;
656
default:
657
CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for setting.", propIdx) );
658
}
659
660
return isSet;
661
}
662
663
double CvCapture_OpenNI2::getImageGeneratorProperty( int propIdx ) const
664
{
665
double propValue = 0.;
666
if( !streams[CV_COLOR_STREAM].isValid() )
667
return propValue;
668
669
openni::VideoMode mode;
670
switch( propIdx )
671
{
672
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
673
propValue = streams[CV_COLOR_STREAM].isValid();
674
break;
675
case CV_CAP_PROP_FRAME_WIDTH :
676
propValue = streams[CV_COLOR_STREAM].getVideoMode().getResolutionX();
677
break;
678
case CV_CAP_PROP_FRAME_HEIGHT :
679
propValue = streams[CV_COLOR_STREAM].getVideoMode().getResolutionY();
680
break;
681
case CV_CAP_PROP_FPS :
682
propValue = streams[CV_COLOR_STREAM].getVideoMode().getFps();
683
break;
684
case CV_CAP_PROP_POS_MSEC :
685
propValue = (double)streamFrames[CV_COLOR_STREAM].getTimestamp();
686
break;
687
case CV_CAP_PROP_POS_FRAMES :
688
propValue = (double)streamFrames[CV_COLOR_STREAM].getFrameIndex();
689
break;
690
default :
691
CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.", propIdx) );
692
}
693
694
return propValue;
695
}
696
697
bool CvCapture_OpenNI2::setImageGeneratorProperty(int propIdx, double propValue)
698
{
699
bool isSet = false;
700
701
switch( propIdx )
702
{
703
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
704
if (isContextOpened)
705
{
706
toggleStream(CV_COLOR_STREAM, propValue > 0.0);
707
isSet = true;
708
}
709
break;
710
case CV_CAP_PROP_OPENNI_OUTPUT_MODE :
711
{
712
if (!streams[CV_COLOR_STREAM].isValid())
713
return isSet;
714
openni::VideoMode mode = streams[CV_COLOR_STREAM].getVideoMode();
715
716
switch( cvRound(propValue) )
717
{
718
case CV_CAP_OPENNI_VGA_30HZ :
719
mode.setResolution(640,480);
720
mode.setFps(30);
721
break;
722
case CV_CAP_OPENNI_SXGA_15HZ :
723
mode.setResolution(1280, 960);
724
mode.setFps(15);
725
break;
726
case CV_CAP_OPENNI_SXGA_30HZ :
727
mode.setResolution(1280, 960);
728
mode.setFps(30);
729
break;
730
case CV_CAP_OPENNI_QVGA_30HZ :
731
mode.setResolution(320, 240);
732
mode.setFps(30);
733
break;
734
case CV_CAP_OPENNI_QVGA_60HZ :
735
mode.setResolution(320, 240);
736
mode.setFps(60);
737
break;
738
default :
739
CV_Error( CV_StsBadArg, "Unsupported image generator output mode.");
740
}
741
742
openni::Status status = streams[CV_COLOR_STREAM].setVideoMode( mode );
743
if( status != openni::STATUS_OK )
744
CV_Error(CV_StsError, std::string("CvCapture_OpenNI2::setImageGeneratorProperty: ") +
745
std::string(openni::OpenNI::getExtendedError()));
746
else
747
isSet = true;
748
break;
749
}
750
default:
751
CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for setting.", propIdx) );
752
}
753
754
return isSet;
755
}
756
757
double CvCapture_OpenNI2::getIrGeneratorProperty(int propIdx) const
758
{
759
double propValue = 0.;
760
if (!streams[CV_IR_STREAM].isValid())
761
return propValue;
762
763
openni::VideoMode mode;
764
switch (propIdx)
765
{
766
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
767
propValue = streams[CV_IR_STREAM].isValid();
768
break;
769
case CV_CAP_PROP_FRAME_WIDTH:
770
propValue = streams[CV_IR_STREAM].getVideoMode().getResolutionX();
771
break;
772
case CV_CAP_PROP_FRAME_HEIGHT:
773
propValue = streams[CV_IR_STREAM].getVideoMode().getResolutionY();
774
break;
775
case CV_CAP_PROP_FPS:
776
propValue = streams[CV_IR_STREAM].getVideoMode().getFps();
777
break;
778
case CV_CAP_PROP_POS_MSEC:
779
propValue = (double)streamFrames[CV_IR_STREAM].getTimestamp();
780
break;
781
case CV_CAP_PROP_POS_FRAMES:
782
propValue = (double)streamFrames[CV_IR_STREAM].getFrameIndex();
783
break;
784
default:
785
CV_Error(CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.", propIdx));
786
}
787
788
return propValue;
789
}
790
791
bool CvCapture_OpenNI2::setIrGeneratorProperty(int propIdx, double propValue)
792
{
793
bool isSet = false;
794
795
switch (propIdx)
796
{
797
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT:
798
if (isContextOpened)
799
{
800
toggleStream(CV_IR_STREAM, propValue > 0.0);
801
isSet = true;
802
}
803
break;
804
case CV_CAP_PROP_OPENNI_OUTPUT_MODE:
805
{
806
if (!streams[CV_IR_STREAM].isValid())
807
return isSet;
808
openni::VideoMode mode = streams[CV_IR_STREAM].getVideoMode();
809
810
switch (cvRound(propValue))
811
{
812
case CV_CAP_OPENNI_VGA_30HZ:
813
mode.setResolution(640, 480);
814
mode.setFps(30);
815
break;
816
case CV_CAP_OPENNI_SXGA_15HZ:
817
mode.setResolution(1280, 960);
818
mode.setFps(15);
819
break;
820
case CV_CAP_OPENNI_SXGA_30HZ:
821
mode.setResolution(1280, 960);
822
mode.setFps(30);
823
break;
824
case CV_CAP_OPENNI_QVGA_30HZ:
825
mode.setResolution(320, 240);
826
mode.setFps(30);
827
break;
828
case CV_CAP_OPENNI_QVGA_60HZ:
829
mode.setResolution(320, 240);
830
mode.setFps(60);
831
break;
832
default:
833
CV_Error(CV_StsBadArg, "Unsupported image generator output mode.");
834
}
835
836
openni::Status status = streams[CV_IR_STREAM].setVideoMode(mode);
837
if (status != openni::STATUS_OK)
838
CV_Error(CV_StsError, std::string("CvCapture_OpenNI2::setImageGeneratorProperty: ") +
839
std::string(openni::OpenNI::getExtendedError()));
840
else
841
isSet = true;
842
break;
843
}
844
default:
845
CV_Error(CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for setting.", propIdx));
846
}
847
848
return isSet;
849
}
850
851
bool CvCapture_OpenNI2::grabFrame()
852
{
853
if( !isOpened() )
854
return false;
855
856
bool isGrabbed = false;
857
858
int numActiveStreams = 0;
859
openni::VideoStream* streamPtrs[CV_MAX_NUM_STREAMS];
860
for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i) {
861
streamPtrs[numActiveStreams++] = &streams[i];
862
}
863
864
int currentStream;
865
openni::Status status = openni::OpenNI::waitForAnyStream(streamPtrs, numActiveStreams, &currentStream, CV_STREAM_TIMEOUT);
866
if( status != openni::STATUS_OK )
867
return false;
868
869
for (int i = 0; i < CV_MAX_NUM_STREAMS; ++i)
870
{
871
if (streams[i].isValid())
872
streams[i].readFrame(&streamFrames[i]);
873
}
874
isGrabbed = true;
875
876
return isGrabbed;
877
}
878
879
inline void getDepthMapFromMetaData(const openni::VideoFrameRef& depthMetaData, cv::Mat& depthMap, int noSampleValue, int shadowValue)
880
{
881
depthMap.create(depthMetaData.getHeight(), depthMetaData.getWidth(), CV_16UC1);
882
depthMap.data = (uchar*)depthMetaData.getData();
883
884
cv::Mat badMask = (depthMap == (double)noSampleValue) | (depthMap == (double)shadowValue) | (depthMap == 0);
885
886
// mask the pixels with invalid depth
887
depthMap.setTo( cv::Scalar::all( CvCapture_OpenNI2::INVALID_PIXEL_VAL ), badMask );
888
}
889
890
IplImage* CvCapture_OpenNI2::retrieveDepthMap()
891
{
892
if( !streamFrames[CV_DEPTH_STREAM].isValid() )
893
return 0;
894
895
getDepthMapFromMetaData(streamFrames[CV_DEPTH_STREAM], outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue );
896
897
return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr();
898
}
899
900
IplImage* CvCapture_OpenNI2::retrievePointCloudMap()
901
{
902
if( !streamFrames[CV_DEPTH_STREAM].isValid() )
903
return 0;
904
905
cv::Mat depthImg;
906
getDepthMapFromMetaData(streamFrames[CV_DEPTH_STREAM], depthImg, noSampleValue, shadowValue);
907
908
const int badPoint = INVALID_PIXEL_VAL;
909
const float badCoord = INVALID_COORDINATE_VAL;
910
int cols = streamFrames[CV_DEPTH_STREAM].getWidth(), rows = streamFrames[CV_DEPTH_STREAM].getHeight();
911
cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) );
912
913
float worldX, worldY, worldZ;
914
for( int y = 0; y < rows; y++ )
915
{
916
for (int x = 0; x < cols; x++)
917
{
918
openni::CoordinateConverter::convertDepthToWorld(streams[CV_DEPTH_STREAM], x, y, depthImg.at<unsigned short>(y, x), &worldX, &worldY, &worldZ);
919
920
if (depthImg.at<unsigned short>(y, x) == badPoint) // not valid
921
pointCloud_XYZ.at<cv::Point3f>(y, x) = cv::Point3f(badCoord, badCoord, badCoord);
922
else
923
{
924
pointCloud_XYZ.at<cv::Point3f>(y, x) = cv::Point3f(worldX*0.001f, worldY*0.001f, worldZ*0.001f); // from mm to meters
925
}
926
}
927
}
928
929
outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].mat = pointCloud_XYZ;
930
931
return outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].getIplImagePtr();
932
}
933
934
static void computeDisparity_32F( const openni::VideoFrameRef& depthMetaData, cv::Mat& disp, double baseline, int F, int noSampleValue, int shadowValue)
935
{
936
cv::Mat depth;
937
getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue );
938
CV_Assert( depth.type() == CV_16UC1 );
939
940
// disparity = baseline * F / z;
941
942
float mult = (float)(baseline /*mm*/ * F /*pixels*/);
943
944
disp.create( depth.size(), CV_32FC1);
945
disp = cv::Scalar::all( CvCapture_OpenNI2::INVALID_PIXEL_VAL );
946
for( int y = 0; y < disp.rows; y++ )
947
{
948
for( int x = 0; x < disp.cols; x++ )
949
{
950
unsigned short curDepth = depth.at<unsigned short>(y,x);
951
if( curDepth != CvCapture_OpenNI2::INVALID_PIXEL_VAL )
952
disp.at<float>(y,x) = mult / curDepth;
953
}
954
}
955
}
956
957
IplImage* CvCapture_OpenNI2::retrieveDisparityMap()
958
{
959
if (!streamFrames[CV_DEPTH_STREAM].isValid())
960
return 0;
961
962
readCamerasParams();
963
964
cv::Mat disp32;
965
computeDisparity_32F(streamFrames[CV_DEPTH_STREAM], disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue);
966
967
disp32.convertTo(outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1);
968
969
return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].getIplImagePtr();
970
}
971
972
IplImage* CvCapture_OpenNI2::retrieveDisparityMap_32F()
973
{
974
if (!streamFrames[CV_DEPTH_STREAM].isValid())
975
return 0;
976
977
readCamerasParams();
978
979
computeDisparity_32F(streamFrames[CV_DEPTH_STREAM], outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue);
980
981
return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr();
982
}
983
984
IplImage* CvCapture_OpenNI2::retrieveValidDepthMask()
985
{
986
if (!streamFrames[CV_DEPTH_STREAM].isValid())
987
return 0;
988
989
cv::Mat d;
990
getDepthMapFromMetaData(streamFrames[CV_DEPTH_STREAM], d, noSampleValue, shadowValue);
991
992
outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = d != CvCapture_OpenNI2::INVALID_PIXEL_VAL;
993
994
return outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].getIplImagePtr();
995
}
996
997
inline void getBGRImageFromMetaData( const openni::VideoFrameRef& imageMetaData, cv::Mat& bgrImage )
998
{
999
cv::Mat bufferImage;
1000
if( imageMetaData.getVideoMode().getPixelFormat() != openni::PIXEL_FORMAT_RGB888 )
1001
CV_Error( CV_StsUnsupportedFormat, "Unsupported format of grabbed image." );
1002
1003
bgrImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_8UC3);
1004
bufferImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_8UC3);
1005
bufferImage.data = (uchar*)imageMetaData.getData();
1006
1007
cv::cvtColor(bufferImage, bgrImage, cv::COLOR_RGB2BGR);
1008
}
1009
1010
inline void getGrayImageFromMetaData(const openni::VideoFrameRef& imageMetaData, cv::Mat& grayImage)
1011
{
1012
if (imageMetaData.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_GRAY8)
1013
{
1014
grayImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_8UC1);
1015
grayImage.data = (uchar*)imageMetaData.getData();
1016
}
1017
else if (imageMetaData.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_GRAY16)
1018
{
1019
grayImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_16UC1);
1020
grayImage.data = (uchar*)imageMetaData.getData();
1021
}
1022
else
1023
{
1024
CV_Error(CV_StsUnsupportedFormat, "Unsupported format of grabbed image.");
1025
}
1026
}
1027
1028
IplImage* CvCapture_OpenNI2::retrieveBGRImage()
1029
{
1030
if( !streamFrames[CV_COLOR_STREAM].isValid() )
1031
return 0;
1032
1033
getBGRImageFromMetaData(streamFrames[CV_COLOR_STREAM], outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat );
1034
1035
return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr();
1036
}
1037
1038
IplImage* CvCapture_OpenNI2::retrieveGrayImage()
1039
{
1040
if (!streamFrames[CV_COLOR_STREAM].isValid())
1041
return 0;
1042
1043
CV_Assert(streamFrames[CV_COLOR_STREAM].getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_RGB888); // RGB
1044
1045
cv::Mat rgbImage;
1046
getBGRImageFromMetaData(streamFrames[CV_COLOR_STREAM], rgbImage);
1047
cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY );
1048
1049
return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr();
1050
}
1051
1052
IplImage* CvCapture_OpenNI2::retrieveIrImage()
1053
{
1054
if (!streamFrames[CV_IR_STREAM].isValid())
1055
return 0;
1056
1057
getGrayImageFromMetaData(streamFrames[CV_IR_STREAM], outputMaps[CV_CAP_OPENNI_IR_IMAGE].mat);
1058
1059
return outputMaps[CV_CAP_OPENNI_IR_IMAGE].getIplImagePtr();
1060
}
1061
1062
IplImage* CvCapture_OpenNI2::retrieveFrame( int outputType )
1063
{
1064
IplImage* image = 0;
1065
CV_Assert( outputType < outputMapsTypesCount && outputType >= 0);
1066
1067
if( outputType == CV_CAP_OPENNI_DEPTH_MAP )
1068
{
1069
image = retrieveDepthMap();
1070
}
1071
else if( outputType == CV_CAP_OPENNI_POINT_CLOUD_MAP )
1072
{
1073
image = retrievePointCloudMap();
1074
}
1075
else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP )
1076
{
1077
image = retrieveDisparityMap();
1078
}
1079
else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP_32F )
1080
{
1081
image = retrieveDisparityMap_32F();
1082
}
1083
else if( outputType == CV_CAP_OPENNI_VALID_DEPTH_MASK )
1084
{
1085
image = retrieveValidDepthMask();
1086
}
1087
else if( outputType == CV_CAP_OPENNI_BGR_IMAGE )
1088
{
1089
image = retrieveBGRImage();
1090
}
1091
else if( outputType == CV_CAP_OPENNI_GRAY_IMAGE )
1092
{
1093
image = retrieveGrayImage();
1094
}
1095
else if( outputType == CV_CAP_OPENNI_IR_IMAGE )
1096
{
1097
image = retrieveIrImage();
1098
}
1099
1100
return image;
1101
}
1102
1103
CvCapture* cvCreateCameraCapture_OpenNI2( int index )
1104
{
1105
CvCapture_OpenNI2* capture = new CvCapture_OpenNI2( index );
1106
1107
if( capture->isOpened() )
1108
return capture;
1109
1110
delete capture;
1111
return 0;
1112
}
1113
1114
CvCapture* cvCreateFileCapture_OpenNI2( const char* filename )
1115
{
1116
CvCapture_OpenNI2* capture = new CvCapture_OpenNI2( filename );
1117
1118
if( capture->isOpened() )
1119
return capture;
1120
1121
delete capture;
1122
return 0;
1123
}
1124
1125
#endif
1126
1127