Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Tetragramm
GitHub Repository: Tetragramm/opencv
Path: blob/master/modules/videoio/src/cap_openni.cpp
16344 views
1
/*M///////////////////////////////////////////////////////////////////////////////////////
2
//
3
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4
//
5
// By downloading, copying, installing or using the software you agree to this license.
6
// If you do not agree to this license, do not download, install,
7
// copy or use the software.
8
//
9
//
10
// Intel License Agreement
11
// For Open Source Computer Vision Library
12
//
13
// Copyright (C) 2000, Intel Corporation, all rights reserved.
14
// Third party copyrights are property of their respective owners.
15
//
16
// Redistribution and use in source and binary forms, with or without modification,
17
// are permitted provided that the following conditions are met:
18
//
19
// * Redistribution's of source code must retain the above copyright notice,
20
// this list of conditions and the following disclaimer.
21
//
22
// * Redistribution's in binary form must reproduce the above copyright notice,
23
// this list of conditions and the following disclaimer in the documentation
24
// and/or other materials provided with the distribution.
25
//
26
// * The name of Intel Corporation may not be used to endorse or promote products
27
// derived from this software without specific prior written permission.
28
//
29
// This software is provided by the copyright holders and contributors "as is" and
30
// any express or implied warranties, including, but not limited to, the implied
31
// warranties of merchantability and fitness for a particular purpose are disclaimed.
32
// In no event shall the Intel Corporation or contributors be liable for any direct,
33
// indirect, incidental, special, exemplary, or consequential damages
34
// (including, but not limited to, procurement of substitute goods or services;
35
// loss of use, data, or profits; or business interruption) however caused
36
// and on any theory of liability, whether in contract, strict liability,
37
// or tort (including negligence or otherwise) arising in any way out of
38
// the use of this software, even if advised of the possibility of such damage.
39
//
40
//M*/
41
#include "precomp.hpp"
42
#include "opencv2/core.hpp"
43
#include "opencv2/imgproc.hpp"
44
45
#ifdef HAVE_OPENNI
46
47
#include <queue>
48
49
#ifndef i386
50
# define i386 0
51
#endif
52
#ifndef __arm__
53
# define __arm__ 0
54
#endif
55
#ifndef _ARC
56
# define _ARC 0
57
#endif
58
#ifndef __APPLE__
59
# define __APPLE__ 0
60
#endif
61
62
#include "XnCppWrapper.h"
63
64
const cv::String XMLConfig =
65
"<OpenNI>"
66
"<Licenses>"
67
"<License vendor=\"PrimeSense\" key=\"0KOIk2JeIBYClPWVnMoRKn5cdY4=\"/>"
68
"</Licenses>"
69
"<Log writeToConsole=\"false\" writeToFile=\"false\">"
70
"<LogLevel value=\"3\"/>"
71
"<Masks>"
72
"<Mask name=\"ALL\" on=\"true\"/>"
73
"</Masks>"
74
"<Dumps>"
75
"</Dumps>"
76
"</Log>"
77
"<ProductionNodes>"
78
"<Node type=\"Image\" name=\"Image1\" stopOnError=\"false\">"
79
"<Configuration>"
80
"<MapOutputMode xRes=\"640\" yRes=\"480\" FPS=\"30\"/>"
81
"<Mirror on=\"false\"/>"
82
"</Configuration>"
83
"</Node> "
84
"<Node type=\"Depth\" name=\"Depth1\">"
85
"<Configuration>"
86
"<MapOutputMode xRes=\"640\" yRes=\"480\" FPS=\"30\"/>"
87
"<Mirror on=\"false\"/>"
88
"</Configuration>"
89
"</Node>"
90
"</ProductionNodes>"
91
"</OpenNI>\n";
92
93
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
94
class ApproximateSyncGrabber
95
{
96
public:
97
ApproximateSyncGrabber( xn::Context &_context,
98
xn::DepthGenerator &_depthGenerator,
99
xn::ImageGenerator &_imageGenerator,
100
int _maxBufferSize, bool _isCircleBuffer, int _maxTimeDuration ) :
101
context(_context), depthGenerator(_depthGenerator), imageGenerator(_imageGenerator),
102
maxBufferSize(_maxBufferSize), isCircleBuffer(_isCircleBuffer), maxTimeDuration(_maxTimeDuration)
103
{
104
105
CV_Assert( depthGenerator.IsValid() );
106
CV_Assert( imageGenerator.IsValid() );
107
}
108
109
void setMaxBufferSize( int _maxBufferSize )
110
{
111
maxBufferSize = _maxBufferSize;
112
}
113
inline int getMaxBufferSize() const { return maxBufferSize; }
114
115
void setIsCircleBuffer( bool _isCircleBuffer ) { isCircleBuffer = _isCircleBuffer; }
116
bool getIsCircleBuffer() const { return isCircleBuffer; }
117
118
void setMaxTimeDuration( int _maxTimeDuration ) { maxTimeDuration = _maxTimeDuration; }
119
int getMaxTimeDuration() const { return maxTimeDuration; }
120
121
bool grab( xn::DepthMetaData& depthMetaData,
122
xn::ImageMetaData& imageMetaData )
123
{
124
CV_Assert( task );
125
126
127
while( task->grab(depthMetaData, imageMetaData) == false )
128
{
129
task->spin();
130
}
131
return true;
132
133
}
134
135
void start()
136
{
137
CV_Assert( depthGenerator.IsValid() );
138
CV_Assert( imageGenerator.IsValid() );
139
task.reset( new ApproximateSynchronizer( *this ) );
140
}
141
142
void finish()
143
{
144
task.release();
145
}
146
147
bool isRun() const { return task != 0; }
148
149
xn::Context &context;
150
xn::DepthGenerator &depthGenerator;
151
xn::ImageGenerator &imageGenerator;
152
153
private:
154
ApproximateSyncGrabber(const ApproximateSyncGrabber&);
155
ApproximateSyncGrabber& operator=(const ApproximateSyncGrabber&);
156
157
int maxBufferSize;
158
bool isCircleBuffer;
159
int maxTimeDuration;
160
161
class ApproximateSynchronizerBase
162
{
163
public:
164
ApproximateSynchronizerBase( ApproximateSyncGrabber& _approxSyncGrabber ) :
165
approxSyncGrabber(_approxSyncGrabber), isDepthFilled(false), isImageFilled(false)
166
{}
167
168
virtual ~ApproximateSynchronizerBase() {}
169
170
virtual bool isSpinContinue() const = 0;
171
virtual void pushDepthMetaData( xn::DepthMetaData& depthMetaData ) = 0;
172
virtual void pushImageMetaData( xn::ImageMetaData& imageMetaData ) = 0;
173
virtual bool popDepthMetaData( xn::DepthMetaData& depthMetaData ) = 0;
174
virtual bool popImageMetaData( xn::ImageMetaData& imageMetaData ) = 0;
175
176
void spin()
177
{
178
while(isSpinContinue() == true)
179
{
180
XnStatus status = approxSyncGrabber.context.WaitAnyUpdateAll();
181
if( status != XN_STATUS_OK )
182
continue;
183
184
//xn::DepthMetaData depth;
185
//xn::ImageMetaData image;
186
approxSyncGrabber.depthGenerator.GetMetaData(depth);
187
approxSyncGrabber.imageGenerator.GetMetaData(image);
188
189
if( depth.Data() && depth.IsDataNew() )
190
pushDepthMetaData( depth );
191
192
if( image.Data() && image.IsDataNew() )
193
pushImageMetaData( image );
194
}
195
}
196
197
virtual bool grab( xn::DepthMetaData& depthMetaData,
198
xn::ImageMetaData& imageMetaData )
199
{
200
for(;;)
201
{
202
if( !isDepthFilled )
203
isDepthFilled = popDepthMetaData(depth);
204
if( !isImageFilled )
205
isImageFilled = popImageMetaData(image);
206
207
if( !isDepthFilled || !isImageFilled )
208
break;
209
210
double timeDiff = 1e-3 * std::abs(static_cast<double>(depth.Timestamp()) - static_cast<double>(image.Timestamp()));
211
212
if( timeDiff <= approxSyncGrabber.maxTimeDuration )
213
{
214
depthMetaData.InitFrom(depth);
215
imageMetaData.InitFrom(image);
216
isDepthFilled = isImageFilled = false;
217
return true;
218
}
219
else
220
{
221
if( depth.Timestamp() < image.Timestamp() )
222
isDepthFilled = false;
223
else
224
isImageFilled = false;
225
}
226
}
227
228
return false;
229
}
230
231
protected:
232
ApproximateSyncGrabber& approxSyncGrabber;
233
xn::DepthMetaData depth;
234
xn::ImageMetaData image;
235
bool isDepthFilled;
236
bool isImageFilled;
237
};
238
239
// If there isn't TBB the synchronization will be executed in the main thread.
240
class ApproximateSynchronizer: public ApproximateSynchronizerBase
241
{
242
public:
243
ApproximateSynchronizer( ApproximateSyncGrabber& _approxSyncGrabber ) :
244
ApproximateSynchronizerBase(_approxSyncGrabber)
245
{}
246
247
virtual bool isSpinContinue() const CV_OVERRIDE
248
{
249
int maxBufferSize = approxSyncGrabber.getMaxBufferSize();
250
return (maxBufferSize <= 0) || (static_cast<int>(depthQueue.size()) < maxBufferSize &&
251
static_cast<int>(imageQueue.size()) < maxBufferSize); // "<" to may push
252
}
253
254
virtual inline void pushDepthMetaData( xn::DepthMetaData& depthMetaData ) CV_OVERRIDE
255
{
256
cv::Ptr<xn::DepthMetaData> depthPtr = cv::makePtr<xn::DepthMetaData>();
257
depthPtr->CopyFrom(depthMetaData);
258
depthQueue.push(depthPtr);
259
}
260
virtual inline void pushImageMetaData( xn::ImageMetaData& imageMetaData ) CV_OVERRIDE
261
{
262
cv::Ptr<xn::ImageMetaData> imagePtr = cv::makePtr<xn::ImageMetaData>();
263
imagePtr->CopyFrom(imageMetaData);
264
imageQueue.push(imagePtr);
265
}
266
virtual inline bool popDepthMetaData( xn::DepthMetaData& depthMetaData ) CV_OVERRIDE
267
{
268
if( depthQueue.empty() )
269
return false;
270
271
depthMetaData.CopyFrom(*depthQueue.front());
272
depthQueue.pop();
273
return true;
274
}
275
virtual inline bool popImageMetaData( xn::ImageMetaData& imageMetaData ) CV_OVERRIDE
276
{
277
if( imageQueue.empty() )
278
return false;
279
280
imageMetaData.CopyFrom(*imageQueue.front());
281
imageQueue.pop();
282
return true;
283
}
284
285
private:
286
std::queue<cv::Ptr<xn::DepthMetaData> > depthQueue;
287
std::queue<cv::Ptr<xn::ImageMetaData> > imageQueue;
288
};
289
290
cv::Ptr<ApproximateSynchronizer> task;
291
};
292
293
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
294
class CvCapture_OpenNI : public CvCapture
295
{
296
public:
297
enum { DEVICE_DEFAULT=0, DEVICE_MS_KINECT=0, DEVICE_ASUS_XTION=1, DEVICE_MAX=1 };
298
299
static const int INVALID_PIXEL_VAL = 0;
300
static const int INVALID_COORDINATE_VAL = 0;
301
302
static const int DEFAULT_MAX_BUFFER_SIZE = 2;
303
static const int DEFAULT_IS_CIRCLE_BUFFER = 0;
304
static const int DEFAULT_MAX_TIME_DURATION = 20;
305
306
CvCapture_OpenNI(int index=0);
307
CvCapture_OpenNI(const char * filename);
308
virtual ~CvCapture_OpenNI();
309
310
virtual double getProperty(int propIdx) const CV_OVERRIDE;
311
virtual bool setProperty(int probIdx, double propVal) CV_OVERRIDE;
312
virtual bool grabFrame() CV_OVERRIDE;
313
virtual IplImage* retrieveFrame(int outputType) CV_OVERRIDE;
314
315
bool isOpened() const;
316
317
protected:
318
struct OutputMap
319
{
320
public:
321
cv::Mat mat;
322
IplImage* getIplImagePtr();
323
private:
324
IplImage iplHeader;
325
};
326
327
static const int outputMapsTypesCount = 7;
328
329
static XnMapOutputMode defaultMapOutputMode();
330
331
IplImage* retrieveDepthMap();
332
IplImage* retrievePointCloudMap();
333
IplImage* retrieveDisparityMap();
334
IplImage* retrieveDisparityMap_32F();
335
IplImage* retrieveValidDepthMask();
336
IplImage* retrieveBGRImage();
337
IplImage* retrieveGrayImage();
338
339
bool readCamerasParams();
340
341
double getDepthGeneratorProperty(int propIdx) const;
342
bool setDepthGeneratorProperty(int propIdx, double propVal);
343
double getImageGeneratorProperty(int propIdx) const;
344
bool setImageGeneratorProperty(int propIdx, double propVal);
345
double getCommonProperty(int propIdx) const;
346
bool setCommonProperty(int propIdx, double propVal);
347
348
// OpenNI context
349
xn::Context context;
350
bool isContextOpened;
351
352
xn::ProductionNode productionNode;
353
354
// Data generators with its metadata
355
xn::DepthGenerator depthGenerator;
356
xn::DepthMetaData depthMetaData;
357
358
xn::ImageGenerator imageGenerator;
359
xn::ImageMetaData imageMetaData;
360
361
int maxBufferSize, maxTimeDuration; // for approx sync
362
bool isCircleBuffer;
363
cv::Ptr<ApproximateSyncGrabber> approxSyncGrabber;
364
365
// Cameras settings:
366
// TODO find in OpenNI function to convert z->disparity and remove fields "baseline" and depthFocalLength_VGA
367
// Distance between IR projector and IR camera (in meters)
368
XnDouble baseline;
369
// Focal length for the IR camera in VGA resolution (in pixels)
370
XnUInt64 depthFocalLength_VGA;
371
372
// The value for shadow (occluded pixels)
373
XnUInt64 shadowValue;
374
// The value for pixels without a valid disparity measurement
375
XnUInt64 noSampleValue;
376
377
std::vector<OutputMap> outputMaps;
378
};
379
380
IplImage* CvCapture_OpenNI::OutputMap::getIplImagePtr()
381
{
382
if( mat.empty() )
383
return 0;
384
385
iplHeader = cvIplImage(mat);
386
return &iplHeader;
387
}
388
389
bool CvCapture_OpenNI::isOpened() const
390
{
391
return isContextOpened;
392
}
393
394
XnMapOutputMode CvCapture_OpenNI::defaultMapOutputMode()
395
{
396
XnMapOutputMode mode;
397
mode.nXRes = XN_VGA_X_RES;
398
mode.nYRes = XN_VGA_Y_RES;
399
mode.nFPS = 30;
400
return mode;
401
}
402
403
CvCapture_OpenNI::CvCapture_OpenNI( int index )
404
{
405
int deviceType = DEVICE_DEFAULT;
406
XnStatus status;
407
408
isContextOpened = false;
409
maxBufferSize = DEFAULT_MAX_BUFFER_SIZE;
410
isCircleBuffer = DEFAULT_IS_CIRCLE_BUFFER;
411
maxTimeDuration = DEFAULT_MAX_TIME_DURATION;
412
413
if( index >= 10 )
414
{
415
deviceType = index / 10;
416
index %= 10;
417
}
418
419
if( deviceType > DEVICE_MAX )
420
return;
421
422
// Initialize and configure the context.
423
status = context.Init();
424
if( status != XN_STATUS_OK )
425
{
426
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to initialize the context: %s\n", xnGetStatusString(status));
427
return;
428
}
429
430
// Find devices
431
xn::NodeInfoList devicesList;
432
status = context.EnumerateProductionTrees( XN_NODE_TYPE_DEVICE, NULL, devicesList, 0 );
433
if( status != XN_STATUS_OK )
434
{
435
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate production trees: %s\n", xnGetStatusString(status));
436
return;
437
}
438
439
// Chose device according to index
440
xn::NodeInfoList::Iterator it = devicesList.Begin();
441
for( int i = 0; i < index && it!=devicesList.End(); ++i ) it++;
442
if ( it == devicesList.End() )
443
{
444
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed device with index %d\n", index);
445
return;
446
}
447
448
xn::NodeInfo deviceNode = *it;
449
status = context.CreateProductionTree( deviceNode, productionNode );
450
if( status != XN_STATUS_OK )
451
{
452
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create production tree: %s\n", xnGetStatusString(status));
453
return;
454
}
455
456
xn::ScriptNode scriptNode;
457
status = context.RunXmlScript( XMLConfig.c_str(), scriptNode );
458
if( status != XN_STATUS_OK )
459
{
460
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to run xml script: %s\n", xnGetStatusString(status));
461
return;
462
}
463
464
// Associate generators with context.
465
// enumerate the nodes to find if depth generator is present
466
xn::NodeInfoList depthList;
467
status = context.EnumerateExistingNodes( depthList, XN_NODE_TYPE_DEPTH );
468
if( status != XN_STATUS_OK )
469
{
470
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate depth generators: %s\n", xnGetStatusString(status));
471
return;
472
}
473
if( depthList.IsEmpty() )
474
{
475
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : The device doesn't have depth generator. Such devices aren't supported now.\n");
476
return;
477
}
478
status = depthGenerator.Create( context );
479
if( status != XN_STATUS_OK )
480
{
481
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create depth generator: %s\n", xnGetStatusString(status));
482
return;
483
}
484
485
// enumerate the nodes to find if image generator is present
486
xn::NodeInfoList imageList;
487
status = context.EnumerateExistingNodes( imageList, XN_NODE_TYPE_IMAGE );
488
if( status != XN_STATUS_OK )
489
{
490
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate image generators: %s\n", xnGetStatusString(status));
491
return;
492
}
493
494
if( !imageList.IsEmpty() )
495
{
496
status = imageGenerator.Create( context );
497
if( status != XN_STATUS_OK )
498
{
499
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create image generator: %s\n", xnGetStatusString(status));
500
return;
501
}
502
}
503
504
// Set map output mode.
505
if( depthGenerator.IsValid() )
506
{
507
CV_DbgAssert( depthGenerator.SetMapOutputMode(defaultMapOutputMode()) == XN_STATUS_OK ); // xn::DepthGenerator supports VGA only! (Jan 2011)
508
}
509
if( imageGenerator.IsValid() )
510
{
511
CV_DbgAssert( imageGenerator.SetMapOutputMode(defaultMapOutputMode()) == XN_STATUS_OK );
512
}
513
514
if( deviceType == DEVICE_ASUS_XTION )
515
{
516
//ps/asus specific
517
imageGenerator.SetIntProperty("InputFormat", 1 /*XN_IO_IMAGE_FORMAT_YUV422*/);
518
imageGenerator.SetPixelFormat(XN_PIXEL_FORMAT_RGB24);
519
depthGenerator.SetIntProperty("RegistrationType", 1 /*XN_PROCESSING_HARDWARE*/);
520
}
521
522
// Start generating data.
523
status = context.StartGeneratingAll();
524
if( status != XN_STATUS_OK )
525
{
526
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to start generating OpenNI data: %s\n", xnGetStatusString(status));
527
return;
528
}
529
530
if( !readCamerasParams() )
531
{
532
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Could not read cameras parameters\n");
533
return;
534
}
535
536
outputMaps.resize( outputMapsTypesCount );
537
538
isContextOpened = true;
539
540
setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0);
541
}
542
543
CvCapture_OpenNI::CvCapture_OpenNI(const char * filename)
544
{
545
XnStatus status;
546
547
isContextOpened = false;
548
maxBufferSize = DEFAULT_MAX_BUFFER_SIZE;
549
isCircleBuffer = DEFAULT_IS_CIRCLE_BUFFER;
550
maxTimeDuration = DEFAULT_MAX_TIME_DURATION;
551
552
// Initialize and configure the context.
553
status = context.Init();
554
if( status != XN_STATUS_OK )
555
{
556
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to initialize the context: %s\n", xnGetStatusString(status));
557
return;
558
}
559
560
// Open file
561
status = context.OpenFileRecording( filename, productionNode );
562
if( status != XN_STATUS_OK )
563
{
564
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to open input file (%s): %s\n", filename, xnGetStatusString(status));
565
return;
566
}
567
568
context.FindExistingNode( XN_NODE_TYPE_DEPTH, depthGenerator );
569
context.FindExistingNode( XN_NODE_TYPE_IMAGE, imageGenerator );
570
571
if( !readCamerasParams() )
572
{
573
fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Could not read cameras parameters\n");
574
return;
575
}
576
577
outputMaps.resize( outputMapsTypesCount );
578
579
isContextOpened = true;
580
}
581
582
CvCapture_OpenNI::~CvCapture_OpenNI()
583
{
584
context.StopGeneratingAll();
585
context.Release();
586
}
587
588
bool CvCapture_OpenNI::readCamerasParams()
589
{
590
XnDouble pixelSize = 0;
591
if( depthGenerator.GetRealProperty( "ZPPS", pixelSize ) != XN_STATUS_OK )
592
{
593
fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read pixel size!\n");
594
return false;
595
}
596
597
// pixel size @ VGA = pixel size @ SXGA x 2
598
pixelSize *= 2.0; // in mm
599
600
// focal length of IR camera in pixels for VGA resolution
601
XnUInt64 zeroPlanDistance; // in mm
602
if( depthGenerator.GetIntProperty( "ZPD", zeroPlanDistance ) != XN_STATUS_OK )
603
{
604
fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read virtual plane distance!\n");
605
return false;
606
}
607
608
if( depthGenerator.GetRealProperty( "LDDIS", baseline ) != XN_STATUS_OK )
609
{
610
fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read base line!\n");
611
return false;
612
}
613
614
// baseline from cm -> mm
615
baseline *= 10;
616
617
// focal length from mm -> pixels (valid for 640x480)
618
depthFocalLength_VGA = (XnUInt64)((double)zeroPlanDistance / (double)pixelSize);
619
620
if( depthGenerator.GetIntProperty( "ShadowValue", shadowValue ) != XN_STATUS_OK )
621
{
622
fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read property \"ShadowValue\"!\n");
623
return false;
624
}
625
626
if( depthGenerator.GetIntProperty("NoSampleValue", noSampleValue ) != XN_STATUS_OK )
627
{
628
fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read property \"NoSampleValue\"!\n");
629
return false;
630
}
631
632
return true;
633
}
634
635
double CvCapture_OpenNI::getProperty( int propIdx ) const
636
{
637
double propValue = 0;
638
639
if( isOpened() )
640
{
641
int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK;
642
643
if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR )
644
{
645
propValue = getImageGeneratorProperty( purePropIdx );
646
}
647
else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR )
648
{
649
propValue = getDepthGeneratorProperty( purePropIdx );
650
}
651
else
652
{
653
propValue = getCommonProperty( purePropIdx );
654
}
655
}
656
657
return propValue;
658
}
659
660
bool CvCapture_OpenNI::setProperty( int propIdx, double propValue )
661
{
662
bool isSet = false;
663
if( isOpened() )
664
{
665
int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK;
666
667
if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR )
668
{
669
isSet = setImageGeneratorProperty( purePropIdx, propValue );
670
}
671
else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR )
672
{
673
isSet = setDepthGeneratorProperty( purePropIdx, propValue );
674
}
675
else
676
{
677
isSet = setCommonProperty( purePropIdx, propValue );
678
}
679
}
680
681
return isSet;
682
}
683
684
double CvCapture_OpenNI::getCommonProperty( int propIdx ) const
685
{
686
double propValue = 0;
687
688
switch( propIdx )
689
{
690
// There is a set of properties that correspond to depth generator by default
691
// (is they are pass without particular generator flag). Two reasons of this:
692
// 1) We can assume that depth generator is the main one for depth sensor.
693
// 2) In the initial vertions of OpenNI integration to OpenCV the value of
694
// flag CV_CAP_OPENNI_DEPTH_GENERATOR was 0 (it isn't zero now).
695
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT :
696
case CV_CAP_PROP_FRAME_WIDTH :
697
case CV_CAP_PROP_FRAME_HEIGHT :
698
case CV_CAP_PROP_FPS :
699
case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH :
700
case CV_CAP_PROP_OPENNI_BASELINE :
701
case CV_CAP_PROP_OPENNI_FOCAL_LENGTH :
702
case CV_CAP_PROP_OPENNI_REGISTRATION :
703
propValue = getDepthGeneratorProperty( propIdx );
704
break;
705
case CV_CAP_PROP_OPENNI_APPROX_FRAME_SYNC :
706
propValue = !approxSyncGrabber.empty() && approxSyncGrabber->isRun() ? 1. : 0.;
707
break;
708
case CV_CAP_PROP_OPENNI_MAX_BUFFER_SIZE :
709
propValue = maxBufferSize;
710
break;
711
case CV_CAP_PROP_OPENNI_CIRCLE_BUFFER :
712
propValue = isCircleBuffer ? 1. : 0.;
713
break;
714
case CV_CAP_PROP_OPENNI_MAX_TIME_DURATION :
715
propValue = maxTimeDuration;
716
break;
717
default :
718
CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for getting.\n", propIdx) );
719
}
720
721
return propValue;
722
}
723
724
bool CvCapture_OpenNI::setCommonProperty( int propIdx, double propValue )
725
{
726
bool isSet = false;
727
728
switch( propIdx )
729
{
730
// There is a set of properties that correspond to depth generator by default
731
// (is they are pass without particular generator flag).
732
case CV_CAP_PROP_OPENNI_REGISTRATION:
733
isSet = setDepthGeneratorProperty( propIdx, propValue );
734
break;
735
case CV_CAP_PROP_OPENNI_APPROX_FRAME_SYNC :
736
if( propValue && depthGenerator.IsValid() && imageGenerator.IsValid() )
737
{
738
// start synchronization
739
if( approxSyncGrabber.empty() )
740
{
741
approxSyncGrabber.reset(new ApproximateSyncGrabber( context, depthGenerator, imageGenerator, maxBufferSize, isCircleBuffer, maxTimeDuration ));
742
}
743
else
744
{
745
approxSyncGrabber->finish();
746
747
// update params
748
approxSyncGrabber->setMaxBufferSize(maxBufferSize);
749
approxSyncGrabber->setIsCircleBuffer(isCircleBuffer);
750
approxSyncGrabber->setMaxTimeDuration(maxTimeDuration);
751
}
752
approxSyncGrabber->start();
753
}
754
else if( !propValue && !approxSyncGrabber.empty() )
755
{
756
// finish synchronization
757
approxSyncGrabber->finish();
758
}
759
break;
760
case CV_CAP_PROP_OPENNI_MAX_BUFFER_SIZE :
761
maxBufferSize = cvRound(propValue);
762
if( !approxSyncGrabber.empty() )
763
approxSyncGrabber->setMaxBufferSize(maxBufferSize);
764
break;
765
case CV_CAP_PROP_OPENNI_CIRCLE_BUFFER :
766
if( !approxSyncGrabber.empty() )
767
approxSyncGrabber->setIsCircleBuffer(isCircleBuffer);
768
break;
769
case CV_CAP_PROP_OPENNI_MAX_TIME_DURATION :
770
maxTimeDuration = cvRound(propValue);
771
if( !approxSyncGrabber.empty() )
772
approxSyncGrabber->setMaxTimeDuration(maxTimeDuration);
773
break;
774
default:
775
CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for setting.\n", propIdx) );
776
}
777
778
return isSet;
779
}
780
781
double CvCapture_OpenNI::getDepthGeneratorProperty( int propIdx ) const
782
{
783
double propValue = 0;
784
if( !depthGenerator.IsValid() )
785
return propValue;
786
787
XnMapOutputMode mode;
788
789
switch( propIdx )
790
{
791
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT :
792
CV_DbgAssert( depthGenerator.IsValid() );
793
propValue = 1.;
794
break;
795
case CV_CAP_PROP_FRAME_WIDTH :
796
if( depthGenerator.GetMapOutputMode(mode) == XN_STATUS_OK )
797
propValue = mode.nXRes;
798
break;
799
case CV_CAP_PROP_FRAME_HEIGHT :
800
if( depthGenerator.GetMapOutputMode(mode) == XN_STATUS_OK )
801
propValue = mode.nYRes;
802
break;
803
case CV_CAP_PROP_FPS :
804
if( depthGenerator.GetMapOutputMode(mode) == XN_STATUS_OK )
805
propValue = mode.nFPS;
806
break;
807
case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH :
808
propValue = depthGenerator.GetDeviceMaxDepth();
809
break;
810
case CV_CAP_PROP_OPENNI_BASELINE :
811
propValue = baseline;
812
break;
813
case CV_CAP_PROP_OPENNI_FOCAL_LENGTH :
814
propValue = (double)depthFocalLength_VGA;
815
break;
816
case CV_CAP_PROP_OPENNI_REGISTRATION :
817
propValue = depthGenerator.GetAlternativeViewPointCap().IsViewPointAs(const_cast<CvCapture_OpenNI *>(this)->imageGenerator) ? 1.0 : 0.0;
818
break;
819
case CV_CAP_PROP_POS_MSEC :
820
propValue = (double)depthGenerator.GetTimestamp();
821
break;
822
case CV_CAP_PROP_POS_FRAMES :
823
propValue = depthGenerator.GetFrameID();
824
break;
825
default :
826
CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) );
827
}
828
829
return propValue;
830
}
831
832
bool CvCapture_OpenNI::setDepthGeneratorProperty( int propIdx, double propValue )
833
{
834
bool isSet = false;
835
836
CV_Assert( depthGenerator.IsValid() );
837
838
switch( propIdx )
839
{
840
case CV_CAP_PROP_OPENNI_REGISTRATION:
841
{
842
if( propValue != 0.0 ) // "on"
843
{
844
// if there isn't image generator (i.e. ASUS XtionPro doesn't have it)
845
// then the property isn't available
846
if( imageGenerator.IsValid() )
847
{
848
if( !depthGenerator.GetAlternativeViewPointCap().IsViewPointAs(imageGenerator) )
849
{
850
if( depthGenerator.GetAlternativeViewPointCap().IsViewPointSupported(imageGenerator) )
851
{
852
XnStatus status = depthGenerator.GetAlternativeViewPointCap().SetViewPoint(imageGenerator);
853
if( status != XN_STATUS_OK )
854
fprintf(stderr, "CvCapture_OpenNI::setDepthGeneratorProperty : %s\n", xnGetStatusString(status));
855
else
856
isSet = true;
857
}
858
else
859
fprintf(stderr, "CvCapture_OpenNI::setDepthGeneratorProperty : Unsupported viewpoint.\n");
860
}
861
else
862
isSet = true;
863
}
864
}
865
else // "off"
866
{
867
XnStatus status = depthGenerator.GetAlternativeViewPointCap().ResetViewPoint();
868
if( status != XN_STATUS_OK )
869
fprintf(stderr, "CvCapture_OpenNI::setDepthGeneratorProperty : %s\n", xnGetStatusString(status));
870
else
871
isSet = true;
872
}
873
}
874
break;
875
default:
876
CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for setting.\n", propIdx) );
877
}
878
879
return isSet;
880
}
881
882
double CvCapture_OpenNI::getImageGeneratorProperty( int propIdx ) const
883
{
884
double propValue = 0.;
885
if( !imageGenerator.IsValid() )
886
return propValue;
887
888
XnMapOutputMode mode;
889
switch( propIdx )
890
{
891
case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT :
892
CV_DbgAssert( imageGenerator.IsValid() );
893
propValue = 1.;
894
break;
895
case CV_CAP_PROP_FRAME_WIDTH :
896
if( imageGenerator.GetMapOutputMode(mode) == XN_STATUS_OK )
897
propValue = mode.nXRes;
898
break;
899
case CV_CAP_PROP_FRAME_HEIGHT :
900
if( imageGenerator.GetMapOutputMode(mode) == XN_STATUS_OK )
901
propValue = mode.nYRes;
902
break;
903
case CV_CAP_PROP_FPS :
904
if( imageGenerator.GetMapOutputMode(mode) == XN_STATUS_OK )
905
propValue = mode.nFPS;
906
break;
907
case CV_CAP_PROP_POS_MSEC :
908
propValue = (double)imageGenerator.GetTimestamp();
909
break;
910
case CV_CAP_PROP_POS_FRAMES :
911
propValue = (double)imageGenerator.GetFrameID();
912
break;
913
default :
914
CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) );
915
}
916
917
return propValue;
918
}
919
920
bool CvCapture_OpenNI::setImageGeneratorProperty( int propIdx, double propValue )
921
{
922
bool isSet = false;
923
if( !imageGenerator.IsValid() )
924
return isSet;
925
926
switch( propIdx )
927
{
928
case CV_CAP_PROP_OPENNI_OUTPUT_MODE :
929
{
930
XnMapOutputMode mode;
931
932
switch( cvRound(propValue) )
933
{
934
case CV_CAP_OPENNI_VGA_30HZ :
935
mode.nXRes = XN_VGA_X_RES;
936
mode.nYRes = XN_VGA_Y_RES;
937
mode.nFPS = 30;
938
break;
939
case CV_CAP_OPENNI_SXGA_15HZ :
940
mode.nXRes = XN_SXGA_X_RES;
941
mode.nYRes = XN_SXGA_Y_RES;
942
mode.nFPS = 15;
943
break;
944
case CV_CAP_OPENNI_SXGA_30HZ :
945
mode.nXRes = XN_SXGA_X_RES;
946
mode.nYRes = XN_SXGA_Y_RES;
947
mode.nFPS = 30;
948
break;
949
case CV_CAP_OPENNI_QVGA_30HZ :
950
mode.nXRes = XN_QVGA_X_RES;
951
mode.nYRes = XN_QVGA_Y_RES;
952
mode.nFPS = 30;
953
break;
954
case CV_CAP_OPENNI_QVGA_60HZ :
955
mode.nXRes = XN_QVGA_X_RES;
956
mode.nYRes = XN_QVGA_Y_RES;
957
mode.nFPS = 60;
958
break;
959
default :
960
CV_Error( CV_StsBadArg, "Unsupported image generator output mode.\n");
961
}
962
963
XnStatus status = imageGenerator.SetMapOutputMode( mode );
964
if( status != XN_STATUS_OK )
965
fprintf(stderr, "CvCapture_OpenNI::setImageGeneratorProperty : %s\n", xnGetStatusString(status));
966
else
967
isSet = true;
968
break;
969
}
970
default:
971
CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for setting.\n", propIdx) );
972
}
973
974
return isSet;
975
}
976
977
bool CvCapture_OpenNI::grabFrame()
978
{
979
if( !isOpened() )
980
return false;
981
982
bool isGrabbed = false;
983
if( !approxSyncGrabber.empty() && approxSyncGrabber->isRun() )
984
{
985
isGrabbed = approxSyncGrabber->grab( depthMetaData, imageMetaData );
986
}
987
else
988
{
989
XnStatus status = context.WaitAndUpdateAll();
990
if( status != XN_STATUS_OK )
991
return false;
992
993
if( depthGenerator.IsValid() )
994
depthGenerator.GetMetaData( depthMetaData );
995
if( imageGenerator.IsValid() )
996
imageGenerator.GetMetaData( imageMetaData );
997
isGrabbed = true;
998
}
999
1000
return isGrabbed;
1001
}
1002
1003
inline void getDepthMapFromMetaData( const xn::DepthMetaData& depthMetaData, cv::Mat& depthMap, XnUInt64 noSampleValue, XnUInt64 shadowValue )
1004
{
1005
int cols = depthMetaData.XRes();
1006
int rows = depthMetaData.YRes();
1007
1008
depthMap.create( rows, cols, CV_16UC1 );
1009
1010
const XnDepthPixel* pDepthMap = depthMetaData.Data();
1011
1012
// CV_Assert( sizeof(unsigned short) == sizeof(XnDepthPixel) );
1013
memcpy( depthMap.data, pDepthMap, cols*rows*sizeof(XnDepthPixel) );
1014
1015
cv::Mat badMask = (depthMap == (double)noSampleValue) | (depthMap == (double)shadowValue) | (depthMap == 0);
1016
1017
// mask the pixels with invalid depth
1018
depthMap.setTo( cv::Scalar::all( CvCapture_OpenNI::INVALID_PIXEL_VAL ), badMask );
1019
}
1020
1021
IplImage* CvCapture_OpenNI::retrieveDepthMap()
1022
{
1023
if( !depthMetaData.Data() )
1024
return 0;
1025
1026
getDepthMapFromMetaData( depthMetaData, outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue );
1027
1028
return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr();
1029
}
1030
1031
IplImage* CvCapture_OpenNI::retrievePointCloudMap()
1032
{
1033
if( !depthMetaData.Data() )
1034
return 0;
1035
1036
cv::Mat depth;
1037
getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue );
1038
1039
const int badPoint = INVALID_PIXEL_VAL;
1040
const float badCoord = INVALID_COORDINATE_VAL;
1041
int cols = depthMetaData.XRes(), rows = depthMetaData.YRes();
1042
cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) );
1043
1044
std::vector<XnPoint3D> proj(cols*rows);
1045
std::vector<XnPoint3D> real(cols*rows);
1046
for( int y = 0; y < rows; y++ )
1047
{
1048
for( int x = 0; x < cols; x++ )
1049
{
1050
int ind = y*cols+x;
1051
proj[ind].X = (float)x;
1052
proj[ind].Y = (float)y;
1053
proj[ind].Z = depth.at<unsigned short>(y, x);
1054
}
1055
}
1056
depthGenerator.ConvertProjectiveToRealWorld(cols*rows, &proj.front(), &real.front());
1057
1058
for( int y = 0; y < rows; y++ )
1059
{
1060
for( int x = 0; x < cols; x++ )
1061
{
1062
// Check for invalid measurements
1063
if( depth.at<unsigned short>(y, x) == badPoint ) // not valid
1064
pointCloud_XYZ.at<cv::Point3f>(y,x) = cv::Point3f( badCoord, badCoord, badCoord );
1065
else
1066
{
1067
int ind = y*cols+x;
1068
pointCloud_XYZ.at<cv::Point3f>(y,x) = cv::Point3f( real[ind].X*0.001f, real[ind].Y*0.001f, real[ind].Z*0.001f); // from mm to meters
1069
}
1070
}
1071
}
1072
1073
outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].mat = pointCloud_XYZ;
1074
1075
return outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].getIplImagePtr();
1076
}
1077
1078
static void computeDisparity_32F( const xn::DepthMetaData& depthMetaData, cv::Mat& disp, XnDouble baseline, XnUInt64 F,
1079
XnUInt64 noSampleValue, XnUInt64 shadowValue )
1080
{
1081
cv::Mat depth;
1082
getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue );
1083
CV_Assert( depth.type() == CV_16UC1 );
1084
1085
1086
// disparity = baseline * F / z;
1087
1088
float mult = (float)(baseline /*mm*/ * F /*pixels*/);
1089
1090
disp.create( depth.size(), CV_32FC1);
1091
disp = cv::Scalar::all( CvCapture_OpenNI::INVALID_PIXEL_VAL );
1092
for( int y = 0; y < disp.rows; y++ )
1093
{
1094
for( int x = 0; x < disp.cols; x++ )
1095
{
1096
unsigned short curDepth = depth.at<unsigned short>(y,x);
1097
if( curDepth != CvCapture_OpenNI::INVALID_PIXEL_VAL )
1098
disp.at<float>(y,x) = mult / curDepth;
1099
}
1100
}
1101
}
1102
1103
IplImage* CvCapture_OpenNI::retrieveDisparityMap()
1104
{
1105
if( !depthMetaData.Data() )
1106
return 0;
1107
1108
cv::Mat disp32;
1109
computeDisparity_32F( depthMetaData, disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue );
1110
1111
disp32.convertTo( outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1 );
1112
1113
return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].getIplImagePtr();
1114
}
1115
1116
IplImage* CvCapture_OpenNI::retrieveDisparityMap_32F()
1117
{
1118
if( !depthMetaData.Data() )
1119
return 0;
1120
1121
computeDisparity_32F( depthMetaData, outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue );
1122
1123
return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr();
1124
}
1125
1126
IplImage* CvCapture_OpenNI::retrieveValidDepthMask()
1127
{
1128
if( !depthMetaData.Data() )
1129
return 0;
1130
1131
cv::Mat depth;
1132
getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue );
1133
1134
outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = depth != CvCapture_OpenNI::INVALID_PIXEL_VAL;
1135
1136
return outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].getIplImagePtr();
1137
}
1138
1139
inline void getBGRImageFromMetaData( const xn::ImageMetaData& imageMetaData, cv::Mat& bgrImage )
1140
{
1141
if( imageMetaData.PixelFormat() != XN_PIXEL_FORMAT_RGB24 )
1142
CV_Error( CV_StsUnsupportedFormat, "Unsupported format of grabbed image\n" );
1143
1144
cv::Mat rgbImage( imageMetaData.YRes(), imageMetaData.XRes(), CV_8UC3 );
1145
const XnRGB24Pixel* pRgbImage = imageMetaData.RGB24Data();
1146
1147
// CV_Assert( 3*sizeof(uchar) == sizeof(XnRGB24Pixel) );
1148
memcpy( rgbImage.data, pRgbImage, rgbImage.total()*sizeof(XnRGB24Pixel) );
1149
cv::cvtColor( rgbImage, bgrImage, CV_RGB2BGR );
1150
}
1151
1152
IplImage* CvCapture_OpenNI::retrieveBGRImage()
1153
{
1154
if( !imageMetaData.Data() )
1155
return 0;
1156
1157
getBGRImageFromMetaData( imageMetaData, outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat );
1158
1159
return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr();
1160
}
1161
1162
IplImage* CvCapture_OpenNI::retrieveGrayImage()
1163
{
1164
if( !imageMetaData.Data() )
1165
return 0;
1166
1167
CV_Assert( imageMetaData.BytesPerPixel() == 3 ); // RGB
1168
1169
cv::Mat rgbImage;
1170
getBGRImageFromMetaData( imageMetaData, rgbImage );
1171
cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY );
1172
1173
return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr();
1174
}
1175
1176
IplImage* CvCapture_OpenNI::retrieveFrame( int outputType )
1177
{
1178
IplImage* image = 0;
1179
CV_Assert( outputType < outputMapsTypesCount && outputType >= 0);
1180
1181
if( outputType == CV_CAP_OPENNI_DEPTH_MAP )
1182
{
1183
image = retrieveDepthMap();
1184
}
1185
else if( outputType == CV_CAP_OPENNI_POINT_CLOUD_MAP )
1186
{
1187
image = retrievePointCloudMap();
1188
}
1189
else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP )
1190
{
1191
image = retrieveDisparityMap();
1192
}
1193
else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP_32F )
1194
{
1195
image = retrieveDisparityMap_32F();
1196
}
1197
else if( outputType == CV_CAP_OPENNI_VALID_DEPTH_MASK )
1198
{
1199
image = retrieveValidDepthMask();
1200
}
1201
else if( outputType == CV_CAP_OPENNI_BGR_IMAGE )
1202
{
1203
image = retrieveBGRImage();
1204
}
1205
else if( outputType == CV_CAP_OPENNI_GRAY_IMAGE )
1206
{
1207
image = retrieveGrayImage();
1208
}
1209
1210
return image;
1211
}
1212
1213
1214
CvCapture* cvCreateCameraCapture_OpenNI( int index )
1215
{
1216
CvCapture_OpenNI* capture = new CvCapture_OpenNI( index );
1217
1218
if( capture->isOpened() )
1219
return capture;
1220
1221
delete capture;
1222
return 0;
1223
}
1224
1225
CvCapture* cvCreateFileCapture_OpenNI( const char* filename )
1226
{
1227
CvCapture_OpenNI* capture = new CvCapture_OpenNI( filename );
1228
1229
if( capture->isOpened() )
1230
return capture;
1231
1232
delete capture;
1233
return 0;
1234
}
1235
1236
#endif
1237
1238