1/*M/////////////////////////////////////////////////////////////////////////////////////// 2// 3// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING. 4// 5// By downloading, copying, installing or using the software you agree to this license. 6// If you do not agree to this license, do not download, install, 7// copy or use the software. 8// 9// 10// Intel License Agreement 11// For Open Source Computer Vision Library 12// 13// Copyright (C) 2000, Intel Corporation, all rights reserved. 14// Third party copyrights are property of their respective owners. 15// 16// Redistribution and use in source and binary forms, with or without modification, 17// are permitted provided that the following conditions are met: 18// 19// * Redistribution's of source code must retain the above copyright notice, 20// this list of conditions and the following disclaimer. 21// 22// * Redistribution's in binary form must reproduce the above copyright notice, 23// this list of conditions and the following disclaimer in the documentation 24// and/or other materials provided with the distribution. 25// 26// * The name of Intel Corporation may not be used to endorse or promote products 27// derived from this software without specific prior written permission. 28// 29// This software is provided by the copyright holders and contributors "as is" and 30// any express or implied warranties, including, but not limited to, the implied 31// warranties of merchantability and fitness for a particular purpose are disclaimed. 32// In no event shall the Intel Corporation or contributors be liable for any direct, 33// indirect, incidental, special, exemplary, or consequential damages 34// (including, but not limited to, procurement of substitute goods or services; 35// loss of use, data, or profits; or business interruption) however caused 36// and on any theory of liability, whether in contract, strict liability, 37// or tort (including negligence or otherwise) arising in any way out of 38// the use of this software, even if advised of the possibility of such damage. 39// 40//M*/ 41#include "precomp.hpp" 42#include "opencv2/core.hpp" 43#include "opencv2/imgproc.hpp" 44 45#ifdef HAVE_OPENNI 46 47#if defined TBB_INTERFACE_VERSION && TBB_INTERFACE_VERSION < 5000 48# undef HAVE_TBB 49#endif 50 51#include <queue> 52 53#ifndef i386 54# define i386 0 55#endif 56#ifndef __arm__ 57# define __arm__ 0 58#endif 59#ifndef _ARC 60# define _ARC 0 61#endif 62#ifndef __APPLE__ 63# define __APPLE__ 0 64#endif 65 66#include "XnCppWrapper.h" 67 68const cv::String XMLConfig = 69"<OpenNI>" 70 "<Licenses>" 71 "<License vendor=\"PrimeSense\" key=\"0KOIk2JeIBYClPWVnMoRKn5cdY4=\"/>" 72 "</Licenses>" 73 "<Log writeToConsole=\"false\" writeToFile=\"false\">" 74 "<LogLevel value=\"3\"/>" 75 "<Masks>" 76 "<Mask name=\"ALL\" on=\"true\"/>" 77 "</Masks>" 78 "<Dumps>" 79 "</Dumps>" 80 "</Log>" 81 "<ProductionNodes>" 82 "<Node type=\"Image\" name=\"Image1\" stopOnError=\"false\">" 83 "<Configuration>" 84 "<MapOutputMode xRes=\"640\" yRes=\"480\" FPS=\"30\"/>" 85 "<Mirror on=\"false\"/>" 86 "</Configuration>" 87 "</Node> " 88 "<Node type=\"Depth\" name=\"Depth1\">" 89 "<Configuration>" 90 "<MapOutputMode xRes=\"640\" yRes=\"480\" FPS=\"30\"/>" 91 "<Mirror on=\"false\"/>" 92 "</Configuration>" 93 "</Node>" 94 "</ProductionNodes>" 95"</OpenNI>\n"; 96 97/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 98class ApproximateSyncGrabber 99{ 100public: 101 ApproximateSyncGrabber( xn::Context &_context, 102 xn::DepthGenerator &_depthGenerator, 103 xn::ImageGenerator &_imageGenerator, 104 int _maxBufferSize, bool _isCircleBuffer, int _maxTimeDuration ) : 105 context(_context), depthGenerator(_depthGenerator), imageGenerator(_imageGenerator), 106 maxBufferSize(_maxBufferSize), isCircleBuffer(_isCircleBuffer), maxTimeDuration(_maxTimeDuration) 107 { 108#ifdef HAVE_TBB 109 task = 0; 110#endif 111 112 CV_Assert( depthGenerator.IsValid() ); 113 CV_Assert( imageGenerator.IsValid() ); 114 } 115 116 void setMaxBufferSize( int _maxBufferSize ) 117 { 118 maxBufferSize = _maxBufferSize; 119#ifdef HAVE_TBB 120 task->setMaxBufferSize(); 121#endif 122 } 123 inline int getMaxBufferSize() const { return maxBufferSize; } 124 125 void setIsCircleBuffer( bool _isCircleBuffer ) { isCircleBuffer = _isCircleBuffer; } 126 bool getIsCircleBuffer() const { return isCircleBuffer; } 127 128 void setMaxTimeDuration( int _maxTimeDuration ) { maxTimeDuration = _maxTimeDuration; } 129 int getMaxTimeDuration() const { return maxTimeDuration; } 130 131 bool grab( xn::DepthMetaData& depthMetaData, 132 xn::ImageMetaData& imageMetaData ) 133 { 134 CV_Assert( task ); 135 136 137 while( task->grab(depthMetaData, imageMetaData) == false ) 138 { 139#ifndef HAVE_TBB 140 task->spin(); 141#endif 142 } 143 return true; 144 145 } 146 147 void start() 148 { 149 CV_Assert( depthGenerator.IsValid() ); 150 CV_Assert( imageGenerator.IsValid() ); 151#ifdef HAVE_TBB 152 task = new( tbb::task::allocate_root() ) TBBApproximateSynchronizerTask( *this ); 153 tbb::task::enqueue(*task); 154#else 155 task.reset( new ApproximateSynchronizer( *this ) ); 156#endif 157 } 158 159 void finish() 160 { 161#ifdef HAVE_TBB 162 if( task ) 163 tbb::task::destroy( *task ); 164#else 165 task.release(); 166#endif 167 } 168 169 bool isRun() const { return task != 0; } 170 171 xn::Context &context; 172 xn::DepthGenerator &depthGenerator; 173 xn::ImageGenerator &imageGenerator; 174 175private: 176 ApproximateSyncGrabber(const ApproximateSyncGrabber&); 177 ApproximateSyncGrabber& operator=(const ApproximateSyncGrabber&); 178 179 int maxBufferSize; 180 bool isCircleBuffer; 181 int maxTimeDuration; 182 183 class ApproximateSynchronizerBase 184 { 185 public: 186 ApproximateSynchronizerBase( ApproximateSyncGrabber& _approxSyncGrabber ) : 187 approxSyncGrabber(_approxSyncGrabber), isDepthFilled(false), isImageFilled(false) 188 {} 189 190 virtual ~ApproximateSynchronizerBase() {} 191 192 virtual bool isSpinContinue() const = 0; 193 virtual void pushDepthMetaData( xn::DepthMetaData& depthMetaData ) = 0; 194 virtual void pushImageMetaData( xn::ImageMetaData& imageMetaData ) = 0; 195 virtual bool popDepthMetaData( xn::DepthMetaData& depthMetaData ) = 0; 196 virtual bool popImageMetaData( xn::ImageMetaData& imageMetaData ) = 0; 197 198 void spin() 199 { 200 while(isSpinContinue() == true) 201 { 202 XnStatus status = approxSyncGrabber.context.WaitAnyUpdateAll(); 203 if( status != XN_STATUS_OK ) 204 continue; 205 206 //xn::DepthMetaData depth; 207 //xn::ImageMetaData image; 208 approxSyncGrabber.depthGenerator.GetMetaData(depth); 209 approxSyncGrabber.imageGenerator.GetMetaData(image); 210 211 if( depth.Data() && depth.IsDataNew() ) 212 pushDepthMetaData( depth ); 213 214 if( image.Data() && image.IsDataNew() ) 215 pushImageMetaData( image ); 216 } 217 } 218 219 virtual bool grab( xn::DepthMetaData& depthMetaData, 220 xn::ImageMetaData& imageMetaData ) 221 { 222 for(;;) 223 { 224 if( !isDepthFilled ) 225 isDepthFilled = popDepthMetaData(depth); 226 if( !isImageFilled ) 227 isImageFilled = popImageMetaData(image); 228 229 if( !isDepthFilled || !isImageFilled ) 230 break; 231 232 double timeDiff = 1e-3 * std::abs(static_cast<double>(depth.Timestamp()) - static_cast<double>(image.Timestamp())); 233 234 if( timeDiff <= approxSyncGrabber.maxTimeDuration ) 235 { 236 depthMetaData.InitFrom(depth); 237 imageMetaData.InitFrom(image); 238 isDepthFilled = isImageFilled = false; 239 return true; 240 } 241 else 242 { 243 if( depth.Timestamp() < image.Timestamp() ) 244 isDepthFilled = false; 245 else 246 isImageFilled = false; 247 } 248 } 249 250 return false; 251 } 252 253 protected: 254 ApproximateSyncGrabber& approxSyncGrabber; 255 xn::DepthMetaData depth; 256 xn::ImageMetaData image; 257 bool isDepthFilled; 258 bool isImageFilled; 259 }; 260 261 // If there isn't TBB the synchronization will be executed in the main thread. 262 class ApproximateSynchronizer: public ApproximateSynchronizerBase 263 { 264 public: 265 ApproximateSynchronizer( ApproximateSyncGrabber& _approxSyncGrabber ) : 266 ApproximateSynchronizerBase(_approxSyncGrabber) 267 {} 268 269 virtual bool isSpinContinue() const 270 { 271 int maxBufferSize = approxSyncGrabber.getMaxBufferSize(); 272 return (maxBufferSize <= 0) || (static_cast<int>(depthQueue.size()) < maxBufferSize && 273 static_cast<int>(imageQueue.size()) < maxBufferSize); // "<" to may push 274 } 275 276 virtual inline void pushDepthMetaData( xn::DepthMetaData& depthMetaData ) 277 { 278 cv::Ptr<xn::DepthMetaData> depthPtr = cv::makePtr<xn::DepthMetaData>(); 279 depthPtr->CopyFrom(depthMetaData); 280 depthQueue.push(depthPtr); 281 } 282 virtual inline void pushImageMetaData( xn::ImageMetaData& imageMetaData ) 283 { 284 cv::Ptr<xn::ImageMetaData> imagePtr = cv::makePtr<xn::ImageMetaData>(); 285 imagePtr->CopyFrom(imageMetaData); 286 imageQueue.push(imagePtr); 287 } 288 virtual inline bool popDepthMetaData( xn::DepthMetaData& depthMetaData ) 289 { 290 if( depthQueue.empty() ) 291 return false; 292 293 depthMetaData.CopyFrom(*depthQueue.front()); 294 depthQueue.pop(); 295 return true; 296 } 297 virtual inline bool popImageMetaData( xn::ImageMetaData& imageMetaData ) 298 { 299 if( imageQueue.empty() ) 300 return false; 301 302 imageMetaData.CopyFrom(*imageQueue.front()); 303 imageQueue.pop(); 304 return true; 305 } 306 307 private: 308 std::queue<cv::Ptr<xn::DepthMetaData> > depthQueue; 309 std::queue<cv::Ptr<xn::ImageMetaData> > imageQueue; 310 }; 311 312#ifdef HAVE_TBB 313 // If there is TBB the synchronization will be executed in own thread. 314 class TBBApproximateSynchronizer: public ApproximateSynchronizerBase 315 { 316 public: 317 TBBApproximateSynchronizer( ApproximateSyncGrabber& _approxSyncGrabber ) : 318 ApproximateSynchronizerBase(_approxSyncGrabber) 319 { 320 setMaxBufferSize(); 321 } 322 323 void setMaxBufferSize() 324 { 325 int maxBufferSize = approxSyncGrabber.getMaxBufferSize(); 326 if( maxBufferSize >= 0 ) 327 { 328 depthQueue.set_capacity( maxBufferSize ); 329 imageQueue.set_capacity( maxBufferSize ); 330 } 331 } 332 333 virtual inline bool isSpinContinue() const { return true; } 334 335 virtual inline void pushDepthMetaData( xn::DepthMetaData& depthMetaData ) 336 { 337 cv::Ptr<xn::DepthMetaData> depthPtr = cv::makePtr<xn::DepthMetaData>(), tmp; 338 depthPtr->CopyFrom(depthMetaData); 339 340 tbb::mutex mtx; 341 mtx.lock(); 342 if( depthQueue.try_push(depthPtr) == false ) 343 { 344 if( approxSyncGrabber.getIsCircleBuffer() ) 345 { 346 CV_Assert( depthQueue.try_pop(tmp) ); 347 CV_Assert( depthQueue.try_push(depthPtr) ); 348 } 349 } 350 mtx.unlock(); 351 } 352 353 virtual inline void pushImageMetaData( xn::ImageMetaData& imageMetaData ) 354 { 355 cv::Ptr<xn::ImageMetaData> imagePtr = cv::makePtr<xn::ImageMetaData>(), tmp; 356 imagePtr->CopyFrom(imageMetaData); 357 358 tbb::mutex mtx; 359 mtx.lock(); 360 if( imageQueue.try_push(imagePtr) == false ) 361 { 362 if( approxSyncGrabber.getIsCircleBuffer() ) 363 { 364 CV_Assert( imageQueue.try_pop(tmp) ); 365 CV_Assert( imageQueue.try_push(imagePtr) ); 366 } 367 } 368 mtx.unlock(); 369 } 370 371 virtual inline bool popDepthMetaData( xn::DepthMetaData& depthMetaData ) 372 { 373 cv::Ptr<xn::DepthMetaData> depthPtr; 374 bool isPop = depthQueue.try_pop(depthPtr); 375 if( isPop ) 376 depthMetaData.CopyFrom(*depthPtr); 377 return isPop; 378 } 379 virtual inline bool popImageMetaData( xn::ImageMetaData& imageMetaData ) 380 { 381 cv::Ptr<xn::ImageMetaData> imagePtr; 382 bool isPop = imageQueue.try_pop(imagePtr); 383 if( isPop ) 384 imageMetaData.CopyFrom(*imagePtr); 385 return isPop; 386 } 387 388 private: 389 tbb::concurrent_bounded_queue<cv::Ptr<xn::DepthMetaData> > depthQueue; 390 tbb::concurrent_bounded_queue<cv::Ptr<xn::ImageMetaData> > imageQueue; 391 }; 392 393 class TBBApproximateSynchronizerTask: public tbb::task 394 { 395 public: 396 TBBApproximateSynchronizerTask( ApproximateSyncGrabber& approxSyncGrabber ) : 397 synchronizer(approxSyncGrabber) 398 {} 399 400 void setMaxBufferSize() 401 { 402 synchronizer.setMaxBufferSize(); 403 } 404 405 bool grab( xn::DepthMetaData& depthMetaData, 406 xn::ImageMetaData& imageMetaData ) 407 { 408 return synchronizer.grab( depthMetaData, imageMetaData ); 409 } 410 411 private: 412 tbb::task* execute() 413 { 414 synchronizer.spin(); 415 return 0; 416 } 417 TBBApproximateSynchronizer synchronizer; 418 }; 419#endif // HAVE_TBB 420 421#ifdef HAVE_TBB 422 TBBApproximateSynchronizerTask* task; 423#else 424 cv::Ptr<ApproximateSynchronizer> task; 425#endif 426}; 427 428/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// 429class CvCapture_OpenNI : public CvCapture 430{ 431public: 432 enum { DEVICE_DEFAULT=0, DEVICE_MS_KINECT=0, DEVICE_ASUS_XTION=1, DEVICE_MAX=1 }; 433 434 static const int INVALID_PIXEL_VAL = 0; 435 static const int INVALID_COORDINATE_VAL = 0; 436 437#ifdef HAVE_TBB 438 static const int DEFAULT_MAX_BUFFER_SIZE = 8; 439#else 440 static const int DEFAULT_MAX_BUFFER_SIZE = 2; 441#endif 442 static const int DEFAULT_IS_CIRCLE_BUFFER = 0; 443 static const int DEFAULT_MAX_TIME_DURATION = 20; 444 445 CvCapture_OpenNI(int index=0); 446 CvCapture_OpenNI(const char * filename); 447 virtual ~CvCapture_OpenNI(); 448 449 virtual double getProperty(int propIdx) const; 450 virtual bool setProperty(int probIdx, double propVal); 451 virtual bool grabFrame(); 452 virtual IplImage* retrieveFrame(int outputType); 453 454 bool isOpened() const; 455 456protected: 457 struct OutputMap 458 { 459 public: 460 cv::Mat mat; 461 IplImage* getIplImagePtr(); 462 private: 463 IplImage iplHeader; 464 }; 465 466 static const int outputMapsTypesCount = 7; 467 468 static XnMapOutputMode defaultMapOutputMode(); 469 470 IplImage* retrieveDepthMap(); 471 IplImage* retrievePointCloudMap(); 472 IplImage* retrieveDisparityMap(); 473 IplImage* retrieveDisparityMap_32F(); 474 IplImage* retrieveValidDepthMask(); 475 IplImage* retrieveBGRImage(); 476 IplImage* retrieveGrayImage(); 477 478 bool readCamerasParams(); 479 480 double getDepthGeneratorProperty(int propIdx) const; 481 bool setDepthGeneratorProperty(int propIdx, double propVal); 482 double getImageGeneratorProperty(int propIdx) const; 483 bool setImageGeneratorProperty(int propIdx, double propVal); 484 double getCommonProperty(int propIdx) const; 485 bool setCommonProperty(int propIdx, double propVal); 486 487 // OpenNI context 488 xn::Context context; 489 bool isContextOpened; 490 491 xn::ProductionNode productionNode; 492 493 // Data generators with its metadata 494 xn::DepthGenerator depthGenerator; 495 xn::DepthMetaData depthMetaData; 496 497 xn::ImageGenerator imageGenerator; 498 xn::ImageMetaData imageMetaData; 499 500 int maxBufferSize, maxTimeDuration; // for approx sync 501 bool isCircleBuffer; 502 cv::Ptr<ApproximateSyncGrabber> approxSyncGrabber; 503 504 // Cameras settings: 505 // TODO find in OpenNI function to convert z->disparity and remove fields "baseline" and depthFocalLength_VGA 506 // Distance between IR projector and IR camera (in meters) 507 XnDouble baseline; 508 // Focal length for the IR camera in VGA resolution (in pixels) 509 XnUInt64 depthFocalLength_VGA; 510 511 // The value for shadow (occluded pixels) 512 XnUInt64 shadowValue; 513 // The value for pixels without a valid disparity measurement 514 XnUInt64 noSampleValue; 515 516 std::vector<OutputMap> outputMaps; 517}; 518 519IplImage* CvCapture_OpenNI::OutputMap::getIplImagePtr() 520{ 521 if( mat.empty() ) 522 return 0; 523 524 iplHeader = IplImage(mat); 525 return &iplHeader; 526} 527 528bool CvCapture_OpenNI::isOpened() const 529{ 530 return isContextOpened; 531} 532 533XnMapOutputMode CvCapture_OpenNI::defaultMapOutputMode() 534{ 535 XnMapOutputMode mode; 536 mode.nXRes = XN_VGA_X_RES; 537 mode.nYRes = XN_VGA_Y_RES; 538 mode.nFPS = 30; 539 return mode; 540} 541 542CvCapture_OpenNI::CvCapture_OpenNI( int index ) 543{ 544 int deviceType = DEVICE_DEFAULT; 545 XnStatus status; 546 547 isContextOpened = false; 548 maxBufferSize = DEFAULT_MAX_BUFFER_SIZE; 549 isCircleBuffer = DEFAULT_IS_CIRCLE_BUFFER; 550 maxTimeDuration = DEFAULT_MAX_TIME_DURATION; 551 552 if( index >= 10 ) 553 { 554 deviceType = index / 10; 555 index %= 10; 556 } 557 558 if( deviceType > DEVICE_MAX ) 559 return; 560 561 // Initialize and configure the context. 562 status = context.Init(); 563 if( status != XN_STATUS_OK ) 564 { 565 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to initialize the context: %s\n", xnGetStatusString(status)); 566 return; 567 } 568 569 // Find devices 570 xn::NodeInfoList devicesList; 571 status = context.EnumerateProductionTrees( XN_NODE_TYPE_DEVICE, NULL, devicesList, 0 ); 572 if( status != XN_STATUS_OK ) 573 { 574 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate production trees: %s\n", xnGetStatusString(status)); 575 return; 576 } 577 578 // Chose device according to index 579 xn::NodeInfoList::Iterator it = devicesList.Begin(); 580 for( int i = 0; i < index && it!=devicesList.End(); ++i ) it++; 581 if ( it == devicesList.End() ) 582 { 583 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed device with index %d\n", index); 584 return; 585 } 586 587 xn::NodeInfo deviceNode = *it; 588 status = context.CreateProductionTree( deviceNode, productionNode ); 589 if( status != XN_STATUS_OK ) 590 { 591 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create production tree: %s\n", xnGetStatusString(status)); 592 return; 593 } 594 595 xn::ScriptNode scriptNode; 596 status = context.RunXmlScript( XMLConfig.c_str(), scriptNode ); 597 if( status != XN_STATUS_OK ) 598 { 599 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to run xml script: %s\n", xnGetStatusString(status)); 600 return; 601 } 602 603 // Associate generators with context. 604 // enumerate the nodes to find if depth generator is present 605 xn::NodeInfoList depthList; 606 status = context.EnumerateExistingNodes( depthList, XN_NODE_TYPE_DEPTH ); 607 if( status != XN_STATUS_OK ) 608 { 609 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate depth generators: %s\n", xnGetStatusString(status)); 610 return; 611 } 612 if( depthList.IsEmpty() ) 613 { 614 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : The device doesn't have depth generator. Such devices aren't supported now.\n"); 615 return; 616 } 617 status = depthGenerator.Create( context ); 618 if( status != XN_STATUS_OK ) 619 { 620 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create depth generator: %s\n", xnGetStatusString(status)); 621 return; 622 } 623 624 // enumerate the nodes to find if image generator is present 625 xn::NodeInfoList imageList; 626 status = context.EnumerateExistingNodes( imageList, XN_NODE_TYPE_IMAGE ); 627 if( status != XN_STATUS_OK ) 628 { 629 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to enumerate image generators: %s\n", xnGetStatusString(status)); 630 return; 631 } 632 633 if( !imageList.IsEmpty() ) 634 { 635 status = imageGenerator.Create( context ); 636 if( status != XN_STATUS_OK ) 637 { 638 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to create image generator: %s\n", xnGetStatusString(status)); 639 return; 640 } 641 } 642 643 // Set map output mode. 644 if( depthGenerator.IsValid() ) 645 { 646 CV_DbgAssert( depthGenerator.SetMapOutputMode(defaultMapOutputMode()) == XN_STATUS_OK ); // xn::DepthGenerator supports VGA only! (Jan 2011) 647 } 648 if( imageGenerator.IsValid() ) 649 { 650 CV_DbgAssert( imageGenerator.SetMapOutputMode(defaultMapOutputMode()) == XN_STATUS_OK ); 651 } 652 653 if( deviceType == DEVICE_ASUS_XTION ) 654 { 655 //ps/asus specific 656 imageGenerator.SetIntProperty("InputFormat", 1 /*XN_IO_IMAGE_FORMAT_YUV422*/); 657 imageGenerator.SetPixelFormat(XN_PIXEL_FORMAT_RGB24); 658 depthGenerator.SetIntProperty("RegistrationType", 1 /*XN_PROCESSING_HARDWARE*/); 659 } 660 661 // Start generating data. 662 status = context.StartGeneratingAll(); 663 if( status != XN_STATUS_OK ) 664 { 665 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to start generating OpenNI data: %s\n", xnGetStatusString(status)); 666 return; 667 } 668 669 if( !readCamerasParams() ) 670 { 671 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Could not read cameras parameters\n"); 672 return; 673 } 674 675 outputMaps.resize( outputMapsTypesCount ); 676 677 isContextOpened = true; 678 679 setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0); 680} 681 682CvCapture_OpenNI::CvCapture_OpenNI(const char * filename) 683{ 684 XnStatus status; 685 686 isContextOpened = false; 687 maxBufferSize = DEFAULT_MAX_BUFFER_SIZE; 688 isCircleBuffer = DEFAULT_IS_CIRCLE_BUFFER; 689 maxTimeDuration = DEFAULT_MAX_TIME_DURATION; 690 691 // Initialize and configure the context. 692 status = context.Init(); 693 if( status != XN_STATUS_OK ) 694 { 695 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to initialize the context: %s\n", xnGetStatusString(status)); 696 return; 697 } 698 699 // Open file 700 status = context.OpenFileRecording( filename, productionNode ); 701 if( status != XN_STATUS_OK ) 702 { 703 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Failed to open input file (%s): %s\n", filename, xnGetStatusString(status)); 704 return; 705 } 706 707 context.FindExistingNode( XN_NODE_TYPE_DEPTH, depthGenerator ); 708 context.FindExistingNode( XN_NODE_TYPE_IMAGE, imageGenerator ); 709 710 if( !readCamerasParams() ) 711 { 712 fprintf(stderr, "CvCapture_OpenNI::CvCapture_OpenNI : Could not read cameras parameters\n"); 713 return; 714 } 715 716 outputMaps.resize( outputMapsTypesCount ); 717 718 isContextOpened = true; 719} 720 721CvCapture_OpenNI::~CvCapture_OpenNI() 722{ 723 context.StopGeneratingAll(); 724 context.Release(); 725} 726 727bool CvCapture_OpenNI::readCamerasParams() 728{ 729 XnDouble pixelSize = 0; 730 if( depthGenerator.GetRealProperty( "ZPPS", pixelSize ) != XN_STATUS_OK ) 731 { 732 fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read pixel size!\n"); 733 return false; 734 } 735 736 // pixel size @ VGA = pixel size @ SXGA x 2 737 pixelSize *= 2.0; // in mm 738 739 // focal length of IR camera in pixels for VGA resolution 740 XnUInt64 zeroPlanDistance; // in mm 741 if( depthGenerator.GetIntProperty( "ZPD", zeroPlanDistance ) != XN_STATUS_OK ) 742 { 743 fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read virtual plane distance!\n"); 744 return false; 745 } 746 747 if( depthGenerator.GetRealProperty( "LDDIS", baseline ) != XN_STATUS_OK ) 748 { 749 fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read base line!\n"); 750 return false; 751 } 752 753 // baseline from cm -> mm 754 baseline *= 10; 755 756 // focal length from mm -> pixels (valid for 640x480) 757 depthFocalLength_VGA = (XnUInt64)((double)zeroPlanDistance / (double)pixelSize); 758 759 if( depthGenerator.GetIntProperty( "ShadowValue", shadowValue ) != XN_STATUS_OK ) 760 { 761 fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read property \"ShadowValue\"!\n"); 762 return false; 763 } 764 765 if( depthGenerator.GetIntProperty("NoSampleValue", noSampleValue ) != XN_STATUS_OK ) 766 { 767 fprintf(stderr, "CvCapture_OpenNI::readCamerasParams : Could not read property \"NoSampleValue\"!\n"); 768 return false; 769 } 770 771 return true; 772} 773 774double CvCapture_OpenNI::getProperty( int propIdx ) const 775{ 776 double propValue = 0; 777 778 if( isOpened() ) 779 { 780 int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK; 781 782 if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR ) 783 { 784 propValue = getImageGeneratorProperty( purePropIdx ); 785 } 786 else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR ) 787 { 788 propValue = getDepthGeneratorProperty( purePropIdx ); 789 } 790 else 791 { 792 propValue = getCommonProperty( purePropIdx ); 793 } 794 } 795 796 return propValue; 797} 798 799bool CvCapture_OpenNI::setProperty( int propIdx, double propValue ) 800{ 801 bool isSet = false; 802 if( isOpened() ) 803 { 804 int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK; 805 806 if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR ) 807 { 808 isSet = setImageGeneratorProperty( purePropIdx, propValue ); 809 } 810 else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR ) 811 { 812 isSet = setDepthGeneratorProperty( purePropIdx, propValue ); 813 } 814 else 815 { 816 isSet = setCommonProperty( purePropIdx, propValue ); 817 } 818 } 819 820 return isSet; 821} 822 823double CvCapture_OpenNI::getCommonProperty( int propIdx ) const 824{ 825 double propValue = 0; 826 827 switch( propIdx ) 828 { 829 // There is a set of properties that correspond to depth generator by default 830 // (is they are pass without particular generator flag). Two reasons of this: 831 // 1) We can assume that depth generator is the main one for depth sensor. 832 // 2) In the initial vertions of OpenNI integration to OpenCV the value of 833 // flag CV_CAP_OPENNI_DEPTH_GENERATOR was 0 (it isn't zero now). 834 case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : 835 case CV_CAP_PROP_FRAME_WIDTH : 836 case CV_CAP_PROP_FRAME_HEIGHT : 837 case CV_CAP_PROP_FPS : 838 case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH : 839 case CV_CAP_PROP_OPENNI_BASELINE : 840 case CV_CAP_PROP_OPENNI_FOCAL_LENGTH : 841 case CV_CAP_PROP_OPENNI_REGISTRATION : 842 propValue = getDepthGeneratorProperty( propIdx ); 843 break; 844 case CV_CAP_PROP_OPENNI_APPROX_FRAME_SYNC : 845 propValue = !approxSyncGrabber.empty() && approxSyncGrabber->isRun() ? 1. : 0.; 846 break; 847 case CV_CAP_PROP_OPENNI_MAX_BUFFER_SIZE : 848 propValue = maxBufferSize; 849 break; 850 case CV_CAP_PROP_OPENNI_CIRCLE_BUFFER : 851 propValue = isCircleBuffer ? 1. : 0.; 852 break; 853 case CV_CAP_PROP_OPENNI_MAX_TIME_DURATION : 854 propValue = maxTimeDuration; 855 break; 856 default : 857 CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for getting.\n", propIdx) ); 858 } 859 860 return propValue; 861} 862 863bool CvCapture_OpenNI::setCommonProperty( int propIdx, double propValue ) 864{ 865 bool isSet = false; 866 867 switch( propIdx ) 868 { 869 // There is a set of properties that correspond to depth generator by default 870 // (is they are pass without particular generator flag). 871 case CV_CAP_PROP_OPENNI_REGISTRATION: 872 isSet = setDepthGeneratorProperty( propIdx, propValue ); 873 break; 874 case CV_CAP_PROP_OPENNI_APPROX_FRAME_SYNC : 875 if( propValue && depthGenerator.IsValid() && imageGenerator.IsValid() ) 876 { 877 // start synchronization 878 if( approxSyncGrabber.empty() ) 879 { 880 approxSyncGrabber.reset(new ApproximateSyncGrabber( context, depthGenerator, imageGenerator, maxBufferSize, isCircleBuffer, maxTimeDuration )); 881 } 882 else 883 { 884 approxSyncGrabber->finish(); 885 886 // update params 887 approxSyncGrabber->setMaxBufferSize(maxBufferSize); 888 approxSyncGrabber->setIsCircleBuffer(isCircleBuffer); 889 approxSyncGrabber->setMaxTimeDuration(maxTimeDuration); 890 } 891 approxSyncGrabber->start(); 892 } 893 else if( !propValue && !approxSyncGrabber.empty() ) 894 { 895 // finish synchronization 896 approxSyncGrabber->finish(); 897 } 898 break; 899 case CV_CAP_PROP_OPENNI_MAX_BUFFER_SIZE : 900 maxBufferSize = cvRound(propValue); 901 if( !approxSyncGrabber.empty() ) 902 approxSyncGrabber->setMaxBufferSize(maxBufferSize); 903 break; 904 case CV_CAP_PROP_OPENNI_CIRCLE_BUFFER : 905 if( !approxSyncGrabber.empty() ) 906 approxSyncGrabber->setIsCircleBuffer(isCircleBuffer); 907 break; 908 case CV_CAP_PROP_OPENNI_MAX_TIME_DURATION : 909 maxTimeDuration = cvRound(propValue); 910 if( !approxSyncGrabber.empty() ) 911 approxSyncGrabber->setMaxTimeDuration(maxTimeDuration); 912 break; 913 default: 914 CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for setting.\n", propIdx) ); 915 } 916 917 return isSet; 918} 919 920double CvCapture_OpenNI::getDepthGeneratorProperty( int propIdx ) const 921{ 922 double propValue = 0; 923 if( !depthGenerator.IsValid() ) 924 return propValue; 925 926 XnMapOutputMode mode; 927 928 switch( propIdx ) 929 { 930 case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : 931 CV_DbgAssert( depthGenerator.IsValid() ); 932 propValue = 1.; 933 break; 934 case CV_CAP_PROP_FRAME_WIDTH : 935 if( depthGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) 936 propValue = mode.nXRes; 937 break; 938 case CV_CAP_PROP_FRAME_HEIGHT : 939 if( depthGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) 940 propValue = mode.nYRes; 941 break; 942 case CV_CAP_PROP_FPS : 943 if( depthGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) 944 propValue = mode.nFPS; 945 break; 946 case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH : 947 propValue = depthGenerator.GetDeviceMaxDepth(); 948 break; 949 case CV_CAP_PROP_OPENNI_BASELINE : 950 propValue = baseline; 951 break; 952 case CV_CAP_PROP_OPENNI_FOCAL_LENGTH : 953 propValue = (double)depthFocalLength_VGA; 954 break; 955 case CV_CAP_PROP_OPENNI_REGISTRATION : 956 propValue = depthGenerator.GetAlternativeViewPointCap().IsViewPointAs(const_cast<CvCapture_OpenNI *>(this)->imageGenerator) ? 1.0 : 0.0; 957 break; 958 case CV_CAP_PROP_POS_MSEC : 959 propValue = (double)depthGenerator.GetTimestamp(); 960 break; 961 case CV_CAP_PROP_POS_FRAMES : 962 propValue = depthGenerator.GetFrameID(); 963 break; 964 default : 965 CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) ); 966 } 967 968 return propValue; 969} 970 971bool CvCapture_OpenNI::setDepthGeneratorProperty( int propIdx, double propValue ) 972{ 973 bool isSet = false; 974 975 CV_Assert( depthGenerator.IsValid() ); 976 977 switch( propIdx ) 978 { 979 case CV_CAP_PROP_OPENNI_REGISTRATION: 980 { 981 if( propValue != 0.0 ) // "on" 982 { 983 // if there isn't image generator (i.e. ASUS XtionPro doesn't have it) 984 // then the property isn't avaliable 985 if( imageGenerator.IsValid() ) 986 { 987 if( !depthGenerator.GetAlternativeViewPointCap().IsViewPointAs(imageGenerator) ) 988 { 989 if( depthGenerator.GetAlternativeViewPointCap().IsViewPointSupported(imageGenerator) ) 990 { 991 XnStatus status = depthGenerator.GetAlternativeViewPointCap().SetViewPoint(imageGenerator); 992 if( status != XN_STATUS_OK ) 993 fprintf(stderr, "CvCapture_OpenNI::setDepthGeneratorProperty : %s\n", xnGetStatusString(status)); 994 else 995 isSet = true; 996 } 997 else 998 fprintf(stderr, "CvCapture_OpenNI::setDepthGeneratorProperty : Unsupported viewpoint.\n"); 999 } 1000 else 1001 isSet = true; 1002 } 1003 } 1004 else // "off" 1005 { 1006 XnStatus status = depthGenerator.GetAlternativeViewPointCap().ResetViewPoint(); 1007 if( status != XN_STATUS_OK ) 1008 fprintf(stderr, "CvCapture_OpenNI::setDepthGeneratorProperty : %s\n", xnGetStatusString(status)); 1009 else 1010 isSet = true; 1011 } 1012 } 1013 break; 1014 default: 1015 CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for setting.\n", propIdx) ); 1016 } 1017 1018 return isSet; 1019} 1020 1021double CvCapture_OpenNI::getImageGeneratorProperty( int propIdx ) const 1022{ 1023 double propValue = 0.; 1024 if( !imageGenerator.IsValid() ) 1025 return propValue; 1026 1027 XnMapOutputMode mode; 1028 switch( propIdx ) 1029 { 1030 case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT : 1031 CV_DbgAssert( imageGenerator.IsValid() ); 1032 propValue = 1.; 1033 break; 1034 case CV_CAP_PROP_FRAME_WIDTH : 1035 if( imageGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) 1036 propValue = mode.nXRes; 1037 break; 1038 case CV_CAP_PROP_FRAME_HEIGHT : 1039 if( imageGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) 1040 propValue = mode.nYRes; 1041 break; 1042 case CV_CAP_PROP_FPS : 1043 if( imageGenerator.GetMapOutputMode(mode) == XN_STATUS_OK ) 1044 propValue = mode.nFPS; 1045 break; 1046 case CV_CAP_PROP_POS_MSEC : 1047 propValue = (double)imageGenerator.GetTimestamp(); 1048 break; 1049 case CV_CAP_PROP_POS_FRAMES : 1050 propValue = (double)imageGenerator.GetFrameID(); 1051 break; 1052 default : 1053 CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) ); 1054 } 1055 1056 return propValue; 1057} 1058 1059bool CvCapture_OpenNI::setImageGeneratorProperty( int propIdx, double propValue ) 1060{ 1061 bool isSet = false; 1062 if( !imageGenerator.IsValid() ) 1063 return isSet; 1064 1065 switch( propIdx ) 1066 { 1067 case CV_CAP_PROP_OPENNI_OUTPUT_MODE : 1068 { 1069 XnMapOutputMode mode; 1070 1071 switch( cvRound(propValue) ) 1072 { 1073 case CV_CAP_OPENNI_VGA_30HZ : 1074 mode.nXRes = XN_VGA_X_RES; 1075 mode.nYRes = XN_VGA_Y_RES; 1076 mode.nFPS = 30; 1077 break; 1078 case CV_CAP_OPENNI_SXGA_15HZ : 1079 mode.nXRes = XN_SXGA_X_RES; 1080 mode.nYRes = XN_SXGA_Y_RES; 1081 mode.nFPS = 15; 1082 break; 1083 case CV_CAP_OPENNI_SXGA_30HZ : 1084 mode.nXRes = XN_SXGA_X_RES; 1085 mode.nYRes = XN_SXGA_Y_RES; 1086 mode.nFPS = 30; 1087 break; 1088 case CV_CAP_OPENNI_QVGA_30HZ : 1089 mode.nXRes = XN_QVGA_X_RES; 1090 mode.nYRes = XN_QVGA_Y_RES; 1091 mode.nFPS = 30; 1092 break; 1093 case CV_CAP_OPENNI_QVGA_60HZ : 1094 mode.nXRes = XN_QVGA_X_RES; 1095 mode.nYRes = XN_QVGA_Y_RES; 1096 mode.nFPS = 60; 1097 break; 1098 default : 1099 CV_Error( CV_StsBadArg, "Unsupported image generator output mode.\n"); 1100 } 1101 1102 XnStatus status = imageGenerator.SetMapOutputMode( mode ); 1103 if( status != XN_STATUS_OK ) 1104 fprintf(stderr, "CvCapture_OpenNI::setImageGeneratorProperty : %s\n", xnGetStatusString(status)); 1105 else 1106 isSet = true; 1107 break; 1108 } 1109 default: 1110 CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for setting.\n", propIdx) ); 1111 } 1112 1113 return isSet; 1114} 1115 1116bool CvCapture_OpenNI::grabFrame() 1117{ 1118 if( !isOpened() ) 1119 return false; 1120 1121 bool isGrabbed = false; 1122 if( !approxSyncGrabber.empty() && approxSyncGrabber->isRun() ) 1123 { 1124 isGrabbed = approxSyncGrabber->grab( depthMetaData, imageMetaData ); 1125 } 1126 else 1127 { 1128 XnStatus status = context.WaitAndUpdateAll(); 1129 if( status != XN_STATUS_OK ) 1130 return false; 1131 1132 if( depthGenerator.IsValid() ) 1133 depthGenerator.GetMetaData( depthMetaData ); 1134 if( imageGenerator.IsValid() ) 1135 imageGenerator.GetMetaData( imageMetaData ); 1136 isGrabbed = true; 1137 } 1138 1139 return isGrabbed; 1140} 1141 1142inline void getDepthMapFromMetaData( const xn::DepthMetaData& depthMetaData, cv::Mat& depthMap, XnUInt64 noSampleValue, XnUInt64 shadowValue ) 1143{ 1144 int cols = depthMetaData.XRes(); 1145 int rows = depthMetaData.YRes(); 1146 1147 depthMap.create( rows, cols, CV_16UC1 ); 1148 1149 const XnDepthPixel* pDepthMap = depthMetaData.Data(); 1150 1151 // CV_Assert( sizeof(unsigned short) == sizeof(XnDepthPixel) ); 1152 memcpy( depthMap.data, pDepthMap, cols*rows*sizeof(XnDepthPixel) ); 1153 1154 cv::Mat badMask = (depthMap == (double)noSampleValue) | (depthMap == (double)shadowValue) | (depthMap == 0); 1155 1156 // mask the pixels with invalid depth 1157 depthMap.setTo( cv::Scalar::all( CvCapture_OpenNI::INVALID_PIXEL_VAL ), badMask ); 1158} 1159 1160IplImage* CvCapture_OpenNI::retrieveDepthMap() 1161{ 1162 if( !depthMetaData.Data() ) 1163 return 0; 1164 1165 getDepthMapFromMetaData( depthMetaData, outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue ); 1166 1167 return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr(); 1168} 1169 1170IplImage* CvCapture_OpenNI::retrievePointCloudMap() 1171{ 1172 if( !depthMetaData.Data() ) 1173 return 0; 1174 1175 cv::Mat depth; 1176 getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue ); 1177 1178 const int badPoint = INVALID_PIXEL_VAL; 1179 const float badCoord = INVALID_COORDINATE_VAL; 1180 int cols = depthMetaData.XRes(), rows = depthMetaData.YRes(); 1181 cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) ); 1182 1183 std::vector<XnPoint3D> proj(cols*rows); 1184 std::vector<XnPoint3D> real(cols*rows); 1185 for( int y = 0; y < rows; y++ ) 1186 { 1187 for( int x = 0; x < cols; x++ ) 1188 { 1189 int ind = y*cols+x; 1190 proj[ind].X = (float)x; 1191 proj[ind].Y = (float)y; 1192 proj[ind].Z = depth.at<unsigned short>(y, x); 1193 } 1194 } 1195 depthGenerator.ConvertProjectiveToRealWorld(cols*rows, &proj.front(), &real.front()); 1196 1197 for( int y = 0; y < rows; y++ ) 1198 { 1199 for( int x = 0; x < cols; x++ ) 1200 { 1201 // Check for invalid measurements 1202 if( depth.at<unsigned short>(y, x) == badPoint ) // not valid 1203 pointCloud_XYZ.at<cv::Point3f>(y,x) = cv::Point3f( badCoord, badCoord, badCoord ); 1204 else 1205 { 1206 int ind = y*cols+x; 1207 pointCloud_XYZ.at<cv::Point3f>(y,x) = cv::Point3f( real[ind].X*0.001f, real[ind].Y*0.001f, real[ind].Z*0.001f); // from mm to meters 1208 } 1209 } 1210 } 1211 1212 outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].mat = pointCloud_XYZ; 1213 1214 return outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].getIplImagePtr(); 1215} 1216 1217static void computeDisparity_32F( const xn::DepthMetaData& depthMetaData, cv::Mat& disp, XnDouble baseline, XnUInt64 F, 1218 XnUInt64 noSampleValue, XnUInt64 shadowValue ) 1219{ 1220 cv::Mat depth; 1221 getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue ); 1222 CV_Assert( depth.type() == CV_16UC1 ); 1223 1224 1225 // disparity = baseline * F / z; 1226 1227 float mult = (float)(baseline /*mm*/ * F /*pixels*/); 1228 1229 disp.create( depth.size(), CV_32FC1); 1230 disp = cv::Scalar::all( CvCapture_OpenNI::INVALID_PIXEL_VAL ); 1231 for( int y = 0; y < disp.rows; y++ ) 1232 { 1233 for( int x = 0; x < disp.cols; x++ ) 1234 { 1235 unsigned short curDepth = depth.at<unsigned short>(y,x); 1236 if( curDepth != CvCapture_OpenNI::INVALID_PIXEL_VAL ) 1237 disp.at<float>(y,x) = mult / curDepth; 1238 } 1239 } 1240} 1241 1242IplImage* CvCapture_OpenNI::retrieveDisparityMap() 1243{ 1244 if( !depthMetaData.Data() ) 1245 return 0; 1246 1247 cv::Mat disp32; 1248 computeDisparity_32F( depthMetaData, disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue ); 1249 1250 disp32.convertTo( outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1 ); 1251 1252 return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].getIplImagePtr(); 1253} 1254 1255IplImage* CvCapture_OpenNI::retrieveDisparityMap_32F() 1256{ 1257 if( !depthMetaData.Data() ) 1258 return 0; 1259 1260 computeDisparity_32F( depthMetaData, outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue ); 1261 1262 return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr(); 1263} 1264 1265IplImage* CvCapture_OpenNI::retrieveValidDepthMask() 1266{ 1267 if( !depthMetaData.Data() ) 1268 return 0; 1269 1270 cv::Mat depth; 1271 getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue ); 1272 1273 outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = depth != CvCapture_OpenNI::INVALID_PIXEL_VAL; 1274 1275 return outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].getIplImagePtr(); 1276} 1277 1278inline void getBGRImageFromMetaData( const xn::ImageMetaData& imageMetaData, cv::Mat& bgrImage ) 1279{ 1280 if( imageMetaData.PixelFormat() != XN_PIXEL_FORMAT_RGB24 ) 1281 CV_Error( CV_StsUnsupportedFormat, "Unsupported format of grabbed image\n" ); 1282 1283 cv::Mat rgbImage( imageMetaData.YRes(), imageMetaData.XRes(), CV_8UC3 ); 1284 const XnRGB24Pixel* pRgbImage = imageMetaData.RGB24Data(); 1285 1286 // CV_Assert( 3*sizeof(uchar) == sizeof(XnRGB24Pixel) ); 1287 memcpy( rgbImage.data, pRgbImage, rgbImage.total()*sizeof(XnRGB24Pixel) ); 1288 cv::cvtColor( rgbImage, bgrImage, CV_RGB2BGR ); 1289} 1290 1291IplImage* CvCapture_OpenNI::retrieveBGRImage() 1292{ 1293 if( !imageMetaData.Data() ) 1294 return 0; 1295 1296 getBGRImageFromMetaData( imageMetaData, outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat ); 1297 1298 return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr(); 1299} 1300 1301IplImage* CvCapture_OpenNI::retrieveGrayImage() 1302{ 1303 if( !imageMetaData.Data() ) 1304 return 0; 1305 1306 CV_Assert( imageMetaData.BytesPerPixel() == 3 ); // RGB 1307 1308 cv::Mat rgbImage; 1309 getBGRImageFromMetaData( imageMetaData, rgbImage ); 1310 cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY ); 1311 1312 return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr(); 1313} 1314 1315IplImage* CvCapture_OpenNI::retrieveFrame( int outputType ) 1316{ 1317 IplImage* image = 0; 1318 CV_Assert( outputType < outputMapsTypesCount && outputType >= 0); 1319 1320 if( outputType == CV_CAP_OPENNI_DEPTH_MAP ) 1321 { 1322 image = retrieveDepthMap(); 1323 } 1324 else if( outputType == CV_CAP_OPENNI_POINT_CLOUD_MAP ) 1325 { 1326 image = retrievePointCloudMap(); 1327 } 1328 else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP ) 1329 { 1330 image = retrieveDisparityMap(); 1331 } 1332 else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP_32F ) 1333 { 1334 image = retrieveDisparityMap_32F(); 1335 } 1336 else if( outputType == CV_CAP_OPENNI_VALID_DEPTH_MASK ) 1337 { 1338 image = retrieveValidDepthMask(); 1339 } 1340 else if( outputType == CV_CAP_OPENNI_BGR_IMAGE ) 1341 { 1342 image = retrieveBGRImage(); 1343 } 1344 else if( outputType == CV_CAP_OPENNI_GRAY_IMAGE ) 1345 { 1346 image = retrieveGrayImage(); 1347 } 1348 1349 return image; 1350} 1351 1352 1353CvCapture* cvCreateCameraCapture_OpenNI( int index ) 1354{ 1355 CvCapture_OpenNI* capture = new CvCapture_OpenNI( index ); 1356 1357 if( capture->isOpened() ) 1358 return capture; 1359 1360 delete capture; 1361 return 0; 1362} 1363 1364CvCapture* cvCreateFileCapture_OpenNI( const char* filename ) 1365{ 1366 CvCapture_OpenNI* capture = new CvCapture_OpenNI( filename ); 1367 1368 if( capture->isOpened() ) 1369 return capture; 1370 1371 delete capture; 1372 return 0; 1373} 1374 1375#endif 1376