1/*M///////////////////////////////////////////////////////////////////////////////////////
2//
3//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
4//
5//  By downloading, copying, installing or using the software you agree to this license.
6//  If you do not agree to this license, do not download, install,
7//  copy or use the software.
8//
9//
10//                        Intel License Agreement
11//                For Open Source Computer Vision Library
12//
13// Copyright (C) 2000, Intel Corporation, all rights reserved.
14// Third party copyrights are property of their respective owners.
15//
16// Redistribution and use in source and binary forms, with or without modification,
17// are permitted provided that the following conditions are met:
18//
19//   * Redistribution's of source code must retain the above copyright notice,
20//     this list of conditions and the following disclaimer.
21//
22//   * Redistribution's in binary form must reproduce the above copyright notice,
23//     this list of conditions and the following disclaimer in the documentation
24//     and/or other materials provided with the distribution.
25//
26//   * The name of Intel Corporation may not be used to endorse or promote products
27//     derived from this software without specific prior written permission.
28//
29// This software is provided by the copyright holders and contributors "as is" and
30// any express or implied warranties, including, but not limited to, the implied
31// warranties of merchantability and fitness for a particular purpose are disclaimed.
32// In no event shall the Intel Corporation or contributors be liable for any direct,
33// indirect, incidental, special, exemplary, or consequential damages
34// (including, but not limited to, procurement of substitute goods or services;
35// loss of use, data, or profits; or business interruption) however caused
36// and on any theory of liability, whether in contract, strict liability,
37// or tort (including negligence or otherwise) arising in any way out of
38// the use of this software, even if advised of the possibility of such damage.
39//
40//M*/
41#include "precomp.hpp"
42#include "opencv2/core.hpp"
43#include "opencv2/imgproc.hpp"
44
45#ifdef HAVE_OPENNI2
46
47#if defined TBB_INTERFACE_VERSION && TBB_INTERFACE_VERSION < 5000
48# undef HAVE_TBB
49#endif
50
51#include <queue>
52
53#ifndef i386
54#  define i386 0
55#endif
56#ifndef __arm__
57#  define __arm__ 0
58#endif
59#ifndef _ARC
60#  define _ARC 0
61#endif
62#ifndef __APPLE__
63#  define __APPLE__ 0
64#endif
65
66#define CV_STREAM_TIMEOUT 2000
67
68#define CV_DEPTH_STREAM 0
69#define CV_COLOR_STREAM 1
70
71#include "OpenNI.h"
72#include "PS1080.h"
73
74///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
75class CvCapture_OpenNI2 : public CvCapture
76{
77public:
78    enum { DEVICE_DEFAULT=0, DEVICE_MS_KINECT=0, DEVICE_ASUS_XTION=1, DEVICE_MAX=1 };
79
80    static const int INVALID_PIXEL_VAL = 0;
81    static const int INVALID_COORDINATE_VAL = 0;
82
83#ifdef HAVE_TBB
84    static const int DEFAULT_MAX_BUFFER_SIZE = 8;
85#else
86    static const int DEFAULT_MAX_BUFFER_SIZE = 2;
87#endif
88    static const int DEFAULT_IS_CIRCLE_BUFFER = 0;
89    static const int DEFAULT_MAX_TIME_DURATION = 20;
90
91    CvCapture_OpenNI2(int index = 0);
92    CvCapture_OpenNI2(const char * filename);
93    virtual ~CvCapture_OpenNI2();
94
95    virtual double getProperty(int propIdx) const;
96    virtual bool setProperty(int probIdx, double propVal);
97    virtual bool grabFrame();
98    virtual IplImage* retrieveFrame(int outputType);
99
100    bool isOpened() const;
101
102protected:
103    struct OutputMap
104    {
105    public:
106        cv::Mat mat;
107        IplImage* getIplImagePtr();
108    private:
109        IplImage iplHeader;
110    };
111
112    static const int outputMapsTypesCount = 7;
113
114    static openni::VideoMode defaultColorOutputMode();
115    static openni::VideoMode defaultDepthOutputMode();
116
117    IplImage* retrieveDepthMap();
118    IplImage* retrievePointCloudMap();
119    IplImage* retrieveDisparityMap();
120    IplImage* retrieveDisparityMap_32F();
121    IplImage* retrieveValidDepthMask();
122    IplImage* retrieveBGRImage();
123    IplImage* retrieveGrayImage();
124
125    bool readCamerasParams();
126
127    double getDepthGeneratorProperty(int propIdx) const;
128    bool setDepthGeneratorProperty(int propIdx, double propVal);
129    double getImageGeneratorProperty(int propIdx) const;
130    bool setImageGeneratorProperty(int propIdx, double propVal);
131    double getCommonProperty(int propIdx) const;
132    bool setCommonProperty(int propIdx, double propVal);
133
134    // OpenNI context
135    openni::Device device;
136    bool isContextOpened;
137    openni::Recorder recorder;
138
139    // Data generators with its metadata
140    openni::VideoStream depth, color, **streams;
141    openni::VideoFrameRef depthFrame, colorFrame;
142    cv::Mat depthImage, colorImage;
143
144    int maxBufferSize, maxTimeDuration; // for approx sync
145    bool isCircleBuffer;
146    //cv::Ptr<ApproximateSyncGrabber> approxSyncGrabber;
147
148    // Cameras settings:
149    // TODO find in OpenNI function to convert z->disparity and remove fields "baseline" and depthFocalLength_VGA
150    // Distance between IR projector and IR camera (in meters)
151    double baseline;
152    // Focal length for the IR camera in VGA resolution (in pixels)
153    int depthFocalLength_VGA;
154
155    // The value for shadow (occluded pixels)
156    int shadowValue;
157    // The value for pixels without a valid disparity measurement
158    int noSampleValue;
159
160    int currentStream;
161
162    int numStream;
163    std::vector<OutputMap> outputMaps;
164};
165
166IplImage* CvCapture_OpenNI2::OutputMap::getIplImagePtr()
167{
168    if( mat.empty() )
169        return 0;
170
171    iplHeader = IplImage(mat);
172    return &iplHeader;
173}
174
175bool CvCapture_OpenNI2::isOpened() const
176{
177    return isContextOpened;
178}
179
180openni::VideoMode CvCapture_OpenNI2::defaultColorOutputMode()
181{
182    openni::VideoMode mode;
183    mode.setResolution(640, 480);
184    mode.setFps(30);
185    mode.setPixelFormat(openni::PIXEL_FORMAT_RGB888);
186    return mode;
187}
188
189openni::VideoMode CvCapture_OpenNI2::defaultDepthOutputMode()
190{
191    openni::VideoMode mode;
192    mode.setResolution(640, 480);
193    mode.setFps(30);
194    mode.setPixelFormat(openni::PIXEL_FORMAT_DEPTH_1_MM);
195    return mode;
196}
197
198CvCapture_OpenNI2::CvCapture_OpenNI2( int index )
199{
200    numStream = 2;
201    const char* deviceURI = openni::ANY_DEVICE;
202    openni::Status status;
203    int deviceType = DEVICE_DEFAULT;
204
205    noSampleValue = shadowValue = 0;
206
207    isContextOpened = false;
208    maxBufferSize = DEFAULT_MAX_BUFFER_SIZE;
209    isCircleBuffer = DEFAULT_IS_CIRCLE_BUFFER;
210    maxTimeDuration = DEFAULT_MAX_TIME_DURATION;
211
212    if( index >= 10 )
213    {
214        deviceType = index / 10;
215        index %= 10;
216    }
217
218    // Asus XTION and Occipital Structure Sensor do not have an image generator
219    if (deviceType == DEVICE_ASUS_XTION)
220        numStream = 1;
221
222    if( deviceType > DEVICE_MAX )
223        return;
224
225    // Initialize and configure the context.
226    status = openni::OpenNI::initialize();
227
228    if (status != openni::STATUS_OK)
229    {
230        CV_Error(CV_StsError, cv::format("Failed to initialize:", openni::OpenNI::getExtendedError()));
231        return;
232    }
233
234    status = device.open(deviceURI);
235    if( status != openni::STATUS_OK )
236    {
237        CV_Error(CV_StsError, cv::format("OpenCVKinect: Device open failed see: %s\n", openni::OpenNI::getExtendedError()));
238        openni::OpenNI::shutdown();
239        return;
240    }
241
242    //device.setDepthColorSyncEnabled(true);
243
244
245    status = depth.create(device, openni::SENSOR_DEPTH);
246    if (status == openni::STATUS_OK)
247    {
248        if (depth.isValid())
249        {
250            CV_Assert(depth.setVideoMode(defaultDepthOutputMode()) == openni::STATUS_OK); // xn::DepthGenerator supports VGA only! (Jan 2011)
251        }
252
253        status = depth.start();
254        if (status != openni::STATUS_OK)
255        {
256            CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't start depth stream: %s\n", openni::OpenNI::getExtendedError()));
257            depth.destroy();
258            return;
259        }
260    }
261    else
262    {
263        CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find depth stream:: %s\n", openni::OpenNI::getExtendedError()));
264        return;
265    }
266
267    streams = new openni::VideoStream*[numStream];
268    streams[CV_DEPTH_STREAM] = &depth;
269
270    // create a color object
271    status = color.create(device, openni::SENSOR_COLOR);
272    if (status == openni::STATUS_OK)
273    {
274        // Set map output mode.
275        if (color.isValid())
276        {
277            CV_Assert(color.setVideoMode(defaultColorOutputMode()) == openni::STATUS_OK);
278        }
279        status = color.start();
280        if (status != openni::STATUS_OK)
281        {
282            CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't start color stream: %s\n", openni::OpenNI::getExtendedError()));
283            color.destroy();
284            return;
285        }
286        streams[CV_COLOR_STREAM] = &color;
287    }
288    else if (numStream == 2)
289    {
290        CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find color stream: %s\n", openni::OpenNI::getExtendedError()));
291        return;
292    }
293
294    if( !readCamerasParams() )
295    {
296        CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n"));
297        return;
298    }
299
300//    if( deviceType == DEVICE_ASUS_XTION )
301//    {
302//        //ps/asus specific
303//        imageGenerator.SetIntProperty("InputFormat", 1 /*XN_IO_IMAGE_FORMAT_YUV422*/);
304//        imageGenerator.SetPixelFormat(XN_PIXEL_FORMAT_RGB24);
305//        depthGenerator.SetIntProperty("RegistrationType", 1 /*XN_PROCESSING_HARDWARE*/);
306//    }
307
308
309    outputMaps.resize( outputMapsTypesCount );
310
311    isContextOpened = true;
312
313    setProperty(CV_CAP_PROP_OPENNI_REGISTRATION, 1.0);
314}
315
316CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename)
317{
318    numStream = 2;
319    openni::Status status;
320
321    isContextOpened = false;
322    maxBufferSize = DEFAULT_MAX_BUFFER_SIZE;
323    isCircleBuffer = DEFAULT_IS_CIRCLE_BUFFER;
324    maxTimeDuration = DEFAULT_MAX_TIME_DURATION;
325
326    // Initialize and configure the context.
327    status = openni::OpenNI::initialize();
328
329    if (status != openni::STATUS_OK)
330    {
331        CV_Error(CV_StsError, cv::format("Failed to initialize:", openni::OpenNI::getExtendedError()));
332        return;
333    }
334
335    // Open file
336    status = device.open(filename);
337    if( status != openni::STATUS_OK )
338    {
339        CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Failed to open input file (%s): %s\n", filename, openni::OpenNI::getExtendedError()));
340        return;
341    }
342
343    if( !readCamerasParams() )
344    {
345        CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n"));
346        return;
347    }
348
349    outputMaps.resize( outputMapsTypesCount );
350
351    isContextOpened = true;
352}
353
354CvCapture_OpenNI2::~CvCapture_OpenNI2()
355{
356    this->depthFrame.release();
357    this->colorFrame.release();
358    this->depth.stop();
359    this->color.stop();
360    openni::OpenNI::shutdown();
361}
362
363bool CvCapture_OpenNI2::readCamerasParams()
364{
365    double pixelSize = 0;
366    if (depth.getProperty<double>(XN_STREAM_PROPERTY_ZERO_PLANE_PIXEL_SIZE, &pixelSize) != openni::STATUS_OK)
367    {
368        CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read pixel size!\n"));
369        return false;
370    }
371
372    // pixel size @ VGA = pixel size @ SXGA x 2
373    pixelSize *= 2.0; // in mm
374
375    // focal length of IR camera in pixels for VGA resolution
376    int zeroPlanDistance; // in mm
377    if (depth.getProperty(XN_STREAM_PROPERTY_ZERO_PLANE_DISTANCE, &zeroPlanDistance) != openni::STATUS_OK)
378    {
379        CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read virtual plane distance!\n"));
380        return false;
381    }
382
383    if (depth.getProperty<double>(XN_STREAM_PROPERTY_EMITTER_DCMOS_DISTANCE, &baseline) != openni::STATUS_OK)
384    {
385        CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::readCamerasParams : Could not read base line!\n"));
386        return false;
387    }
388
389    // baseline from cm -> mm
390    baseline *= 10;
391
392    // focal length from mm -> pixels (valid for 640x480)
393    depthFocalLength_VGA = (int)((double)zeroPlanDistance / (double)pixelSize);
394
395    return true;
396}
397
398double CvCapture_OpenNI2::getProperty( int propIdx ) const
399{
400    double propValue = 0;
401
402    if( isOpened() )
403    {
404        int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK;
405
406        if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR )
407        {
408            propValue = getImageGeneratorProperty( purePropIdx );
409        }
410        else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR )
411        {
412            propValue = getDepthGeneratorProperty( purePropIdx );
413        }
414        else
415        {
416            propValue = getCommonProperty( purePropIdx );
417        }
418    }
419
420    return propValue;
421}
422
423bool CvCapture_OpenNI2::setProperty( int propIdx, double propValue )
424{
425    bool isSet = false;
426    if( isOpened() )
427    {
428        int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK;
429
430        if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR )
431        {
432            isSet = setImageGeneratorProperty( purePropIdx, propValue );
433        }
434        else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR )
435        {
436            isSet = setDepthGeneratorProperty( purePropIdx, propValue );
437        }
438        else
439        {
440            isSet = setCommonProperty( purePropIdx, propValue );
441        }
442    }
443
444    return isSet;
445}
446
447double CvCapture_OpenNI2::getCommonProperty( int propIdx ) const
448{
449    double propValue = 0;
450
451    switch( propIdx )
452    {
453    // There is a set of properties that correspond to depth generator by default
454    // (is they are pass without particular generator flag). Two reasons of this:
455    // 1) We can assume that depth generator is the main one for depth sensor.
456    // 2) In the initial vertions of OpenNI integration to OpenCV the value of
457    //    flag CV_CAP_OPENNI_DEPTH_GENERATOR was 0 (it isn't zero now).
458    case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT :
459    case CV_CAP_PROP_FRAME_WIDTH :
460    case CV_CAP_PROP_FRAME_HEIGHT :
461    case CV_CAP_PROP_FPS :
462    case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH :
463    case CV_CAP_PROP_OPENNI_BASELINE :
464    case CV_CAP_PROP_OPENNI_FOCAL_LENGTH :
465    case CV_CAP_PROP_OPENNI_REGISTRATION :
466        propValue = getDepthGeneratorProperty( propIdx );
467        break;
468    case CV_CAP_PROP_OPENNI2_SYNC :
469        propValue = const_cast<CvCapture_OpenNI2 *>(this)->device.getDepthColorSyncEnabled();
470    case CV_CAP_PROP_OPENNI2_MIRROR:
471    {
472        bool isMirroring = color.getMirroringEnabled() && depth.getMirroringEnabled();
473        propValue = isMirroring ? 1.0 : 0.0;
474        break;
475    }
476    default :
477        CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for getting.\n", propIdx) );
478    }
479
480    return propValue;
481}
482
483bool CvCapture_OpenNI2::setCommonProperty( int propIdx, double propValue )
484{
485    bool isSet = false;
486
487    switch( propIdx )
488    {
489    case CV_CAP_PROP_OPENNI2_MIRROR:
490    {
491        bool mirror = propValue > 0.0 ? true : false;
492        isSet = color.setMirroringEnabled(mirror) == openni::STATUS_OK;
493        isSet = depth.setMirroringEnabled(mirror) == openni::STATUS_OK;
494    }
495        break;
496    // There is a set of properties that correspond to depth generator by default
497    // (is they are pass without particular generator flag).
498    case CV_CAP_PROP_OPENNI_REGISTRATION:
499        isSet = setDepthGeneratorProperty( propIdx, propValue );
500        break;
501    case CV_CAP_PROP_OPENNI2_SYNC:
502        isSet = device.setDepthColorSyncEnabled(propValue > 0.0) == openni::STATUS_OK;
503        break;
504    default:
505        CV_Error( CV_StsBadArg, cv::format("Such parameter (propIdx=%d) isn't supported for setting.\n", propIdx) );
506    }
507
508    return isSet;
509}
510
511double CvCapture_OpenNI2::getDepthGeneratorProperty( int propIdx ) const
512{
513    double propValue = 0;
514    if( !depth.isValid() )
515        return propValue;
516
517    openni::VideoMode mode;
518
519    switch( propIdx )
520    {
521    case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT :
522        CV_DbgAssert(depth.isValid());
523        propValue = 1.;
524        break;
525    case CV_CAP_PROP_FRAME_WIDTH :
526        propValue = depth.getVideoMode().getResolutionX();
527        break;
528    case CV_CAP_PROP_FRAME_HEIGHT :
529            propValue = depth.getVideoMode().getResolutionY();
530        break;
531    case CV_CAP_PROP_FPS :
532        mode = depth.getVideoMode();
533        propValue = mode.getFps();
534        break;
535    case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH :
536        propValue = depth.getMaxPixelValue();
537        break;
538    case CV_CAP_PROP_OPENNI_BASELINE :
539        propValue = baseline;
540        break;
541    case CV_CAP_PROP_OPENNI_FOCAL_LENGTH :
542        propValue = (double)depthFocalLength_VGA;
543        break;
544    case CV_CAP_PROP_OPENNI_REGISTRATION :
545        propValue = device.getImageRegistrationMode();
546        break;
547    case CV_CAP_PROP_POS_MSEC :
548        propValue = (double)depthFrame.getTimestamp();
549        break;
550    case CV_CAP_PROP_POS_FRAMES :
551        propValue = depthFrame.getFrameIndex();
552        break;
553    default :
554        CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) );
555    }
556
557    return propValue;
558}
559
560bool CvCapture_OpenNI2::setDepthGeneratorProperty( int propIdx, double propValue )
561{
562    bool isSet = false;
563
564    CV_Assert( depth.isValid() );
565
566    switch( propIdx )
567    {
568    case CV_CAP_PROP_OPENNI_REGISTRATION:
569        {
570            if( propValue < 1.0 ) // "on"
571            {
572                // if there isn't image generator (i.e. ASUS XtionPro doesn't have it)
573                // then the property isn't avaliable
574                if ( color.isValid() )
575                {
576                    openni::ImageRegistrationMode mode = propValue < 1.0 ? openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR : openni::IMAGE_REGISTRATION_OFF;
577                    if( !device.getImageRegistrationMode() == mode )
578                    {
579                        if (device.isImageRegistrationModeSupported(mode))
580                        {
581                            openni::Status status = device.setImageRegistrationMode(mode);
582                            if( status != openni::STATUS_OK )
583                                CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::setDepthGeneratorProperty : %s\n", openni::OpenNI::getExtendedError()));
584                            else
585                                isSet = true;
586                        }
587                        else
588                            CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::setDepthGeneratorProperty : Unsupported viewpoint.\n"));
589                    }
590                    else
591                        isSet = true;
592                }
593            }
594            else // "off"
595            {
596                openni::Status status = device.setImageRegistrationMode(openni::IMAGE_REGISTRATION_OFF);
597                if( status != openni::STATUS_OK )
598                    CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::setDepthGeneratorProperty : %s\n", openni::OpenNI::getExtendedError()));
599                else
600                    isSet = true;
601            }
602        }
603        break;
604    default:
605        CV_Error( CV_StsBadArg, cv::format("Depth generator does not support such parameter (propIdx=%d) for setting.\n", propIdx) );
606    }
607
608    return isSet;
609}
610
611double CvCapture_OpenNI2::getImageGeneratorProperty( int propIdx ) const
612{
613    double propValue = 0.;
614    if( !color.isValid() )
615        return propValue;
616
617    openni::VideoMode mode;
618    switch( propIdx )
619    {
620    case CV_CAP_PROP_OPENNI_GENERATOR_PRESENT :
621        CV_DbgAssert( color.isValid() );
622        propValue = 1.;
623        break;
624    case CV_CAP_PROP_FRAME_WIDTH :
625            propValue = color.getVideoMode().getResolutionX();
626        break;
627    case CV_CAP_PROP_FRAME_HEIGHT :
628            propValue = color.getVideoMode().getResolutionY();
629        break;
630    case CV_CAP_PROP_FPS :
631            propValue = color.getVideoMode().getFps();
632        break;
633    case CV_CAP_PROP_POS_MSEC :
634        propValue = (double)colorFrame.getTimestamp();
635        break;
636    case CV_CAP_PROP_POS_FRAMES :
637        propValue = (double)colorFrame.getFrameIndex();
638        break;
639    default :
640        CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for getting.\n", propIdx) );
641    }
642
643    return propValue;
644}
645
646bool CvCapture_OpenNI2::setImageGeneratorProperty(int propIdx, double propValue)
647{
648    bool isSet = false;
649        if( !color.isValid() )
650            return isSet;
651
652        switch( propIdx )
653        {
654        case CV_CAP_PROP_OPENNI_OUTPUT_MODE :
655        {
656            openni::VideoMode mode = color.getVideoMode();
657
658            switch( cvRound(propValue) )
659            {
660            case CV_CAP_OPENNI_VGA_30HZ :
661                mode.setResolution(640,480);
662                mode.setFps(30);
663                break;
664            case CV_CAP_OPENNI_SXGA_15HZ :
665                mode.setResolution(1280, 960);
666                mode.setFps(15);
667                break;
668            case CV_CAP_OPENNI_SXGA_30HZ :
669                mode.setResolution(1280, 960);
670                mode.setFps(30);
671                break;
672            case CV_CAP_OPENNI_QVGA_30HZ :
673                mode.setResolution(320, 240);
674                mode.setFps(30);
675                 break;
676            case CV_CAP_OPENNI_QVGA_60HZ :
677                mode.setResolution(320, 240);
678                mode.setFps(60);
679                 break;
680            default :
681                CV_Error( CV_StsBadArg, "Unsupported image generator output mode.\n");
682            }
683
684            openni::Status status = color.setVideoMode( mode );
685            if( status != openni::STATUS_OK )
686                CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::setImageGeneratorProperty : %s\n", openni::OpenNI::getExtendedError()));
687            else
688                isSet = true;
689            break;
690        }
691        default:
692            CV_Error( CV_StsBadArg, cv::format("Image generator does not support such parameter (propIdx=%d) for setting.\n", propIdx) );
693        }
694
695    return isSet;
696}
697
698bool CvCapture_OpenNI2::grabFrame()
699{
700    if( !isOpened() )
701        return false;
702
703    bool isGrabbed = false;
704
705    openni::Status status = openni::OpenNI::waitForAnyStream(streams, numStream, &currentStream, CV_STREAM_TIMEOUT);
706    if( status != openni::STATUS_OK )
707        return false;
708
709    if( depth.isValid() )
710        depth.readFrame(&depthFrame);
711    if (color.isValid())
712        color.readFrame(&colorFrame);
713    isGrabbed = true;
714
715    return isGrabbed;
716}
717
718inline void getDepthMapFromMetaData(const openni::VideoFrameRef& depthMetaData, cv::Mat& depthMap, int noSampleValue, int shadowValue)
719{
720    depthMap.create(depthMetaData.getHeight(), depthMetaData.getWidth(), CV_16UC1);
721    depthMap.data = (uchar*)depthMetaData.getData();
722
723    cv::Mat badMask = (depthMap == (double)noSampleValue) | (depthMap == (double)shadowValue) | (depthMap == 0);
724
725    // mask the pixels with invalid depth
726    depthMap.setTo( cv::Scalar::all( CvCapture_OpenNI2::INVALID_PIXEL_VAL ), badMask );
727}
728
729IplImage* CvCapture_OpenNI2::retrieveDepthMap()
730{
731    if( !depth.isValid() )
732        return 0;
733
734    getDepthMapFromMetaData( depthFrame, outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue );
735
736    return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr();
737}
738
739IplImage* CvCapture_OpenNI2::retrievePointCloudMap()
740{
741    if( !depthFrame.isValid() )
742        return 0;
743
744    cv::Mat depthImg;
745    getDepthMapFromMetaData(depthFrame, depthImg, noSampleValue, shadowValue);
746
747    const int badPoint = INVALID_PIXEL_VAL;
748    const float badCoord = INVALID_COORDINATE_VAL;
749    int cols = depthFrame.getWidth(), rows = depthFrame.getHeight();
750    cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) );
751
752    float worldX, worldY, worldZ;
753    for( int y = 0; y < rows; y++ )
754    {
755        for (int x = 0; x < cols; x++)
756        {
757            openni::CoordinateConverter::convertDepthToWorld(depth, x, y, depthImg.at<unsigned short>(y, x), &worldX, &worldY, &worldZ);
758
759            if (depthImg.at<unsigned short>(y, x) == badPoint) // not valid
760                pointCloud_XYZ.at<cv::Point3f>(y, x) = cv::Point3f(badCoord, badCoord, badCoord);
761            else
762            {
763                pointCloud_XYZ.at<cv::Point3f>(y, x) = cv::Point3f(worldX*0.001f, worldY*0.001f, worldZ*0.001f); // from mm to meters
764            }
765        }
766    }
767
768    outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].mat = pointCloud_XYZ;
769
770    return outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].getIplImagePtr();
771}
772
773static void computeDisparity_32F( const openni::VideoFrameRef& depthMetaData, cv::Mat& disp, double baseline, int F, int noSampleValue, int shadowValue)
774{
775    cv::Mat depth;
776    getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue );
777    CV_Assert( depth.type() == CV_16UC1 );
778
779    // disparity = baseline * F / z;
780
781    float mult = (float)(baseline /*mm*/ * F /*pixels*/);
782
783    disp.create( depth.size(), CV_32FC1);
784    disp = cv::Scalar::all( CvCapture_OpenNI2::INVALID_PIXEL_VAL );
785    for( int y = 0; y < disp.rows; y++ )
786    {
787        for( int x = 0; x < disp.cols; x++ )
788        {
789            unsigned short curDepth = depth.at<unsigned short>(y,x);
790            if( curDepth != CvCapture_OpenNI2::INVALID_PIXEL_VAL )
791                disp.at<float>(y,x) = mult / curDepth;
792        }
793    }
794}
795
796IplImage* CvCapture_OpenNI2::retrieveDisparityMap()
797{
798    if (!depthFrame.isValid())
799        return 0;
800
801    cv::Mat disp32;
802    computeDisparity_32F(depthFrame, disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue);
803
804    disp32.convertTo( outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1 );
805
806    return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].getIplImagePtr();
807}
808
809IplImage* CvCapture_OpenNI2::retrieveDisparityMap_32F()
810{
811    if (!depthFrame.isValid())
812        return 0;
813
814    computeDisparity_32F(depthFrame, outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue);
815
816    return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr();
817}
818
819IplImage* CvCapture_OpenNI2::retrieveValidDepthMask()
820{
821    if (!depthFrame.isValid())
822        return 0;
823
824    cv::Mat d;
825    getDepthMapFromMetaData(depthFrame, d, noSampleValue, shadowValue);
826
827    outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = d != CvCapture_OpenNI2::INVALID_PIXEL_VAL;
828
829    return outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].getIplImagePtr();
830}
831
832inline void getBGRImageFromMetaData( const openni::VideoFrameRef& imageMetaData, cv::Mat& bgrImage )
833{
834   cv::Mat bufferImage;
835   if( imageMetaData.getVideoMode().getPixelFormat() != openni::PIXEL_FORMAT_RGB888 )
836        CV_Error( CV_StsUnsupportedFormat, "Unsupported format of grabbed image\n" );
837
838   bgrImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_8UC3);
839   bufferImage.create(imageMetaData.getHeight(), imageMetaData.getWidth(), CV_8UC3);
840   bufferImage.data = (uchar*)imageMetaData.getData();
841
842   cv::cvtColor(bufferImage, bgrImage, cv::COLOR_RGB2BGR);
843}
844
845IplImage* CvCapture_OpenNI2::retrieveBGRImage()
846{
847    if( !color.isValid() )
848        return 0;
849
850    getBGRImageFromMetaData( colorFrame, outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat );
851
852    return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr();
853}
854
855IplImage* CvCapture_OpenNI2::retrieveGrayImage()
856{
857    if (!colorFrame.isValid())
858        return 0;
859
860    CV_Assert(colorFrame.getVideoMode().getPixelFormat() == openni::PIXEL_FORMAT_RGB888); // RGB
861
862    cv::Mat rgbImage;
863    getBGRImageFromMetaData(colorFrame, rgbImage);
864    cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY );
865
866    return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr();
867}
868
869IplImage* CvCapture_OpenNI2::retrieveFrame( int outputType )
870{
871    IplImage* image = 0;
872    CV_Assert( outputType < outputMapsTypesCount && outputType >= 0);
873
874    if( outputType == CV_CAP_OPENNI_DEPTH_MAP )
875    {
876        image = retrieveDepthMap();
877    }
878    else if( outputType == CV_CAP_OPENNI_POINT_CLOUD_MAP )
879    {
880        image = retrievePointCloudMap();
881    }
882    else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP )
883    {
884        image = retrieveDisparityMap();
885    }
886    else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP_32F )
887    {
888        image = retrieveDisparityMap_32F();
889    }
890    else if( outputType == CV_CAP_OPENNI_VALID_DEPTH_MASK )
891    {
892        image = retrieveValidDepthMask();
893    }
894    else if( outputType == CV_CAP_OPENNI_BGR_IMAGE )
895    {
896        image = retrieveBGRImage();
897    }
898    else if( outputType == CV_CAP_OPENNI_GRAY_IMAGE )
899    {
900        image = retrieveGrayImage();
901    }
902
903    return image;
904}
905
906CvCapture* cvCreateCameraCapture_OpenNI( int index )
907{
908    CvCapture_OpenNI2* capture = new CvCapture_OpenNI2( index );
909
910    if( capture->isOpened() )
911        return capture;
912
913    delete capture;
914    return 0;
915}
916
917CvCapture* cvCreateFileCapture_OpenNI( const char* filename )
918{
919    CvCapture_OpenNI2* capture = new CvCapture_OpenNI2( filename );
920
921    if( capture->isOpened() )
922        return capture;
923
924    delete capture;
925    return 0;
926}
927
928#endif
929