1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/*
18 * Contains code that is used to capture video frames from a camera device
19 * on Mac. This code uses QTKit API to work with camera devices, and requires
20 * Mac OS at least 10.5
21 */
22
23#import <Cocoa/Cocoa.h>
24#import <QTKit/QTKit.h>
25#import <CoreAudio/CoreAudio.h>
26#include "android/camera/camera-capture.h"
27#include "android/camera/camera-format-converters.h"
28
29#define  E(...)    derror(__VA_ARGS__)
30#define  W(...)    dwarning(__VA_ARGS__)
31#define  D(...)    VERBOSE_PRINT(camera,__VA_ARGS__)
32
33/*******************************************************************************
34 *                     Helper routines
35 ******************************************************************************/
36
37/* Converts internal QT pixel format to a FOURCC value. */
38static uint32_t
39_QTtoFOURCC(uint32_t qt_pix_format)
40{
41  switch (qt_pix_format) {
42    case kCVPixelFormatType_24RGB:
43      return V4L2_PIX_FMT_RGB24;
44
45    case kCVPixelFormatType_24BGR:
46      return V4L2_PIX_FMT_BGR32;
47
48    case kCVPixelFormatType_32ARGB:
49    case kCVPixelFormatType_32RGBA:
50      return V4L2_PIX_FMT_RGB32;
51
52    case kCVPixelFormatType_32BGRA:
53    case kCVPixelFormatType_32ABGR:
54      return V4L2_PIX_FMT_BGR32;
55
56    case kCVPixelFormatType_422YpCbCr8:
57      return V4L2_PIX_FMT_UYVY;
58
59    case kCVPixelFormatType_420YpCbCr8Planar:
60      return V4L2_PIX_FMT_YVU420;
61
62    case 'yuvs':  // kCVPixelFormatType_422YpCbCr8_yuvs - undeclared?
63      return V4L2_PIX_FMT_YUYV;
64
65    default:
66      E("Unrecognized pixel format '%.4s'", (const char*)&qt_pix_format);
67      return 0;
68  }
69}
70
71/*******************************************************************************
72 *                     MacCamera implementation
73 ******************************************************************************/
74
75/* Encapsulates a camera device on MacOS */
76@interface MacCamera : NSObject {
77    /* Capture session. */
78    QTCaptureSession*             capture_session;
79    /* Camera capture device. */
80    QTCaptureDevice*              capture_device;
81    /* Input device registered with the capture session. */
82    QTCaptureDeviceInput*         input_device;
83    /* Output device registered with the capture session. */
84    QTCaptureVideoPreviewOutput*  output_device;
85    /* Current framebuffer. */
86    CVImageBufferRef              current_frame;
87    /* Desired frame width */
88    int                           desired_width;
89    /* Desired frame height */
90    int                           desired_height;
91}
92
93/* Initializes MacCamera instance.
94 * Return:
95 *  Pointer to initialized instance on success, or nil on failure.
96 */
97- (MacCamera*)init;
98
99/* Undoes 'init' */
100- (void)free;
101
102/* Starts capturing video frames.
103 * Param:
104 *  width, height - Requested dimensions for the captured video frames.
105 * Return:
106 *  0 on success, or !=0 on failure.
107 */
108- (int)start_capturing:(int)width:(int)height;
109
110/* Captures a frame from the camera device.
111 * Param:
112 *  framebuffers - Array of framebuffers where to read the frame. Size of this
113 *      array is defined by the 'fbs_num' parameter. Note that the caller must
114 *      make sure that buffers are large enough to contain entire frame captured
115 *      from the device.
116 *  fbs_num - Number of entries in the 'framebuffers' array.
117 * Return:
118 *  0 on success, or non-zero value on failure. There is a special vaule 1
119 *  returned from this routine which indicates that frames are not yet available
120 *  in the device. The client should respond to this value by repeating the
121 *  read, rather than reporting an error.
122 */
123- (int)read_frame:(ClientFrameBuffer*)framebuffers:(int)fbs_num:(float)r_scale:(float)g_scale:(float)b_scale:(float)exp_comp;
124
125@end
126
127@implementation MacCamera
128
129- (MacCamera*)init
130{
131    NSError *error;
132    BOOL success;
133
134    /* Obtain the capture device, make sure it's not used by another
135     * application, and open it. */
136    capture_device =
137        [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
138    if (capture_device == nil) {
139        E("There are no available video devices found.");
140        [self release];
141        return nil;
142    }
143    if ([capture_device isInUseByAnotherApplication]) {
144        E("Default camera device is in use by another application.");
145        [capture_device release];
146        capture_device = nil;
147        [self release];
148        return nil;
149    }
150    success = [capture_device open:&error];
151    if (!success) {
152        E("Unable to open camera device: '%s'",
153          [[error localizedDescription] UTF8String]);
154        [self free];
155        [self release];
156        return nil;
157    }
158
159    /* Create capture session. */
160    capture_session = [[QTCaptureSession alloc] init];
161    if (capture_session == nil) {
162        E("Unable to create capure session.");
163        [self free];
164        [self release];
165        return nil;
166    }
167
168    /* Create an input device and register it with the capture session. */
169    input_device = [[QTCaptureDeviceInput alloc] initWithDevice:capture_device];
170    success = [capture_session addInput:input_device error:&error];
171    if (!success) {
172        E("Unable to initialize input device: '%s'",
173          [[error localizedDescription] UTF8String]);
174        [input_device release];
175        input_device = nil;
176        [self free];
177        [self release];
178        return nil;
179    }
180
181    /* Create an output device and register it with the capture session. */
182    output_device = [[QTCaptureVideoPreviewOutput alloc] init];
183    success = [capture_session addOutput:output_device error:&error];
184    if (!success) {
185        E("Unable to initialize output device: '%s'",
186          [[error localizedDescription] UTF8String]);
187        [output_device release];
188        output_device = nil;
189        [self free];
190        [self release];
191        return nil;
192    }
193    [output_device setDelegate:self];
194
195    return self;
196}
197
198- (void)free
199{
200    /* Uninitialize capture session. */
201    if (capture_session != nil) {
202        /* Make sure that capturing is stopped. */
203        if ([capture_session isRunning]) {
204            [capture_session stopRunning];
205        }
206        /* Detach input and output devices from the session. */
207        if (input_device != nil) {
208            [capture_session removeInput:input_device];
209            [input_device release];
210            input_device = nil;
211        }
212        if (output_device != nil) {
213            [capture_session removeOutput:output_device];
214            [output_device release];
215            output_device = nil;
216        }
217        /* Destroy capture session. */
218        [capture_session release];
219        capture_session = nil;
220    }
221
222    /* Uninitialize capture device. */
223    if (capture_device != nil) {
224        /* Make sure device is not opened. */
225        if ([capture_device isOpen]) {
226            [capture_device close];
227        }
228        [capture_device release];
229        capture_device = nil;
230    }
231
232    /* Release current framebuffer. */
233    if (current_frame != nil) {
234       CVBufferRelease(current_frame);
235       current_frame = nil;
236    }
237}
238
239- (int)start_capturing:(int)width:(int)height
240{
241  if (![capture_session isRunning]) {
242        /* Set desired frame dimensions. */
243        desired_width = width;
244        desired_height = height;
245        [output_device setPixelBufferAttributes:
246          [NSDictionary dictionaryWithObjectsAndKeys:
247              [NSNumber numberWithInt: width], kCVPixelBufferWidthKey,
248              [NSNumber numberWithInt: height], kCVPixelBufferHeightKey,
249              nil]];
250        [capture_session startRunning];
251        return 0;
252  } else if (width == desired_width && height == desired_height) {
253      W("%s: Already capturing %dx%d frames",
254        __FUNCTION__, desired_width, desired_height);
255      return -1;
256  } else {
257      E("%s: Already capturing %dx%d frames. Requested frame dimensions are %dx%d",
258        __FUNCTION__, desired_width, desired_height, width, height);
259      return -1;
260  }
261}
262
263- (int)read_frame:(ClientFrameBuffer*)framebuffers:(int)fbs_num:(float)r_scale:(float)g_scale:(float)b_scale:(float)exp_comp
264{
265    int res = -1;
266
267    /* Frames are pushed by QT in another thread.
268     * So we need a protection here. */
269    @synchronized (self)
270    {
271        if (current_frame != nil) {
272            /* Collect frame info. */
273            const uint32_t pixel_format =
274                _QTtoFOURCC(CVPixelBufferGetPixelFormatType(current_frame));
275            const int frame_width = CVPixelBufferGetWidth(current_frame);
276            const int frame_height = CVPixelBufferGetHeight(current_frame);
277            const size_t frame_size =
278                CVPixelBufferGetBytesPerRow(current_frame) * frame_height;
279
280            /* Get framebuffer pointer. */
281            CVPixelBufferLockBaseAddress(current_frame, 0);
282            const void* pixels = CVPixelBufferGetBaseAddress(current_frame);
283            if (pixels != nil) {
284                /* Convert framebuffer. */
285                res = convert_frame(pixels, pixel_format, frame_size,
286                                    frame_width, frame_height,
287                                    framebuffers, fbs_num,
288                                    r_scale, g_scale, b_scale, exp_comp);
289            } else {
290                E("%s: Unable to obtain framebuffer", __FUNCTION__);
291                res = -1;
292            }
293            CVPixelBufferUnlockBaseAddress(current_frame, 0);
294        } else {
295            /* First frame didn't come in just yet. Let the caller repeat. */
296            res = 1;
297        }
298    }
299
300    return res;
301}
302
303- (void)captureOutput:(QTCaptureOutput*) captureOutput
304                      didOutputVideoFrame:(CVImageBufferRef)videoFrame
305                      withSampleBuffer:(QTSampleBuffer*) sampleBuffer
306                      fromConnection:(QTCaptureConnection*) connection
307{
308    CVImageBufferRef to_release;
309    CVBufferRetain(videoFrame);
310
311    /* Frames are pulled by the client in another thread.
312     * So we need a protection here. */
313    @synchronized (self)
314    {
315        to_release = current_frame;
316        current_frame = videoFrame;
317    }
318    CVBufferRelease(to_release);
319}
320
321@end
322
323/*******************************************************************************
324 *                     CameraDevice routines
325 ******************************************************************************/
326
327typedef struct MacCameraDevice MacCameraDevice;
328/* MacOS-specific camera device descriptor. */
329struct MacCameraDevice {
330    /* Common camera device descriptor. */
331    CameraDevice  header;
332    /* Actual camera device object. */
333    MacCamera*    device;
334};
335
336/* Allocates an instance of MacCameraDevice structure.
337 * Return:
338 *  Allocated instance of MacCameraDevice structure. Note that this routine
339 *  also sets 'opaque' field in the 'header' structure to point back to the
340 *  containing MacCameraDevice instance.
341 */
342static MacCameraDevice*
343_camera_device_alloc(void)
344{
345    MacCameraDevice* cd = (MacCameraDevice*)malloc(sizeof(MacCameraDevice));
346    if (cd != NULL) {
347        memset(cd, 0, sizeof(MacCameraDevice));
348        cd->header.opaque = cd;
349    } else {
350        E("%s: Unable to allocate MacCameraDevice instance", __FUNCTION__);
351    }
352    return cd;
353}
354
355/* Uninitializes and frees MacCameraDevice descriptor.
356 * Note that upon return from this routine memory allocated for the descriptor
357 * will be freed.
358 */
359static void
360_camera_device_free(MacCameraDevice* cd)
361{
362    if (cd != NULL) {
363        if (cd->device != NULL) {
364            [cd->device free];
365            [cd->device release];
366            cd->device = nil;
367        }
368        AFREE(cd);
369    } else {
370        W("%s: No descriptor", __FUNCTION__);
371    }
372}
373
374/* Resets camera device after capturing.
375 * Since new capture request may require different frame dimensions we must
376 * reset frame info cached in the capture window. The only way to do that would
377 * be closing, and reopening it again. */
378static void
379_camera_device_reset(MacCameraDevice* cd)
380{
381    if (cd != NULL && cd->device) {
382        [cd->device free];
383        cd->device = [cd->device init];
384    }
385}
386
387/*******************************************************************************
388 *                     CameraDevice API
389 ******************************************************************************/
390
391CameraDevice*
392camera_device_open(const char* name, int inp_channel)
393{
394    MacCameraDevice* mcd;
395
396    mcd = _camera_device_alloc();
397    if (mcd == NULL) {
398        E("%s: Unable to allocate MacCameraDevice instance", __FUNCTION__);
399        return NULL;
400    }
401    mcd->device = [[MacCamera alloc] init];
402    if (mcd->device == nil) {
403        E("%s: Unable to initialize camera device.", __FUNCTION__);
404        return NULL;
405    }
406    return &mcd->header;
407}
408
409int
410camera_device_start_capturing(CameraDevice* cd,
411                              uint32_t pixel_format,
412                              int frame_width,
413                              int frame_height)
414{
415    MacCameraDevice* mcd;
416
417    /* Sanity checks. */
418    if (cd == NULL || cd->opaque == NULL) {
419        E("%s: Invalid camera device descriptor", __FUNCTION__);
420        return -1;
421    }
422    mcd = (MacCameraDevice*)cd->opaque;
423    if (mcd->device == nil) {
424        E("%s: Camera device is not opened", __FUNCTION__);
425        return -1;
426    }
427
428    return [mcd->device start_capturing:frame_width:frame_height];
429}
430
431int
432camera_device_stop_capturing(CameraDevice* cd)
433{
434    MacCameraDevice* mcd;
435
436    /* Sanity checks. */
437    if (cd == NULL || cd->opaque == NULL) {
438        E("%s: Invalid camera device descriptor", __FUNCTION__);
439        return -1;
440    }
441    mcd = (MacCameraDevice*)cd->opaque;
442    if (mcd->device == nil) {
443        E("%s: Camera device is not opened", __FUNCTION__);
444        return -1;
445    }
446
447    /* Reset capture settings, so next call to capture can set its own. */
448    _camera_device_reset(mcd);
449
450    return 0;
451}
452
453int
454camera_device_read_frame(CameraDevice* cd,
455                         ClientFrameBuffer* framebuffers,
456                         int fbs_num,
457                         float r_scale,
458                         float g_scale,
459                         float b_scale,
460                         float exp_comp)
461{
462    MacCameraDevice* mcd;
463
464    /* Sanity checks. */
465    if (cd == NULL || cd->opaque == NULL) {
466        E("%s: Invalid camera device descriptor", __FUNCTION__);
467        return -1;
468    }
469    mcd = (MacCameraDevice*)cd->opaque;
470    if (mcd->device == nil) {
471        E("%s: Camera device is not opened", __FUNCTION__);
472        return -1;
473    }
474
475    return [mcd->device read_frame:framebuffers:fbs_num:r_scale:g_scale:b_scale:exp_comp];
476}
477
478void
479camera_device_close(CameraDevice* cd)
480{
481    /* Sanity checks. */
482    if (cd == NULL || cd->opaque == NULL) {
483        E("%s: Invalid camera device descriptor", __FUNCTION__);
484    } else {
485        _camera_device_free((MacCameraDevice*)cd->opaque);
486    }
487}
488
489int
490enumerate_camera_devices(CameraInfo* cis, int max)
491{
492/* Array containing emulated webcam frame dimensions.
493 * QT API provides device independent frame dimensions, by scaling frames
494 * received from the device to whatever dimensions were requested for the
495 * output device. So, we can just use a small set of frame dimensions to
496 * emulate.
497 */
498static const CameraFrameDim _emulate_dims[] =
499{
500  /* Emulates 640x480 frame. */
501  {640, 480},
502  /* Emulates 352x288 frame (required by camera framework). */
503  {352, 288},
504  /* Emulates 320x240 frame (required by camera framework). */
505  {320, 240},
506  /* Emulates 176x144 frame (required by camera framework). */
507  {176, 144}
508};
509
510    /* Obtain default video device. QT API doesn't really provide a reliable
511     * way to identify camera devices. There is a QTCaptureDevice::uniqueId
512     * method that supposedly does that, but in some cases it just doesn't
513     * work. Until we figure out a reliable device identification, we will
514     * stick to using only one (default) camera for emulation. */
515    QTCaptureDevice* video_dev =
516        [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
517    if (video_dev == nil) {
518        D("No web cameras are connected to the host.");
519        return 0;
520    }
521
522    /* Obtain pixel format for the device. */
523    NSArray* pix_formats = [video_dev formatDescriptions];
524    if (pix_formats == nil || [pix_formats count] == 0) {
525        E("Unable to obtain pixel format for the default camera device.");
526        [video_dev release];
527        return 0;
528    }
529    const uint32_t qt_pix_format = [[pix_formats objectAtIndex:0] formatType];
530    [pix_formats release];
531
532    /* Obtain FOURCC pixel format for the device. */
533    cis[0].pixel_format = _QTtoFOURCC(qt_pix_format);
534    if (cis[0].pixel_format == 0) {
535        /* Unsupported pixel format. */
536        E("Pixel format '%.4s' reported by the camera device is unsupported",
537          (const char*)&qt_pix_format);
538        [video_dev release];
539        return 0;
540    }
541
542    /* Initialize camera info structure. */
543    cis[0].frame_sizes = (CameraFrameDim*)malloc(sizeof(_emulate_dims));
544    if (cis[0].frame_sizes != NULL) {
545        cis[0].frame_sizes_num = sizeof(_emulate_dims) / sizeof(*_emulate_dims);
546        memcpy(cis[0].frame_sizes, _emulate_dims, sizeof(_emulate_dims));
547        cis[0].device_name = ASTRDUP("webcam0");
548        cis[0].inp_channel = 0;
549        cis[0].display_name = ASTRDUP("webcam0");
550        cis[0].in_use = 0;
551        [video_dev release];
552        return 1;
553    } else {
554        E("Unable to allocate memory for camera information.");
555        [video_dev release];
556        return 0;
557    }
558}
559