V4LCameraAdapter.cpp revision 8c4175849a833e6466c16f05c5b68fbe6d0006c5
1/*
2 * Copyright (C) Texas Instruments - http://www.ti.com/
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/**
18* @file V4LCameraAdapter.cpp
19*
20* This file maps the Camera Hardware Interface to V4L2.
21*
22*/
23
24
25#include "V4LCameraAdapter.h"
26#include "CameraHal.h"
27#include "TICameraParameters.h"
28#include "DebugUtils.h"
29#include <signal.h>
30#include <stdio.h>
31#include <stdlib.h>
32#include <string.h>
33#include <fcntl.h>
34#include <unistd.h>
35#include <errno.h>
36#include <sys/ioctl.h>
37#include <sys/mman.h>
38#include <sys/select.h>
39#include <linux/videodev.h>
40
41#include <ui/GraphicBuffer.h>
42#include <ui/GraphicBufferMapper.h>
43
44#include <cutils/properties.h>
45#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
46static int mDebugFps = 0;
47
48#define Q16_OFFSET 16
49
50#define HERE(Msg) {CAMHAL_LOGEB("--=== %s===--\n", Msg);}
51
52namespace Ti {
53namespace Camera {
54
55//frames skipped before recalculating the framerate
56#define FPS_PERIOD 30
57
58//define this macro to save first few raw frames when starting the preview.
59//#define SAVE_RAW_FRAMES 1
60//#define DUMP_CAPTURE_FRAME 1
61//#define PPM_PER_FRAME_CONVERSION 1
62
63//Proto Types
64static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size );
65static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height );
66static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height );
67
68android::Mutex gV4LAdapterLock;
69char device[15];
70
71
72/*--------------------Camera Adapter Class STARTS here-----------------------------*/
73
74/*--------------------V4L wrapper functions -------------------------------*/
75status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
76    status_t ret = NO_ERROR;
77    errno = 0;
78
79    do {
80        ret = ioctl (fd, req, argp);
81    }while (-1 == ret && EINTR == errno);
82
83    return ret;
84}
85
86status_t V4LCameraAdapter::v4lInitMmap(int& count) {
87    status_t ret = NO_ERROR;
88
89    int width, height;
90    mParams.getPreviewSize(&width, &height);
91    jpeg_with_dht_buffer_size = (width * height / 2) + jpgdecoder.readDHTSize();
92
93    //First allocate adapter internal buffers at V4L level for USB Cam
94    //These are the buffers from which we will copy the data into overlay buffers
95    /* Check if camera can handle NB_BUFFER buffers */
96    mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
97    mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
98    mVideoInfo->rb.count = count;
99
100    ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
101    if (ret < 0) {
102        CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
103        return ret;
104    }
105
106    count = mVideoInfo->rb.count;
107    for (int i = 0; i < count; i++) {
108
109        memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
110
111        mVideoInfo->buf.index = i;
112        mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
113        mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
114
115        ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYBUF, &mVideoInfo->buf);
116        if (ret < 0) {
117            CAMHAL_LOGEB("Unable to query buffer (%s)", strerror(errno));
118            return ret;
119        }
120
121        mVideoInfo->mem[i] = mmap (NULL,
122               mVideoInfo->buf.length,
123               PROT_READ | PROT_WRITE,
124               MAP_SHARED,
125               mCameraHandle,
126               mVideoInfo->buf.m.offset);
127
128        CAMHAL_LOGVB(" mVideoInfo->mem[%d]=%p ; mVideoInfo->buf.length = %d", i, mVideoInfo->mem[i], mVideoInfo->buf.length);
129        if (mVideoInfo->mem[i] == MAP_FAILED) {
130            CAMHAL_LOGEB("Unable to map buffer [%d]. (%s)", i, strerror(errno));
131            return -1;
132        }
133
134        if (jpeg_with_dht_buffer[i] != NULL){
135            free(jpeg_with_dht_buffer[i]);
136            jpeg_with_dht_buffer[i] = NULL;
137        }
138        jpeg_with_dht_buffer[i] = (unsigned char *)malloc(jpeg_with_dht_buffer_size);
139    }
140    return ret;
141}
142
143status_t V4LCameraAdapter::v4lInitUsrPtr(int& count) {
144    status_t ret = NO_ERROR;
145
146    mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
147    mVideoInfo->rb.memory = V4L2_MEMORY_USERPTR;
148    mVideoInfo->rb.count = count;
149
150    ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
151    if (ret < 0) {
152        CAMHAL_LOGEB("VIDIOC_REQBUFS failed for USERPTR: %s", strerror(errno));
153        return ret;
154    }
155
156    count = mVideoInfo->rb.count;
157    return ret;
158}
159
160status_t V4LCameraAdapter::v4lStartStreaming () {
161    status_t ret = NO_ERROR;
162    enum v4l2_buf_type bufType;
163
164    if (!mVideoInfo->isStreaming) {
165        bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
166
167        ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMON, &bufType);
168        if (ret < 0) {
169            CAMHAL_LOGEB("StartStreaming: Unable to start capture: %s", strerror(errno));
170            return ret;
171        }
172        mVideoInfo->isStreaming = true;
173    }
174    return ret;
175}
176
177status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
178    status_t ret = NO_ERROR;
179    enum v4l2_buf_type bufType;
180
181    if (mVideoInfo->isStreaming) {
182        bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
183
184        ret = v4lIoctl (mCameraHandle, VIDIOC_STREAMOFF, &bufType);
185        if (ret < 0) {
186            CAMHAL_LOGEB("StopStreaming: Unable to stop capture: %s", strerror(errno));
187            goto EXIT;
188        }
189        mVideoInfo->isStreaming = false;
190
191        /* Unmap buffers */
192        mVideoInfo->buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
193        mVideoInfo->buf.memory = V4L2_MEMORY_MMAP;
194        for (int i = 0; i < nBufferCount; i++) {
195            if (munmap(mVideoInfo->mem[i], mVideoInfo->buf.length) < 0) {
196                CAMHAL_LOGEA("munmap() failed");
197            }
198        }
199
200        //free the memory allocated during REQBUFS, by setting the count=0
201        mVideoInfo->rb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
202        mVideoInfo->rb.memory = V4L2_MEMORY_MMAP;
203        mVideoInfo->rb.count = 0;
204
205        ret = v4lIoctl(mCameraHandle, VIDIOC_REQBUFS, &mVideoInfo->rb);
206        if (ret < 0) {
207            CAMHAL_LOGEB("VIDIOC_REQBUFS failed: %s", strerror(errno));
208            goto EXIT;
209        }
210    }
211EXIT:
212    return ret;
213}
214
215status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_format) {
216    status_t ret = NO_ERROR;
217
218    mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
219    ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
220    if (ret < 0) {
221        CAMHAL_LOGEB("VIDIOC_G_FMT Failed: %s", strerror(errno));
222    }
223
224    mVideoInfo->width = width;
225    mVideoInfo->height = height;
226    mVideoInfo->framesizeIn = (width * height << 1);
227    mVideoInfo->formatIn = pix_format;
228
229    mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
230    mVideoInfo->format.fmt.pix.width = width;
231    mVideoInfo->format.fmt.pix.height = height;
232    mVideoInfo->format.fmt.pix.pixelformat = pix_format;
233
234    ret = v4lIoctl(mCameraHandle, VIDIOC_S_FMT, &mVideoInfo->format);
235    if (ret < 0) {
236        CAMHAL_LOGEB("VIDIOC_S_FMT Failed: %s", strerror(errno));
237        return ret;
238    }
239    v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
240    CAMHAL_LOGDB("VIDIOC_G_FMT : WxH = %dx%d", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height);
241    return ret;
242}
243
244status_t V4LCameraAdapter::restartPreview ()
245{
246    status_t ret = NO_ERROR;
247    int width = 0;
248    int height = 0;
249    struct v4l2_streamparm streamParams;
250
251    //configure for preview size and pixel format.
252    mParams.getPreviewSize(&width, &height);
253
254    ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
255    if (ret < 0) {
256        CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
257        goto EXIT;
258    }
259
260    ret = v4lInitMmap(mPreviewBufferCount);
261    if (ret < 0) {
262        CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
263        goto EXIT;
264    }
265
266    //set frame rate
267    streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
268    streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
269    streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
270    streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
271    streamParams.parm.capture.timeperframe.numerator= 1;
272    ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
273    if (ret < 0) {
274        CAMHAL_LOGEB("VIDIOC_S_PARM Failed: %s", strerror(errno));
275        goto EXIT;
276    }
277
278    for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
279
280        v4l2_buffer buf;
281        buf.index = i;
282        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
283        buf.memory = V4L2_MEMORY_MMAP;
284
285        ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
286        if (ret < 0) {
287            CAMHAL_LOGEA("VIDIOC_QBUF Failed");
288            goto EXIT;
289        }
290        nQueued++;
291    }
292
293    ret = v4lStartStreaming();
294    CAMHAL_LOGDA("Ready for preview....");
295EXIT:
296    return ret;
297}
298
299/*--------------------Camera Adapter Functions-----------------------------*/
300status_t V4LCameraAdapter::initialize(CameraProperties::Properties* caps)
301{
302    char value[PROPERTY_VALUE_MAX];
303
304    LOG_FUNCTION_NAME;
305    property_get("debug.camera.showfps", value, "0");
306    mDebugFps = atoi(value);
307
308    int ret = NO_ERROR;
309
310    // Allocate memory for video info structure
311    mVideoInfo = (struct VideoInfo *) calloc (1, sizeof (struct VideoInfo));
312    if(!mVideoInfo) {
313        ret = NO_MEMORY;
314        goto EXIT;
315    }
316
317    if ((mCameraHandle = open(device, O_RDWR | O_NONBLOCK) ) == -1) {
318        CAMHAL_LOGEB("Error while opening handle to V4L2 Camera: %s", strerror(errno));
319        ret = BAD_VALUE;
320        goto EXIT;
321    }
322
323    ret = v4lIoctl (mCameraHandle, VIDIOC_QUERYCAP, &mVideoInfo->cap);
324    if (ret < 0) {
325        CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
326        ret = BAD_VALUE;
327        goto EXIT;
328    }
329
330    if ((mVideoInfo->cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
331        CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
332        ret = BAD_VALUE;
333        goto EXIT;
334    }
335
336    if (!(mVideoInfo->cap.capabilities & V4L2_CAP_STREAMING)) {
337        CAMHAL_LOGEA("Error while adapter initialization: Capture device does not support streaming i/o");
338        ret = BAD_VALUE;
339        goto EXIT;
340    }
341
342    // Initialize flags
343    mPreviewing = false;
344    mVideoInfo->isStreaming = false;
345    mRecording = false;
346    mCapturing = false;
347EXIT:
348    LOG_FUNCTION_NAME_EXIT;
349    return ret;
350}
351
352status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::FrameType frameType)
353{
354    status_t ret = NO_ERROR;
355    int idx = 0;
356    LOG_FUNCTION_NAME;
357
358    if ( frameType == CameraFrame::IMAGE_FRAME) { //(1 > mCapturedFrames)
359        // Signal end of image capture
360        if ( NULL != mEndImageCaptureCallback) {
361            CAMHAL_LOGDB("===========Signal End Image Capture==========");
362            mEndImageCaptureCallback(mEndCaptureData);
363        }
364        goto EXIT;
365    }
366    if ( !mVideoInfo->isStreaming ) {
367        goto EXIT;
368    }
369
370    for (int xx = 0; xx < NB_BUFFER; xx++){
371        if (mPreviewBufs[xx] == frameBuf){
372            idx = xx;
373            break;
374        }
375    }
376    if (idx == NB_BUFFER){
377        CAMHAL_LOGEB("Wrong index  = %d. What do i do? What do i do?",idx);
378        goto EXIT;
379    }
380    if(idx < 0) {
381        CAMHAL_LOGEB("Wrong index  = %d",idx);
382        goto EXIT;
383    }
384
385    v4l2_buffer buf;
386    buf.index = idx;
387    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
388    buf.memory = V4L2_MEMORY_MMAP;
389
390    ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
391    if (ret < 0) {
392       CAMHAL_LOGEA("VIDIOC_QBUF Failed");
393       goto EXIT;
394    }
395     nQueued++;
396EXIT:
397    LOG_FUNCTION_NAME_EXIT;
398    return ret;
399
400}
401
402status_t V4LCameraAdapter::setParameters(const android::CameraParameters &params)
403{
404    status_t ret = NO_ERROR;
405    int width, height;
406    struct v4l2_streamparm streamParams;
407
408    LOG_FUNCTION_NAME;
409
410    if(!mPreviewing && !mCapturing) {
411        params.getPreviewSize(&width, &height);
412        CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
413
414        ret = v4lSetFormat( width, height, DEFAULT_PIXEL_FORMAT);
415        if (ret < 0) {
416            CAMHAL_LOGEB(" VIDIOC_S_FMT Failed: %s", strerror(errno));
417            goto EXIT;
418        }
419        //set frame rate
420        // Now its fixed to 30 FPS
421        streamParams.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
422        streamParams.parm.capture.capability = V4L2_CAP_TIMEPERFRAME;
423        streamParams.parm.capture.capturemode = V4L2_MODE_HIGHQUALITY;
424        streamParams.parm.capture.timeperframe.denominator = FPS_PERIOD;
425        streamParams.parm.capture.timeperframe.numerator= 1;
426        ret = v4lIoctl(mCameraHandle, VIDIOC_S_PARM, &streamParams);
427        if (ret < 0) {
428            CAMHAL_LOGEB(" VIDIOC_S_PARM Failed: %s", strerror(errno));
429            goto EXIT;
430        }
431        int actualFps = streamParams.parm.capture.timeperframe.denominator / streamParams.parm.capture.timeperframe.numerator;
432        CAMHAL_LOGDB("Actual FPS set is : %d.", actualFps);
433    }
434
435    // Udpate the current parameter set
436    mParams = params;
437
438EXIT:
439    LOG_FUNCTION_NAME_EXIT;
440    return ret;
441}
442
443
444void V4LCameraAdapter::getParameters(android::CameraParameters& params)
445{
446    LOG_FUNCTION_NAME;
447
448    // Return the current parameter set
449    params = mParams;
450
451    LOG_FUNCTION_NAME_EXIT;
452}
453
454
455///API to give the buffers to Adapter
456status_t V4LCameraAdapter::useBuffers(CameraMode mode, CameraBuffer *bufArr, int num, size_t length, unsigned int queueable)
457{
458    status_t ret = NO_ERROR;
459
460    LOG_FUNCTION_NAME;
461
462    android::AutoMutex lock(mLock);
463
464    switch(mode)
465        {
466        case CAMERA_PREVIEW:
467            mPreviewBufferCountQueueable = queueable;
468            ret = UseBuffersPreview(bufArr, num);
469            break;
470
471        case CAMERA_IMAGE_CAPTURE:
472            mCaptureBufferCountQueueable = queueable;
473            ret = UseBuffersCapture(bufArr, num);
474            break;
475
476        case CAMERA_VIDEO:
477            //@warn Video capture is not fully supported yet
478            mPreviewBufferCountQueueable = queueable;
479            ret = UseBuffersPreview(bufArr, num);
480            break;
481
482        case CAMERA_MEASUREMENT:
483            break;
484
485        default:
486            break;
487        }
488
489    LOG_FUNCTION_NAME_EXIT;
490
491    return ret;
492}
493
494status_t V4LCameraAdapter::UseBuffersCapture(CameraBuffer *bufArr, int num) {
495    int ret = NO_ERROR;
496
497    LOG_FUNCTION_NAME;
498    if(NULL == bufArr) {
499        ret = BAD_VALUE;
500        goto EXIT;
501    }
502
503    for (int i = 0; i < num; i++) {
504        //Associate each Camera internal buffer with the one from Overlay
505        mCaptureBufs.add(&bufArr[i], i);
506        CAMHAL_LOGDB("capture- buff [%d] = 0x%x ",i, mCaptureBufs.keyAt(i));
507    }
508
509    mCaptureBuffersAvailable.clear();
510    for (int i = 0; i < mCaptureBufferCountQueueable; i++ ) {
511        mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 0);
512    }
513
514    // initial ref count for undeqeueued buffers is 1 since buffer provider
515    // is still holding on to it
516    for (int i = mCaptureBufferCountQueueable; i < num; i++ ) {
517        mCaptureBuffersAvailable.add(&mCaptureBuffers[i], 1);
518    }
519
520    // Update the preview buffer count
521    mCaptureBufferCount = num;
522EXIT:
523    LOG_FUNCTION_NAME_EXIT;
524    return ret;
525
526}
527
528status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num)
529{
530    int ret = NO_ERROR;
531    LOG_FUNCTION_NAME;
532
533    if(NULL == bufArr) {
534        ret = BAD_VALUE;
535        goto EXIT;
536    }
537
538    ret = v4lInitMmap(num);
539    if (ret == NO_ERROR) {
540        for (int i = 0; i < num; i++) {
541            //Associate each Camera internal buffer with the one from Overlay
542            mPreviewBufs[i] = &bufArr[i];
543            CAMHAL_LOGDB("Preview- buff [%d] = 0x%x ",i, mPreviewBufs[i]);
544        }
545
546        // Update the preview buffer count
547        mPreviewBufferCount = num;
548    }
549EXIT:
550    LOG_FUNCTION_NAME_EXIT;
551    return ret;
552}
553
554status_t V4LCameraAdapter::takePicture() {
555    status_t ret = NO_ERROR;
556    int width = 0;
557    int height = 0;
558    size_t yuv422i_buff_size = 0;
559    int index = 0;
560    char *fp = NULL;
561    CameraBuffer *buffer = NULL;
562    CameraFrame frame;
563
564    LOG_FUNCTION_NAME;
565
566    android::AutoMutex lock(mCaptureBufsLock);
567
568    if(mCapturing) {
569        CAMHAL_LOGEA("Already Capture in Progress...");
570        ret = BAD_VALUE;
571        goto EXIT;
572    }
573
574    mCapturing = true;
575    mPreviewing = false;
576
577    // Stop preview streaming
578    ret = v4lStopStreaming(mPreviewBufferCount);
579    if (ret < 0 ) {
580        CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
581        goto EXIT;
582    }
583
584    //configure for capture image size and pixel format.
585    mParams.getPictureSize(&width, &height);
586    CAMHAL_LOGDB("Image Capture Size WxH = %dx%d",width,height);
587    yuv422i_buff_size = width * height * 2;
588
589    ret = v4lSetFormat (width, height, DEFAULT_CAPTURE_FORMAT);
590    if (ret < 0) {
591        CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
592        goto EXIT;
593    }
594
595    ret = v4lInitMmap(mCaptureBufferCount);
596    if (ret < 0) {
597        CAMHAL_LOGEB("v4lInitMmap Failed: %s", strerror(errno));
598        goto EXIT;
599    }
600
601    for (int i = 0; i < mCaptureBufferCountQueueable; i++) {
602
603       v4l2_buffer buf;
604       buf.index = i;
605       buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
606       buf.memory = V4L2_MEMORY_MMAP;
607
608       ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
609       if (ret < 0) {
610           CAMHAL_LOGEA("VIDIOC_QBUF Failed");
611           ret = BAD_VALUE;
612           goto EXIT;
613       }
614       nQueued++;
615    }
616
617    ret = v4lStartStreaming();
618    if (ret < 0) {
619        CAMHAL_LOGEB("v4lStartStreaming Failed: %s", strerror(errno));
620        goto EXIT;
621    }
622
623    CAMHAL_LOGDA("Streaming started for Image Capture");
624
625    //get the frame and send to encode as JPG
626    int filledLen;
627    fp = this->GetFrame(index, filledLen);
628    if(!fp) {
629        CAMHAL_LOGEA("!!! Captured frame is NULL !!!!");
630        ret = BAD_VALUE;
631        goto EXIT;
632    }
633
634    CAMHAL_LOGDA("::Capture Frame received from V4L::");
635    buffer = mCaptureBufs.keyAt(index);
636    CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d", index, buffer->opaque, yuv422i_buff_size);
637
638    //copy the yuv422i data to the image buffer.
639    memcpy(buffer->opaque, fp, yuv422i_buff_size);
640
641#ifdef DUMP_CAPTURE_FRAME
642    //dump the YUV422 buffer in to a file
643    //a folder should have been created at /data/misc/camera/raw/
644    {
645        int fd =-1;
646        fd = open("/data/misc/camera/raw/captured_yuv422i_dump.yuv", O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
647        if(fd < 0) {
648            CAMHAL_LOGEB("Unable to open file: %s",  strerror(fd));
649        }
650        else {
651            write(fd, fp, yuv422i_buff_size );
652            close(fd);
653            CAMHAL_LOGDB("::Captured Frame dumped at /data/misc/camera/raw/captured_yuv422i_dump.yuv::");
654        }
655    }
656#endif
657
658    CAMHAL_LOGDA("::sending capture frame to encoder::");
659    frame.mFrameType = CameraFrame::IMAGE_FRAME;
660    frame.mBuffer = buffer;
661    frame.mLength = yuv422i_buff_size;
662    frame.mWidth = width;
663    frame.mHeight = height;
664    frame.mAlignment = width*2;
665    frame.mOffset = 0;
666    frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
667    frame.mFrameMask = (unsigned int)CameraFrame::IMAGE_FRAME;
668    frame.mQuirks |= CameraFrame::ENCODE_RAW_YUV422I_TO_JPEG;
669    frame.mQuirks |= CameraFrame::FORMAT_YUV422I_YUYV;
670
671    ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
672    if (ret != NO_ERROR) {
673        CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
674    } else {
675        ret = sendFrameToSubscribers(&frame);
676    }
677
678    // Stop streaming after image capture
679    ret = v4lStopStreaming(mCaptureBufferCount);
680    if (ret < 0 ) {
681        CAMHAL_LOGEB("v4lStopStreaming Failed: %s", strerror(errno));
682        goto EXIT;
683    }
684
685    ret = restartPreview();
686EXIT:
687    LOG_FUNCTION_NAME_EXIT;
688    return ret;
689}
690
691status_t V4LCameraAdapter::stopImageCapture()
692{
693    status_t ret = NO_ERROR;
694    LOG_FUNCTION_NAME;
695
696    //Release image buffers
697    if ( NULL != mReleaseImageBuffersCallback ) {
698        mReleaseImageBuffersCallback(mReleaseData);
699    }
700    mCaptureBufs.clear();
701
702    mCapturing = false;
703    mPreviewing = true;
704    LOG_FUNCTION_NAME_EXIT;
705    return ret;
706}
707
708status_t V4LCameraAdapter::autoFocus()
709{
710    status_t ret = NO_ERROR;
711    LOG_FUNCTION_NAME;
712
713    //autoFocus is not implemented. Just return.
714    LOG_FUNCTION_NAME_EXIT;
715    return ret;
716}
717
718status_t V4LCameraAdapter::startPreview()
719{
720    status_t ret = NO_ERROR;
721
722    LOG_FUNCTION_NAME;
723    android::AutoMutex lock(mPreviewBufsLock);
724
725    if(mPreviewing) {
726        ret = BAD_VALUE;
727        goto EXIT;
728    }
729
730    for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
731
732        v4l2_buffer buf;
733        buf.index = i;
734        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
735        buf.memory = V4L2_MEMORY_MMAP;
736
737        ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
738        if (ret < 0) {
739            CAMHAL_LOGEA("VIDIOC_QBUF Failed");
740            goto EXIT;
741        }
742        nQueued++;
743    }
744
745    ret = v4lStartStreaming();
746
747    // Create and start preview thread for receiving buffers from V4L Camera
748    if(!mCapturing) {
749        mPreviewThread = new PreviewThread(this);
750        CAMHAL_LOGDA("Created preview thread");
751    }
752
753    //Update the flag to indicate we are previewing
754    mPreviewing = true;
755    mCapturing = false;
756
757EXIT:
758    LOG_FUNCTION_NAME_EXIT;
759    return ret;
760}
761
762status_t V4LCameraAdapter::stopPreview()
763{
764    enum v4l2_buf_type bufType;
765    int ret = NO_ERROR;
766
767    LOG_FUNCTION_NAME;
768    android::AutoMutex lock(mStopPreviewLock);
769
770    if(!mPreviewing) {
771        return NO_INIT;
772    }
773    mPreviewing = false;
774
775    ret = v4lStopStreaming(mPreviewBufferCount);
776    if (ret < 0) {
777        CAMHAL_LOGEB("StopStreaming: FAILED: %s", strerror(errno));
778    }
779
780    nQueued = 0;
781    nDequeued = 0;
782    mFramesWithEncoder = 0;
783
784    mPreviewThread->requestExitAndWait();
785    mPreviewThread.clear();
786
787    LOG_FUNCTION_NAME_EXIT;
788    return ret;
789}
790
791char * V4LCameraAdapter::GetFrame(int &index, int &filledLen)
792{
793    int ret = NO_ERROR;
794    LOG_FUNCTION_NAME;
795
796    v4l2_buffer buf;
797    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
798    buf.memory = V4L2_MEMORY_MMAP;
799
800    /* DQ */
801    // Some V4L drivers, notably uvc, protect each incoming call with
802    // a driver-wide mutex.  If we use poll() or blocking VIDIOC_DQBUF ioctl
803    // here then we sometimes would run into a deadlock on VIDIO_QBUF ioctl.
804    while(true) {
805      if(!mVideoInfo->isStreaming) {
806        return NULL;
807      }
808
809      ret = v4lIoctl(mCameraHandle, VIDIOC_DQBUF, &buf);
810      if((ret == 0) || (errno != EAGAIN)) {
811        break;
812      }
813    }
814
815    if (ret < 0) {
816        CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
817        return NULL;
818    }
819    nDequeued++;
820
821    index = buf.index;
822    filledLen = buf.bytesused;
823
824    LOG_FUNCTION_NAME_EXIT;
825    return (char *)mVideoInfo->mem[buf.index];
826}
827
828//API to get the frame size required to be allocated. This size is used to override the size passed
829//by camera service when VSTAB/VNF is turned ON for example
830status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
831{
832    status_t ret = NO_ERROR;
833    LOG_FUNCTION_NAME;
834
835    // Just return the current preview size, nothing more to do here.
836    mParams.getPreviewSize(( int * ) &width,
837                           ( int * ) &height);
838
839    LOG_FUNCTION_NAME_EXIT;
840
841    return ret;
842}
843
844status_t V4LCameraAdapter::getFrameDataSize(size_t &dataFrameSize, size_t bufferCount)
845{
846    // We don't support meta data, so simply return
847    return NO_ERROR;
848}
849
850status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t bufferCount)
851{
852    int width = 0;
853    int height = 0;
854    int bytesPerPixel = 2; // for YUV422i; default pixel format
855
856    LOG_FUNCTION_NAME;
857
858    mParams.getPictureSize( &width, &height );
859    frame.mLength = width * height * bytesPerPixel;
860    frame.mWidth = width;
861    frame.mHeight = height;
862    frame.mAlignment = width * bytesPerPixel;
863
864    CAMHAL_LOGDB("Picture size: W x H = %u x %u (size=%u bytes, alignment=%u bytes)",
865                 frame.mWidth, frame.mHeight, frame.mLength, frame.mAlignment);
866    LOG_FUNCTION_NAME_EXIT;
867    return NO_ERROR;
868}
869
870static void debugShowFPS()
871{
872    static int mFrameCount = 0;
873    static int mLastFrameCount = 0;
874    static nsecs_t mLastFpsTime = 0;
875    static float mFps = 0;
876    if(mDebugFps) {
877        mFrameCount++;
878        if (!(mFrameCount & 0x1F)) {
879            nsecs_t now = systemTime();
880            nsecs_t diff = now - mLastFpsTime;
881            mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
882            mLastFpsTime = now;
883            mLastFrameCount = mFrameCount;
884            CAMHAL_LOGD("Camera %d Frames, %f FPS", mFrameCount, mFps);
885        }
886    }
887}
888
889status_t V4LCameraAdapter::recalculateFPS()
890{
891    float currentFPS;
892
893    mFrameCount++;
894
895    if ( ( mFrameCount % FPS_PERIOD ) == 0 )
896        {
897        nsecs_t now = systemTime();
898        nsecs_t diff = now - mLastFPSTime;
899        currentFPS =  ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
900        mLastFPSTime = now;
901        mLastFrameCount = mFrameCount;
902
903        if ( 1 == mIter )
904            {
905            mFPS = currentFPS;
906            }
907        else
908            {
909            //cumulative moving average
910            mFPS = mLastFPS + (currentFPS - mLastFPS)/mIter;
911            }
912
913        mLastFPS = mFPS;
914        mIter++;
915        }
916
917    return NO_ERROR;
918}
919
920void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
921{
922    LOG_FUNCTION_NAME;
923
924    LOG_FUNCTION_NAME_EXIT;
925}
926
927
928V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index)
929{
930    LOG_FUNCTION_NAME;
931
932    // Nothing useful to do in the constructor
933    mFramesWithEncoder = 0;
934    jpeg_with_dht_buffer_size = 0;
935    for (int i = 0; i < NB_BUFFER; i++) jpeg_with_dht_buffer[i] = NULL;
936
937    LOG_FUNCTION_NAME_EXIT;
938}
939
940V4LCameraAdapter::~V4LCameraAdapter()
941{
942    LOG_FUNCTION_NAME;
943
944    // Close the camera handle and free the video info structure
945    close(mCameraHandle);
946
947    if (mVideoInfo)
948      {
949        free(mVideoInfo);
950        mVideoInfo = NULL;
951      }
952
953    for (int i = 0; i < NB_BUFFER; i++) {
954        if (jpeg_with_dht_buffer[i] != NULL){
955            free(jpeg_with_dht_buffer[i]);
956            jpeg_with_dht_buffer[i] = NULL;
957        }
958    }
959
960    LOG_FUNCTION_NAME_EXIT;
961}
962
963static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size ) {
964    //convert YUV422I yuyv to uyvy format.
965    uint32_t *bf = (uint32_t*)src;
966    uint32_t *dst = (uint32_t*)dest;
967
968    LOG_FUNCTION_NAME;
969
970    if (!src || !dest) {
971        return;
972    }
973
974    for(size_t i = 0; i < size; i = i+4)
975    {
976        dst[0] = ((bf[0] & 0x00FF00FF) << 8) | ((bf[0] & 0xFF00FF00) >> 8);
977        bf++;
978        dst++;
979    }
980
981    LOG_FUNCTION_NAME_EXIT;
982}
983
984static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height ) {
985    //convert YUV422I to YUV420 NV12 format and copies directly to preview buffers (Tiler memory).
986    int stride = 4096;
987    unsigned char *bf = src;
988    unsigned char *dst_y = dest;
989    unsigned char *dst_uv = dest + ( height * stride);
990#ifdef PPM_PER_FRAME_CONVERSION
991    static int frameCount = 0;
992    static nsecs_t ppm_diff = 0;
993    nsecs_t ppm_start  = systemTime();
994#endif
995
996    LOG_FUNCTION_NAME;
997
998    if (width % 16 ) {
999        for(int i = 0; i < height; i++) {
1000            for(int j = 0; j < width; j++) {
1001                *dst_y = *bf;
1002                dst_y++;
1003                bf = bf + 2;
1004            }
1005            dst_y += (stride - width);
1006        }
1007
1008        bf = src;
1009        bf++;  //UV sample
1010        for(int i = 0; i < height/2; i++) {
1011            for(int j=0; j<width; j++) {
1012                *dst_uv = *bf;
1013                dst_uv++;
1014                bf = bf + 2;
1015            }
1016            bf = bf + width*2;
1017            dst_uv = dst_uv + (stride - width);
1018        }
1019    } else {
1020        //neon conversion
1021        for(int i = 0; i < height; i++) {
1022            int n = width;
1023            int skip = i & 0x1;       // skip uv elements for the odd rows
1024            asm volatile (
1025                "   pld [%[src], %[src_stride], lsl #2]                         \n\t"
1026                "   cmp %[n], #16                                               \n\t"
1027                "   blt 5f                                                      \n\t"
1028                "0: @ 16 pixel copy                                             \n\t"
1029                "   vld2.8  {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv..      \n\t"
1030                "                                @ now q0 = y q1 = uv           \n\t"
1031                "   vst1.32   {d0,d1}, [%[dst_y]]!                              \n\t"
1032                "   cmp    %[skip], #0                                          \n\t"
1033                "   bne 1f                                                      \n\t"
1034                "   vst1.32  {d2,d3},[%[dst_uv]]!                               \n\t"
1035                "1: @ skip odd rows for UV                                      \n\t"
1036                "   sub %[n], %[n], #16                                         \n\t"
1037                "   cmp %[n], #16                                               \n\t"
1038                "   bge 0b                                                      \n\t"
1039                "5: @ end                                                       \n\t"
1040#ifdef NEEDS_ARM_ERRATA_754319_754320
1041                "   vmov s0,s0  @ add noop for errata item                      \n\t"
1042#endif
1043                : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
1044                : [src_stride] "r" (width), [skip] "r" (skip)
1045                : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
1046            );
1047            dst_y = dst_y + (stride - width);
1048            if (skip == 0) {
1049                dst_uv = dst_uv + (stride - width);
1050            }
1051        } //end of for()
1052    }
1053
1054#ifdef PPM_PER_FRAME_CONVERSION
1055    ppm_diff += (systemTime() - ppm_start);
1056    frameCount++;
1057
1058    if (frameCount >= 30) {
1059        ppm_diff = ppm_diff / frameCount;
1060        LOGD("PPM: YUV422i to NV12 Conversion(%d x %d): %llu us ( %llu ms )", width, height,
1061                ns2us(ppm_diff), ns2ms(ppm_diff) );
1062        ppm_diff = 0;
1063        frameCount = 0;
1064    }
1065#endif
1066
1067    LOG_FUNCTION_NAME_EXIT;
1068}
1069
1070static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height ) {
1071    //convert YUV422I to YUV420 NV12 format.
1072    unsigned char *bf = src;
1073    unsigned char *dst_y = dest;
1074    unsigned char *dst_uv = dest + (width * height);
1075
1076    LOG_FUNCTION_NAME;
1077
1078    if (width % 16 ) {
1079        for(int i = 0; i < height; i++) {
1080            for(int j = 0; j < width; j++) {
1081                *dst_y = *bf;
1082                dst_y++;
1083                bf = bf + 2;
1084            }
1085        }
1086
1087        bf = src;
1088        bf++;  //UV sample
1089        for(int i = 0; i < height/2; i++) {
1090            for(int j=0; j<width; j++) {
1091                *dst_uv = *bf;
1092                dst_uv++;
1093                bf = bf + 2;
1094            }
1095            bf = bf + width*2;
1096        }
1097    } else {
1098        //neon conversion
1099        for(int i = 0; i < height; i++) {
1100            int n = width;
1101            int skip = i & 0x1;       // skip uv elements for the odd rows
1102            asm volatile (
1103                "   pld [%[src], %[src_stride], lsl #2]                         \n\t"
1104                "   cmp %[n], #16                                               \n\t"
1105                "   blt 5f                                                      \n\t"
1106                "0: @ 16 pixel copy                                             \n\t"
1107                "   vld2.8  {q0, q1} , [%[src]]! @ q0 = yyyy.. q1 = uvuv..      \n\t"
1108                "                                @ now q0 = y q1 = uv           \n\t"
1109                "   vst1.32   {d0,d1}, [%[dst_y]]!                              \n\t"
1110                "   cmp    %[skip], #0                                          \n\t"
1111                "   bne 1f                                                      \n\t"
1112                "   vst1.32  {d2,d3},[%[dst_uv]]!                               \n\t"
1113                "1: @ skip odd rows for UV                                      \n\t"
1114                "   sub %[n], %[n], #16                                         \n\t"
1115                "   cmp %[n], #16                                               \n\t"
1116                "   bge 0b                                                      \n\t"
1117                "5: @ end                                                       \n\t"
1118#ifdef NEEDS_ARM_ERRATA_754319_754320
1119                "   vmov s0,s0  @ add noop for errata item                      \n\t"
1120#endif
1121                : [dst_y] "+r" (dst_y), [dst_uv] "+r" (dst_uv), [src] "+r" (src), [n] "+r" (n)
1122                : [src_stride] "r" (width), [skip] "r" (skip)
1123                : "cc", "memory", "q0", "q1", "q2", "d0", "d1", "d2", "d3"
1124            );
1125        }
1126    }
1127
1128    LOG_FUNCTION_NAME_EXIT;
1129}
1130
1131#ifdef SAVE_RAW_FRAMES
1132void saveFile(unsigned char* buff, int buff_size) {
1133    static int      counter = 1;
1134    int             fd = -1;
1135    char            fn[256];
1136
1137    LOG_FUNCTION_NAME;
1138    if (counter > 3) {
1139        return;
1140    }
1141    //dump nv12 buffer
1142    counter++;
1143    sprintf(fn, "/data/misc/camera/raw/nv12_dump_%03d.yuv", counter);
1144    CAMHAL_LOGEB("Dumping nv12 frame to a file : %s.", fn);
1145
1146    fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
1147    if(fd < 0) {
1148        CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
1149        return;
1150    }
1151
1152    write(fd, buff, buff_size );
1153    close(fd);
1154
1155    LOG_FUNCTION_NAME_EXIT;
1156}
1157#endif
1158
1159/* Preview Thread */
1160// ---------------------------------------------------------------------------
1161
1162int V4LCameraAdapter::previewThread()
1163{
1164    status_t ret = NO_ERROR;
1165    int width, height;
1166    CameraFrame frame;
1167    void *y_uv[2];
1168    int index = 0;
1169    int filledLen = 0;
1170    int stride = 4096;
1171    char *fp = NULL;
1172
1173    mParams.getPreviewSize(&width, &height);
1174    android::Mutex::Autolock lock(mSubscriberLock);
1175
1176    if (mPreviewing) {
1177
1178        fp = this->GetFrame(index, filledLen);
1179        if(!fp) {
1180            ret = BAD_VALUE;
1181            goto EXIT;
1182        }
1183
1184        CameraBuffer *buffer = mPreviewBufs[index];
1185        CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(buffer);
1186        if (!lframe) {
1187            ret = BAD_VALUE;
1188            goto EXIT;
1189        }
1190
1191        debugShowFPS();
1192
1193        if ( mFrameSubscribers.size() == 0 ) {
1194            ret = BAD_VALUE;
1195            goto EXIT;
1196        }
1197
1198        if ( DEFAULT_PIXEL_FORMAT == V4L2_PIX_FMT_MJPEG ) {
1199            /*
1200            MJPEG frames do not include the Huffman tables. MJPEG compressors use standard tables,
1201            and they are not included in the stream to decrease the bandwidth. Therefore, the
1202            Huffman table must be concatenated onto the start of a motion JPEG image to form a
1203            valid still JPEG image.
1204            */
1205            int final_jpg_sz = jpgdecoder.appendDHT((unsigned char*)fp, filledLen,
1206                                    jpeg_with_dht_buffer[index], jpeg_with_dht_buffer_size);
1207            if (!jpgdecoder.decode(jpeg_with_dht_buffer[index], final_jpg_sz, (unsigned char*)lframe->mYuv[0], 4096)) {
1208                CAMHAL_LOGEA("Error while decoding JPEG");
1209            }
1210        }
1211        else if ( DEFAULT_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV )
1212        {
1213            y_uv[0] = (void*) lframe->mYuv[0];
1214            //y_uv[1] = (void*) lframe->mYuv[1];
1215            //y_uv[1] = (void*) (lframe->mYuv[0] + height*stride);
1216            convertYUV422ToNV12Tiler ( (unsigned char*)fp, (unsigned char*)y_uv[0], width, height);
1217            CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; y= 0x%x; UV= 0x%x.",index, buffer, y_uv[0], y_uv[1] );
1218
1219#ifdef SAVE_RAW_FRAMES
1220            unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
1221            //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file
1222            convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height);
1223            saveFile( nv12_buff, ((width*height)*3/2) );
1224            free (nv12_buff);
1225#endif
1226        }
1227
1228        frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
1229        frame.mBuffer = buffer;
1230        frame.mLength = width*height*3/2;
1231        frame.mAlignment = stride;
1232        frame.mOffset = 0;
1233        frame.mTimestamp = systemTime(SYSTEM_TIME_MONOTONIC);
1234        frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
1235
1236        if (mRecording)
1237        {
1238            frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
1239            mFramesWithEncoder++;
1240        }
1241
1242        ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
1243        if (ret != NO_ERROR) {
1244            CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
1245        } else {
1246            ret = sendFrameToSubscribers(&frame);
1247        }
1248    }
1249EXIT:
1250
1251    return ret;
1252}
1253
1254//scan for video devices
1255void detectVideoDevice(char** video_device_list, int& num_device) {
1256    char dir_path[20];
1257    char* filename;
1258    char** dev_list = video_device_list;
1259    DIR *d;
1260    struct dirent *dir;
1261    int index = 0;
1262
1263    strcpy(dir_path, DEVICE_PATH);
1264    d = opendir(dir_path);
1265    if(d) {
1266        //read each entry in the /dev/ and find if there is videox entry.
1267        while ((dir = readdir(d)) != NULL) {
1268            filename = dir->d_name;
1269            if (strncmp(filename, DEVICE_NAME, 5) == 0) {
1270                strcpy(dev_list[index],DEVICE_PATH);
1271                strncat(dev_list[index],filename,sizeof(DEVICE_NAME));
1272                index++;
1273            }
1274       } //end of while()
1275       closedir(d);
1276       num_device = index;
1277
1278       for(int i=0; i<index; i++){
1279           CAMHAL_LOGDB("Video device list::dev_list[%d]= %s",i,dev_list[i]);
1280       }
1281    }
1282}
1283
1284extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index)
1285{
1286    CameraAdapter *adapter = NULL;
1287    android::AutoMutex lock(gV4LAdapterLock);
1288
1289    LOG_FUNCTION_NAME;
1290
1291    adapter = new V4LCameraAdapter(sensor_index);
1292    if ( adapter ) {
1293        CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",sensor_index);
1294    } else {
1295        CAMHAL_LOGEA("V4L Camera adapter create failed for sensor index = %d!",sensor_index);
1296    }
1297
1298    LOG_FUNCTION_NAME_EXIT;
1299
1300    return adapter;
1301}
1302
1303extern "C" status_t V4LCameraAdapter_Capabilities(
1304        CameraProperties::Properties * const properties_array,
1305        const int starting_camera, const int max_camera, int & supportedCameras)
1306{
1307    status_t ret = NO_ERROR;
1308    struct v4l2_capability cap;
1309    int tempHandle = NULL;
1310    int num_cameras_supported = 0;
1311    char device_list[5][15];
1312    char* video_device_list[5];
1313    int num_v4l_devices = 0;
1314    int sensorId = 0;
1315    CameraProperties::Properties* properties = NULL;
1316
1317    LOG_FUNCTION_NAME;
1318
1319    supportedCameras = 0;
1320    memset((void*)&cap, 0, sizeof(v4l2_capability));
1321
1322    if (!properties_array) {
1323        CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
1324        LOG_FUNCTION_NAME_EXIT;
1325        return BAD_VALUE;
1326    }
1327
1328    for (int i = 0; i < 5; i++) {
1329        video_device_list[i] = device_list[i];
1330    }
1331    //look for the connected video devices
1332    detectVideoDevice(video_device_list, num_v4l_devices);
1333
1334    for (int i = 0; i < num_v4l_devices; i++) {
1335        if ( (starting_camera + num_cameras_supported) < max_camera) {
1336            sensorId = starting_camera + num_cameras_supported;
1337
1338            CAMHAL_LOGDB("Opening device[%d] = %s..",i, video_device_list[i]);
1339            if ((tempHandle = open(video_device_list[i], O_RDWR)) == -1) {
1340                CAMHAL_LOGEB("Error while opening handle to V4L2 Camera(%s): %s",video_device_list[i], strerror(errno));
1341                continue;
1342            }
1343
1344            ret = ioctl (tempHandle, VIDIOC_QUERYCAP, &cap);
1345            if (ret < 0) {
1346                CAMHAL_LOGEA("Error when querying the capabilities of the V4L Camera");
1347                close(tempHandle);
1348                continue;
1349            }
1350
1351            //check for video capture devices
1352            if ((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
1353                CAMHAL_LOGEA("Error while adapter initialization: video capture not supported.");
1354                close(tempHandle);
1355                continue;
1356            }
1357
1358            strcpy(device, video_device_list[i]);
1359            properties = properties_array + starting_camera + num_cameras_supported;
1360
1361            //fetch capabilities for this camera
1362            ret = V4LCameraAdapter::getCaps( sensorId, properties, tempHandle );
1363            if (ret < 0) {
1364                CAMHAL_LOGEA("Error while getting capabilities.");
1365                close(tempHandle);
1366                continue;
1367            }
1368
1369            num_cameras_supported++;
1370
1371        }
1372        //For now exit this loop once a valid video capture device is found.
1373        //TODO: find all V4L capture devices and it capabilities
1374        break;
1375    }//end of for() loop
1376
1377    supportedCameras = num_cameras_supported;
1378    CAMHAL_LOGDB("Number of V4L cameras detected =%d", num_cameras_supported);
1379
1380EXIT:
1381    LOG_FUNCTION_NAME_EXIT;
1382    close(tempHandle);
1383    return NO_ERROR;
1384}
1385
1386} // namespace Camera
1387} // namespace Ti
1388
1389
1390/*--------------------Camera Adapter Class ENDS here-----------------------------*/
1391
1392