ExynosCameraHWInterface2.cpp revision 66422b5e6af560e5a19cb75f8b0c1037d28e0993
1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10**     http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file      ExynosCameraHWInterface2.cpp
21 * \brief     source file for Android Camera API 2.0 HAL
22 * \author    Sungjoong Kang(sj3.kang@samsung.com)
23 * \date      2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 *   Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 *   2nd Release
31 *
32 */
33
34//#define LOG_NDEBUG 0
35#define LOG_TAG "ExynosCameraHAL2"
36#include <utils/Log.h>
37
38#include "ExynosCameraHWInterface2.h"
39#include "exynos_format.h"
40
41namespace android {
42
43void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
44{
45    int nw;
46    int cnt = 0;
47    uint32_t written = 0;
48
49    ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
50    int fd = open(fname, O_RDWR | O_CREAT, 0644);
51    if (fd < 0) {
52        ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
53        return;
54    }
55
56    ALOGV("writing %d bytes to file [%s]", size, fname);
57    while (written < size) {
58        nw = ::write(fd, buf + written, size - written);
59        if (nw < 0) {
60            ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
61            break;
62        }
63        written += nw;
64        cnt++;
65    }
66    ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
67    ::close(fd);
68}
69
70int get_pixel_depth(uint32_t fmt)
71{
72    int depth = 0;
73
74    switch (fmt) {
75    case V4L2_PIX_FMT_JPEG:
76        depth = 8;
77        break;
78
79    case V4L2_PIX_FMT_NV12:
80    case V4L2_PIX_FMT_NV21:
81    case V4L2_PIX_FMT_YUV420:
82    case V4L2_PIX_FMT_YVU420M:
83    case V4L2_PIX_FMT_NV12M:
84    case V4L2_PIX_FMT_NV12MT:
85        depth = 12;
86        break;
87
88    case V4L2_PIX_FMT_RGB565:
89    case V4L2_PIX_FMT_YUYV:
90    case V4L2_PIX_FMT_YVYU:
91    case V4L2_PIX_FMT_UYVY:
92    case V4L2_PIX_FMT_VYUY:
93    case V4L2_PIX_FMT_NV16:
94    case V4L2_PIX_FMT_NV61:
95    case V4L2_PIX_FMT_YUV422P:
96    case V4L2_PIX_FMT_SBGGR10:
97    case V4L2_PIX_FMT_SBGGR12:
98    case V4L2_PIX_FMT_SBGGR16:
99        depth = 16;
100        break;
101
102    case V4L2_PIX_FMT_RGB32:
103        depth = 32;
104        break;
105    default:
106        ALOGE("Get depth failed(format : %d)", fmt);
107        break;
108    }
109
110    return depth;
111}
112
113int cam_int_s_fmt(node_info_t *node)
114{
115    struct v4l2_format v4l2_fmt;
116    unsigned int framesize;
117    int ret;
118
119    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
120
121    v4l2_fmt.type = node->type;
122    framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
123
124    if (node->planes >= 1) {
125        v4l2_fmt.fmt.pix_mp.width       = node->width;
126        v4l2_fmt.fmt.pix_mp.height      = node->height;
127        v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
128        v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
129    } else {
130        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
131    }
132
133    /* Set up for capture */
134    ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
135
136    if (ret < 0)
137        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
138
139
140    return ret;
141}
142
143int cam_int_reqbufs(node_info_t *node)
144{
145    struct v4l2_requestbuffers req;
146    int ret;
147
148    req.count = node->buffers;
149    req.type = node->type;
150    req.memory = node->memory;
151
152    ret = exynos_v4l2_reqbufs(node->fd, &req);
153
154    if (ret < 0)
155        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
156
157    return req.count;
158}
159
160int cam_int_qbuf(node_info_t *node, int index)
161{
162    struct v4l2_buffer v4l2_buf;
163    struct v4l2_plane planes[VIDEO_MAX_PLANES];
164    int i;
165    int ret = 0;
166
167    v4l2_buf.m.planes   = planes;
168    v4l2_buf.type       = node->type;
169    v4l2_buf.memory     = node->memory;
170    v4l2_buf.index      = index;
171    v4l2_buf.length     = node->planes;
172
173    for(i = 0; i < node->planes; i++){
174        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
175        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
176    }
177
178    ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
179
180    if (ret < 0)
181        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
182
183    return ret;
184}
185
186int cam_int_streamon(node_info_t *node)
187{
188    enum v4l2_buf_type type = node->type;
189    int ret;
190
191
192    ret = exynos_v4l2_streamon(node->fd, type);
193
194    if (ret < 0)
195        ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
196
197    ALOGV("On streaming I/O... ... fd(%d)", node->fd);
198
199    return ret;
200}
201
202int cam_int_streamoff(node_info_t *node)
203{
204    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
205    int ret;
206
207
208    ALOGV("Off streaming I/O... fd(%d)", node->fd);
209    ret = exynos_v4l2_streamoff(node->fd, type);
210
211    if (ret < 0)
212        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
213
214    return ret;
215}
216
217int isp_int_streamoff(node_info_t *node)
218{
219    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
220    int ret;
221
222    ALOGV("Off streaming I/O... fd(%d)", node->fd);
223    ret = exynos_v4l2_streamoff(node->fd, type);
224
225    if (ret < 0)
226        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
227
228    return ret;
229}
230
231int cam_int_dqbuf(node_info_t *node)
232{
233    struct v4l2_buffer v4l2_buf;
234    struct v4l2_plane planes[VIDEO_MAX_PLANES];
235    int ret;
236
237    v4l2_buf.type       = node->type;
238    v4l2_buf.memory     = node->memory;
239    v4l2_buf.m.planes   = planes;
240    v4l2_buf.length     = node->planes;
241
242    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
243    if (ret < 0)
244        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
245
246    return v4l2_buf.index;
247}
248
249int cam_int_dqbuf(node_info_t *node, int num_plane)
250{
251    struct v4l2_buffer v4l2_buf;
252    struct v4l2_plane planes[VIDEO_MAX_PLANES];
253    int ret;
254
255    v4l2_buf.type       = node->type;
256    v4l2_buf.memory     = node->memory;
257    v4l2_buf.m.planes   = planes;
258    v4l2_buf.length     = num_plane;
259
260    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
261    if (ret < 0)
262        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
263
264    return v4l2_buf.index;
265}
266
267int cam_int_s_input(node_info_t *node, int index)
268{
269    int ret;
270
271    ret = exynos_v4l2_s_input(node->fd, index);
272    if (ret < 0)
273        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
274
275    return ret;
276}
277
278
279gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
280
281RequestManager::RequestManager(SignalDrivenThread* main_thread):
282    m_lastAeMode(0),
283    m_lastAaMode(0),
284    m_lastAwbMode(0),
285#ifdef VDIS_ENABLE
286    m_vdisBubbleEn(false),
287#endif
288    m_lastAeComp(0),
289    m_frameIndex(-1)
290{
291    m_metadataConverter = new MetadataConverter;
292    m_mainThread = main_thread;
293    ResetEntry();
294    m_sensorPipelineSkipCnt = 0;
295    return;
296}
297
298RequestManager::~RequestManager()
299{
300    ALOGV("%s", __FUNCTION__);
301    if (m_metadataConverter != NULL) {
302        delete m_metadataConverter;
303        m_metadataConverter = NULL;
304    }
305
306    releaseSensorQ();
307    return;
308}
309
310void RequestManager::ResetEntry()
311{
312    Mutex::Autolock lock(m_requestMutex);
313    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
314        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
315        entries[i].internal_shot.shot.ctl.request.frameCount = -1;
316    }
317    m_numOfEntries = 0;
318    m_entryInsertionIndex = -1;
319    m_entryProcessingIndex = -1;
320    m_entryFrameOutputIndex = -1;
321}
322
323int RequestManager::GetNumEntries()
324{
325    return m_numOfEntries;
326}
327
328void RequestManager::SetDefaultParameters(int cropX)
329{
330    m_cropX = cropX;
331}
332
333bool RequestManager::IsRequestQueueFull()
334{
335    Mutex::Autolock lock(m_requestMutex);
336    if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
337        return true;
338    else
339        return false;
340}
341
342void RequestManager::RegisterRequest(camera_metadata_t * new_request)
343{
344    ALOGV("DEBUG(%s):", __FUNCTION__);
345
346    Mutex::Autolock lock(m_requestMutex);
347
348    request_manager_entry * newEntry = NULL;
349    int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
350    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
351
352
353    newEntry = &(entries[newInsertionIndex]);
354
355    if (newEntry->status!=EMPTY) {
356        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
357        return;
358    }
359    newEntry->status = REGISTERED;
360    newEntry->original_request = new_request;
361    memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
362    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
363    newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.outputStreams[15];
364
365    m_numOfEntries++;
366    m_entryInsertionIndex = newInsertionIndex;
367
368
369    ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
370    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
371}
372
373void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
374{
375    ALOGV("DEBUG(%s):", __FUNCTION__);
376    int frame_index;
377    request_manager_entry * currentEntry;
378
379    Mutex::Autolock lock(m_requestMutex);
380
381    frame_index = GetFrameIndex();
382    currentEntry =  &(entries[frame_index]);
383    if (currentEntry->status != METADONE) {
384        CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
385                       m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
386        return;
387    }
388    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
389
390    currentEntry->status = EMPTY;
391    currentEntry->original_request = NULL;
392    memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
393    currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
394    currentEntry->output_stream_count = 0;
395    m_numOfEntries--;
396    ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
397     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
398
399    return;
400}
401
402bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
403                camera_metadata_t ** prepared_frame, int afState)
404{
405    ALOGV("DEBUG(%s):", __FUNCTION__);
406    Mutex::Autolock lock(m_requestMutex);
407    status_t res = NO_ERROR;
408    int tempFrameOutputIndex = GetFrameIndex();
409    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
410    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
411        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
412
413    if (currentEntry->status != METADONE) {
414        ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
415
416        return false;
417    }
418    m_entryFrameOutputIndex = tempFrameOutputIndex;
419    m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 20, 500); //estimated
420    add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
421    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
422                m_tempFrameMetadata);
423    if (res!=NO_ERROR) {
424        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
425        return false;
426    }
427    *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
428    *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
429    *prepared_frame = m_tempFrameMetadata;
430    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
431        currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
432    // Dump();
433    return true;
434}
435
436int RequestManager::MarkProcessingRequest(ExynosBuffer* buf, int *afMode)
437{
438    struct camera2_shot_ext * shot_ext;
439    struct camera2_shot_ext * request_shot;
440    int targetStreamIndex = 0;
441    request_manager_entry * newEntry = NULL;
442    static int count = 0;
443
444    Mutex::Autolock lock(m_requestMutex);
445    if (m_numOfEntries == 0)  {
446        CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
447        return -1;
448    }
449
450    if ((m_entryProcessingIndex == m_entryInsertionIndex)
451        && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
452        ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
453         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
454        return -1;
455    }
456
457    int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
458    ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
459
460    newEntry = &(entries[newProcessingIndex]);
461    request_shot = &(newEntry->internal_shot);
462    *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
463    if (newEntry->status != REGISTERED) {
464        CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
465        for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
466                CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
467        }
468        return -1;
469    }
470
471    newEntry->status = REQUESTED;
472
473    shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
474
475    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
476    shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
477    shot_ext->request_sensor = 1;
478    shot_ext->dis_bypass = 1;
479    shot_ext->dnr_bypass = 1;
480    shot_ext->fd_bypass = 1;
481    shot_ext->setfile = 0;
482
483    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
484    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
485    if (targetStreamIndex & MASK_OUTPUT_SCP)
486        shot_ext->request_scp = 1;
487
488    if (targetStreamIndex & MASK_OUTPUT_SCC)
489        shot_ext->request_scc = 1;
490
491    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
492        shot_ext->fd_bypass = 0;
493
494    if (count == 0){
495        shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
496    } else
497        shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
498
499    count++;
500    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
501    shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
502    shot_ext->shot.magicNumber = 0x23456789;
503    shot_ext->shot.ctl.sensor.exposureTime = 0;
504    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
505    shot_ext->shot.ctl.sensor.sensitivity = 0;
506
507
508    shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
509    shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
510    shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
511
512    m_entryProcessingIndex = newProcessingIndex;
513    return newProcessingIndex;
514}
515
516void RequestManager::NotifyStreamOutput(int frameCnt)
517{
518    int index;
519
520    Mutex::Autolock lock(m_requestMutex);
521    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
522
523    index = FindEntryIndexByFrameCnt(frameCnt);
524    if (index == -1) {
525        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
526        return;
527    }
528    ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt,   entries[index].output_stream_count);
529
530    entries[index].output_stream_count--;  //TODO : match stream id also
531    CheckCompleted(index);
532}
533
534void RequestManager::CheckCompleted(int index)
535{
536    if((entries[index].status == METADONE) && (entries[index].output_stream_count <= 0)){
537        ALOGV("send SIGNAL_MAIN_STREAM_OUTPUT_DONE(index:%d)(frameCnt:%d)",
538                index, entries[index].internal_shot.shot.ctl.request.frameCount );
539        SetFrameIndex(index);
540        m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
541    }
542}
543
544void RequestManager::SetFrameIndex(int index)
545{
546    m_frameIndex = index;
547}
548
549int RequestManager::GetFrameIndex()
550{
551    return m_frameIndex;
552}
553
554void  RequestManager::pushSensorQ(int index)
555{
556    Mutex::Autolock lock(m_requestMutex);
557    m_sensorQ.push_back(index);
558}
559
560int RequestManager::popSensorQ()
561{
562   List<int>::iterator sensor_token;
563   int index;
564
565    Mutex::Autolock lock(m_requestMutex);
566
567    if(m_sensorQ.size() == 0)
568        return -1;
569
570    sensor_token = m_sensorQ.begin()++;
571    index = *sensor_token;
572    m_sensorQ.erase(sensor_token);
573
574    return (index);
575}
576
577void RequestManager::releaseSensorQ()
578{
579    List<int>::iterator r;
580
581    Mutex::Autolock lock(m_requestMutex);
582    ALOGV("(%d)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
583
584    while(m_sensorQ.size() > 0){
585        r  = m_sensorQ.begin()++;
586        m_sensorQ.erase(r);
587    }
588    return;
589}
590
591void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
592{
593    int index;
594    struct camera2_shot_ext * request_shot;
595    nsecs_t timeStamp;
596    int i;
597
598    Mutex::Autolock lock(m_requestMutex);
599    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
600
601    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
602        if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
603            && (entries[i].status == CAPTURED)){
604            entries[i].status =METADONE;
605            break;
606        }
607    }
608
609    if (i == NUM_MAX_REQUEST_MGR_ENTRY){
610        ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
611        return;
612    }
613
614    request_manager_entry * newEntry = &(entries[i]);
615    request_shot = &(newEntry->internal_shot);
616
617    timeStamp = request_shot->shot.dm.sensor.timeStamp;
618    memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
619    request_shot->shot.dm.sensor.timeStamp = timeStamp;
620    m_lastTimeStamp = timeStamp;
621    CheckCompleted(i);
622}
623
624void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
625{
626    int index, targetStreamIndex;
627    struct camera2_shot_ext * request_shot;
628
629    ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
630    if (frameCnt < 0)
631        return;
632
633    index = FindEntryIndexByFrameCnt(frameCnt);
634    if (index == -1) {
635        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
636        return;
637    }
638
639    request_manager_entry * newEntry = &(entries[index]);
640    request_shot = &(newEntry->internal_shot);
641    memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
642    shot_ext->shot.ctl.request.frameCount = frameCnt;
643    shot_ext->request_sensor = 1;
644    shot_ext->dis_bypass = 1;
645    shot_ext->dnr_bypass = 1;
646    shot_ext->fd_bypass = 1;
647    shot_ext->setfile = 0;
648
649    shot_ext->request_scc = 0;
650    shot_ext->request_scp = 0;
651
652    shot_ext->isReprocessing = request_shot->isReprocessing;
653    shot_ext->reprocessInput = request_shot->reprocessInput;
654    shot_ext->shot.ctl.request.outputStreams[0] = 0;
655
656    shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
657    shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
658    shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
659
660    // mapping flash UI mode from aeMode
661    if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
662        ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
663        request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
664    }
665    // mapping awb UI mode form awbMode
666    ctl_info->awb.i_awbMode = request_shot->shot.ctl.aa.awbMode;
667
668    // Apply ae/awb lock or unlock
669    if (request_shot->ae_lock == AEMODE_LOCK_ON)
670            request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
671    if (request_shot->awb_lock == AWBMODE_LOCK_ON)
672            request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
673
674    // This is for pre-capture metering
675    if (ctl_info->ae.m_aeCnt >= IS_COMMAND_EXECUTION) {
676        if (ctl_info->ae.m_aeCnt == IS_COMMAND_CLEAR) {
677            ALOGV("(%s) [Capture] m_aeCnt :  CLEAR -> NONE", __FUNCTION__);
678            ctl_info->ae.m_aeCnt = IS_COMMAND_NONE;
679        } else {
680            ctl_info->ae.m_aeCnt = IS_COMMAND_CLEAR;
681            ALOGV("(%s) [Capture] m_aeCnt :  EXECUTION -> CLEAR", __FUNCTION__);
682        }
683    }
684    if (ctl_info->awb.m_awbCnt >= IS_COMMAND_EXECUTION) {
685        if (ctl_info->awb.m_awbCnt == IS_COMMAND_CLEAR)
686            ctl_info->awb.m_awbCnt = IS_COMMAND_NONE;
687        else
688            ctl_info->awb.m_awbCnt = IS_COMMAND_CLEAR;
689    }
690
691    if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
692        shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
693    }
694    else {
695        shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
696        m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
697    }
698    if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
699        shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
700    }
701    else {
702        shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
703        m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
704    }
705    if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
706        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
707    }
708    else {
709        shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
710        m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
711    }
712    if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
713        shot_ext->shot.ctl.aa.aeExpCompensation = 0;
714    }
715    else {
716        shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
717        m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
718    }
719
720#ifdef VDIS_ENABLE
721    if (request_shot->shot.ctl.aa.videoStabilizationMode) {
722        m_vdisBubbleEn = true;
723        shot_ext->dis_bypass = 0;
724    } else {
725        m_vdisBubbleEn = false;
726        shot_ext->dis_bypass = 1;
727    }
728#endif
729
730    shot_ext->shot.ctl.aa.afTrigger = 0;
731
732    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
733    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
734    if (targetStreamIndex & MASK_OUTPUT_SCP)
735        shot_ext->request_scp = 1;
736
737    if (targetStreamIndex & MASK_OUTPUT_SCC)
738        shot_ext->request_scc = 1;
739
740    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
741        shot_ext->fd_bypass = 0;
742
743    if (targetStreamIndex & STREAM_MASK_RECORD) {
744        shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
745        shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
746    }
747
748    ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
749    (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
750    (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
751    (int)(shot_ext->shot.ctl.aa.afMode));
752}
753
754#ifdef VDIS_ENABLE
755bool    RequestManager::IsVdisEnable(void)
756{
757        return m_vdisBubbleEn;
758}
759#endif
760
761int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
762{
763    for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
764        if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
765            return i;
766    }
767    return -1;
768}
769
770void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
771{
772    int index = FindEntryIndexByFrameCnt(frameCnt);
773    if (index == -1) {
774        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
775        return;
776    }
777
778    request_manager_entry * currentEntry = &(entries[index]);
779    currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
780    ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
781        index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
782}
783
784
785nsecs_t  RequestManager::GetTimestampByFrameCnt(int frameCnt)
786{
787    int index = FindEntryIndexByFrameCnt(frameCnt);
788    if (index == -1) {
789        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
790        return m_lastTimeStamp;
791    }
792    else
793        return GetTimestamp(index);
794}
795
796nsecs_t  RequestManager::GetTimestamp(int index)
797{
798    Mutex::Autolock lock(m_requestMutex);
799    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
800        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
801        return 0;
802    }
803
804    request_manager_entry * currentEntry = &(entries[index]);
805    nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
806    if (frameTime == 0) {
807        ALOGV("DEBUG(%s): timestamp null,  returning saved value", __FUNCTION__);
808        frameTime = m_lastTimeStamp;
809    }
810    ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
811    return frameTime;
812}
813
814int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
815{
816    int i;
817
818    if (m_numOfEntries == 0) {
819        CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
820        return -1;
821    }
822
823    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
824        if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
825            continue;
826
827        if (entries[i].status == REQUESTED) {
828            entries[i].status = CAPTURED;
829            return entries[i].internal_shot.shot.ctl.request.frameCount;
830        }
831        CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
832
833    }
834    CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
835
836    return -1;
837}
838
839void     RequestManager::SetInitialSkip(int count)
840{
841    ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
842    if (count > m_sensorPipelineSkipCnt)
843        m_sensorPipelineSkipCnt = count;
844}
845
846int     RequestManager::GetSkipCnt()
847{
848    ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
849    if (m_sensorPipelineSkipCnt == 0)
850        return m_sensorPipelineSkipCnt;
851    else
852        return --m_sensorPipelineSkipCnt;
853}
854
855void RequestManager::Dump(void)
856{
857    int i = 0;
858    request_manager_entry * currentEntry;
859    ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
860    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
861
862    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
863        currentEntry =  &(entries[i]);
864        ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
865        currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
866            currentEntry->output_stream_count,
867            currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
868    }
869}
870
871int     RequestManager::GetNextIndex(int index)
872{
873    index++;
874    if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
875        index = 0;
876
877    return index;
878}
879
880ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
881            m_requestQueueOps(NULL),
882            m_frameQueueOps(NULL),
883            m_callbackCookie(NULL),
884            m_numOfRemainingReqInSvc(0),
885            m_isRequestQueuePending(false),
886            m_isRequestQueueNull(true),
887            m_isIspStarted(false),
888            m_ionCameraClient(0),
889            m_zoomRatio(1),
890            m_scp_closing(false),
891            m_scp_closed(false),
892            m_afState(HAL_AFSTATE_INACTIVE),
893            m_afMode(NO_CHANGE),
894            m_afMode2(NO_CHANGE),
895#ifdef VDIS_ENABLE
896            m_vdisBubbleCnt(0),
897            m_vdisDupFrame(0),
898#endif
899            m_IsAfModeUpdateRequired(false),
900            m_IsAfTriggerRequired(false),
901            m_IsAfLockRequired(false),
902            m_wideAspect(false),
903            m_scpOutputSignalCnt(0),
904            m_scpOutputImageCnt(0),
905            m_afTriggerId(0),
906            m_afPendingTriggerId(0),
907            m_afModeWaitingCnt(0),
908            m_halDevice(dev),
909            m_nightCaptureCnt(0),
910            m_cameraId(cameraId),
911            m_thumbNailW(160),
912            m_thumbNailH(120)
913{
914    ALOGV("DEBUG(%s):", __FUNCTION__);
915    int ret = 0;
916    int res = 0;
917
918    m_exynosPictureCSC = NULL;
919    m_exynosVideoCSC = NULL;
920
921    if (!m_grallocHal) {
922        ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
923        if (ret)
924            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
925    }
926
927    m_camera2 = camera;
928    m_ionCameraClient = createIonClient(m_ionCameraClient);
929    if(m_ionCameraClient == 0)
930        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
931
932
933    m_BayerManager = new BayerBufManager();
934    m_mainThread    = new MainThread(this);
935    m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
936    *openInvalid = InitializeISPChain();
937    if (*openInvalid < 0) {
938        // clean process
939        // 1. close video nodes
940        // SCP
941        res = exynos_v4l2_close(m_camera_info.scp.fd);
942        if (res != NO_ERROR ) {
943            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
944        }
945        // SCC
946        res = exynos_v4l2_close(m_camera_info.capture.fd);
947        if (res != NO_ERROR ) {
948            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
949        }
950        // Sensor
951        res = exynos_v4l2_close(m_camera_info.sensor.fd);
952        if (res != NO_ERROR ) {
953            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
954        }
955        // ISP
956        res = exynos_v4l2_close(m_camera_info.isp.fd);
957        if (res != NO_ERROR ) {
958            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
959        }
960    } else {
961        m_sensorThread  = new SensorThread(this);
962        m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
963        ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
964
965        for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
966            m_subStreams[i].type =  SUBSTREAM_TYPE_NONE;
967        CSC_METHOD cscMethod = CSC_METHOD_HW;
968        m_exynosPictureCSC = csc_init(cscMethod);
969        if (m_exynosPictureCSC == NULL)
970            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
971        csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
972
973        m_exynosVideoCSC = csc_init(cscMethod);
974        if (m_exynosVideoCSC == NULL)
975            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
976        csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
977
978        m_setExifFixedAttribute();
979
980        // contol information clear
981        // flash
982        m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
983        m_ctlInfo.flash.m_afFlashDoneFlg= false;
984        m_ctlInfo.flash.m_flashEnableFlg = false;
985        m_ctlInfo.flash.m_flashFrameCount = 0;
986        m_ctlInfo.flash.m_flashCnt = 0;
987        m_ctlInfo.flash.m_flashTimeOut = 0;
988        m_ctlInfo.flash.m_flashDecisionResult = false;
989        m_ctlInfo.flash.m_flashTorchMode = false;
990        m_ctlInfo.flash.m_precaptureState = 0;
991        m_ctlInfo.flash.m_precaptureTriggerId = 0;
992        //ae
993        m_ctlInfo.ae.m_aeCnt = IS_COMMAND_NONE;
994        // awb
995        m_ctlInfo.awb.i_awbMode = AA_AWBMODE_OFF;
996        m_ctlInfo.awb.m_awbCnt = IS_COMMAND_NONE;
997    }
998}
999
1000ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
1001{
1002    ALOGV("%s: ENTER", __FUNCTION__);
1003    this->release();
1004    ALOGV("%s: EXIT", __FUNCTION__);
1005}
1006
1007void ExynosCameraHWInterface2::release()
1008{
1009    int i, res;
1010    CAM_LOGD("%s: ENTER", __func__);
1011
1012    if (m_streamThreads[1] != NULL) {
1013        m_streamThreads[1]->release();
1014        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
1015    }
1016
1017    if (m_streamThreads[0] != NULL) {
1018        m_streamThreads[0]->release();
1019        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
1020    }
1021
1022    if (m_sensorThread != NULL) {
1023        m_sensorThread->release();
1024    }
1025
1026    if (m_mainThread != NULL) {
1027        m_mainThread->release();
1028    }
1029
1030    if (m_exynosPictureCSC)
1031        csc_deinit(m_exynosPictureCSC);
1032    m_exynosPictureCSC = NULL;
1033
1034    if (m_exynosVideoCSC)
1035        csc_deinit(m_exynosVideoCSC);
1036    m_exynosVideoCSC = NULL;
1037
1038    if (m_streamThreads[1] != NULL) {
1039        while (!m_streamThreads[1]->IsTerminated())
1040        {
1041            CAM_LOGD("Waiting for stream thread 1 is tetminated");
1042            usleep(100000);
1043        }
1044        m_streamThreads[1] = NULL;
1045    }
1046
1047    if (m_streamThreads[0] != NULL) {
1048        while (!m_streamThreads[0]->IsTerminated())
1049        {
1050            CAM_LOGD("Waiting for stream thread 0 is tetminated");
1051            usleep(100000);
1052        }
1053        m_streamThreads[0] = NULL;
1054    }
1055
1056    if (m_sensorThread != NULL) {
1057        while (!m_sensorThread->IsTerminated())
1058        {
1059            CAM_LOGD("Waiting for sensor thread is tetminated");
1060            usleep(100000);
1061        }
1062        m_sensorThread = NULL;
1063    }
1064
1065    if (m_mainThread != NULL) {
1066        while (!m_mainThread->IsTerminated())
1067        {
1068            CAM_LOGD("Waiting for main thread is tetminated");
1069            usleep(100000);
1070        }
1071        m_mainThread = NULL;
1072    }
1073
1074    if (m_requestManager != NULL) {
1075        delete m_requestManager;
1076        m_requestManager = NULL;
1077    }
1078
1079    if (m_BayerManager != NULL) {
1080        delete m_BayerManager;
1081        m_BayerManager = NULL;
1082    }
1083    for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1084        freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1085
1086    for (i = 0; i < NUM_SCC_BUFFERS; i++)
1087        freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1088
1089    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1090    res = exynos_v4l2_close(m_camera_info.sensor.fd);
1091    if (res != NO_ERROR ) {
1092        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1093    }
1094
1095    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1096    res = exynos_v4l2_close(m_camera_info.isp.fd);
1097    if (res != NO_ERROR ) {
1098        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1099    }
1100
1101    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1102    res = exynos_v4l2_close(m_camera_info.capture.fd);
1103    if (res != NO_ERROR ) {
1104        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1105    }
1106
1107    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1108    res = exynos_v4l2_close(m_camera_info.scp.fd);
1109    if (res != NO_ERROR ) {
1110        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1111    }
1112    ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1113    deleteIonClient(m_ionCameraClient);
1114
1115    ALOGV("%s: EXIT", __func__);
1116}
1117
1118int ExynosCameraHWInterface2::InitializeISPChain()
1119{
1120    char node_name[30];
1121    int fd = 0;
1122    int i;
1123    int ret = 0;
1124
1125    /* Open Sensor */
1126    memset(&node_name, 0x00, sizeof(char[30]));
1127    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1128    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1129
1130    if (fd < 0) {
1131        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1132    }
1133    else {
1134        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1135    }
1136    m_camera_info.sensor.fd = fd;
1137
1138    /* Open ISP */
1139    memset(&node_name, 0x00, sizeof(char[30]));
1140    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1141    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1142
1143    if (fd < 0) {
1144        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1145    }
1146    else {
1147        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1148    }
1149    m_camera_info.isp.fd = fd;
1150
1151    /* Open ScalerC */
1152    memset(&node_name, 0x00, sizeof(char[30]));
1153    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1154    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1155
1156    if (fd < 0) {
1157        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1158    }
1159    else {
1160        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1161    }
1162    m_camera_info.capture.fd = fd;
1163
1164    /* Open ScalerP */
1165    memset(&node_name, 0x00, sizeof(char[30]));
1166    sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1167    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1168    if (fd < 0) {
1169        ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1170    }
1171    else {
1172        ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1173    }
1174    m_camera_info.scp.fd = fd;
1175
1176    if(m_cameraId == 0)
1177        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1178    else
1179        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1180
1181    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1182    m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1183    m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1184
1185    m_camera_info.dummy_shot.dis_bypass = 1;
1186    m_camera_info.dummy_shot.dnr_bypass = 1;
1187    m_camera_info.dummy_shot.fd_bypass = 1;
1188
1189    /*sensor setting*/
1190    m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1191    m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1192    m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1193
1194    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1195    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1196
1197    /*request setting*/
1198    m_camera_info.dummy_shot.request_sensor = 1;
1199    m_camera_info.dummy_shot.request_scc = 0;
1200    m_camera_info.dummy_shot.request_scp = 0;
1201    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1202
1203    m_camera_info.sensor.width = m_camera2->getSensorRawW();
1204    m_camera_info.sensor.height = m_camera2->getSensorRawH();
1205
1206    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1207    m_camera_info.sensor.planes = 2;
1208    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1209    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1210    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1211
1212    for(i = 0; i < m_camera_info.sensor.buffers; i++){
1213        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1214        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1215        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1216        allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
1217    }
1218
1219    m_camera_info.isp.width = m_camera_info.sensor.width;
1220    m_camera_info.isp.height = m_camera_info.sensor.height;
1221    m_camera_info.isp.format = m_camera_info.sensor.format;
1222    m_camera_info.isp.planes = m_camera_info.sensor.planes;
1223    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1224    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1225    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1226
1227    for(i = 0; i < m_camera_info.isp.buffers; i++){
1228        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1229        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1230        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1231        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1232        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1233        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1234        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1235    };
1236
1237    /* init ISP */
1238    ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1239    if (ret < 0) {
1240        ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
1241        return false;
1242    }
1243    cam_int_s_fmt(&(m_camera_info.isp));
1244    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1245    cam_int_reqbufs(&(m_camera_info.isp));
1246    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1247    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1248
1249    /* init Sensor */
1250    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1251    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1252    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1253        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1254    }
1255    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1256    cam_int_reqbufs(&(m_camera_info.sensor));
1257    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1258    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1259        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1260        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1261        m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1262        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1263                sizeof(struct camera2_shot_ext));
1264    }
1265
1266    for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1267        cam_int_qbuf(&(m_camera_info.sensor), i);
1268
1269    for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1270        m_requestManager->pushSensorQ(i);
1271
1272    ALOGV("== stream_on :: sensor");
1273    cam_int_streamon(&(m_camera_info.sensor));
1274    m_camera_info.sensor.status = true;
1275
1276    /* init Capture */
1277    m_camera_info.capture.width = m_camera2->getSensorW();
1278    m_camera_info.capture.height = m_camera2->getSensorH();
1279    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1280#ifdef ENABLE_FRAME_SYNC
1281    m_camera_info.capture.planes = 2;
1282#else
1283    m_camera_info.capture.planes = 1;
1284#endif
1285    m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1286    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1287    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1288
1289    m_camera_info.capture.status = false;
1290
1291    return true;
1292}
1293
1294void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
1295{
1296    ALOGV("(%s)", __FUNCTION__);
1297    StreamThread *AllocatedStream;
1298    stream_parameters_t newParameters;
1299    uint32_t format_actual;
1300    int numRegisteredStream = 0;
1301
1302
1303    if (!threadExists) {
1304        m_streamThreads[1]  = new StreamThread(this, 1);
1305    }
1306    AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1307    if (!threadExists)
1308        m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1309    AllocatedStream->m_index        = 1;
1310
1311    format_actual                   = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1312
1313    newParameters.width             = m_camera2->getSensorW();
1314    newParameters.height            = m_camera2->getSensorH();
1315    newParameters.format            = format_actual;
1316    newParameters.streamOps         = NULL;
1317    newParameters.numHwBuffers      = NUM_SCC_BUFFERS;
1318#ifdef ENABLE_FRAME_SYNC
1319    newParameters.planes            = 2;
1320#else
1321    newParameters.planes            = 1;
1322#endif
1323
1324    newParameters.numSvcBufsInHal   = 0;
1325
1326    newParameters.node              = &m_camera_info.capture;
1327
1328    AllocatedStream->streamType     = STREAM_TYPE_INDIRECT;
1329    AllocatedStream->m_numRegisteredStream = numRegisteredStream;
1330    ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1331
1332    for (int i = 0; i < m_camera_info.capture.buffers; i++){
1333        if (!threadExists) {
1334            initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
1335            m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1336#ifdef ENABLE_FRAME_SYNC
1337            m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1338            allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
1339#else
1340            allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1341#endif
1342        }
1343    }
1344    cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
1345    m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1346    cam_int_s_fmt(newParameters.node);
1347    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1348    cam_int_reqbufs(newParameters.node);
1349    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1350
1351    for (int i = 0; i < newParameters.node->buffers; i++) {
1352        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1353        cam_int_qbuf(newParameters.node, i);
1354        newParameters.svcBufStatus[i] = ON_DRIVER;
1355    }
1356
1357    ALOGV("== stream_on :: capture");
1358    if (cam_int_streamon(newParameters.node) < 0) {
1359        ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1360    } else {
1361        m_camera_info.capture.status = true;
1362    }
1363
1364    AllocatedStream->setParameter(&newParameters);
1365    AllocatedStream->m_activated    = true;
1366    AllocatedStream->m_isBufferInit = true;
1367}
1368
1369void ExynosCameraHWInterface2::StartISP()
1370{
1371    ALOGV("== stream_on :: isp");
1372    cam_int_streamon(&(m_camera_info.isp));
1373    exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1374}
1375
1376int ExynosCameraHWInterface2::getCameraId() const
1377{
1378    return m_cameraId;
1379}
1380
1381int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1382{
1383    ALOGV("DEBUG(%s):", __FUNCTION__);
1384    if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1385            && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1386        m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1387        return 0;
1388    }
1389    else {
1390        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1391        return 1;
1392    }
1393}
1394
1395int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1396{
1397    int i = 0;
1398
1399    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1400    if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1401        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1402        return 0;
1403    }
1404    m_isRequestQueueNull = false;
1405    if (m_requestManager->GetNumEntries() == 0)
1406        m_requestManager->SetInitialSkip(5);
1407
1408    if (m_isIspStarted == false) {
1409        /* isp */
1410        m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1411        m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1412        cam_int_s_fmt(&(m_camera_info.isp));
1413        cam_int_reqbufs(&(m_camera_info.isp));
1414
1415        /* sensor */
1416        if (m_camera_info.sensor.status == false) {
1417            cam_int_s_fmt(&(m_camera_info.sensor));
1418            cam_int_reqbufs(&(m_camera_info.sensor));
1419
1420            for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1421                ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1422                m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1423                m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1424                memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1425                        sizeof(struct camera2_shot_ext));
1426            }
1427            for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1428                cam_int_qbuf(&(m_camera_info.sensor), i);
1429
1430            for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1431                m_requestManager->pushSensorQ(i);
1432            ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
1433            cam_int_streamon(&(m_camera_info.sensor));
1434            m_camera_info.sensor.status = true;
1435        }
1436    }
1437    if (!(m_streamThreads[1].get())) {
1438        ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
1439        StartSCCThread(false);
1440    } else {
1441        if (m_streamThreads[1]->m_activated ==  false) {
1442            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1443            StartSCCThread(true);
1444        } else {
1445            if (m_camera_info.capture.status == false) {
1446                m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1447                cam_int_s_fmt(&(m_camera_info.capture));
1448                ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1449                cam_int_reqbufs(&(m_camera_info.capture));
1450                ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1451
1452                for (int i = 0; i < m_camera_info.capture.buffers; i++) {
1453                    ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1454                    cam_int_qbuf(&(m_camera_info.capture), i);
1455                }
1456                ALOGV("== stream_on :: capture");
1457                if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1458                    ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1459                } else {
1460                    m_camera_info.capture.status = true;
1461                }
1462            }
1463        }
1464    }
1465    if (m_isIspStarted == false) {
1466        StartISP();
1467        ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
1468        m_requestManager->SetInitialSkip(5);
1469        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1470        m_isIspStarted = true;
1471    }
1472    m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1473    return 0;
1474}
1475
1476int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1477{
1478    ALOGV("DEBUG(%s):", __FUNCTION__);
1479    if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1480            && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1481        m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1482        return 0;
1483    }
1484    else {
1485        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1486        return 1;
1487    }
1488}
1489
1490int ExynosCameraHWInterface2::getInProgressCount()
1491{
1492    int inProgressCount = m_requestManager->GetNumEntries();
1493    ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount);
1494    return inProgressCount;
1495}
1496
1497int ExynosCameraHWInterface2::flushCapturesInProgress()
1498{
1499    return 0;
1500}
1501
1502int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1503{
1504    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1505
1506    if (request == NULL) return BAD_VALUE;
1507    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1508        return BAD_VALUE;
1509    }
1510    status_t res;
1511    // Pass 1, calculate size and allocate
1512    res = m_camera2->constructDefaultRequest(request_template,
1513            request,
1514            true);
1515    if (res != OK) {
1516        return res;
1517    }
1518    // Pass 2, build request
1519    res = m_camera2->constructDefaultRequest(request_template,
1520            request,
1521            false);
1522    if (res != OK) {
1523        ALOGE("Unable to populate new request for template %d",
1524                request_template);
1525    }
1526
1527    return res;
1528}
1529
1530int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1531                                    uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1532{
1533    ALOGV("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1534    bool useDirectOutput = false;
1535    StreamThread *AllocatedStream;
1536    stream_parameters_t newParameters;
1537    substream_parameters_t *subParameters;
1538    StreamThread *parentStream;
1539    status_t res;
1540    int allocCase = 0;
1541
1542    if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)  &&
1543            m_camera2->isSupportedResolution(width, height)) {
1544        if (!(m_streamThreads[0].get())) {
1545            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1546            allocCase = 0;
1547        }
1548        else {
1549            if ((m_streamThreads[0].get())->m_activated == true) {
1550                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1551                allocCase = 1;
1552            }
1553            else {
1554                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1555                allocCase = 2;
1556            }
1557        }
1558
1559        // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1560        if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
1561                    || (width == 720 && height == 480) || (width == 1440 && height == 960)
1562                    || (width == 1344 && height == 896)) {
1563            m_wideAspect = true;
1564        } else {
1565            m_wideAspect = false;
1566        }
1567        ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1568
1569        if (allocCase == 0 || allocCase == 2) {
1570            *stream_id = STREAM_ID_PREVIEW;
1571
1572            m_streamThreads[0]  = new StreamThread(this, *stream_id);
1573
1574            AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1575            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1576
1577            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1578            *usage                              = GRALLOC_USAGE_SW_WRITE_OFTEN;
1579            *max_buffers                        = 6;
1580
1581            newParameters.width                 = width;
1582            newParameters.height                = height;
1583            newParameters.format                = *format_actual;
1584            newParameters.streamOps             = stream_ops;
1585            newParameters.usage                 = *usage;
1586            newParameters.numHwBuffers          = NUM_SCP_BUFFERS;
1587            newParameters.numOwnSvcBuffers      = *max_buffers;
1588            newParameters.planes                = NUM_PLANES(*format_actual);
1589            newParameters.metaPlanes            = 1;
1590            newParameters.numSvcBufsInHal       = 0;
1591            newParameters.minUndequedBuffer     = 4;
1592
1593            newParameters.node                  = &m_camera_info.scp;
1594            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1595            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1596
1597            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1598            AllocatedStream->m_index            = 0;
1599            AllocatedStream->setParameter(&newParameters);
1600            AllocatedStream->m_activated = true;
1601            AllocatedStream->m_numRegisteredStream = 1;
1602            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1603            m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1604            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1605            if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
1606                AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
1607            if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
1608                AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
1609            return 0;
1610        } else if (allocCase == 1) {
1611            *stream_id = STREAM_ID_RECORD;
1612
1613            subParameters = &m_subStreams[STREAM_ID_RECORD];
1614            memset(subParameters, 0, sizeof(substream_parameters_t));
1615
1616            parentStream = (StreamThread*)(m_streamThreads[0].get());
1617            if (!parentStream) {
1618                return 1;
1619            }
1620
1621            *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1622            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1623            *max_buffers = 6;
1624
1625            subParameters->type         = SUBSTREAM_TYPE_RECORD;
1626            subParameters->width        = width;
1627            subParameters->height       = height;
1628            subParameters->format       = *format_actual;
1629            subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1630            subParameters->streamOps     = stream_ops;
1631            subParameters->usage         = *usage;
1632            subParameters->numOwnSvcBuffers = *max_buffers;
1633            subParameters->numSvcBufsInHal  = 0;
1634            subParameters->needBufferInit    = false;
1635            subParameters->minUndequedBuffer = 2;
1636
1637            res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
1638            if (res != NO_ERROR) {
1639                ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1640                return 1;
1641            }
1642            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1643            ALOGV("(%s): Enabling Record", __FUNCTION__);
1644            return 0;
1645        }
1646    }
1647    else if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED|| format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)
1648            && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) {
1649
1650        if (!(m_streamThreads[1].get())) {
1651            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1652            useDirectOutput = true;
1653//            createThread = true;
1654        }
1655        else {
1656            ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
1657//            useDirectOutput = true;
1658//            createThread = false;
1659        }
1660        if (useDirectOutput) {
1661            *stream_id = STREAM_ID_ZSL;
1662
1663            /*if (createThread)*/ {
1664                m_streamThreads[1]  = new StreamThread(this, *stream_id);
1665            }
1666            AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1667            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1668
1669            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1670            *max_buffers                        = 6;
1671
1672            *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1673            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1674            *max_buffers = 6;
1675
1676            newParameters.width                 = width;
1677            newParameters.height                = height;
1678            newParameters.format                = *format_actual;
1679            newParameters.streamOps             = stream_ops;
1680            newParameters.usage                 = *usage;
1681            newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1682            newParameters.numOwnSvcBuffers      = *max_buffers;
1683            newParameters.planes                = NUM_PLANES(*format_actual);
1684            newParameters.metaPlanes            = 1;
1685
1686            newParameters.numSvcBufsInHal       = 0;
1687            newParameters.minUndequedBuffer     = 4;
1688
1689            newParameters.node                  = &m_camera_info.capture;
1690            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1691            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1692
1693            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1694            AllocatedStream->m_index            = 1;
1695            AllocatedStream->setParameter(&newParameters);
1696            AllocatedStream->m_activated = true;
1697            AllocatedStream->m_numRegisteredStream++;
1698            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1699            return 0;
1700        }
1701    }
1702    else if (format == HAL_PIXEL_FORMAT_BLOB
1703            && m_camera2->isSupportedJpegResolution(width, height)) {
1704        *stream_id = STREAM_ID_JPEG;
1705
1706        subParameters = &m_subStreams[*stream_id];
1707        memset(subParameters, 0, sizeof(substream_parameters_t));
1708
1709        if (!(m_streamThreads[1].get())) {
1710            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1711            StartSCCThread(false);
1712        }
1713        else if (m_streamThreads[1]->m_activated ==  false) {
1714            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1715            StartSCCThread(true);
1716        }
1717        parentStream = (StreamThread*)(m_streamThreads[1].get());
1718
1719        *format_actual = HAL_PIXEL_FORMAT_BLOB;
1720        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1721        *max_buffers = 4;
1722
1723        subParameters->type          = SUBSTREAM_TYPE_JPEG;
1724        subParameters->width         = width;
1725        subParameters->height        = height;
1726        subParameters->format        = *format_actual;
1727        subParameters->svcPlanes     = 1;
1728        subParameters->streamOps     = stream_ops;
1729        subParameters->usage         = *usage;
1730        subParameters->numOwnSvcBuffers = *max_buffers;
1731        subParameters->numSvcBufsInHal  = 0;
1732        subParameters->needBufferInit    = false;
1733        subParameters->minUndequedBuffer = 2;
1734
1735        res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
1736        if (res != NO_ERROR) {
1737            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1738            return 1;
1739        }
1740        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1741        ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
1742        return 0;
1743    }
1744    else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1745        *stream_id = STREAM_ID_PRVCB;
1746
1747        subParameters = &m_subStreams[STREAM_ID_PRVCB];
1748        memset(subParameters, 0, sizeof(substream_parameters_t));
1749
1750        parentStream = (StreamThread*)(m_streamThreads[0].get());
1751        if (!parentStream) {
1752            return 1;
1753        }
1754
1755        *format_actual = format;
1756        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1757        *max_buffers = 6;
1758
1759        subParameters->type         = SUBSTREAM_TYPE_PRVCB;
1760        subParameters->width        = width;
1761        subParameters->height       = height;
1762        subParameters->format       = *format_actual;
1763        subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1764        subParameters->streamOps     = stream_ops;
1765        subParameters->usage         = *usage;
1766        subParameters->numOwnSvcBuffers = *max_buffers;
1767        subParameters->numSvcBufsInHal  = 0;
1768        subParameters->needBufferInit    = false;
1769        subParameters->minUndequedBuffer = 2;
1770
1771        if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
1772            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
1773            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
1774        }
1775        else {
1776            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1777            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
1778        }
1779
1780        res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
1781        if (res != NO_ERROR) {
1782            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1783            return 1;
1784        }
1785        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1786        ALOGV("(%s): Enabling previewcb", __FUNCTION__);
1787        return 0;
1788    }
1789    ALOGE("DEBUG(%s): Unsupported Pixel Format", __FUNCTION__);
1790    return 1;
1791}
1792
1793int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1794        int num_buffers, buffer_handle_t *registeringBuffers)
1795{
1796    int                     i,j;
1797    void                    *virtAddr[3];
1798    int                     plane_index = 0;
1799    StreamThread *          targetStream;
1800    stream_parameters_t     *targetStreamParms;
1801    node_info_t             *currentNode;
1802
1803    struct v4l2_buffer v4l2_buf;
1804    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1805
1806    CAM_LOGV("DEBUG(%s): streamID (%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1807        stream_id, num_buffers, (uint32_t)registeringBuffers);
1808
1809    if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
1810        targetStream = m_streamThreads[0].get();
1811        targetStreamParms = &(m_streamThreads[0]->m_parameters);
1812
1813    }
1814    else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
1815        substream_parameters_t  *targetParms;
1816        targetParms = &m_subStreams[stream_id];
1817
1818        targetParms->numSvcBuffers = num_buffers;
1819
1820        for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
1821            ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
1822                i, stream_id, (uint32_t)(registeringBuffers[i]));
1823            if (m_grallocHal) {
1824                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1825                       targetParms->usage, 0, 0,
1826                       targetParms->width, targetParms->height, virtAddr) != 0) {
1827                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1828                }
1829                else {
1830                    ExynosBuffer currentBuf;
1831                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1832                    if (targetParms->svcPlanes == 1) {
1833                        currentBuf.fd.extFd[0] = priv_handle->fd;
1834                        currentBuf.size.extS[0] = priv_handle->size;
1835                        currentBuf.size.extS[1] = 0;
1836                        currentBuf.size.extS[2] = 0;
1837                    } else if (targetParms->svcPlanes == 2) {
1838                        currentBuf.fd.extFd[0] = priv_handle->fd;
1839                        currentBuf.fd.extFd[1] = priv_handle->fd1;
1840
1841                    } else if (targetParms->svcPlanes == 3) {
1842                        currentBuf.fd.extFd[0] = priv_handle->fd;
1843                        currentBuf.fd.extFd[1] = priv_handle->fd1;
1844                        currentBuf.fd.extFd[2] = priv_handle->fd2;
1845                    }
1846                    for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
1847                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1848                        CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
1849                             __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
1850                             (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
1851                    }
1852                    targetParms->svcBufStatus[i]  = ON_SERVICE;
1853                    targetParms->svcBuffers[i]    = currentBuf;
1854                    targetParms->svcBufHandle[i]  = registeringBuffers[i];
1855                }
1856            }
1857        }
1858        targetParms->needBufferInit = true;
1859        return 0;
1860    }
1861    else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
1862        targetStream = m_streamThreads[1].get();
1863        targetStreamParms = &(m_streamThreads[1]->m_parameters);
1864    }
1865    else {
1866        ALOGE("ERR(%s) unregisterd stream id (%d)", __FUNCTION__, stream_id);
1867        return 1;
1868    }
1869
1870    if (targetStream->streamType == STREAM_TYPE_DIRECT) {
1871        if (num_buffers < targetStreamParms->numHwBuffers) {
1872            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
1873                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
1874            return 1;
1875        }
1876    }
1877    CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
1878            __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
1879            targetStreamParms->height, targetStreamParms->planes);
1880    targetStreamParms->numSvcBuffers = num_buffers;
1881    currentNode = targetStreamParms->node;
1882    currentNode->width      = targetStreamParms->width;
1883    currentNode->height     = targetStreamParms->height;
1884    currentNode->format     = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
1885    currentNode->planes     = targetStreamParms->planes;
1886    currentNode->buffers    = targetStreamParms->numHwBuffers;
1887    cam_int_s_input(currentNode, m_camera_info.sensor_id);
1888    cam_int_s_fmt(currentNode);
1889    cam_int_reqbufs(currentNode);
1890    for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
1891        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1892            i, (uint32_t)(registeringBuffers[i]));
1893                v4l2_buf.m.planes   = planes;
1894                v4l2_buf.type       = currentNode->type;
1895                v4l2_buf.memory     = currentNode->memory;
1896                v4l2_buf.index      = i;
1897                v4l2_buf.length     = currentNode->planes;
1898
1899                ExynosBuffer currentBuf;
1900                ExynosBuffer metaBuf;
1901                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1902
1903                m_getAlignedYUVSize(currentNode->format,
1904                    currentNode->width, currentNode->height, &currentBuf);
1905
1906                ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
1907                if (currentNode->planes == 1) {
1908                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1909                    currentBuf.fd.extFd[0] = priv_handle->fd;
1910                    currentBuf.size.extS[0] = priv_handle->size;
1911                    currentBuf.size.extS[1] = 0;
1912                    currentBuf.size.extS[2] = 0;
1913                } else if (currentNode->planes == 2) {
1914                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1915                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
1916                    currentBuf.fd.extFd[0] = priv_handle->fd;
1917                    currentBuf.fd.extFd[1] = priv_handle->fd1;
1918
1919                } else if (currentNode->planes == 3) {
1920                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1921                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
1922                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
1923                    currentBuf.fd.extFd[0] = priv_handle->fd;
1924                    currentBuf.fd.extFd[2] = priv_handle->fd1;
1925                    currentBuf.fd.extFd[1] = priv_handle->fd2;
1926                }
1927
1928                for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
1929                    currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
1930                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
1931                    CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
1932                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
1933                         (unsigned int)currentBuf.virt.extP[plane_index],
1934                         v4l2_buf.m.planes[plane_index].length);
1935                }
1936
1937                if (i < currentNode->buffers) {
1938
1939
1940#ifdef ENABLE_FRAME_SYNC
1941                    /* add plane for metadata*/
1942                    metaBuf.size.extS[0] = 4*1024;
1943                    allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
1944
1945                    v4l2_buf.length += targetStreamParms->metaPlanes;
1946                    v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
1947                    v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
1948
1949                    ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1950#endif
1951                    if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1952                        ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
1953                            __FUNCTION__, stream_id, currentNode->fd);
1954                    }
1955                    ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
1956                            __FUNCTION__, stream_id, currentNode->fd);
1957                    targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1958                }
1959                else {
1960                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1961                }
1962
1963                targetStreamParms->svcBuffers[i]       = currentBuf;
1964                targetStreamParms->metaBuffers[i] = metaBuf;
1965                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
1966            }
1967
1968    ALOGV("DEBUG(%s): calling  streamon stream id = %d", __FUNCTION__, stream_id);
1969    cam_int_streamon(targetStreamParms->node);
1970    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
1971    currentNode->status = true;
1972    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
1973
1974    return 0;
1975}
1976
1977int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
1978{
1979    StreamThread *targetStream;
1980    status_t res = NO_ERROR;
1981    CAM_LOGV("DEBUG(%s):stream id %d", __FUNCTION__, stream_id);
1982    bool releasingScpMain = false;
1983
1984    if (stream_id == STREAM_ID_PREVIEW) {
1985        targetStream = (StreamThread*)(m_streamThreads[0].get());
1986        targetStream->m_numRegisteredStream--;
1987        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
1988        releasingScpMain = true;
1989        for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
1990            for (int j = 0; j < targetStream->m_parameters.planes; j++) {
1991                ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
1992                                targetStream->m_parameters.svcBuffers[i].size.extS[j]);
1993                CAM_LOGD("DBG(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
1994                              targetStream->m_parameters.svcBuffers[i].fd.extFd[j], targetStream->m_parameters.svcBuffers[i].virt.extP[j]);
1995            }
1996        }
1997    } else if (stream_id == STREAM_ID_JPEG) {
1998        targetStream = (StreamThread*)(m_streamThreads[1].get());
1999        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2000        if (m_resizeBuf.size.s != 0) {
2001            freeCameraMemory(&m_resizeBuf, 1);
2002        }
2003        if (targetStream)
2004            res = targetStream->detachSubStream(stream_id);
2005        if (res != NO_ERROR) {
2006            ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2007            return 1;
2008        }
2009        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2010        return 0;
2011    } else if (stream_id == STREAM_ID_RECORD) {
2012        targetStream = (StreamThread*)(m_streamThreads[0].get());
2013        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2014        if (targetStream)
2015            res = targetStream->detachSubStream(stream_id);
2016        else
2017            return 0;
2018    } else if (stream_id == STREAM_ID_PRVCB) {
2019        targetStream = (StreamThread*)(m_streamThreads[0].get());
2020        if (m_resizeBuf.size.s != 0) {
2021            freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
2022        }
2023        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2024        if (targetStream)
2025            res = targetStream->detachSubStream(stream_id);
2026        else
2027            return 0;
2028    } else if (stream_id == STREAM_ID_ZSL) {
2029        targetStream = (StreamThread*)(m_streamThreads[1].get());
2030        targetStream->m_numRegisteredStream--;
2031        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2032    } else {
2033        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
2034        return 1;
2035    }
2036
2037    if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
2038        ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
2039        targetStream = (StreamThread*)(m_streamThreads[1].get());
2040        targetStream->m_releasing = true;
2041        do {
2042            CAM_LOGD("stream thread release %d", __LINE__);
2043            targetStream->release();
2044            usleep(33000);
2045        } while (targetStream->m_releasing);
2046    }
2047
2048    if (releasingScpMain || (m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
2049        ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
2050        targetStream = (StreamThread*)(m_streamThreads[0].get());
2051        targetStream->m_releasing = true;
2052        do {
2053            ALOGD("stream thread release %d", __LINE__);
2054            targetStream->release();
2055            usleep(33000);
2056        } while (targetStream->m_releasing);
2057        targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
2058
2059        if (targetStream != NULL) {
2060            while (targetStream->IsTerminated())
2061            {
2062                ALOGD("Waiting for stream thread is tetminated");
2063                usleep(10000);
2064            }
2065            m_streamThreads[0] = NULL;
2066        }
2067
2068        if (m_sensorThread != NULL) {
2069            m_sensorThread->release();
2070            while (!m_sensorThread->IsTerminated()){
2071                ALOGD("Waiting for sensor thread is tetminated");
2072                usleep(10000);
2073            }
2074        }
2075        else {
2076            ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
2077        }
2078
2079        if (m_camera_info.capture.status == true) {
2080            if (cam_int_streamoff(&(m_camera_info.capture)) < 0) {
2081                ALOGE("ERR(%s): capture stream off fail", __FUNCTION__);
2082            } else {
2083                m_camera_info.capture.status = false;
2084            }
2085            ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
2086            if (m_streamThreads[1]->streamType == STREAM_TYPE_INDIRECT) {
2087                m_camera_info.capture.buffers = 0;
2088                ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
2089                cam_int_reqbufs(&(m_camera_info.capture));
2090                ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
2091            }
2092        }
2093        m_isIspStarted = false;
2094    }
2095    ALOGV("(%s): END", __FUNCTION__);
2096    return 0;
2097}
2098
2099int ExynosCameraHWInterface2::allocateReprocessStream(
2100    uint32_t width, uint32_t height, uint32_t format,
2101    const camera2_stream_in_ops_t *reprocess_stream_ops,
2102    uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
2103{
2104    ALOGV("DEBUG(%s):", __FUNCTION__);
2105    return 0;
2106}
2107
2108int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2109            uint32_t output_stream_id,
2110            const camera2_stream_in_ops_t *reprocess_stream_ops,
2111            // outputs
2112            uint32_t *stream_id)
2113{
2114    ALOGV("DEBUG(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
2115    *stream_id = STREAM_ID_JPEG_REPROCESS;
2116
2117    m_reprocessStreamId = *stream_id;
2118    m_reprocessOps = reprocess_stream_ops;
2119    m_reprocessOutputStreamId = output_stream_id;
2120    return 0;
2121}
2122
2123int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
2124{
2125    ALOGV("DEBUG(%s):", __FUNCTION__);
2126    if (stream_id == STREAM_ID_JPEG_REPROCESS) {
2127        m_reprocessStreamId = 0;
2128        m_reprocessOps = NULL;
2129        m_reprocessOutputStreamId = 0;
2130        return 0;
2131    }
2132    return 1;
2133}
2134
2135int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
2136{
2137    ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
2138
2139    switch (trigger_id) {
2140    case CAMERA2_TRIGGER_AUTOFOCUS:
2141        ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2142        OnAfTriggerStart(ext1);
2143        break;
2144
2145    case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
2146        ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2147        OnAfCancel(ext1);
2148        break;
2149    case CAMERA2_TRIGGER_PRECAPTURE_METERING:
2150        ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
2151        OnPrecaptureMeteringTriggerStart(ext1);
2152        break;
2153    default:
2154        break;
2155    }
2156    return 0;
2157}
2158
2159int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
2160{
2161    ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
2162    m_notifyCb = notify_cb;
2163    m_callbackCookie = user;
2164    return 0;
2165}
2166
2167int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
2168{
2169    ALOGV("DEBUG(%s):", __FUNCTION__);
2170    return 0;
2171}
2172
2173int ExynosCameraHWInterface2::dump(int fd)
2174{
2175    ALOGV("DEBUG(%s):", __FUNCTION__);
2176    return 0;
2177}
2178
2179void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
2180{
2181    switch (colorFormat) {
2182    // 1p
2183    case V4L2_PIX_FMT_RGB565 :
2184    case V4L2_PIX_FMT_YUYV :
2185    case V4L2_PIX_FMT_UYVY :
2186    case V4L2_PIX_FMT_VYUY :
2187    case V4L2_PIX_FMT_YVYU :
2188        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2189        buf->size.extS[1] = 0;
2190        buf->size.extS[2] = 0;
2191        break;
2192    // 2p
2193    case V4L2_PIX_FMT_NV12 :
2194    case V4L2_PIX_FMT_NV12T :
2195    case V4L2_PIX_FMT_NV21 :
2196        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
2197        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2198        buf->size.extS[2] = 0;
2199        break;
2200    case V4L2_PIX_FMT_NV12M :
2201    case V4L2_PIX_FMT_NV12MT_16X16 :
2202    case V4L2_PIX_FMT_NV21M:
2203        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
2204        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2205        buf->size.extS[2] = 0;
2206        break;
2207    case V4L2_PIX_FMT_NV16 :
2208    case V4L2_PIX_FMT_NV61 :
2209        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2210        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
2211        buf->size.extS[2] = 0;
2212        break;
2213     // 3p
2214    case V4L2_PIX_FMT_YUV420 :
2215    case V4L2_PIX_FMT_YVU420 :
2216        buf->size.extS[0] = (w * h);
2217        buf->size.extS[1] = (w * h) >> 2;
2218        buf->size.extS[2] = (w * h) >> 2;
2219        break;
2220    case V4L2_PIX_FMT_YUV420M:
2221    case V4L2_PIX_FMT_YVU420M :
2222    case V4L2_PIX_FMT_YUV422P :
2223        buf->size.extS[0] = ALIGN(w,  16) * ALIGN(h,  16);
2224        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2225        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2226        break;
2227    default:
2228        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2229        return;
2230        break;
2231    }
2232}
2233
2234bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
2235                                             int  dst_w,  int   dst_h,
2236                                             int *crop_x, int *crop_y,
2237                                             int *crop_w, int *crop_h,
2238                                             int zoom)
2239{
2240    *crop_w = src_w;
2241    *crop_h = src_h;
2242
2243    if (   src_w != dst_w
2244        || src_h != dst_h) {
2245        float src_ratio = 1.0f;
2246        float dst_ratio = 1.0f;
2247
2248        // ex : 1024 / 768
2249        src_ratio = (float)src_w / (float)src_h;
2250
2251        // ex : 352  / 288
2252        dst_ratio = (float)dst_w / (float)dst_h;
2253
2254        if (dst_w * dst_h < src_w * src_h) {
2255            if (dst_ratio <= src_ratio) {
2256                // shrink w
2257                *crop_w = src_h * dst_ratio;
2258                *crop_h = src_h;
2259            } else {
2260                // shrink h
2261                *crop_w = src_w;
2262                *crop_h = src_w / dst_ratio;
2263            }
2264        } else {
2265            if (dst_ratio <= src_ratio) {
2266                // shrink w
2267                *crop_w = src_h * dst_ratio;
2268                *crop_h = src_h;
2269            } else {
2270                // shrink h
2271                *crop_w = src_w;
2272                *crop_h = src_w / dst_ratio;
2273            }
2274        }
2275    }
2276
2277    if (zoom != 0) {
2278        float zoomLevel = ((float)zoom + 10.0) / 10.0;
2279        *crop_w = (int)((float)*crop_w / zoomLevel);
2280        *crop_h = (int)((float)*crop_h / zoomLevel);
2281    }
2282
2283    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
2284    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2285    if (w_align != 0) {
2286        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2287            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
2288            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2289        }
2290        else
2291            *crop_w -= w_align;
2292    }
2293
2294    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
2295    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2296    if (h_align != 0) {
2297        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2298            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
2299            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2300        }
2301        else
2302            *crop_h -= h_align;
2303    }
2304
2305    *crop_x = (src_w - *crop_w) >> 1;
2306    *crop_y = (src_h - *crop_h) >> 1;
2307
2308    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2309        *crop_x -= 1;
2310
2311    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2312        *crop_y -= 1;
2313
2314    return true;
2315}
2316
2317BayerBufManager::BayerBufManager()
2318{
2319    ALOGV("DEBUG(%s): ", __FUNCTION__);
2320    for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2321        entries[i].status = BAYER_ON_HAL_EMPTY;
2322        entries[i].reqFrameCnt = 0;
2323    }
2324    sensorEnqueueHead = 0;
2325    sensorDequeueHead = 0;
2326    ispEnqueueHead = 0;
2327    ispDequeueHead = 0;
2328    numOnSensor = 0;
2329    numOnIsp = 0;
2330    numOnHalFilled = 0;
2331    numOnHalEmpty = NUM_BAYER_BUFFERS;
2332}
2333
2334BayerBufManager::~BayerBufManager()
2335{
2336    ALOGV("%s", __FUNCTION__);
2337}
2338
2339int     BayerBufManager::GetIndexForSensorEnqueue()
2340{
2341    int ret = 0;
2342    if (numOnHalEmpty == 0)
2343        ret = -1;
2344    else
2345        ret = sensorEnqueueHead;
2346    ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2347    return ret;
2348}
2349
2350int    BayerBufManager::MarkSensorEnqueue(int index)
2351{
2352    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2353
2354    // sanity check
2355    if (index != sensorEnqueueHead) {
2356        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2357        return -1;
2358    }
2359    if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2360        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2361            index, entries[index].status, BAYER_ON_HAL_EMPTY);
2362        return -1;
2363    }
2364
2365    entries[index].status = BAYER_ON_SENSOR;
2366    entries[index].reqFrameCnt = 0;
2367    numOnHalEmpty--;
2368    numOnSensor++;
2369    sensorEnqueueHead = GetNextIndex(index);
2370    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2371        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2372    return 0;
2373}
2374
2375int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
2376{
2377    ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2378
2379    if (entries[index].status != BAYER_ON_SENSOR) {
2380        ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2381            index, entries[index].status, BAYER_ON_SENSOR);
2382        return -1;
2383    }
2384
2385    entries[index].status = BAYER_ON_HAL_FILLED;
2386    numOnHalFilled++;
2387    numOnSensor--;
2388
2389    return 0;
2390}
2391
2392int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2393{
2394    int ret = 0;
2395    if (numOnHalFilled == 0)
2396        ret = -1;
2397    else {
2398        *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2399        ret = ispEnqueueHead;
2400    }
2401    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2402    return ret;
2403}
2404
2405int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2406{
2407    int ret = 0;
2408    if (numOnIsp == 0)
2409        ret = -1;
2410    else {
2411        *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2412        ret = ispDequeueHead;
2413    }
2414    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2415    return ret;
2416}
2417
2418int    BayerBufManager::MarkIspEnqueue(int index)
2419{
2420    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2421
2422    // sanity check
2423    if (index != ispEnqueueHead) {
2424        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2425        return -1;
2426    }
2427    if (entries[index].status != BAYER_ON_HAL_FILLED) {
2428        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2429            index, entries[index].status, BAYER_ON_HAL_FILLED);
2430        return -1;
2431    }
2432
2433    entries[index].status = BAYER_ON_ISP;
2434    numOnHalFilled--;
2435    numOnIsp++;
2436    ispEnqueueHead = GetNextIndex(index);
2437    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2438        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2439    return 0;
2440}
2441
2442int    BayerBufManager::MarkIspDequeue(int index)
2443{
2444    ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2445
2446    // sanity check
2447    if (index != ispDequeueHead) {
2448        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2449        return -1;
2450    }
2451    if (entries[index].status != BAYER_ON_ISP) {
2452        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2453            index, entries[index].status, BAYER_ON_ISP);
2454        return -1;
2455    }
2456
2457    entries[index].status = BAYER_ON_HAL_EMPTY;
2458    entries[index].reqFrameCnt = 0;
2459    numOnHalEmpty++;
2460    numOnIsp--;
2461    ispDequeueHead = GetNextIndex(index);
2462    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2463        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2464    return 0;
2465}
2466
2467int BayerBufManager::GetNumOnSensor()
2468{
2469    return numOnSensor;
2470}
2471
2472int BayerBufManager::GetNumOnHalFilled()
2473{
2474    return numOnHalFilled;
2475}
2476
2477int BayerBufManager::GetNumOnIsp()
2478{
2479    return numOnIsp;
2480}
2481
2482int     BayerBufManager::GetNextIndex(int index)
2483{
2484    index++;
2485    if (index >= NUM_BAYER_BUFFERS)
2486        index = 0;
2487
2488    return index;
2489}
2490
2491void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2492{
2493    camera_metadata_t *currentRequest = NULL;
2494    camera_metadata_t *currentFrame = NULL;
2495    size_t numEntries = 0;
2496    size_t frameSize = 0;
2497    camera_metadata_t * preparedFrame = NULL;
2498    camera_metadata_t *deregisteredRequest = NULL;
2499    uint32_t currentSignal = self->GetProcessingSignal();
2500    MainThread *  selfThread      = ((MainThread*)self);
2501    int res = 0;
2502
2503    int ret;
2504
2505    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2506
2507    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2508        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2509
2510        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2511        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2512        return;
2513    }
2514
2515    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2516        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2517        if (m_requestManager->IsRequestQueueFull()==false) {
2518            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2519            if (NULL == currentRequest) {
2520                ALOGE("DEBUG(%s)(0x%x): dequeue_request returned NULL ", __FUNCTION__, currentSignal);
2521                m_isRequestQueueNull = true;
2522#ifdef VDIS_ENABLE
2523                if (m_requestManager->IsVdisEnable())
2524                    m_vdisBubbleCnt = 1;
2525#endif
2526            }
2527            else {
2528                m_requestManager->RegisterRequest(currentRequest);
2529
2530                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2531                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2532                if (m_requestManager->IsRequestQueueFull()==false)
2533                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2534
2535                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2536            }
2537        }
2538        else {
2539            m_isRequestQueuePending = true;
2540        }
2541    }
2542
2543    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2544        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2545        /*while (1)*/ {
2546            ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2547            if (ret == false)
2548                CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
2549
2550            m_requestManager->DeregisterRequest(&deregisteredRequest);
2551
2552            ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2553            if (ret < 0)
2554                CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
2555
2556            ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2557            if (ret < 0)
2558                CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
2559
2560            if (currentFrame==NULL) {
2561                ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2562            }
2563            else {
2564                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2565            }
2566            res = append_camera_metadata(currentFrame, preparedFrame);
2567            if (res==0) {
2568                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2569                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2570            }
2571            else {
2572                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2573            }
2574        }
2575        if (!m_isRequestQueueNull) {
2576            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2577        }
2578
2579        if (getInProgressCount()>0) {
2580            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2581            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2582        }
2583    }
2584    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2585    return;
2586}
2587
2588void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
2589{
2590    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2591    /* will add */
2592    return;
2593}
2594
2595
2596void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2597{
2598    ALOGD("####  common Section");
2599    ALOGD("####                 magic(%x) ",
2600        shot_ext->shot.magicNumber);
2601    ALOGD("####  ctl Section");
2602    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2603        shot_ext->shot.ctl.request.metadataMode,
2604        shot_ext->shot.ctl.lens.aperture,
2605        shot_ext->shot.ctl.sensor.exposureTime,
2606        shot_ext->shot.ctl.sensor.frameDuration,
2607        shot_ext->shot.ctl.sensor.sensitivity,
2608        shot_ext->shot.ctl.aa.awbMode);
2609
2610    ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2611        shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2612        shot_ext->shot.ctl.request.outputStreams[0]);
2613
2614    ALOGD("####  DM Section");
2615    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2616        shot_ext->shot.dm.request.metadataMode,
2617        shot_ext->shot.dm.lens.aperture,
2618        shot_ext->shot.dm.sensor.exposureTime,
2619        shot_ext->shot.dm.sensor.frameDuration,
2620        shot_ext->shot.dm.sensor.sensitivity,
2621        shot_ext->shot.dm.sensor.timeStamp,
2622        shot_ext->shot.dm.aa.awbMode,
2623        shot_ext->shot.dm.request.frameCount );
2624}
2625
2626void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
2627{
2628    // Flash
2629    switch (m_ctlInfo.flash.m_flashCnt) {
2630    case IS_FLASH_STATE_ON:
2631        CAM_LOGV("(%s): [Flash] Flash ON for Capture", __FUNCTION__);
2632        if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2633            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2634            m_ctlInfo.flash.m_flashTimeOut = 5;
2635        } else
2636            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2637        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2638        break;
2639    case IS_FLASH_STATE_ON_WAIT:
2640        break;
2641    case IS_FLASH_STATE_ON_DONE:
2642        if (!m_ctlInfo.flash.m_afFlashDoneFlg)
2643            // auto transition at pre-capture trigger
2644            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
2645        break;
2646    case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
2647        CAM_LOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK", __FUNCTION__);
2648        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2649        //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2650        shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2651        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
2652        break;
2653    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2654    case IS_FLASH_STATE_AUTO_WAIT:
2655        shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
2656        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
2657        break;
2658    case IS_FLASH_STATE_AUTO_DONE:
2659        CAM_LOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE", __FUNCTION__);
2660        break;
2661    case IS_FLASH_STATE_AUTO_OFF:
2662        CAM_LOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear", __FUNCTION__);
2663        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2664        m_ctlInfo.flash.m_afFlashDoneFlg = false;
2665        m_ctlInfo.flash.m_flashEnableFlg = false;
2666        break;
2667    case IS_FLASH_STATE_CAPTURE:
2668        CAM_LOGV("(%s): [Flash] IS_FLASH_CAPTURE", __FUNCTION__);
2669        m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
2670        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2671        shot_ext->request_scc = 0;
2672        shot_ext->request_scp = 0;
2673        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
2674        break;
2675    case IS_FLASH_STATE_CAPTURE_WAIT:
2676        shot_ext->request_scc = 0;
2677        shot_ext->request_scp = 0;
2678        break;
2679    case IS_FLASH_STATE_CAPTURE_JPEG:
2680        CAM_LOGE("(%s): [Flash] Flash Capture  (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
2681        shot_ext->request_scc = 1;
2682        shot_ext->request_scp = 1;
2683        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END;  // auto transition
2684        break;
2685    case IS_FLASH_STATE_CAPTURE_END:
2686        CAM_LOGV("(%s): [Flash] Flash Capture END", __FUNCTION__);
2687        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2688        shot_ext->request_scc = 0;
2689        shot_ext->request_scp = 0;
2690        m_ctlInfo.flash.m_flashEnableFlg = false;
2691        m_ctlInfo.flash.m_flashCnt = 0;
2692        m_ctlInfo.flash.m_afFlashDoneFlg= false;
2693        break;
2694    default:
2695        ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
2696    }
2697}
2698
2699void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
2700{
2701    // Flash
2702    switch (m_ctlInfo.flash.m_flashCnt) {
2703    case IS_FLASH_STATE_AUTO_WAIT:
2704        if (m_ctlInfo.flash.m_flashDecisionResult) {
2705            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2706                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2707                CAM_LOGV("(%s): [Flash] Lis :  AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
2708            } else {
2709                CAM_LOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
2710            }
2711        } else {
2712            //If flash isn't activated at flash auto mode, skip flash auto control
2713            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2714            CAM_LOGV("(%s): [Flash] Skip :  AUTO -> OFF", __FUNCTION__);
2715        }
2716        break;
2717    }
2718}
2719
2720void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
2721{
2722    // Flash
2723    switch (m_ctlInfo.flash.m_flashCnt) {
2724    case IS_FLASH_STATE_ON_WAIT:
2725        if (shot_ext->shot.dm.flash.decision > 0) {
2726            // store decision result to skip capture sequenece
2727            CAM_LOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
2728            if (shot_ext->shot.dm.flash.decision == 2)
2729                m_ctlInfo.flash.m_flashDecisionResult = false;
2730            else
2731                m_ctlInfo.flash.m_flashDecisionResult = true;
2732            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2733        } else {
2734            if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2735                CAM_LOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
2736                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2737                m_ctlInfo.flash.m_flashDecisionResult = false;
2738            } else {
2739                m_ctlInfo.flash.m_flashTimeOut--;
2740            }
2741        }
2742        break;
2743    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2744        if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
2745            CAM_LOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
2746            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
2747        } else {
2748            CAM_LOGV("(%s):  [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
2749        }
2750        break;
2751    case IS_FLASH_STATE_CAPTURE_WAIT:
2752        if (m_ctlInfo.flash.m_flashDecisionResult) {
2753            if (shot_ext->shot.dm.flash.firingStable) {
2754                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2755            } else {
2756                if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2757                    ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
2758                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2759                } else {
2760                    ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
2761                    m_ctlInfo.flash.m_flashTimeOut--;
2762                }
2763            }
2764        } else {
2765            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2766        }
2767        break;
2768    }
2769}
2770
2771void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
2772{
2773    uint32_t        currentSignal = self->GetProcessingSignal();
2774    SensorThread *  selfThread      = ((SensorThread*)self);
2775    int index;
2776    int index_isp;
2777    status_t res;
2778    nsecs_t frameTime;
2779    int bayersOnSensor = 0, bayersOnIsp = 0;
2780    int j = 0;
2781    bool isCapture = false;
2782    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
2783
2784    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2785        CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2786
2787        ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
2788        cam_int_streamoff(&(m_camera_info.sensor));
2789        ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
2790
2791        m_camera_info.sensor.buffers = 0;
2792        ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
2793        cam_int_reqbufs(&(m_camera_info.sensor));
2794        ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
2795        m_camera_info.sensor.status = false;
2796
2797        ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
2798        isp_int_streamoff(&(m_camera_info.isp));
2799        ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
2800
2801        m_camera_info.isp.buffers = 0;
2802        ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
2803        cam_int_reqbufs(&(m_camera_info.isp));
2804        ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
2805
2806        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
2807
2808        m_requestManager->releaseSensorQ();
2809        m_requestManager->ResetEntry();
2810        ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2811        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2812        return;
2813    }
2814
2815    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
2816    {
2817        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
2818        int targetStreamIndex = 0, i=0;
2819        int matchedFrameCnt = -1, processingReqIndex;
2820        struct camera2_shot_ext *shot_ext;
2821        struct camera2_shot_ext *shot_ext_capture;
2822        bool triggered = false;
2823        int afMode;
2824
2825        /* dqbuf from sensor */
2826        ALOGV("Sensor DQbuf start");
2827        index = cam_int_dqbuf(&(m_camera_info.sensor));
2828        m_requestManager->pushSensorQ(index);
2829        ALOGV("Sensor DQbuf done(%d)", index);
2830        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2831
2832        if (m_nightCaptureCnt != 0) {
2833            matchedFrameCnt = m_nightCaptureFrameCnt;
2834        } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
2835            matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
2836            ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
2837        } else {
2838            matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
2839        }
2840
2841#ifdef VDIS_ENABLE
2842        if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
2843            matchedFrameCnt = m_vdisDupFrame;
2844        }
2845#endif
2846
2847        if (matchedFrameCnt != -1) {
2848#ifdef VDIS_ENABLE
2849            if (m_vdisBubbleCnt == 0) {
2850                frameTime = systemTime();
2851                m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
2852                m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
2853            }
2854#else
2855            frameTime = systemTime();
2856            m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
2857            m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
2858#endif
2859
2860            if (m_afModeWaitingCnt != 0) {
2861                ALOGV("### Af Trigger pulled, waiting for mode change cnt(%d) ", m_afModeWaitingCnt);
2862                m_afModeWaitingCnt --;
2863                if (m_afModeWaitingCnt == 1) {
2864                    m_afModeWaitingCnt = 0;
2865                    OnAfTrigger(m_afPendingTriggerId);
2866                }
2867            }
2868            m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
2869            float zoomLeft, zoomTop, zoomWidth, zoomHeight;
2870            int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
2871
2872            m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
2873                           m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
2874                           &crop_x, &crop_y,
2875                           &crop_w, &crop_h,
2876                           0);
2877
2878            if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
2879                zoomWidth =  m_camera2->getSensorW() / m_zoomRatio;
2880                zoomHeight = zoomWidth *
2881                        m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
2882            } else {
2883                zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
2884                zoomWidth = zoomHeight *
2885                        m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
2886            }
2887            zoomLeft = (crop_w - zoomWidth) / 2;
2888            zoomTop = (crop_h - zoomHeight) / 2;
2889
2890            int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
2891
2892            if (new_cropRegion[0] * 2 + new_cropRegion[2] > (int32_t)m_camera2->getSensorW())
2893                new_cropRegion[2]--;
2894            else if (new_cropRegion[0] * 2 + new_cropRegion[2] < (int32_t)m_camera2->getSensorW())
2895                new_cropRegion[2]++;
2896
2897            shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
2898            shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
2899            shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
2900            if (m_IsAfModeUpdateRequired) {
2901                ALOGE("### AF Mode change(Mode %d) ", m_afMode);
2902                shot_ext->shot.ctl.aa.afMode = m_afMode;
2903                if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
2904                    ALOGE("### With Automatic triger for continuous modes");
2905                    m_afState = HAL_AFSTATE_STARTED;
2906                    shot_ext->shot.ctl.aa.afTrigger = 1;
2907                    triggered = true;
2908                }
2909                m_IsAfModeUpdateRequired = false;
2910                // support inifinity focus mode
2911                if ((m_afMode == AA_AFMODE_OFF) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
2912                    shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
2913                    shot_ext->shot.ctl.aa.afTrigger = 1;
2914                    triggered = true;
2915                }
2916                if (m_afMode2 != NO_CHANGE) {
2917                    enum aa_afmode tempAfMode = m_afMode2;
2918                    m_afMode2 = NO_CHANGE;
2919                    SetAfMode(tempAfMode);
2920                }
2921            }
2922            else {
2923                shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
2924            }
2925            if (m_IsAfTriggerRequired) {
2926                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
2927                    if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
2928                        // Flash is enabled and start AF
2929                        if (m_afState == HAL_AFSTATE_SCANNING) {
2930                            ALOGE("(%s): restarting trigger ", __FUNCTION__);
2931                        } else {
2932                            if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
2933                                ALOGE("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
2934                            else
2935                                m_afState = HAL_AFSTATE_STARTED;
2936                        }
2937                        ALOGE("### AF Triggering with mode (%d)", m_afMode);
2938                        shot_ext->shot.ctl.aa.afTrigger = 1;
2939                        shot_ext->shot.ctl.aa.afMode = m_afMode;
2940                        m_IsAfTriggerRequired = false;
2941                    }
2942                } else {
2943                    ALOGE("### AF Triggering with mode (%d)", m_afMode);
2944                    if (m_afState == HAL_AFSTATE_SCANNING) {
2945                        ALOGE("(%s): restarting trigger ", __FUNCTION__);
2946                    } else {
2947                        if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
2948                            ALOGE("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
2949                        else
2950                            m_afState = HAL_AFSTATE_STARTED;
2951                    }
2952                    shot_ext->shot.ctl.aa.afTrigger = 1;
2953                    shot_ext->shot.ctl.aa.afMode = m_afMode;
2954                    m_IsAfTriggerRequired = false;
2955                }
2956            }
2957            else {
2958                shot_ext->shot.ctl.aa.afTrigger = 0;
2959            }
2960
2961            if (m_wideAspect) {
2962                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
2963                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
2964                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2965            } else {
2966                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
2967            }
2968            if (triggered)
2969                shot_ext->shot.ctl.aa.afTrigger = 1;
2970
2971            // TODO : check collision with AFMode Update
2972            if (m_IsAfLockRequired) {
2973                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
2974                m_IsAfLockRequired = false;
2975            }
2976            ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
2977                index,
2978                shot_ext->shot.ctl.request.frameCount,
2979                shot_ext->request_scp,
2980                shot_ext->request_scc,
2981                shot_ext->dis_bypass, sizeof(camera2_shot));
2982            if (0 == shot_ext->shot.ctl.aa.afRegions[0] && 0 == shot_ext->shot.ctl.aa.afRegions[1]
2983                && 0 == shot_ext->shot.ctl.aa.afRegions[2] && 0 == shot_ext->shot.ctl.aa.afRegions[3]) {
2984                ALOGV("(%s): AF region resetting", __FUNCTION__);
2985                lastAfRegion[0] = 0;
2986                lastAfRegion[1] = 0;
2987                lastAfRegion[2] = 0;
2988                lastAfRegion[3] = 0;
2989            }
2990            else {
2991                if (!(lastAfRegion[0] == shot_ext->shot.ctl.aa.afRegions[0] && lastAfRegion[1] == shot_ext->shot.ctl.aa.afRegions[1]
2992                        && lastAfRegion[2] == shot_ext->shot.ctl.aa.afRegions[2] && lastAfRegion[3] == shot_ext->shot.ctl.aa.afRegions[3])) {
2993                    ALOGE("(%s): AF region changed : triggering", __FUNCTION__);
2994                    shot_ext->shot.ctl.aa.afTrigger = 1;
2995                    shot_ext->shot.ctl.aa.afMode = m_afMode;
2996                    m_afState = HAL_AFSTATE_STARTED;
2997                    lastAfRegion[0] = shot_ext->shot.ctl.aa.afRegions[0];
2998                    lastAfRegion[1] = shot_ext->shot.ctl.aa.afRegions[1];
2999                    lastAfRegion[2] = shot_ext->shot.ctl.aa.afRegions[2];
3000                    lastAfRegion[3] = shot_ext->shot.ctl.aa.afRegions[3];
3001                }
3002                    // clear region infos in case of CAF mode
3003                    if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
3004                        shot_ext->shot.ctl.aa.afRegions[0] = lastAfRegion[0] = 0;
3005                        shot_ext->shot.ctl.aa.afRegions[1] = lastAfRegion[1] = 0;
3006                        shot_ext->shot.ctl.aa.afRegions[2] = lastAfRegion[2] = 0;
3007                        shot_ext->shot.ctl.aa.afRegions[3] = lastAfRegion[3] = 0;
3008                    }
3009            }
3010            if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
3011                    && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
3012                shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
3013            if (m_nightCaptureCnt == 0) {
3014                if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
3015                        && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3016                    shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3017                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
3018                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3019                    m_nightCaptureCnt = 4;
3020                    m_nightCaptureFrameCnt = matchedFrameCnt;
3021                    shot_ext->request_scc = 0;
3022                }
3023            }
3024            else if (m_nightCaptureCnt == 1) {
3025                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3026                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
3027                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3028                m_nightCaptureCnt--;
3029                shot_ext->request_scc = 1;
3030            }
3031            else if (m_nightCaptureCnt == 2) {
3032                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3033                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
3034                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3035                m_nightCaptureCnt--;
3036                shot_ext->request_scc = 0;
3037            }
3038            else if (m_nightCaptureCnt == 3 || m_nightCaptureCnt == 4) {
3039                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3040                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3041                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3042                m_nightCaptureCnt--;
3043                shot_ext->request_scc = 0;
3044            }
3045
3046            // Flash mode
3047            // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
3048            if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
3049                    && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
3050                    && (m_cameraId == 0)) {
3051                if (!m_ctlInfo.flash.m_flashDecisionResult) {
3052                    m_ctlInfo.flash.m_flashEnableFlg = false;
3053                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3054                    m_ctlInfo.flash.m_flashCnt = 0;
3055                } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) || (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
3056                    ALOGE("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
3057                    shot_ext->request_scc = 0;
3058                    m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
3059                    m_ctlInfo.flash.m_flashEnableFlg = true;
3060                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3061                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
3062                }
3063            }
3064
3065            // TODO : set torch mode for video recording. need to find proper position.
3066            // m_wideAspect is will be changed to recording hint
3067            if ((shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_SINGLE) && m_wideAspect) {
3068                shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_TORCH;
3069                shot_ext->shot.ctl.flash.firingPower = 10;
3070                m_ctlInfo.flash.m_flashTorchMode = true;
3071            } else if (m_wideAspect){
3072                shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3073                shot_ext->shot.ctl.flash.firingPower = 0;
3074                m_ctlInfo.flash.m_flashTorchMode = false;
3075            } else {
3076                if (m_ctlInfo.flash.m_flashTorchMode) {
3077                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3078                    shot_ext->shot.ctl.flash.firingPower = 0;
3079                    m_ctlInfo.flash.m_flashTorchMode = false;
3080                } else {
3081                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
3082                }
3083            }
3084
3085            if (m_ctlInfo.flash.m_flashEnableFlg) {
3086                m_preCaptureListenerSensor(shot_ext);
3087                m_preCaptureSetter(shot_ext);
3088            }
3089
3090            if (shot_ext->isReprocessing) {
3091                ALOGE("(%s): Reprocess request ", __FUNCTION__);
3092                m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
3093                shot_ext->request_scp = 0;
3094                shot_ext->request_scc = 0;
3095                m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
3096                memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot));
3097                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
3098            }
3099            ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
3100            (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3101            (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3102            (int)(shot_ext->shot.ctl.aa.afTrigger));
3103
3104#ifdef VDIS_ENABLE
3105            if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
3106                shot_ext->dis_bypass = 1;
3107                shot_ext->request_scp = 0;
3108                shot_ext->request_scc = 0;
3109                m_vdisBubbleCnt--;
3110                matchedFrameCnt = -1;
3111            } else {
3112                m_vdisDupFrame = matchedFrameCnt;
3113            }
3114#endif
3115
3116            uint32_t current_scp = shot_ext->request_scp;
3117
3118            if (shot_ext->shot.dm.request.frameCount == 0) {
3119                CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
3120            }
3121
3122            cam_int_qbuf(&(m_camera_info.isp), index);
3123
3124            usleep(10000);
3125
3126            ALOGV("### isp DQBUF start");
3127            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3128
3129            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3130
3131            if (m_ctlInfo.flash.m_flashEnableFlg)
3132                m_preCaptureListenerISP(shot_ext);
3133
3134            ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) shot_size(%d)",
3135                index,
3136                shot_ext->shot.ctl.request.frameCount,
3137                shot_ext->request_scp,
3138                shot_ext->request_scc,
3139                shot_ext->dis_bypass, sizeof(camera2_shot));
3140            ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
3141                (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3142                (int)(shot_ext->shot.dm.aa.awbMode),
3143                (int)(shot_ext->shot.dm.aa.afMode));
3144
3145            m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
3146
3147            if (current_scp) {
3148                ALOGV("send SIGNAL_STREAM_DATA_COMING(return scp : %d)", shot_ext->request_scp);
3149                m_scpOutputSignalCnt++;
3150                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3151            }
3152
3153            if (current_scp != shot_ext->request_scp) {
3154                CAM_LOGW("WARN(%s): scp frame drop1 request_scp(%d to %d)",
3155                                __FUNCTION__, current_scp, shot_ext->request_scp);
3156            }
3157            if (shot_ext->request_scc) {
3158                memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot));
3159                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3160            }
3161
3162            ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
3163               shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
3164            if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
3165                ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
3166                m_scp_closed = true;
3167            }
3168            else
3169                m_scp_closed = false;
3170
3171            if (!shot_ext->fd_bypass) {
3172                /* FD orientation axis transformation */
3173                for (int i=0; i < CAMERA2_MAX_FACES; i++) {
3174                    if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
3175                        shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
3176                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][0])
3177                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3178                    if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
3179                        shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
3180                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][1])
3181                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3182                    if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
3183                        shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
3184                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][2])
3185                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3186                    if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
3187                        shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
3188                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][3])
3189                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3190                }
3191            }
3192            if (m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
3193                m_requestManager->ApplyDynamicMetadata(shot_ext);
3194            }
3195            OnAfNotification(shot_ext->shot.dm.aa.afState);
3196            OnPrecaptureMeteringNotification();
3197        }
3198
3199        index = m_requestManager->popSensorQ();
3200        if(index < 0){
3201            ALOGE("sensorQ is empty");
3202            return;
3203        }
3204
3205        processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]), &afMode);
3206        if (processingReqIndex != -1)
3207            SetAfMode((enum aa_afmode)afMode);
3208
3209
3210        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3211        if (m_scp_closing || m_scp_closed) {
3212            ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
3213            shot_ext->request_scc = 0;
3214            shot_ext->request_scp = 0;
3215            shot_ext->request_sensor = 0;
3216        }
3217        cam_int_qbuf(&(m_camera_info.sensor), index);
3218        ALOGV("Sensor Qbuf done(%d)", index);
3219
3220        if (!m_scp_closing
3221            && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
3222            ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3223                                    matchedFrameCnt, processingReqIndex);
3224            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
3225        }
3226    }
3227    return;
3228}
3229
3230void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
3231{
3232    uint32_t                currentSignal   = self->GetProcessingSignal();
3233    StreamThread *          selfThread      = ((StreamThread*)self);
3234    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3235    node_info_t             *currentNode    = selfStreamParms->node;
3236    substream_parameters_t  *subParms;
3237    buffer_handle_t * buf = NULL;
3238    status_t res;
3239    void *virtAddr[3];
3240    int i, j;
3241    int index;
3242    nsecs_t timestamp;
3243
3244    if (!(selfThread->m_isBufferInit))
3245    {
3246        for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3247            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3248            if (res != NO_ERROR || buf == NULL) {
3249                ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3250                return;
3251            }
3252            ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3253               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3254
3255            index = selfThread->findBufferIndex(buf);
3256            if (index == -1) {
3257                ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
3258            }
3259            else {
3260                ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3261                    __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
3262                if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
3263                    selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3264                else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
3265                    selfStreamParms->svcBufStatus[index] = ON_HAL;
3266                else {
3267                    ALOGV("DBG(%s): buffer status abnormal (%d) "
3268                        , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3269                }
3270                selfStreamParms->numSvcBufsInHal++;
3271            }
3272            selfStreamParms->bufIndex = 0;
3273        }
3274        selfThread->m_isBufferInit = true;
3275    }
3276    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3277        if (selfThread->m_attachedSubStreams[i].streamId == -1)
3278            continue;
3279
3280        subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
3281        if (subParms->type && subParms->needBufferInit) {
3282            ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3283                __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
3284            int checkingIndex = 0;
3285            bool found = false;
3286            for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
3287                res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3288                if (res != NO_ERROR || buf == NULL) {
3289                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3290                    return;
3291                }
3292                subParms->numSvcBufsInHal++;
3293                ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3294                   subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3295
3296                if (m_grallocHal->lock(m_grallocHal, *buf,
3297                       subParms->usage, 0, 0,
3298                       subParms->width, subParms->height, virtAddr) != 0) {
3299                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3300                }
3301                else {
3302                      ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3303                        __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3304                }
3305                found = false;
3306                for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3307                    if (subParms->svcBufHandle[checkingIndex] == *buf ) {
3308                        found = true;
3309                        break;
3310                    }
3311                }
3312                ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3313                if (!found) break;
3314
3315                index = checkingIndex;
3316
3317                if (index == -1) {
3318                    ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3319                }
3320                else {
3321                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3322                        __FUNCTION__, index, subParms->svcBufStatus[index]);
3323                    if (subParms->svcBufStatus[index]== ON_SERVICE)
3324                        subParms->svcBufStatus[index] = ON_HAL;
3325                    else {
3326                        ALOGV("DBG(%s): buffer status abnormal (%d) "
3327                            , __FUNCTION__, subParms->svcBufStatus[index]);
3328                    }
3329                    if (*buf != subParms->svcBufHandle[index])
3330                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3331                    else
3332                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3333                }
3334                subParms->svcBufIndex = 0;
3335            }
3336            if (subParms->type == SUBSTREAM_TYPE_JPEG) {
3337                m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
3338                m_resizeBuf.size.extS[1] = 0;
3339                m_resizeBuf.size.extS[2] = 0;
3340
3341                if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
3342                    ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3343                }
3344            }
3345            if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
3346                m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
3347                subParms->height, &m_previewCbBuf);
3348
3349                if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
3350                    ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
3351                }
3352            }
3353            subParms->needBufferInit= false;
3354        }
3355    }
3356}
3357
3358void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3359{
3360    StreamThread *          selfThread      = ((StreamThread*)self);
3361    ALOGV("DEBUG(%s): ", __FUNCTION__ );
3362    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3363    selfThread->m_isBufferInit = false;
3364    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3365        selfThread->m_attachedSubStreams[i].streamId    = -1;
3366        selfThread->m_attachedSubStreams[i].priority    = 0;
3367    }
3368    return;
3369}
3370
3371int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
3372    int stream_id, nsecs_t frameTimeStamp)
3373{
3374    substream_parameters_t  *subParms = &m_subStreams[stream_id];
3375
3376    switch (stream_id) {
3377
3378    case STREAM_ID_JPEG:
3379        return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
3380
3381    case STREAM_ID_RECORD:
3382        return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
3383
3384    case STREAM_ID_PRVCB:
3385        return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
3386
3387    default:
3388        return 0;
3389    }
3390}
3391void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
3392{
3393    uint32_t                currentSignal   = self->GetProcessingSignal();
3394    StreamThread *          selfThread      = ((StreamThread*)self);
3395    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3396    node_info_t             *currentNode    = selfStreamParms->node;
3397    int i = 0;
3398    nsecs_t frameTimeStamp;
3399
3400    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3401        CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3402
3403        if (selfThread->m_isBufferInit) {
3404            ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
3405                selfThread->m_index, currentNode->fd);
3406            if (cam_int_streamoff(currentNode) < 0 ) {
3407                ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3408            }
3409            ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
3410                    selfThread->m_index, currentNode->fd);
3411            currentNode->buffers = 0;
3412            cam_int_reqbufs(currentNode);
3413            ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
3414                    selfThread->m_index, currentNode->fd);
3415        }
3416#ifdef ENABLE_FRAME_SYNC
3417        // free metabuffers
3418        for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3419            if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
3420                freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3421                selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3422                selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3423            }
3424#endif
3425        selfThread->m_isBufferInit = false;
3426        selfThread->m_releasing = false;
3427        selfThread->m_activated = false;
3428        ALOGV("(%s): [%d] END  SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3429        return;
3430    }
3431    if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
3432        status_t    res;
3433        buffer_handle_t * buf = NULL;
3434        bool found = false;
3435        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3436            __FUNCTION__, selfThread->m_index);
3437        res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
3438        if (res != NO_ERROR || buf == NULL) {
3439            ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
3440            return;
3441        }
3442        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3443        int checkingIndex = 0;
3444        for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3445            if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3446                found = true;
3447                break;
3448            }
3449        }
3450        ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3451            __FUNCTION__, (unsigned int)buf, found, checkingIndex);
3452
3453        if (!found) return;
3454
3455        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3456            if (selfThread->m_attachedSubStreams[i].streamId == -1)
3457                continue;
3458
3459#ifdef ENABLE_FRAME_SYNC
3460            // TODO: check real timestamp
3461            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3462            m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
3463#else
3464            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3465#endif
3466            if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
3467                m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
3468                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3469        }
3470
3471        res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
3472        if (res != NO_ERROR) {
3473            ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
3474            return;
3475        }
3476        ALOGV("(%s): streamthread[%d] END   SIGNAL_STREAM_REPROCESSING_START",
3477            __FUNCTION__,selfThread->m_index);
3478
3479        return;
3480    }
3481    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3482        buffer_handle_t * buf = NULL;
3483        status_t res = 0;
3484        int i, j;
3485        int index;
3486        nsecs_t timestamp;
3487#ifdef ENABLE_FRAME_SYNC
3488        camera2_stream *frame;
3489#endif
3490        int numOfUndqbuf = 0;
3491
3492        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3493
3494        m_streamBufferInit(self);
3495
3496        do {
3497            ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
3498                selfThread->m_index, selfThread->streamType);
3499
3500#ifdef ENABLE_FRAME_SYNC
3501            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
3502            frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
3503            ALOGV("frame count streamthread[%d] : %d", selfThread->m_index, frame->rcount);
3504            frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3505#else
3506            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3507            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
3508#endif
3509            ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)  sigcnt(%d)",__FUNCTION__,
3510                selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
3511
3512            if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] !=  ON_DRIVER)
3513                ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3514                       __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
3515            selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3516
3517            for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3518                if (selfThread->m_attachedSubStreams[i].streamId == -1)
3519                    continue;
3520                if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3521#ifdef ENABLE_FRAME_SYNC
3522                    m_requestManager->NotifyStreamOutput(frame->rcount);
3523#endif
3524                    m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3525                        selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3526                }
3527            }
3528
3529#ifdef ENABLE_FRAME_SYNC
3530            m_requestManager->NotifyStreamOutput(frame->rcount);
3531#endif
3532            if (m_requestManager->GetSkipCnt() <= 0) {
3533                if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3534#ifdef ENABLE_FRAME_SYNC
3535                    ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
3536#else
3537                    ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3538#endif
3539                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3540                            frameTimeStamp,
3541                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3542                }
3543                else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3544#ifdef ENABLE_FRAME_SYNC
3545                    ALOGV("** SCC output (frameCnt:%d), last(%d)", frame->rcount);
3546#else
3547                    ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
3548#endif
3549                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3550                                frameTimeStamp,
3551                                &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3552                }
3553                ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3554            }
3555            else {
3556                res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3557                        &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3558                ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3559            }
3560            if (res == 0) {
3561                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
3562                selfStreamParms->numSvcBufsInHal--;
3563            }
3564            else {
3565                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3566            }
3567
3568        }
3569        while(0);
3570
3571        while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers - 1) {
3572            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3573            if (res != NO_ERROR || buf == NULL) {
3574                ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3575                break;
3576            }
3577            selfStreamParms->numSvcBufsInHal++;
3578            ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3579                selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3580               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3581            const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3582
3583            bool found = false;
3584            int checkingIndex = 0;
3585            for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3586                if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3587                    found = true;
3588                    break;
3589                }
3590            }
3591            if (!found) break;
3592            selfStreamParms->bufIndex = checkingIndex;
3593            if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
3594                uint32_t    plane_index = 0;
3595                ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
3596                struct v4l2_buffer v4l2_buf;
3597                struct v4l2_plane  planes[VIDEO_MAX_PLANES];
3598
3599                v4l2_buf.m.planes   = planes;
3600                v4l2_buf.type       = currentNode->type;
3601                v4l2_buf.memory     = currentNode->memory;
3602                v4l2_buf.index      = selfStreamParms->bufIndex;
3603                v4l2_buf.length     = currentNode->planes;
3604
3605                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
3606                v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
3607                v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
3608                for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
3609                    v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
3610                }
3611#ifdef ENABLE_FRAME_SYNC
3612                /* add plane for metadata*/
3613                v4l2_buf.length += selfStreamParms->metaPlanes;
3614                v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
3615                v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
3616#endif
3617                if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
3618                    ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
3619                        __FUNCTION__, selfThread->m_index);
3620                    return;
3621                }
3622                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
3623                ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
3624                    __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
3625            }
3626        }
3627
3628        ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3629    }
3630    return;
3631}
3632
3633void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
3634{
3635    uint32_t                currentSignal   = self->GetProcessingSignal();
3636    StreamThread *          selfThread      = ((StreamThread*)self);
3637    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3638    node_info_t             *currentNode    = selfStreamParms->node;
3639
3640
3641    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3642        CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3643
3644        if (selfThread->m_isBufferInit) {
3645            if (currentNode->fd == m_camera_info.capture.fd) {
3646                if (m_camera_info.capture.status == true) {
3647                    ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
3648                    selfThread->m_index, currentNode->fd);
3649                    if (cam_int_streamoff(currentNode) < 0 ){
3650                        ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3651                    } else {
3652                        m_camera_info.capture.status = false;
3653                    }
3654                }
3655            } else {
3656                ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
3657                selfThread->m_index, currentNode->fd);
3658                if (cam_int_streamoff(currentNode) < 0 ){
3659                    ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3660                }
3661            }
3662            ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
3663            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
3664                    selfThread->m_index, currentNode->fd);
3665            currentNode->buffers = 0;
3666            cam_int_reqbufs(currentNode);
3667            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
3668                    selfThread->m_index, currentNode->fd);
3669        }
3670
3671        selfThread->m_isBufferInit = false;
3672        selfThread->m_releasing = false;
3673        selfThread->m_activated = false;
3674        ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3675        return;
3676    }
3677
3678    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3679#ifdef ENABLE_FRAME_SYNC
3680        camera2_stream *frame;
3681#endif
3682        nsecs_t frameTimeStamp;
3683
3684        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
3685            __FUNCTION__,selfThread->m_index);
3686
3687        m_streamBufferInit(self);
3688
3689        ALOGD("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
3690        selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3691        ALOGD("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
3692            selfThread->m_index, selfStreamParms->bufIndex);
3693
3694#ifdef ENABLE_FRAME_SYNC
3695        frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
3696        ALOGV("frame count(SCC) : %d",  frame->rcount);
3697        frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3698#else
3699        frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3700#endif
3701
3702        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3703            if (selfThread->m_attachedSubStreams[i].streamId == -1)
3704                continue;
3705            if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3706#ifdef ENABLE_FRAME_SYNC
3707                m_requestManager->NotifyStreamOutput(frame->rcount);
3708#endif
3709                m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
3710                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3711            }
3712        }
3713        cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
3714        ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
3715
3716
3717
3718        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
3719            __FUNCTION__, selfThread->m_index);
3720    }
3721
3722
3723    return;
3724}
3725
3726void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
3727{
3728    uint32_t                currentSignal   = self->GetProcessingSignal();
3729    StreamThread *          selfThread      = ((StreamThread*)self);
3730    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3731    node_info_t             *currentNode    = selfStreamParms->node;
3732
3733    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
3734
3735    // Do something in Child thread handler
3736    // Should change function to class that inherited StreamThread class to support dynamic stream allocation
3737    if (selfThread->streamType == STREAM_TYPE_DIRECT) {
3738        m_streamFunc_direct(self);
3739    } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
3740        m_streamFunc_indirect(self);
3741    }
3742
3743    return;
3744}
3745int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
3746{
3747    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
3748    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_JPEG];
3749    status_t    res;
3750    ExynosRect jpegRect;
3751    bool found = false;
3752    int pictureW, pictureH, pictureFramesize = 0;
3753    int pictureFormat;
3754    int cropX, cropY, cropW, cropH = 0;
3755    ExynosBuffer resizeBufInfo;
3756    ExynosRect   m_jpegPictureRect;
3757    buffer_handle_t * buf = NULL;
3758
3759    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
3760    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
3761        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
3762            found = true;
3763            break;
3764        }
3765        subParms->svcBufIndex++;
3766        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
3767            subParms->svcBufIndex = 0;
3768    }
3769    if (!found) {
3770        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
3771        subParms->svcBufIndex++;
3772        return 1;
3773    }
3774
3775    m_jpegPictureRect.w = subParms->width;
3776    m_jpegPictureRect.h = subParms->height;
3777
3778     ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
3779              __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
3780                   m_jpegPictureRect.w, m_jpegPictureRect.h);
3781
3782    m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
3783                   m_jpegPictureRect.w, m_jpegPictureRect.h,
3784                   &cropX, &cropY,
3785                   &pictureW, &pictureH,
3786                   0);
3787    pictureFormat = V4L2_PIX_FMT_YUYV;
3788    pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
3789
3790    if (m_exynosPictureCSC) {
3791        float zoom_w = 0, zoom_h = 0;
3792        if (m_zoomRatio == 0)
3793            m_zoomRatio = 1;
3794
3795        if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
3796            zoom_w =  pictureW / m_zoomRatio;
3797            zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
3798        } else {
3799            zoom_h = pictureH / m_zoomRatio;
3800            zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
3801        }
3802        cropX = (pictureW - zoom_w) / 2;
3803        cropY = (pictureH - zoom_h) / 2;
3804        cropW = zoom_w;
3805        cropH = zoom_h;
3806
3807        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
3808              __FUNCTION__, cropX, cropY, cropW, cropH);
3809
3810        csc_set_src_format(m_exynosPictureCSC,
3811                           ALIGN(pictureW, 16), ALIGN(pictureH, 16),
3812                           cropX, cropY, cropW, cropH,
3813                           V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
3814                           0);
3815
3816        csc_set_dst_format(m_exynosPictureCSC,
3817                           m_jpegPictureRect.w, m_jpegPictureRect.h,
3818                           0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
3819                           V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
3820                           0);
3821        for (int i = 0 ; i < 3 ; i++)
3822            ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
3823                __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
3824        csc_set_src_buffer(m_exynosPictureCSC,
3825                           (void **)&srcImageBuf->fd.fd);
3826
3827        csc_set_dst_buffer(m_exynosPictureCSC,
3828                           (void **)&m_resizeBuf.fd.fd);
3829        for (int i = 0 ; i < 3 ; i++)
3830            ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
3831                __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
3832
3833        if (csc_convert(m_exynosPictureCSC) != 0)
3834            ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
3835
3836    }
3837    else {
3838        ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
3839    }
3840
3841    resizeBufInfo = m_resizeBuf;
3842
3843    m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
3844
3845    for (int i = 1; i < 3; i++) {
3846        if (m_resizeBuf.size.extS[i] != 0)
3847            m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
3848
3849        ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
3850    }
3851
3852    jpegRect.w = m_jpegPictureRect.w;
3853    jpegRect.h = m_jpegPictureRect.h;
3854    jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
3855
3856    for (int j = 0 ; j < 3 ; j++)
3857        ALOGV("DEBUG(%s): dest buf node  fd.extFd[%d]=%d size=%d virt=%x ",
3858            __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
3859            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
3860            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
3861
3862    if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false)
3863        ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
3864
3865    m_resizeBuf = resizeBufInfo;
3866
3867    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
3868
3869    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
3870            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
3871    if (res == 0) {
3872        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
3873        subParms->numSvcBufsInHal--;
3874    }
3875    else {
3876        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
3877    }
3878
3879    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
3880    {
3881        bool found = false;
3882        int checkingIndex = 0;
3883
3884        ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
3885
3886        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3887        if (res != NO_ERROR || buf == NULL) {
3888            ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3889            break;
3890        }
3891        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3892        subParms->numSvcBufsInHal ++;
3893        ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3894           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3895
3896
3897        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3898            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3899                found = true;
3900                break;
3901            }
3902        }
3903        ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
3904
3905        if (!found) {
3906             break;
3907        }
3908
3909        subParms->svcBufIndex = checkingIndex;
3910        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
3911            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
3912        }
3913        else {
3914            ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d]  status = %d", __FUNCTION__,
3915                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
3916        }
3917    }
3918    return 0;
3919}
3920
3921int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
3922{
3923    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
3924    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_RECORD];
3925    status_t    res;
3926    ExynosRect jpegRect;
3927    bool found = false;
3928    int cropX, cropY, cropW, cropH = 0;
3929    buffer_handle_t * buf = NULL;
3930
3931    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
3932    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
3933        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
3934            found = true;
3935            break;
3936        }
3937        subParms->svcBufIndex++;
3938        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
3939            subParms->svcBufIndex = 0;
3940    }
3941    if (!found) {
3942        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
3943        subParms->svcBufIndex++;
3944        return 1;
3945    }
3946
3947    if (m_exynosVideoCSC) {
3948        int videoW = subParms->width, videoH = subParms->height;
3949        int cropX, cropY, cropW, cropH = 0;
3950        int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
3951        m_getRatioSize(previewW, previewH,
3952                       videoW, videoH,
3953                       &cropX, &cropY,
3954                       &cropW, &cropH,
3955                       0);
3956
3957        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
3958                 __FUNCTION__, cropX, cropY, cropW, cropH);
3959
3960        csc_set_src_format(m_exynosVideoCSC,
3961                           previewW, previewH,
3962                           cropX, cropY, cropW, cropH,
3963                           selfStreamParms->format,
3964                           0);
3965
3966        csc_set_dst_format(m_exynosVideoCSC,
3967                           videoW, videoH,
3968                           0, 0, videoW, videoH,
3969                           subParms->format,
3970                           1);
3971
3972        csc_set_src_buffer(m_exynosVideoCSC,
3973                        (void **)&srcImageBuf->fd.fd);
3974
3975        csc_set_dst_buffer(m_exynosVideoCSC,
3976            (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
3977
3978        if (csc_convert(m_exynosVideoCSC) != 0) {
3979            ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
3980        }
3981        else {
3982            ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
3983        }
3984    }
3985    else {
3986        ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
3987    }
3988
3989    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
3990
3991    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
3992            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
3993    if (res == 0) {
3994        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
3995        subParms->numSvcBufsInHal--;
3996    }
3997    else {
3998        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
3999    }
4000
4001    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4002    {
4003        bool found = false;
4004        int checkingIndex = 0;
4005
4006        ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4007
4008        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4009        if (res != NO_ERROR || buf == NULL) {
4010            ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4011            break;
4012        }
4013        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4014        subParms->numSvcBufsInHal ++;
4015        ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4016           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4017
4018        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4019            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4020                found = true;
4021                break;
4022            }
4023        }
4024        ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4025
4026        if (!found) {
4027             break;
4028        }
4029
4030        subParms->svcBufIndex = checkingIndex;
4031        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4032            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4033        }
4034        else {
4035            ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4036                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4037        }
4038    }
4039    return 0;
4040}
4041
4042int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4043{
4044    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4045    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_PRVCB];
4046    status_t    res;
4047    bool found = false;
4048    int cropX, cropY, cropW, cropH = 0;
4049    buffer_handle_t * buf = NULL;
4050
4051    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4052    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4053        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4054            found = true;
4055            break;
4056        }
4057        subParms->svcBufIndex++;
4058        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4059            subParms->svcBufIndex = 0;
4060    }
4061    if (!found) {
4062        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4063        subParms->svcBufIndex++;
4064        return 1;
4065    }
4066
4067    if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
4068        if (m_exynosVideoCSC) {
4069            int previewCbW = subParms->width, previewCbH = subParms->height;
4070            int cropX, cropY, cropW, cropH = 0;
4071            int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4072            m_getRatioSize(previewW, previewH,
4073                           previewCbW, previewCbH,
4074                           &cropX, &cropY,
4075                           &cropW, &cropH,
4076                           0);
4077
4078            ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4079                     __FUNCTION__, cropX, cropY, cropW, cropH);
4080            csc_set_src_format(m_exynosVideoCSC,
4081                               previewW, previewH,
4082                               cropX, cropY, cropW, cropH,
4083                               selfStreamParms->format,
4084                               0);
4085
4086            csc_set_dst_format(m_exynosVideoCSC,
4087                               previewCbW, previewCbH,
4088                               0, 0, previewCbW, previewCbH,
4089                               subParms->internalFormat,
4090                               1);
4091
4092            csc_set_src_buffer(m_exynosVideoCSC,
4093                        (void **)&srcImageBuf->fd.fd);
4094
4095            csc_set_dst_buffer(m_exynosVideoCSC,
4096                (void **)(&(m_previewCbBuf.fd.fd)));
4097
4098            if (csc_convert(m_exynosVideoCSC) != 0) {
4099                ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
4100            }
4101            else {
4102                ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
4103            }
4104            if (previewCbW == ALIGN(previewCbW, 16)) {
4105                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4106                    m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
4107                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
4108                    m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
4109            }
4110            else {
4111                // TODO : copy line by line ?
4112            }
4113        }
4114        else {
4115            ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4116        }
4117    }
4118    else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
4119        int previewCbW = subParms->width, previewCbH = subParms->height;
4120        int stride = ALIGN(previewCbW, 16);
4121        int c_stride = ALIGN(stride / 2, 16);
4122        memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4123            srcImageBuf->virt.extP[0], stride * previewCbH);
4124        memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
4125            srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
4126        memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
4127            srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
4128    }
4129    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4130
4131    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4132            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4133    if (res == 0) {
4134        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4135        subParms->numSvcBufsInHal--;
4136    }
4137    else {
4138        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4139    }
4140
4141    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4142    {
4143        bool found = false;
4144        int checkingIndex = 0;
4145
4146        ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4147
4148        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4149        if (res != NO_ERROR || buf == NULL) {
4150            ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4151            break;
4152        }
4153        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4154        subParms->numSvcBufsInHal ++;
4155        ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4156           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4157
4158
4159        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4160            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4161                found = true;
4162                break;
4163            }
4164        }
4165        ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4166
4167        if (!found) {
4168             break;
4169        }
4170
4171        subParms->svcBufIndex = checkingIndex;
4172        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4173            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4174        }
4175        else {
4176            ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4177                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4178        }
4179    }
4180    return 0;
4181}
4182
4183bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
4184{
4185    int sizeOfSupportList;
4186
4187    //REAR Camera
4188    if(this->getCameraId() == 0) {
4189        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
4190
4191        for(int i = 0; i < sizeOfSupportList; i++) {
4192            if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
4193                return true;
4194        }
4195
4196    }
4197    else {
4198        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
4199
4200        for(int i = 0; i < sizeOfSupportList; i++) {
4201            if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
4202                return true;
4203        }
4204    }
4205
4206    return false;
4207}
4208bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
4209                            ExynosBuffer *jpegBuf,
4210                            ExynosRect *rect)
4211{
4212    unsigned char *addr;
4213
4214    ExynosJpegEncoderForCamera jpegEnc;
4215    bool ret = false;
4216    int res = 0;
4217
4218    unsigned int *yuvSize = yuvBuf->size.extS;
4219
4220    if (jpegEnc.create()) {
4221        ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
4222        goto jpeg_encode_done;
4223    }
4224
4225    if (jpegEnc.setQuality(100)) {
4226        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
4227        goto jpeg_encode_done;
4228    }
4229
4230    if (jpegEnc.setSize(rect->w, rect->h)) {
4231        ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
4232        goto jpeg_encode_done;
4233    }
4234    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
4235
4236    if (jpegEnc.setColorFormat(rect->colorFormat)) {
4237        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
4238        goto jpeg_encode_done;
4239    }
4240
4241    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
4242        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
4243        goto jpeg_encode_done;
4244    }
4245
4246    if((m_jpegMetadata.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.ctl.jpeg.thumbnailSize[1] != 0)) {
4247        mExifInfo.enableThumb = true;
4248        if(!m_checkThumbnailSize(m_jpegMetadata.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.ctl.jpeg.thumbnailSize[1])) {
4249            //default value
4250            m_thumbNailW = SUPPORT_THUMBNAIL_REAR_SIZE[0][0];
4251            m_thumbNailH = SUPPORT_THUMBNAIL_REAR_SIZE[0][1];
4252        } else {
4253            m_thumbNailW = m_jpegMetadata.ctl.jpeg.thumbnailSize[0];
4254            m_thumbNailH = m_jpegMetadata.ctl.jpeg.thumbnailSize[1];
4255        }
4256
4257        ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
4258
4259    } else {
4260        mExifInfo.enableThumb = false;
4261    }
4262
4263    if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
4264        ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
4265        goto jpeg_encode_done;
4266    }
4267
4268    ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
4269    if (jpegEnc.setThumbnailQuality(50)) {
4270        ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
4271        goto jpeg_encode_done;
4272    }
4273
4274    m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
4275    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
4276    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
4277        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
4278        goto jpeg_encode_done;
4279    }
4280    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
4281        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
4282        goto jpeg_encode_done;
4283    }
4284
4285    if (jpegEnc.updateConfig()) {
4286        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
4287        goto jpeg_encode_done;
4288    }
4289
4290    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
4291        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
4292        goto jpeg_encode_done;
4293    }
4294
4295    ret = true;
4296
4297jpeg_encode_done:
4298
4299    if (jpegEnc.flagCreate() == true)
4300        jpegEnc.destroy();
4301
4302    return ret;
4303}
4304
4305void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
4306{
4307    m_ctlInfo.flash.m_precaptureTriggerId = id;
4308    if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
4309        // flash is required
4310        switch (m_ctlInfo.flash.m_flashCnt) {
4311        case IS_FLASH_STATE_AUTO_DONE:
4312            // Flash capture sequence, AF flash was executed before
4313            break;
4314        default:
4315            // Full flash sequence
4316            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4317            m_ctlInfo.flash.m_flashEnableFlg = true;
4318        }
4319    } else {
4320        // Apply AE & AWB lock
4321        ALOGV("[PreCap] Flash OFF mode ");
4322        m_ctlInfo.flash.m_flashEnableFlg = false;
4323        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
4324        m_ctlInfo.ae.m_aeCnt = IS_COMMAND_EXECUTION;
4325        m_ctlInfo.awb.m_awbCnt = IS_COMMAND_EXECUTION;
4326    }
4327    ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
4328}
4329void ExynosCameraHWInterface2::OnAfTriggerStart(int id)
4330{
4331    m_afPendingTriggerId = id;
4332    m_afModeWaitingCnt = 6;
4333}
4334
4335void ExynosCameraHWInterface2::OnAfTrigger(int id)
4336{
4337    m_afTriggerId = id;
4338
4339    switch (m_afMode) {
4340    case AA_AFMODE_AUTO:
4341    case AA_AFMODE_MACRO:
4342    case AA_AFMODE_OFF:
4343        ALOGE("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
4344        // If flash is enable, Flash operation is executed before triggering AF
4345        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4346                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4347                && (m_cameraId == 0)) {
4348            ALOGE("[Flash] AF Flash start with Mode (%d)", m_afMode);
4349            m_ctlInfo.flash.m_flashEnableFlg = true;
4350            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4351            m_ctlInfo.flash.m_flashDecisionResult = false;
4352            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4353        }
4354        OnAfTriggerAutoMacro(id);
4355        break;
4356    case AA_AFMODE_CONTINUOUS_VIDEO:
4357        ALOGE("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
4358        OnAfTriggerCAFVideo(id);
4359        break;
4360    case AA_AFMODE_CONTINUOUS_PICTURE:
4361        ALOGE("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
4362        OnAfTriggerCAFPicture(id);
4363        break;
4364
4365    default:
4366        break;
4367    }
4368}
4369
4370void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
4371{
4372    int nextState = NO_TRANSITION;
4373
4374    switch (m_afState) {
4375    case HAL_AFSTATE_INACTIVE:
4376        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4377        m_IsAfTriggerRequired = true;
4378        break;
4379    case HAL_AFSTATE_NEEDS_COMMAND:
4380        nextState = NO_TRANSITION;
4381        break;
4382    case HAL_AFSTATE_STARTED:
4383        nextState = NO_TRANSITION;
4384        break;
4385    case HAL_AFSTATE_SCANNING:
4386        nextState = NO_TRANSITION;
4387        break;
4388    case HAL_AFSTATE_LOCKED:
4389        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4390        m_IsAfTriggerRequired = true;
4391        break;
4392    case HAL_AFSTATE_FAILED:
4393        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4394        m_IsAfTriggerRequired = true;
4395        break;
4396    default:
4397        break;
4398    }
4399    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4400    if (nextState != NO_TRANSITION)
4401        m_afState = nextState;
4402}
4403
4404void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4405{
4406    int nextState = NO_TRANSITION;
4407
4408    switch (m_afState) {
4409    case HAL_AFSTATE_INACTIVE:
4410        nextState = HAL_AFSTATE_FAILED;
4411        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4412        break;
4413    case HAL_AFSTATE_NEEDS_COMMAND:
4414        // not used
4415        break;
4416    case HAL_AFSTATE_STARTED:
4417        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4418        m_AfHwStateFailed = false;
4419        // If flash is enable, Flash operation is executed before triggering AF
4420        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4421                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4422                && (m_cameraId == 0)) {
4423            ALOGE("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4424            m_ctlInfo.flash.m_flashEnableFlg = true;
4425            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4426            m_ctlInfo.flash.m_flashDecisionResult = false;
4427            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4428        }
4429        break;
4430    case HAL_AFSTATE_SCANNING:
4431        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4432        m_AfHwStateFailed = false;
4433        // If flash is enable, Flash operation is executed before triggering AF
4434        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4435                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4436                && (m_cameraId == 0)) {
4437            ALOGE("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4438            m_ctlInfo.flash.m_flashEnableFlg = true;
4439            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4440            m_ctlInfo.flash.m_flashDecisionResult = false;
4441            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4442        }
4443        break;
4444    case HAL_AFSTATE_NEEDS_DETERMINATION:
4445        nextState = NO_TRANSITION;
4446        break;
4447    case HAL_AFSTATE_PASSIVE_FOCUSED:
4448        m_IsAfLockRequired = true;
4449        if (m_AfHwStateFailed) {
4450            ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
4451            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4452            nextState = HAL_AFSTATE_FAILED;
4453        }
4454        else {
4455            ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
4456            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4457            nextState = HAL_AFSTATE_LOCKED;
4458        }
4459        m_AfHwStateFailed = false;
4460        break;
4461    case HAL_AFSTATE_LOCKED:
4462        nextState = NO_TRANSITION;
4463        break;
4464    case HAL_AFSTATE_FAILED:
4465        nextState = NO_TRANSITION;
4466        break;
4467    default:
4468        break;
4469    }
4470    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4471    if (nextState != NO_TRANSITION)
4472        m_afState = nextState;
4473}
4474
4475
4476void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
4477{
4478    int nextState = NO_TRANSITION;
4479
4480    switch (m_afState) {
4481    case HAL_AFSTATE_INACTIVE:
4482        nextState = HAL_AFSTATE_FAILED;
4483        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4484        break;
4485    case HAL_AFSTATE_NEEDS_COMMAND:
4486        // not used
4487        break;
4488    case HAL_AFSTATE_STARTED:
4489        m_IsAfLockRequired = true;
4490        nextState = HAL_AFSTATE_FAILED;
4491        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4492        break;
4493    case HAL_AFSTATE_SCANNING:
4494        m_IsAfLockRequired = true;
4495        nextState = HAL_AFSTATE_FAILED;
4496        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4497        break;
4498    case HAL_AFSTATE_NEEDS_DETERMINATION:
4499        // not used
4500        break;
4501    case HAL_AFSTATE_PASSIVE_FOCUSED:
4502        m_IsAfLockRequired = true;
4503        SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4504        nextState = HAL_AFSTATE_LOCKED;
4505        break;
4506    case HAL_AFSTATE_LOCKED:
4507        nextState = NO_TRANSITION;
4508        break;
4509    case HAL_AFSTATE_FAILED:
4510        nextState = NO_TRANSITION;
4511        break;
4512    default:
4513        break;
4514    }
4515    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4516    if (nextState != NO_TRANSITION)
4517        m_afState = nextState;
4518}
4519
4520void ExynosCameraHWInterface2::OnPrecaptureMeteringNotification()
4521{
4522    if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4523        if (m_ctlInfo.flash.m_flashEnableFlg) {
4524            // flash case
4525            switch (m_ctlInfo.flash.m_flashCnt) {
4526            case IS_FLASH_STATE_AUTO_DONE:
4527                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4528                                ANDROID_CONTROL_AE_STATE_LOCKED,
4529                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4530                m_notifyCb(CAMERA2_MSG_AUTOWB,
4531                                ANDROID_CONTROL_AWB_STATE_LOCKED,
4532                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4533                m_ctlInfo.flash.m_precaptureTriggerId = 0;
4534                break;
4535            default:
4536                if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
4537                    ALOGE("(%s) INVALID flash state count. (%d)", (int)m_ctlInfo.flash.m_flashCnt);
4538                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
4539                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4540                                    ANDROID_CONTROL_AE_STATE_LOCKED,
4541                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4542                    m_notifyCb(CAMERA2_MSG_AUTOWB,
4543                                    ANDROID_CONTROL_AWB_STATE_LOCKED,
4544                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4545                    m_ctlInfo.flash.m_precaptureTriggerId = 0;
4546                } else {
4547                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4548                                    ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4549                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4550                    m_notifyCb(CAMERA2_MSG_AUTOWB,
4551                                    ANDROID_CONTROL_AWB_STATE_CONVERGED,
4552                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4553                }
4554            }
4555        } else {
4556            // non-flash case
4557            // AE
4558            switch (m_ctlInfo.ae.m_aeCnt) {
4559            case IS_COMMAND_EXECUTION:
4560                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4561                                ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4562                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4563                ALOGE("[PreCap] OnPrecaptureMeteringNotification (ID %d) CAMERA2_MSG_AUTOEXPOSURE, ANDROID_CONTROL_AE_STATE_PRECAPTURE", m_ctlInfo.flash.m_precaptureTriggerId);
4564                break;
4565            case IS_COMMAND_NONE:
4566            case IS_COMMAND_CLEAR:
4567                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4568                                ANDROID_CONTROL_AE_STATE_LOCKED,
4569                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4570                ALOGE("[PreCap] OnPrecaptureMeteringNotification (ID %d) CAMERA2_MSG_AUTOEXPOSURE, ANDROID_CONTROL_AE_STATE_LOCKED", m_ctlInfo.flash.m_precaptureTriggerId);
4571                m_ctlInfo.flash.m_precaptureTriggerId = 0;
4572                break;
4573            }
4574            // AWB
4575            switch (m_ctlInfo.awb.m_awbCnt) {
4576            case IS_COMMAND_EXECUTION:
4577                m_notifyCb(CAMERA2_MSG_AUTOWB,
4578                                ANDROID_CONTROL_AWB_STATE_CONVERGED,
4579                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4580                break;
4581            case IS_COMMAND_NONE:
4582            case IS_COMMAND_CLEAR:
4583                m_notifyCb(CAMERA2_MSG_AUTOWB,
4584                                ANDROID_CONTROL_AWB_STATE_LOCKED,
4585                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4586                break;
4587            }
4588        }
4589    }
4590}
4591
4592void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
4593{
4594    switch (m_afMode) {
4595    case AA_AFMODE_AUTO:
4596    case AA_AFMODE_MACRO:
4597        OnAfNotificationAutoMacro(noti);
4598        break;
4599    case AA_AFMODE_CONTINUOUS_VIDEO:
4600        OnAfNotificationCAFVideo(noti);
4601        break;
4602    case AA_AFMODE_CONTINUOUS_PICTURE:
4603        OnAfNotificationCAFPicture(noti);
4604        break;
4605    case AA_AFMODE_OFF:
4606    default:
4607        break;
4608    }
4609}
4610
4611void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
4612{
4613    int nextState = NO_TRANSITION;
4614    bool bWrongTransition = false;
4615
4616    if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
4617        switch (noti) {
4618        case AA_AFSTATE_INACTIVE:
4619        case AA_AFSTATE_ACTIVE_SCAN:
4620        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4621        case AA_AFSTATE_AF_FAILED_FOCUS:
4622        default:
4623            nextState = NO_TRANSITION;
4624            break;
4625        }
4626    }
4627    else if (m_afState == HAL_AFSTATE_STARTED) {
4628        switch (noti) {
4629        case AA_AFSTATE_INACTIVE:
4630            nextState = NO_TRANSITION;
4631            break;
4632        case AA_AFSTATE_ACTIVE_SCAN:
4633            nextState = HAL_AFSTATE_SCANNING;
4634            SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
4635            break;
4636        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4637            nextState = NO_TRANSITION;
4638            break;
4639        case AA_AFSTATE_AF_FAILED_FOCUS:
4640            nextState = NO_TRANSITION;
4641            break;
4642        default:
4643            bWrongTransition = true;
4644            break;
4645        }
4646    }
4647    else if (m_afState == HAL_AFSTATE_SCANNING) {
4648        switch (noti) {
4649        case AA_AFSTATE_INACTIVE:
4650            bWrongTransition = true;
4651            break;
4652        case AA_AFSTATE_ACTIVE_SCAN:
4653            nextState = NO_TRANSITION;
4654            break;
4655        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4656            // If Flash mode is enable, after AF execute pre-capture metering
4657            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4658                switch (m_ctlInfo.flash.m_flashCnt) {
4659                case IS_FLASH_STATE_ON_DONE:
4660                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4661                    break;
4662                case IS_FLASH_STATE_AUTO_DONE:
4663                    nextState = HAL_AFSTATE_LOCKED;
4664                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4665                    break;
4666                default:
4667                    nextState = NO_TRANSITION;
4668                }
4669            } else {
4670                nextState = HAL_AFSTATE_LOCKED;
4671                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4672            }
4673            break;
4674        case AA_AFSTATE_AF_FAILED_FOCUS:
4675            // If Flash mode is enable, after AF execute pre-capture metering
4676            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4677                switch (m_ctlInfo.flash.m_flashCnt) {
4678                case IS_FLASH_STATE_ON_DONE:
4679                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4680                    break;
4681                case IS_FLASH_STATE_AUTO_DONE:
4682                    nextState = HAL_AFSTATE_FAILED;
4683                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4684                    break;
4685                default:
4686                    nextState = NO_TRANSITION;
4687                }
4688            } else {
4689                nextState = HAL_AFSTATE_FAILED;
4690                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4691            }
4692            break;
4693        default:
4694            bWrongTransition = true;
4695            break;
4696        }
4697    }
4698    else if (m_afState == HAL_AFSTATE_LOCKED) {
4699        switch (noti) {
4700            case AA_AFSTATE_INACTIVE:
4701            case AA_AFSTATE_ACTIVE_SCAN:
4702                bWrongTransition = true;
4703                break;
4704            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4705                // Flash off if flash mode is available.
4706                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg)
4707                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4708                nextState = NO_TRANSITION;
4709                break;
4710            case AA_AFSTATE_AF_FAILED_FOCUS:
4711            default:
4712                bWrongTransition = true;
4713                break;
4714        }
4715    }
4716    else if (m_afState == HAL_AFSTATE_FAILED) {
4717        switch (noti) {
4718            case AA_AFSTATE_INACTIVE:
4719            case AA_AFSTATE_ACTIVE_SCAN:
4720            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4721                bWrongTransition = true;
4722                break;
4723            case AA_AFSTATE_AF_FAILED_FOCUS:
4724                // Flash off if flash mode is available.
4725                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg)
4726                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4727                nextState = NO_TRANSITION;
4728                break;
4729            default:
4730                bWrongTransition = true;
4731                break;
4732        }
4733    }
4734    if (bWrongTransition) {
4735        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
4736        return;
4737    }
4738    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
4739    if (nextState != NO_TRANSITION)
4740        m_afState = nextState;
4741}
4742
4743void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
4744{
4745    int nextState = NO_TRANSITION;
4746    bool bWrongTransition = false;
4747
4748    if (m_afState == HAL_AFSTATE_INACTIVE) {
4749        switch (noti) {
4750        case AA_AFSTATE_INACTIVE:
4751        case AA_AFSTATE_ACTIVE_SCAN:
4752        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4753        case AA_AFSTATE_AF_FAILED_FOCUS:
4754        default:
4755            nextState = NO_TRANSITION;
4756            break;
4757        }
4758    }
4759    else if (m_afState == HAL_AFSTATE_STARTED) {
4760        switch (noti) {
4761        case AA_AFSTATE_INACTIVE:
4762            nextState = NO_TRANSITION;
4763            break;
4764        case AA_AFSTATE_ACTIVE_SCAN:
4765            nextState = HAL_AFSTATE_SCANNING;
4766            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4767            break;
4768        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4769            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4770            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4771            break;
4772        case AA_AFSTATE_AF_FAILED_FOCUS:
4773            //nextState = HAL_AFSTATE_FAILED;
4774            //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4775            nextState = NO_TRANSITION;
4776            break;
4777        default:
4778            bWrongTransition = true;
4779            break;
4780        }
4781    }
4782    else if (m_afState == HAL_AFSTATE_SCANNING) {
4783        switch (noti) {
4784        case AA_AFSTATE_INACTIVE:
4785            nextState = NO_TRANSITION;
4786            break;
4787        case AA_AFSTATE_ACTIVE_SCAN:
4788            nextState = NO_TRANSITION;
4789            m_AfHwStateFailed = false;
4790            break;
4791        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4792            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4793            m_AfHwStateFailed = false;
4794            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4795            break;
4796        case AA_AFSTATE_AF_FAILED_FOCUS:
4797            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4798            m_AfHwStateFailed = true;
4799            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4800            break;
4801        default:
4802            bWrongTransition = true;
4803            break;
4804        }
4805    }
4806    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
4807        switch (noti) {
4808        case AA_AFSTATE_INACTIVE:
4809            nextState = NO_TRANSITION;
4810            break;
4811        case AA_AFSTATE_ACTIVE_SCAN:
4812            nextState = HAL_AFSTATE_SCANNING;
4813            m_AfHwStateFailed = false;
4814            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4815            break;
4816        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4817            nextState = NO_TRANSITION;
4818            m_AfHwStateFailed = false;
4819            // Flash off if flash mode is available.
4820            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4821                ALOGD("[AF Flash] Off with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4822                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4823            }
4824            break;
4825        case AA_AFSTATE_AF_FAILED_FOCUS:
4826            nextState = NO_TRANSITION;
4827            m_AfHwStateFailed = true;
4828            // Flash off if flash mode is available.
4829            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4830                ALOGD("[AF Flash] Off with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4831                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4832            }
4833            break;
4834        default:
4835            bWrongTransition = true;
4836            break;
4837        }
4838    }
4839    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
4840        switch (noti) {
4841        case AA_AFSTATE_INACTIVE:
4842            nextState = NO_TRANSITION;
4843            break;
4844        case AA_AFSTATE_ACTIVE_SCAN:
4845            nextState = NO_TRANSITION;
4846            break;
4847        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4848            // If Flash mode is enable, after AF execute pre-capture metering
4849            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4850                ALOGD("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4851                switch (m_ctlInfo.flash.m_flashCnt) {
4852                case IS_FLASH_STATE_ON_DONE:
4853                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4854                    break;
4855                case IS_FLASH_STATE_AUTO_DONE:
4856                    m_IsAfLockRequired = true;
4857                    nextState = HAL_AFSTATE_LOCKED;
4858                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4859                    break;
4860                default:
4861                    nextState = NO_TRANSITION;
4862                }
4863            } else {
4864                m_IsAfLockRequired = true;
4865                nextState = HAL_AFSTATE_LOCKED;
4866                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4867            }
4868            break;
4869        case AA_AFSTATE_AF_FAILED_FOCUS:
4870            // If Flash mode is enable, after AF execute pre-capture metering
4871            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4872                ALOGD("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4873                switch (m_ctlInfo.flash.m_flashCnt) {
4874                case IS_FLASH_STATE_ON_DONE:
4875                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4876                    break;
4877                case IS_FLASH_STATE_AUTO_DONE:
4878                    m_IsAfLockRequired = true;
4879                    nextState = HAL_AFSTATE_FAILED;
4880                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4881                    break;
4882                default:
4883                    nextState = NO_TRANSITION;
4884                }
4885            } else {
4886                m_IsAfLockRequired = true;
4887                nextState = HAL_AFSTATE_FAILED;
4888                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4889            }
4890            break;
4891        default:
4892            bWrongTransition = true;
4893            break;
4894        }
4895    }
4896    else if (m_afState == HAL_AFSTATE_LOCKED) {
4897        switch (noti) {
4898            case AA_AFSTATE_INACTIVE:
4899                nextState = NO_TRANSITION;
4900                break;
4901            case AA_AFSTATE_ACTIVE_SCAN:
4902                bWrongTransition = true;
4903                break;
4904            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4905                nextState = NO_TRANSITION;
4906                // Flash off if flash mode is available.
4907                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4908                    ALOGD("[AF Flash] Off with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4909                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4910                }
4911                break;
4912            case AA_AFSTATE_AF_FAILED_FOCUS:
4913            default:
4914                bWrongTransition = true;
4915                break;
4916        }
4917    }
4918    else if (m_afState == HAL_AFSTATE_FAILED) {
4919        switch (noti) {
4920            case AA_AFSTATE_INACTIVE:
4921                bWrongTransition = true;
4922                break;
4923            case AA_AFSTATE_ACTIVE_SCAN:
4924                nextState = HAL_AFSTATE_SCANNING;
4925                break;
4926            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4927                bWrongTransition = true;
4928                break;
4929            case AA_AFSTATE_AF_FAILED_FOCUS:
4930                // Flash off if flash mode is available.
4931                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4932                    ALOGD("[AF Flash] Off with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4933                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4934                }
4935                nextState = NO_TRANSITION;
4936                break;
4937            default:
4938                bWrongTransition = true;
4939                break;
4940        }
4941    }
4942    if (bWrongTransition) {
4943        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
4944        return;
4945    }
4946    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
4947    if (nextState != NO_TRANSITION)
4948        m_afState = nextState;
4949}
4950
4951void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
4952{
4953    int nextState = NO_TRANSITION;
4954    bool bWrongTransition = false;
4955
4956    if (m_afState == HAL_AFSTATE_INACTIVE) {
4957        switch (noti) {
4958        case AA_AFSTATE_INACTIVE:
4959        case AA_AFSTATE_ACTIVE_SCAN:
4960        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4961        case AA_AFSTATE_AF_FAILED_FOCUS:
4962        default:
4963            nextState = NO_TRANSITION;
4964            break;
4965        }
4966    }
4967    else if (m_afState == HAL_AFSTATE_STARTED) {
4968        switch (noti) {
4969        case AA_AFSTATE_INACTIVE:
4970            nextState = NO_TRANSITION;
4971            break;
4972        case AA_AFSTATE_ACTIVE_SCAN:
4973            nextState = HAL_AFSTATE_SCANNING;
4974            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4975            break;
4976        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4977            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4978            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4979            break;
4980        case AA_AFSTATE_AF_FAILED_FOCUS:
4981            nextState = HAL_AFSTATE_FAILED;
4982            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4983            break;
4984        default:
4985            bWrongTransition = true;
4986            break;
4987        }
4988    }
4989    else if (m_afState == HAL_AFSTATE_SCANNING) {
4990        switch (noti) {
4991        case AA_AFSTATE_INACTIVE:
4992            bWrongTransition = true;
4993            break;
4994        case AA_AFSTATE_ACTIVE_SCAN:
4995            nextState = NO_TRANSITION;
4996            break;
4997        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4998            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4999            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5000            break;
5001        case AA_AFSTATE_AF_FAILED_FOCUS:
5002            nextState = NO_TRANSITION;
5003            break;
5004        default:
5005            bWrongTransition = true;
5006            break;
5007        }
5008    }
5009    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5010        switch (noti) {
5011        case AA_AFSTATE_INACTIVE:
5012            bWrongTransition = true;
5013            break;
5014        case AA_AFSTATE_ACTIVE_SCAN:
5015            nextState = HAL_AFSTATE_SCANNING;
5016            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5017            break;
5018        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5019            nextState = NO_TRANSITION;
5020            break;
5021        case AA_AFSTATE_AF_FAILED_FOCUS:
5022            nextState = HAL_AFSTATE_FAILED;
5023            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5024            // TODO : needs NO_TRANSITION ?
5025            break;
5026        default:
5027            bWrongTransition = true;
5028            break;
5029        }
5030    }
5031    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5032        switch (noti) {
5033        case AA_AFSTATE_INACTIVE:
5034            bWrongTransition = true;
5035            break;
5036        case AA_AFSTATE_ACTIVE_SCAN:
5037            nextState = NO_TRANSITION;
5038            break;
5039        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5040            m_IsAfLockRequired = true;
5041            nextState = HAL_AFSTATE_LOCKED;
5042            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5043            break;
5044        case AA_AFSTATE_AF_FAILED_FOCUS:
5045            nextState = HAL_AFSTATE_FAILED;
5046            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5047            break;
5048        default:
5049            bWrongTransition = true;
5050            break;
5051        }
5052    }
5053    else if (m_afState == HAL_AFSTATE_LOCKED) {
5054        switch (noti) {
5055            case AA_AFSTATE_INACTIVE:
5056                nextState = NO_TRANSITION;
5057                break;
5058            case AA_AFSTATE_ACTIVE_SCAN:
5059                bWrongTransition = true;
5060                break;
5061            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5062                nextState = NO_TRANSITION;
5063                break;
5064            case AA_AFSTATE_AF_FAILED_FOCUS:
5065            default:
5066                bWrongTransition = true;
5067                break;
5068        }
5069    }
5070    else if (m_afState == HAL_AFSTATE_FAILED) {
5071        switch (noti) {
5072            case AA_AFSTATE_INACTIVE:
5073            case AA_AFSTATE_ACTIVE_SCAN:
5074            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5075                bWrongTransition = true;
5076                break;
5077            case AA_AFSTATE_AF_FAILED_FOCUS:
5078                nextState = NO_TRANSITION;
5079                break;
5080            default:
5081                bWrongTransition = true;
5082                break;
5083        }
5084    }
5085    if (bWrongTransition) {
5086        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5087        return;
5088    }
5089    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5090    if (nextState != NO_TRANSITION)
5091        m_afState = nextState;
5092}
5093
5094void ExynosCameraHWInterface2::OnAfCancel(int id)
5095{
5096    m_afTriggerId = id;
5097
5098    switch (m_afMode) {
5099    case AA_AFMODE_AUTO:
5100    case AA_AFMODE_MACRO:
5101    case AA_AFMODE_OFF:
5102        OnAfCancelAutoMacro(id);
5103        break;
5104    case AA_AFMODE_CONTINUOUS_VIDEO:
5105        OnAfCancelCAFVideo(id);
5106        break;
5107    case AA_AFMODE_CONTINUOUS_PICTURE:
5108        OnAfCancelCAFPicture(id);
5109        break;
5110    default:
5111        break;
5112    }
5113}
5114
5115void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
5116{
5117    int nextState = NO_TRANSITION;
5118    m_afTriggerId = id;
5119
5120    if (m_ctlInfo.flash.m_flashEnableFlg  && m_ctlInfo.flash.m_afFlashDoneFlg) {
5121        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5122    }
5123    switch (m_afState) {
5124    case HAL_AFSTATE_INACTIVE:
5125        nextState = NO_TRANSITION;
5126        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5127        break;
5128    case HAL_AFSTATE_NEEDS_COMMAND:
5129    case HAL_AFSTATE_STARTED:
5130    case HAL_AFSTATE_SCANNING:
5131    case HAL_AFSTATE_LOCKED:
5132    case HAL_AFSTATE_FAILED:
5133        SetAfMode(AA_AFMODE_OFF);
5134        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5135        nextState = HAL_AFSTATE_INACTIVE;
5136        break;
5137    default:
5138        break;
5139    }
5140    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5141    if (nextState != NO_TRANSITION)
5142        m_afState = nextState;
5143}
5144
5145void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
5146{
5147    int nextState = NO_TRANSITION;
5148    m_afTriggerId = id;
5149
5150    switch (m_afState) {
5151    case HAL_AFSTATE_INACTIVE:
5152        nextState = NO_TRANSITION;
5153        break;
5154    case HAL_AFSTATE_NEEDS_COMMAND:
5155    case HAL_AFSTATE_STARTED:
5156    case HAL_AFSTATE_SCANNING:
5157    case HAL_AFSTATE_LOCKED:
5158    case HAL_AFSTATE_FAILED:
5159    case HAL_AFSTATE_NEEDS_DETERMINATION:
5160    case HAL_AFSTATE_PASSIVE_FOCUSED:
5161        SetAfMode(AA_AFMODE_OFF);
5162        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5163        SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
5164        nextState = HAL_AFSTATE_INACTIVE;
5165        break;
5166    default:
5167        break;
5168    }
5169    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5170    if (nextState != NO_TRANSITION)
5171        m_afState = nextState;
5172}
5173
5174void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
5175{
5176    int nextState = NO_TRANSITION;
5177    m_afTriggerId = id;
5178
5179    switch (m_afState) {
5180    case HAL_AFSTATE_INACTIVE:
5181        nextState = NO_TRANSITION;
5182        break;
5183    case HAL_AFSTATE_NEEDS_COMMAND:
5184    case HAL_AFSTATE_STARTED:
5185    case HAL_AFSTATE_SCANNING:
5186    case HAL_AFSTATE_LOCKED:
5187    case HAL_AFSTATE_FAILED:
5188    case HAL_AFSTATE_NEEDS_DETERMINATION:
5189    case HAL_AFSTATE_PASSIVE_FOCUSED:
5190        SetAfMode(AA_AFMODE_OFF);
5191        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5192        SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
5193        nextState = HAL_AFSTATE_INACTIVE;
5194        break;
5195    default:
5196        break;
5197    }
5198    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5199    if (nextState != NO_TRANSITION)
5200        m_afState = nextState;
5201}
5202
5203void ExynosCameraHWInterface2::SetAfStateForService(int newState)
5204{
5205    if (m_serviceAfState != newState || newState == 0)
5206        m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
5207    m_serviceAfState = newState;
5208}
5209
5210int ExynosCameraHWInterface2::GetAfStateForService()
5211{
5212   return m_serviceAfState;
5213}
5214
5215void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
5216{
5217    if (m_afMode != afMode) {
5218        if (m_IsAfModeUpdateRequired) {
5219            m_afMode2 = afMode;
5220            ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
5221        }
5222        else {
5223            ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
5224            m_IsAfModeUpdateRequired = true;
5225            m_afMode = afMode;
5226            if (m_afModeWaitingCnt != 0) {
5227                m_afModeWaitingCnt = 0;
5228                m_afState = HAL_AFSTATE_INACTIVE;
5229                OnAfTrigger(m_afPendingTriggerId);
5230            }
5231        }
5232    }
5233}
5234
5235void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5236{
5237    char property[PROPERTY_VALUE_MAX];
5238
5239    //2 0th IFD TIFF Tags
5240#if 0 // STOPSHIP TODO(aray): remove before launch, but for now don't leak product data
5241    //3 Maker
5242    property_get("ro.product.brand", property, EXIF_DEF_MAKER);
5243    strncpy((char *)mExifInfo.maker, property,
5244                sizeof(mExifInfo.maker) - 1);
5245    mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
5246    //3 Model
5247    property_get("ro.product.model", property, EXIF_DEF_MODEL);
5248    strncpy((char *)mExifInfo.model, property,
5249                sizeof(mExifInfo.model) - 1);
5250    mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
5251    //3 Software
5252    property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
5253    strncpy((char *)mExifInfo.software, property,
5254                sizeof(mExifInfo.software) - 1);
5255    mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
5256#endif
5257
5258    //3 YCbCr Positioning
5259    mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
5260
5261    //2 0th IFD Exif Private Tags
5262    //3 F Number
5263    mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
5264    mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
5265    //3 Exposure Program
5266    mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
5267    //3 Exif Version
5268    memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
5269    //3 Aperture
5270    double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
5271    mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
5272    mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
5273    //3 Maximum lens aperture
5274    mExifInfo.max_aperture.num = mExifInfo.aperture.num;
5275    mExifInfo.max_aperture.den = mExifInfo.aperture.den;
5276    //3 Lens Focal Length
5277    mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
5278
5279    mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
5280    //3 User Comments
5281    strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
5282    //3 Color Space information
5283    mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
5284    //3 Exposure Mode
5285    mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
5286
5287    //2 0th IFD GPS Info Tags
5288    unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
5289    memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
5290
5291    //2 1th IFD TIFF Tags
5292    mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
5293    mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5294    mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5295    mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5296    mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5297    mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
5298}
5299
5300void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
5301	camera2_shot *currentEntry)
5302{
5303    camera2_dm *dm = &(currentEntry->dm);
5304    camera2_ctl *ctl = &(currentEntry->ctl);
5305
5306    ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
5307    if (!ctl->request.frameCount)
5308       return;
5309    //2 0th IFD TIFF Tags
5310    //3 Width
5311    exifInfo->width = rect->w;
5312    //3 Height
5313    exifInfo->height = rect->h;
5314    //3 Orientation
5315    switch (ctl->jpeg.orientation) {
5316    case 90:
5317        exifInfo->orientation = EXIF_ORIENTATION_90;
5318        break;
5319    case 180:
5320        exifInfo->orientation = EXIF_ORIENTATION_180;
5321        break;
5322    case 270:
5323        exifInfo->orientation = EXIF_ORIENTATION_270;
5324        break;
5325    case 0:
5326    default:
5327        exifInfo->orientation = EXIF_ORIENTATION_UP;
5328        break;
5329    }
5330
5331    //3 Date time
5332    time_t rawtime;
5333    struct tm *timeinfo;
5334    time(&rawtime);
5335    timeinfo = localtime(&rawtime);
5336    strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
5337
5338    //2 0th IFD Exif Private Tags
5339    //3 Exposure Time
5340    int shutterSpeed = (dm->sensor.exposureTime/1000);
5341
5342    if (shutterSpeed < 0) {
5343        shutterSpeed = 100;
5344    }
5345
5346    exifInfo->exposure_time.num = 1;
5347    // x us -> 1/x s */
5348    //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5349    exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
5350
5351    //3 ISO Speed Rating
5352    exifInfo->iso_speed_rating = dm->aa.isoValue;
5353
5354    uint32_t av, tv, bv, sv, ev;
5355    av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
5356    tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
5357    sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
5358    bv = av + tv - sv;
5359    ev = av + tv;
5360    //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5361    ALOGD("AV=%d, TV=%d, SV=%d", av, tv, sv);
5362
5363    //3 Shutter Speed
5364    exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
5365    exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
5366    //3 Brightness
5367    exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
5368    exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
5369    //3 Exposure Bias
5370    if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
5371        ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
5372        exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
5373        exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
5374    } else {
5375        exifInfo->exposure_bias.num = 0;
5376        exifInfo->exposure_bias.den = 0;
5377    }
5378    //3 Metering Mode
5379    /*switch (m_curCameraInfo->metering) {
5380    case METERING_MODE_CENTER:
5381        exifInfo->metering_mode = EXIF_METERING_CENTER;
5382        break;
5383    case METERING_MODE_MATRIX:
5384        exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5385        break;
5386    case METERING_MODE_SPOT:
5387        exifInfo->metering_mode = EXIF_METERING_SPOT;
5388        break;
5389    case METERING_MODE_AVERAGE:
5390    default:
5391        exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5392        break;
5393    }*/
5394    exifInfo->metering_mode = EXIF_METERING_CENTER;
5395
5396    //3 Flash
5397    if (m_ctlInfo.flash.m_flashDecisionResult)
5398        exifInfo->flash = 1;
5399    else
5400        exifInfo->flash = EXIF_DEF_FLASH;
5401
5402    //3 White Balance
5403    if (m_ctlInfo.awb.i_awbMode == AA_AWBMODE_WB_AUTO)
5404        exifInfo->white_balance = EXIF_WB_AUTO;
5405    else
5406        exifInfo->white_balance = EXIF_WB_MANUAL;
5407
5408    //3 Scene Capture Type
5409    switch (ctl->aa.sceneMode) {
5410    case AA_SCENE_MODE_PORTRAIT:
5411        exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
5412        break;
5413    case AA_SCENE_MODE_LANDSCAPE:
5414        exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
5415        break;
5416    case AA_SCENE_MODE_NIGHT_PORTRAIT:
5417        exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
5418        break;
5419    default:
5420        exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
5421        break;
5422    }
5423
5424    //2 0th IFD GPS Info Tags
5425    if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
5426
5427        if (ctl->jpeg.gpsCoordinates[0] > 0)
5428            strcpy((char *)exifInfo->gps_latitude_ref, "N");
5429        else
5430            strcpy((char *)exifInfo->gps_latitude_ref, "S");
5431
5432        if (ctl->jpeg.gpsCoordinates[1] > 0)
5433            strcpy((char *)exifInfo->gps_longitude_ref, "E");
5434        else
5435            strcpy((char *)exifInfo->gps_longitude_ref, "W");
5436
5437        if (ctl->jpeg.gpsCoordinates[2] > 0)
5438            exifInfo->gps_altitude_ref = 0;
5439        else
5440            exifInfo->gps_altitude_ref = 1;
5441
5442        double latitude = fabs(ctl->jpeg.gpsCoordinates[0] / 10000.0);
5443        double longitude = fabs(ctl->jpeg.gpsCoordinates[1] / 10000.0);
5444        double altitude = fabs(ctl->jpeg.gpsCoordinates[2] / 100.0);
5445
5446        exifInfo->gps_latitude[0].num = (uint32_t)latitude;
5447        exifInfo->gps_latitude[0].den = 1;
5448        exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
5449        exifInfo->gps_latitude[1].den = 1;
5450        exifInfo->gps_latitude[2].num = (uint32_t)((((latitude - exifInfo->gps_latitude[0].num) * 60)
5451                                        - exifInfo->gps_latitude[1].num) * 60);
5452        exifInfo->gps_latitude[2].den = 1;
5453
5454        exifInfo->gps_longitude[0].num = (uint32_t)longitude;
5455        exifInfo->gps_longitude[0].den = 1;
5456        exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
5457        exifInfo->gps_longitude[1].den = 1;
5458        exifInfo->gps_longitude[2].num = (uint32_t)((((longitude - exifInfo->gps_longitude[0].num) * 60)
5459                                        - exifInfo->gps_longitude[1].num) * 60);
5460        exifInfo->gps_longitude[2].den = 1;
5461
5462        exifInfo->gps_altitude.num = (uint32_t)altitude;
5463        exifInfo->gps_altitude.den = 1;
5464
5465        struct tm tm_data;
5466        long timestamp;
5467        timestamp = (long)ctl->jpeg.gpsTimestamp;
5468        gmtime_r(&timestamp, &tm_data);
5469        exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
5470        exifInfo->gps_timestamp[0].den = 1;
5471        exifInfo->gps_timestamp[1].num = tm_data.tm_min;
5472        exifInfo->gps_timestamp[1].den = 1;
5473        exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
5474        exifInfo->gps_timestamp[2].den = 1;
5475        snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
5476                "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
5477
5478        exifInfo->enableGps = true;
5479    } else {
5480        exifInfo->enableGps = false;
5481    }
5482
5483    //2 1th IFD TIFF Tags
5484    exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
5485    exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
5486}
5487
5488ExynosCameraHWInterface2::MainThread::~MainThread()
5489{
5490    ALOGV("(%s):", __FUNCTION__);
5491}
5492
5493void ExynosCameraHWInterface2::MainThread::release()
5494{
5495    ALOGV("(%s):", __func__);
5496    SetSignal(SIGNAL_THREAD_RELEASE);
5497}
5498
5499ExynosCameraHWInterface2::SensorThread::~SensorThread()
5500{
5501    ALOGV("(%s):", __FUNCTION__);
5502}
5503
5504void ExynosCameraHWInterface2::SensorThread::release()
5505{
5506    ALOGV("(%s):", __func__);
5507    SetSignal(SIGNAL_THREAD_RELEASE);
5508}
5509
5510ExynosCameraHWInterface2::StreamThread::~StreamThread()
5511{
5512    ALOGV("(%s):", __FUNCTION__);
5513}
5514
5515void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
5516{
5517    ALOGV("DEBUG(%s):", __FUNCTION__);
5518    memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
5519}
5520
5521void ExynosCameraHWInterface2::StreamThread::release()
5522{
5523    ALOGV("(%s):", __func__);
5524    SetSignal(SIGNAL_THREAD_RELEASE);
5525}
5526
5527int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
5528{
5529    int index;
5530    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5531        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
5532            return index;
5533    }
5534    return -1;
5535}
5536
5537int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
5538{
5539    int index;
5540    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5541        if (m_parameters.svcBufHandle[index] == *bufHandle)
5542            return index;
5543    }
5544    return -1;
5545}
5546
5547status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
5548{
5549    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
5550    int index, vacantIndex;
5551    bool vacancy = false;
5552
5553    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
5554        if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
5555            vacancy = true;
5556            vacantIndex = index;
5557        } else if (m_attachedSubStreams[index].streamId == stream_id) {
5558            return BAD_VALUE;
5559        }
5560    }
5561    if (!vacancy)
5562        return NO_MEMORY;
5563    m_attachedSubStreams[vacantIndex].streamId = stream_id;
5564    m_attachedSubStreams[vacantIndex].priority = priority;
5565    m_numRegisteredStream++;
5566    return NO_ERROR;
5567}
5568
5569status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
5570{
5571    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
5572    int index;
5573    bool found = false;
5574
5575    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
5576        if (m_attachedSubStreams[index].streamId == stream_id) {
5577            found = true;
5578            break;
5579        }
5580    }
5581    if (!found)
5582        return BAD_VALUE;
5583    m_attachedSubStreams[index].streamId = -1;
5584    m_attachedSubStreams[index].priority = 0;
5585    m_numRegisteredStream--;
5586    return NO_ERROR;
5587}
5588
5589int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
5590{
5591    if (ionClient == 0) {
5592        ionClient = ion_client_create();
5593        if (ionClient < 0) {
5594            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
5595            return 0;
5596        }
5597    }
5598    return ionClient;
5599}
5600
5601int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
5602{
5603    if (ionClient != 0) {
5604        if (ionClient > 0) {
5605            ion_client_destroy(ionClient);
5606        }
5607        ionClient = 0;
5608    }
5609    return ionClient;
5610}
5611
5612int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
5613{
5614    return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
5615}
5616
5617int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
5618{
5619    int ret = 0;
5620    int i = 0;
5621    int flag = 0;
5622
5623    if (ionClient == 0) {
5624        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
5625        return -1;
5626    }
5627
5628    for (i = 0 ; i < iMemoryNum ; i++) {
5629        if (buf->size.extS[i] == 0) {
5630            break;
5631        }
5632        if (1 << i & cacheFlag)
5633            flag = ION_FLAG_CACHED;
5634        else
5635            flag = 0;
5636        buf->fd.extFd[i] = ion_alloc(ionClient, \
5637                                      buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK, flag);
5638        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
5639            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
5640            buf->fd.extFd[i] = -1;
5641            freeCameraMemory(buf, iMemoryNum);
5642            return -1;
5643        }
5644
5645        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
5646                                        buf->size.extS[i], 0);
5647        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
5648            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
5649            buf->virt.extP[i] = (char *)MAP_FAILED;
5650            freeCameraMemory(buf, iMemoryNum);
5651            return -1;
5652        }
5653        ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
5654    }
5655
5656    return ret;
5657}
5658
5659void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
5660{
5661
5662    int i = 0 ;
5663    int ret = 0;
5664
5665    for (i=0;i<iMemoryNum;i++) {
5666        if (buf->fd.extFd[i] != -1) {
5667            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
5668                ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
5669                if (ret < 0)
5670                    ALOGE("ERR(%s)", __FUNCTION__);
5671            }
5672            ion_free(buf->fd.extFd[i]);
5673        }
5674        buf->fd.extFd[i] = -1;
5675        buf->virt.extP[i] = (char *)MAP_FAILED;
5676        buf->size.extS[i] = 0;
5677    }
5678}
5679
5680void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
5681{
5682    int i =0 ;
5683    for (i=0;i<iMemoryNum;i++) {
5684        buf->virt.extP[i] = (char *)MAP_FAILED;
5685        buf->fd.extFd[i] = -1;
5686        buf->size.extS[i] = 0;
5687    }
5688}
5689
5690
5691
5692
5693static camera2_device_t *g_cam2_device = NULL;
5694static bool g_camera_vaild = false;
5695ExynosCamera2 * g_camera2[2] = { NULL, NULL };
5696
5697static int HAL2_camera_device_close(struct hw_device_t* device)
5698{
5699    ALOGV("%s: ENTER", __FUNCTION__);
5700    if (device) {
5701
5702        camera2_device_t *cam_device = (camera2_device_t *)device;
5703        ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
5704        ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
5705        delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
5706        g_cam2_device = NULL;
5707        free(cam_device);
5708        g_camera_vaild = false;
5709    }
5710
5711    ALOGV("%s: EXIT", __FUNCTION__);
5712    return 0;
5713}
5714
5715static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
5716{
5717    return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
5718}
5719
5720static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
5721            const camera2_request_queue_src_ops_t *request_src_ops)
5722{
5723    ALOGV("DEBUG(%s):", __FUNCTION__);
5724    return obj(dev)->setRequestQueueSrcOps(request_src_ops);
5725}
5726
5727static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
5728{
5729    ALOGV("DEBUG(%s):", __FUNCTION__);
5730    return obj(dev)->notifyRequestQueueNotEmpty();
5731}
5732
5733static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
5734            const camera2_frame_queue_dst_ops_t *frame_dst_ops)
5735{
5736    ALOGV("DEBUG(%s):", __FUNCTION__);
5737    return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
5738}
5739
5740static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
5741{
5742    ALOGV("DEBUG(%s):", __FUNCTION__);
5743    return obj(dev)->getInProgressCount();
5744}
5745
5746static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
5747{
5748    ALOGV("DEBUG(%s):", __FUNCTION__);
5749    return obj(dev)->flushCapturesInProgress();
5750}
5751
5752static int HAL2_device_construct_default_request(const struct camera2_device *dev,
5753            int request_template, camera_metadata_t **request)
5754{
5755    ALOGV("DEBUG(%s):", __FUNCTION__);
5756    return obj(dev)->constructDefaultRequest(request_template, request);
5757}
5758
5759static int HAL2_device_allocate_stream(
5760            const struct camera2_device *dev,
5761            // inputs
5762            uint32_t width,
5763            uint32_t height,
5764            int      format,
5765            const camera2_stream_ops_t *stream_ops,
5766            // outputs
5767            uint32_t *stream_id,
5768            uint32_t *format_actual,
5769            uint32_t *usage,
5770            uint32_t *max_buffers)
5771{
5772    ALOGV("(%s): ", __FUNCTION__);
5773    return obj(dev)->allocateStream(width, height, format, stream_ops,
5774                                    stream_id, format_actual, usage, max_buffers);
5775}
5776
5777static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
5778            uint32_t stream_id,
5779            int num_buffers,
5780            buffer_handle_t *buffers)
5781{
5782    ALOGV("DEBUG(%s):", __FUNCTION__);
5783    return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
5784}
5785
5786static int HAL2_device_release_stream(
5787        const struct camera2_device *dev,
5788            uint32_t stream_id)
5789{
5790    ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
5791    if (!g_camera_vaild)
5792        return 0;
5793    return obj(dev)->releaseStream(stream_id);
5794}
5795
5796static int HAL2_device_allocate_reprocess_stream(
5797           const struct camera2_device *dev,
5798            uint32_t width,
5799            uint32_t height,
5800            uint32_t format,
5801            const camera2_stream_in_ops_t *reprocess_stream_ops,
5802            // outputs
5803            uint32_t *stream_id,
5804            uint32_t *consumer_usage,
5805            uint32_t *max_buffers)
5806{
5807    ALOGV("DEBUG(%s):", __FUNCTION__);
5808    return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
5809                                    stream_id, consumer_usage, max_buffers);
5810}
5811
5812static int HAL2_device_allocate_reprocess_stream_from_stream(
5813           const struct camera2_device *dev,
5814            uint32_t output_stream_id,
5815            const camera2_stream_in_ops_t *reprocess_stream_ops,
5816            // outputs
5817            uint32_t *stream_id)
5818{
5819    ALOGV("DEBUG(%s):", __FUNCTION__);
5820    return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
5821                                    reprocess_stream_ops, stream_id);
5822}
5823
5824static int HAL2_device_release_reprocess_stream(
5825        const struct camera2_device *dev,
5826            uint32_t stream_id)
5827{
5828    ALOGV("DEBUG(%s):", __FUNCTION__);
5829    return obj(dev)->releaseReprocessStream(stream_id);
5830}
5831
5832static int HAL2_device_trigger_action(const struct camera2_device *dev,
5833           uint32_t trigger_id,
5834            int ext1,
5835            int ext2)
5836{
5837    ALOGV("DEBUG(%s):", __FUNCTION__);
5838    return obj(dev)->triggerAction(trigger_id, ext1, ext2);
5839}
5840
5841static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
5842            camera2_notify_callback notify_cb,
5843            void *user)
5844{
5845    ALOGV("DEBUG(%s):", __FUNCTION__);
5846    return obj(dev)->setNotifyCallback(notify_cb, user);
5847}
5848
5849static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
5850            vendor_tag_query_ops_t **ops)
5851{
5852    ALOGV("DEBUG(%s):", __FUNCTION__);
5853    return obj(dev)->getMetadataVendorTagOps(ops);
5854}
5855
5856static int HAL2_device_dump(const struct camera2_device *dev, int fd)
5857{
5858    ALOGV("DEBUG(%s):", __FUNCTION__);
5859    return obj(dev)->dump(fd);
5860}
5861
5862
5863
5864
5865
5866static int HAL2_getNumberOfCameras()
5867{
5868    ALOGV("(%s): returning 2", __FUNCTION__);
5869    return 2;
5870}
5871
5872
5873static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
5874{
5875    ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
5876    static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
5877
5878    status_t res;
5879
5880    if (cameraId == 0) {
5881        info->facing = CAMERA_FACING_BACK;
5882        if (!g_camera2[0])
5883            g_camera2[0] = new ExynosCamera2(0);
5884    }
5885    else if (cameraId == 1) {
5886        info->facing = CAMERA_FACING_FRONT;
5887        if (!g_camera2[1])
5888            g_camera2[1] = new ExynosCamera2(1);
5889    }
5890    else
5891        return BAD_VALUE;
5892
5893    info->orientation = 0;
5894    info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
5895    if (mCameraInfo[cameraId] == NULL) {
5896        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
5897        if (res != OK) {
5898            ALOGE("%s: Unable to allocate static info: %s (%d)",
5899                    __FUNCTION__, strerror(-res), res);
5900            return res;
5901        }
5902        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
5903        if (res != OK) {
5904            ALOGE("%s: Unable to fill in static info: %s (%d)",
5905                    __FUNCTION__, strerror(-res), res);
5906            return res;
5907        }
5908    }
5909    info->static_camera_characteristics = mCameraInfo[cameraId];
5910    return NO_ERROR;
5911}
5912
5913#define SET_METHOD(m) m : HAL2_device_##m
5914
5915static camera2_device_ops_t camera2_device_ops = {
5916        SET_METHOD(set_request_queue_src_ops),
5917        SET_METHOD(notify_request_queue_not_empty),
5918        SET_METHOD(set_frame_queue_dst_ops),
5919        SET_METHOD(get_in_progress_count),
5920        SET_METHOD(flush_captures_in_progress),
5921        SET_METHOD(construct_default_request),
5922        SET_METHOD(allocate_stream),
5923        SET_METHOD(register_stream_buffers),
5924        SET_METHOD(release_stream),
5925        SET_METHOD(allocate_reprocess_stream),
5926        SET_METHOD(allocate_reprocess_stream_from_stream),
5927        SET_METHOD(release_reprocess_stream),
5928        SET_METHOD(trigger_action),
5929        SET_METHOD(set_notify_callback),
5930        SET_METHOD(get_metadata_vendor_tag_ops),
5931        SET_METHOD(dump),
5932};
5933
5934#undef SET_METHOD
5935
5936
5937static int HAL2_camera_device_open(const struct hw_module_t* module,
5938                                  const char *id,
5939                                  struct hw_device_t** device)
5940{
5941    int cameraId = atoi(id);
5942    int openInvalid = 0;
5943
5944    g_camera_vaild = false;
5945    ALOGV("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
5946    if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
5947        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
5948        return -EINVAL;
5949    }
5950
5951    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
5952    if (g_cam2_device) {
5953        if (obj(g_cam2_device)->getCameraId() == cameraId) {
5954            ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
5955            goto done;
5956        } else {
5957
5958            while (g_cam2_device)
5959                usleep(10000);
5960        }
5961    }
5962
5963    g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
5964    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
5965
5966    if (!g_cam2_device)
5967        return -ENOMEM;
5968
5969    g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
5970    g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
5971    g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
5972    g_cam2_device->common.close   = HAL2_camera_device_close;
5973
5974    g_cam2_device->ops = &camera2_device_ops;
5975
5976    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
5977
5978    g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
5979    if (!openInvalid) {
5980        ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
5981        return -ENODEV;
5982    }
5983done:
5984    *device = (hw_device_t *)g_cam2_device;
5985    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
5986    g_camera_vaild = true;
5987
5988    return 0;
5989}
5990
5991
5992static hw_module_methods_t camera_module_methods = {
5993            open : HAL2_camera_device_open
5994};
5995
5996extern "C" {
5997    struct camera_module HAL_MODULE_INFO_SYM = {
5998      common : {
5999          tag                : HARDWARE_MODULE_TAG,
6000          module_api_version : CAMERA_MODULE_API_VERSION_2_0,
6001          hal_api_version    : HARDWARE_HAL_API_VERSION,
6002          id                 : CAMERA_HARDWARE_MODULE_ID,
6003          name               : "Exynos Camera HAL2",
6004          author             : "Samsung Corporation",
6005          methods            : &camera_module_methods,
6006          dso:                NULL,
6007          reserved:           {0},
6008      },
6009      get_number_of_cameras : HAL2_getNumberOfCameras,
6010      get_camera_info       : HAL2_getCameraInfo
6011    };
6012}
6013
6014}; // namespace android
6015