ExynosCameraHWInterface2.cpp revision 10e122bdf2db78378287834b1c0b725fbb426638
1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10**     http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file      ExynosCameraHWInterface2.cpp
21 * \brief     source file for Android Camera API 2.0 HAL
22 * \author    Sungjoong Kang(sj3.kang@samsung.com)
23 * \date      2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 *   Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 *   2nd Release
31 *
32 */
33
34//#define LOG_NDEBUG 0
35#define LOG_TAG "ExynosCameraHAL2"
36#include <utils/Log.h>
37
38#include "ExynosCameraHWInterface2.h"
39#include "exynos_format.h"
40
41namespace android {
42
43void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
44{
45    int nw;
46    int cnt = 0;
47    uint32_t written = 0;
48
49    ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
50    int fd = open(fname, O_RDWR | O_CREAT, 0644);
51    if (fd < 0) {
52        ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
53        return;
54    }
55
56    ALOGV("writing %d bytes to file [%s]", size, fname);
57    while (written < size) {
58        nw = ::write(fd, buf + written, size - written);
59        if (nw < 0) {
60            ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
61            break;
62        }
63        written += nw;
64        cnt++;
65    }
66    ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
67    ::close(fd);
68}
69
70int get_pixel_depth(uint32_t fmt)
71{
72    int depth = 0;
73
74    switch (fmt) {
75    case V4L2_PIX_FMT_JPEG:
76        depth = 8;
77        break;
78
79    case V4L2_PIX_FMT_NV12:
80    case V4L2_PIX_FMT_NV21:
81    case V4L2_PIX_FMT_YUV420:
82    case V4L2_PIX_FMT_YVU420M:
83    case V4L2_PIX_FMT_NV12M:
84    case V4L2_PIX_FMT_NV12MT:
85        depth = 12;
86        break;
87
88    case V4L2_PIX_FMT_RGB565:
89    case V4L2_PIX_FMT_YUYV:
90    case V4L2_PIX_FMT_YVYU:
91    case V4L2_PIX_FMT_UYVY:
92    case V4L2_PIX_FMT_VYUY:
93    case V4L2_PIX_FMT_NV16:
94    case V4L2_PIX_FMT_NV61:
95    case V4L2_PIX_FMT_YUV422P:
96    case V4L2_PIX_FMT_SBGGR10:
97    case V4L2_PIX_FMT_SBGGR12:
98    case V4L2_PIX_FMT_SBGGR16:
99        depth = 16;
100        break;
101
102    case V4L2_PIX_FMT_RGB32:
103        depth = 32;
104        break;
105    default:
106        ALOGE("Get depth failed(format : %d)", fmt);
107        break;
108    }
109
110    return depth;
111}
112
113int cam_int_s_fmt(node_info_t *node)
114{
115    struct v4l2_format v4l2_fmt;
116    unsigned int framesize;
117    int ret;
118
119    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
120
121    v4l2_fmt.type = node->type;
122    framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
123
124    if (node->planes >= 1) {
125        v4l2_fmt.fmt.pix_mp.width       = node->width;
126        v4l2_fmt.fmt.pix_mp.height      = node->height;
127        v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
128        v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
129    } else {
130        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
131    }
132
133    /* Set up for capture */
134    ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
135
136    if (ret < 0)
137        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
138
139
140    return ret;
141}
142
143int cam_int_reqbufs(node_info_t *node)
144{
145    struct v4l2_requestbuffers req;
146    int ret;
147
148    req.count = node->buffers;
149    req.type = node->type;
150    req.memory = node->memory;
151
152    ret = exynos_v4l2_reqbufs(node->fd, &req);
153
154    if (ret < 0)
155        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
156
157    return req.count;
158}
159
160int cam_int_qbuf(node_info_t *node, int index)
161{
162    struct v4l2_buffer v4l2_buf;
163    struct v4l2_plane planes[VIDEO_MAX_PLANES];
164    int i;
165    int ret = 0;
166
167    v4l2_buf.m.planes   = planes;
168    v4l2_buf.type       = node->type;
169    v4l2_buf.memory     = node->memory;
170    v4l2_buf.index      = index;
171    v4l2_buf.length     = node->planes;
172
173    for(i = 0; i < node->planes; i++){
174        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
175        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
176    }
177
178    ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
179
180    if (ret < 0)
181        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
182
183    return ret;
184}
185
186int cam_int_streamon(node_info_t *node)
187{
188    enum v4l2_buf_type type = node->type;
189    int ret;
190
191
192    ret = exynos_v4l2_streamon(node->fd, type);
193
194    if (ret < 0)
195        ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
196
197    ALOGV("On streaming I/O... ... fd(%d)", node->fd);
198
199    return ret;
200}
201
202int cam_int_streamoff(node_info_t *node)
203{
204    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
205    int ret;
206
207
208    ALOGV("Off streaming I/O... fd(%d)", node->fd);
209    ret = exynos_v4l2_streamoff(node->fd, type);
210
211    if (ret < 0)
212        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
213
214    return ret;
215}
216
217int isp_int_streamoff(node_info_t *node)
218{
219    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
220    int ret;
221
222    ALOGV("Off streaming I/O... fd(%d)", node->fd);
223    ret = exynos_v4l2_streamoff(node->fd, type);
224
225    if (ret < 0)
226        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
227
228    return ret;
229}
230
231int cam_int_dqbuf(node_info_t *node)
232{
233    struct v4l2_buffer v4l2_buf;
234    struct v4l2_plane planes[VIDEO_MAX_PLANES];
235    int ret;
236
237    v4l2_buf.type       = node->type;
238    v4l2_buf.memory     = node->memory;
239    v4l2_buf.m.planes   = planes;
240    v4l2_buf.length     = node->planes;
241
242    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
243    if (ret < 0)
244        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
245
246    return v4l2_buf.index;
247}
248
249int cam_int_dqbuf(node_info_t *node, int num_plane)
250{
251    struct v4l2_buffer v4l2_buf;
252    struct v4l2_plane planes[VIDEO_MAX_PLANES];
253    int ret;
254
255    v4l2_buf.type       = node->type;
256    v4l2_buf.memory     = node->memory;
257    v4l2_buf.m.planes   = planes;
258    v4l2_buf.length     = num_plane;
259
260    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
261    if (ret < 0)
262        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
263
264    return v4l2_buf.index;
265}
266
267int cam_int_s_input(node_info_t *node, int index)
268{
269    int ret;
270
271    ret = exynos_v4l2_s_input(node->fd, index);
272    if (ret < 0)
273        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
274
275    return ret;
276}
277
278
279gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
280
281RequestManager::RequestManager(SignalDrivenThread* main_thread):
282    m_lastAeMode(0),
283    m_lastAaMode(0),
284    m_lastAwbMode(0),
285    m_vdisBubbleEn(false),
286    m_lastAeComp(0),
287    m_lastCompletedFrameCnt(-1)
288{
289    m_metadataConverter = new MetadataConverter;
290    m_mainThread = main_thread;
291    ResetEntry();
292    m_sensorPipelineSkipCnt = 0;
293    return;
294}
295
296RequestManager::~RequestManager()
297{
298    ALOGV("%s", __FUNCTION__);
299    if (m_metadataConverter != NULL) {
300        delete m_metadataConverter;
301        m_metadataConverter = NULL;
302    }
303
304    releaseSensorQ();
305    return;
306}
307
308void RequestManager::ResetEntry()
309{
310    Mutex::Autolock lock(m_requestMutex);
311    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
312        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
313        entries[i].internal_shot.shot.ctl.request.frameCount = -1;
314    }
315    m_numOfEntries = 0;
316    m_entryInsertionIndex = -1;
317    m_entryProcessingIndex = -1;
318    m_entryFrameOutputIndex = -1;
319}
320
321int RequestManager::GetNumEntries()
322{
323    return m_numOfEntries;
324}
325
326void RequestManager::SetDefaultParameters(int cropX)
327{
328    m_cropX = cropX;
329}
330
331bool RequestManager::IsRequestQueueFull()
332{
333    Mutex::Autolock lock(m_requestMutex);
334    if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
335        return true;
336    else
337        return false;
338}
339
340void RequestManager::RegisterRequest(camera_metadata_t * new_request)
341{
342    ALOGV("DEBUG(%s):", __FUNCTION__);
343
344    Mutex::Autolock lock(m_requestMutex);
345
346    request_manager_entry * newEntry = NULL;
347    int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
348    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
349
350
351    newEntry = &(entries[newInsertionIndex]);
352
353    if (newEntry->status!=EMPTY) {
354        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
355        return;
356    }
357    newEntry->status = REGISTERED;
358    newEntry->original_request = new_request;
359    memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
360    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
361    newEntry->output_stream_count = 0;
362    if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
363        newEntry->output_stream_count++;
364
365    if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
366        newEntry->output_stream_count++;
367
368    m_numOfEntries++;
369    m_entryInsertionIndex = newInsertionIndex;
370
371
372    ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
373    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
374}
375
376void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
377{
378    ALOGV("DEBUG(%s):", __FUNCTION__);
379    int frame_index;
380    request_manager_entry * currentEntry;
381
382    Mutex::Autolock lock(m_requestMutex);
383
384    frame_index = GetCompletedIndex();
385    currentEntry =  &(entries[frame_index]);
386    if (currentEntry->status != COMPLETED) {
387        CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
388                       m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
389        return;
390    }
391    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
392
393    m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
394
395    currentEntry->status = EMPTY;
396    currentEntry->original_request = NULL;
397    memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
398    currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
399    currentEntry->output_stream_count = 0;
400    m_numOfEntries--;
401    ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
402     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
403
404    CheckCompleted(GetNextIndex(frame_index));
405    return;
406}
407
408bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
409                camera_metadata_t ** prepared_frame, int afState)
410{
411    ALOGV("DEBUG(%s):", __FUNCTION__);
412    Mutex::Autolock lock(m_requestMutex);
413    status_t res = NO_ERROR;
414    int tempFrameOutputIndex = GetCompletedIndex();
415    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
416    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
417        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
418
419    if (currentEntry->status != COMPLETED) {
420        ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
421
422        return false;
423    }
424    m_entryFrameOutputIndex = tempFrameOutputIndex;
425    m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 25, 500); //estimated
426    add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
427    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
428                m_tempFrameMetadata);
429    if (res!=NO_ERROR) {
430        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
431        return false;
432    }
433    *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
434    *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
435    *prepared_frame = m_tempFrameMetadata;
436    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
437        currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
438    // Dump();
439    return true;
440}
441
442int RequestManager::MarkProcessingRequest(ExynosBuffer* buf, int *afMode)
443{
444    struct camera2_shot_ext * shot_ext;
445    struct camera2_shot_ext * request_shot;
446    int targetStreamIndex = 0;
447    request_manager_entry * newEntry = NULL;
448    static int count = 0;
449
450    Mutex::Autolock lock(m_requestMutex);
451    if (m_numOfEntries == 0)  {
452        CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
453        return -1;
454    }
455
456    if ((m_entryProcessingIndex == m_entryInsertionIndex)
457        && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
458        ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
459         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
460        return -1;
461    }
462
463    int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
464    ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
465
466    newEntry = &(entries[newProcessingIndex]);
467    request_shot = &(newEntry->internal_shot);
468    *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
469    if (newEntry->status != REGISTERED) {
470        CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
471        for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
472                CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
473        }
474        return -1;
475    }
476
477    newEntry->status = REQUESTED;
478
479    shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
480
481    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
482    shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
483    shot_ext->request_sensor = 1;
484    shot_ext->dis_bypass = 1;
485    shot_ext->dnr_bypass = 1;
486    shot_ext->fd_bypass = 1;
487    shot_ext->setfile = 0;
488
489    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
490    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
491    if (targetStreamIndex & MASK_OUTPUT_SCP)
492        shot_ext->request_scp = 1;
493
494    if (targetStreamIndex & MASK_OUTPUT_SCC)
495        shot_ext->request_scc = 1;
496
497    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
498        shot_ext->fd_bypass = 0;
499
500    if (count == 0){
501        shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
502    } else
503        shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
504
505    count++;
506    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
507    shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
508    shot_ext->shot.magicNumber = 0x23456789;
509    shot_ext->shot.ctl.sensor.exposureTime = 0;
510    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
511    shot_ext->shot.ctl.sensor.sensitivity = 0;
512
513
514    shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
515    shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
516    shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
517
518    m_entryProcessingIndex = newProcessingIndex;
519    return newProcessingIndex;
520}
521
522void RequestManager::NotifyStreamOutput(int frameCnt)
523{
524    int index;
525
526    Mutex::Autolock lock(m_requestMutex);
527    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
528
529    index = FindEntryIndexByFrameCnt(frameCnt);
530    if (index == -1) {
531        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
532        return;
533    }
534    ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt,   entries[index].output_stream_count);
535
536    entries[index].output_stream_count--;  //TODO : match stream id also
537    CheckCompleted(index);
538}
539
540void RequestManager::CheckCompleted(int index)
541{
542    if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
543        && (entries[index].output_stream_count <= 0)){
544        ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
545                index, entries[index].internal_shot.shot.ctl.request.frameCount );
546        entries[index].status = COMPLETED;
547        if (m_lastCompletedFrameCnt + 1 == entries[index].internal_shot.shot.ctl.request.frameCount)
548            m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
549    }
550}
551
552int RequestManager::GetCompletedIndex()
553{
554    return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
555}
556
557void  RequestManager::pushSensorQ(int index)
558{
559    Mutex::Autolock lock(m_requestMutex);
560    m_sensorQ.push_back(index);
561}
562
563int RequestManager::popSensorQ()
564{
565   List<int>::iterator sensor_token;
566   int index;
567
568    Mutex::Autolock lock(m_requestMutex);
569
570    if(m_sensorQ.size() == 0)
571        return -1;
572
573    sensor_token = m_sensorQ.begin()++;
574    index = *sensor_token;
575    m_sensorQ.erase(sensor_token);
576
577    return (index);
578}
579
580void RequestManager::releaseSensorQ()
581{
582    List<int>::iterator r;
583
584    Mutex::Autolock lock(m_requestMutex);
585    ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
586
587    while(m_sensorQ.size() > 0){
588        r  = m_sensorQ.begin()++;
589        m_sensorQ.erase(r);
590    }
591    return;
592}
593
594void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
595{
596    int index;
597    struct camera2_shot_ext * request_shot;
598    nsecs_t timeStamp;
599    int i;
600
601    Mutex::Autolock lock(m_requestMutex);
602    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
603
604    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
605        if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
606            && (entries[i].status == CAPTURED)){
607            entries[i].status = METADONE;
608            break;
609        }
610    }
611
612    if (i == NUM_MAX_REQUEST_MGR_ENTRY){
613        ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
614        return;
615    }
616
617    request_manager_entry * newEntry = &(entries[i]);
618    request_shot = &(newEntry->internal_shot);
619
620    timeStamp = request_shot->shot.dm.sensor.timeStamp;
621    memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
622    request_shot->shot.dm.sensor.timeStamp = timeStamp;
623    m_lastTimeStamp = timeStamp;
624    CheckCompleted(i);
625}
626
627void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
628{
629    int index, targetStreamIndex;
630    struct camera2_shot_ext * request_shot;
631
632    ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
633    if (frameCnt < 0)
634        return;
635
636    index = FindEntryIndexByFrameCnt(frameCnt);
637    if (index == -1) {
638        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
639        return;
640    }
641
642    request_manager_entry * newEntry = &(entries[index]);
643    request_shot = &(newEntry->internal_shot);
644    memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
645    shot_ext->shot.ctl.request.frameCount = frameCnt;
646    shot_ext->request_sensor = 1;
647    shot_ext->dis_bypass = 1;
648    shot_ext->dnr_bypass = 1;
649    shot_ext->fd_bypass = 1;
650    shot_ext->drc_bypass = 1;
651    shot_ext->setfile = 0;
652
653    shot_ext->request_scc = 0;
654    shot_ext->request_scp = 0;
655
656    shot_ext->isReprocessing = request_shot->isReprocessing;
657    shot_ext->reprocessInput = request_shot->reprocessInput;
658    shot_ext->shot.ctl.request.outputStreams[0] = 0;
659
660    shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
661    shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
662    shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
663
664    // mapping flash UI mode from aeMode
665    if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
666        if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
667            ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
668        request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
669    }
670    // mapping awb UI mode form awbMode
671    if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
672        ctl_info->awb.i_awbMode = request_shot->shot.ctl.aa.awbMode;
673
674    // Apply ae/awb lock or unlock
675    if (request_shot->ae_lock == AEMODE_LOCK_ON)
676            request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
677    if (request_shot->awb_lock == AWBMODE_LOCK_ON)
678            request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
679
680    if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
681        shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
682    }
683    else {
684        shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
685        m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
686    }
687    if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
688        shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
689    }
690    else {
691        shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
692        m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
693    }
694    if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
695        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
696    }
697    else {
698        shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
699        m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
700    }
701    if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
702        shot_ext->shot.ctl.aa.aeExpCompensation = 0;
703    }
704    else {
705        shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
706        m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
707    }
708
709    if (request_shot->shot.ctl.aa.videoStabilizationMode) {
710        m_vdisBubbleEn = true;
711        shot_ext->dis_bypass = 0;
712    } else {
713        m_vdisBubbleEn = false;
714        shot_ext->dis_bypass = 1;
715    }
716
717    shot_ext->shot.ctl.aa.afTrigger = 0;
718
719    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
720    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
721    if (targetStreamIndex & MASK_OUTPUT_SCP)
722        shot_ext->request_scp = 1;
723
724    if (targetStreamIndex & MASK_OUTPUT_SCC)
725        shot_ext->request_scc = 1;
726
727    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
728        shot_ext->fd_bypass = 0;
729
730    if (targetStreamIndex & STREAM_MASK_RECORD) {
731        shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
732        shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
733    }
734
735    ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
736    (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
737    (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
738    (int)(shot_ext->shot.ctl.aa.afMode));
739}
740
741bool    RequestManager::IsVdisEnable(void)
742{
743        return m_vdisBubbleEn;
744}
745
746int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
747{
748    for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
749        if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
750            return i;
751    }
752    return -1;
753}
754
755void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
756{
757    int index = FindEntryIndexByFrameCnt(frameCnt);
758    if (index == -1) {
759        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
760        return;
761    }
762
763    request_manager_entry * currentEntry = &(entries[index]);
764    if (currentEntry->internal_shot.isReprocessing == 1) {
765        ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
766        index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
767    } else {
768        currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
769        ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
770            index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
771    }
772}
773
774
775nsecs_t  RequestManager::GetTimestampByFrameCnt(int frameCnt)
776{
777    int index = FindEntryIndexByFrameCnt(frameCnt);
778    if (index == -1) {
779        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
780        return m_lastTimeStamp;
781    }
782    else
783        return GetTimestamp(index);
784}
785
786nsecs_t  RequestManager::GetTimestamp(int index)
787{
788    Mutex::Autolock lock(m_requestMutex);
789    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
790        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
791        return 0;
792    }
793
794    request_manager_entry * currentEntry = &(entries[index]);
795    nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
796    if (frameTime == 0) {
797        ALOGV("DEBUG(%s): timestamp null,  returning saved value", __FUNCTION__);
798        frameTime = m_lastTimeStamp;
799    }
800    ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
801    return frameTime;
802}
803
804uint8_t  RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
805{
806    int index = FindEntryIndexByFrameCnt(frameCnt);
807    if (index == -1) {
808        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
809        return 0;
810    }
811    else
812        return GetOutputStream(index);
813}
814
815uint8_t  RequestManager::GetOutputStream(int index)
816{
817    Mutex::Autolock lock(m_requestMutex);
818    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
819        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
820        return 0;
821    }
822
823    request_manager_entry * currentEntry = &(entries[index]);
824    return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
825}
826
827int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
828{
829    Mutex::Autolock lock(m_requestMutex);
830    int i;
831
832    if (m_numOfEntries == 0) {
833        CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
834        return -1;
835    }
836
837    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
838        if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
839            continue;
840
841        if (entries[i].status == REQUESTED) {
842            entries[i].status = CAPTURED;
843            return entries[i].internal_shot.shot.ctl.request.frameCount;
844        }
845        CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
846
847    }
848    CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
849
850    return -1;
851}
852
853void     RequestManager::SetInitialSkip(int count)
854{
855    ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
856    if (count > m_sensorPipelineSkipCnt)
857        m_sensorPipelineSkipCnt = count;
858}
859
860int     RequestManager::GetSkipCnt()
861{
862    ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
863    if (m_sensorPipelineSkipCnt == 0)
864        return m_sensorPipelineSkipCnt;
865    else
866        return --m_sensorPipelineSkipCnt;
867}
868
869void RequestManager::Dump(void)
870{
871    int i = 0;
872    request_manager_entry * currentEntry;
873    ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
874    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
875
876    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
877        currentEntry =  &(entries[i]);
878        ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
879        currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
880            currentEntry->output_stream_count,
881            currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
882    }
883}
884
885int     RequestManager::GetNextIndex(int index)
886{
887    index++;
888    if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
889        index = 0;
890
891    return index;
892}
893
894int     RequestManager::GetPrevIndex(int index)
895{
896    index--;
897    if (index < 0)
898        index = NUM_MAX_REQUEST_MGR_ENTRY-1;
899
900    return index;
901}
902
903ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
904            m_requestQueueOps(NULL),
905            m_frameQueueOps(NULL),
906            m_callbackCookie(NULL),
907            m_numOfRemainingReqInSvc(0),
908            m_isRequestQueuePending(false),
909            m_isRequestQueueNull(true),
910            m_isIspStarted(false),
911            m_ionCameraClient(0),
912            m_zoomRatio(1),
913            m_scp_closing(false),
914            m_scp_closed(false),
915            m_afState(HAL_AFSTATE_INACTIVE),
916            m_afMode(NO_CHANGE),
917            m_afMode2(NO_CHANGE),
918            m_vdisBubbleCnt(0),
919            m_vdisDupFrame(0),
920            m_IsAfModeUpdateRequired(false),
921            m_IsAfTriggerRequired(false),
922            m_IsAfLockRequired(false),
923            m_sccLocalBufferValid(false),
924            m_wideAspect(false),
925            m_scpOutputSignalCnt(0),
926            m_scpOutputImageCnt(0),
927            m_afTriggerId(0),
928            m_afPendingTriggerId(0),
929            m_afModeWaitingCnt(0),
930            m_scpForceSuspended(false),
931            m_halDevice(dev),
932            m_nightCaptureCnt(0),
933            m_nightCaptureFrameCnt(0),
934            m_cameraId(cameraId),
935            m_thumbNailW(160),
936            m_thumbNailH(120)
937{
938    ALOGD("(%s): ENTER", __FUNCTION__);
939    int ret = 0;
940    int res = 0;
941
942    m_exynosPictureCSC = NULL;
943    m_exynosVideoCSC = NULL;
944
945    if (!m_grallocHal) {
946        ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
947        if (ret)
948            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
949    }
950
951    m_camera2 = camera;
952    m_ionCameraClient = createIonClient(m_ionCameraClient);
953    if(m_ionCameraClient == 0)
954        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
955
956
957    m_BayerManager = new BayerBufManager();
958    m_mainThread    = new MainThread(this);
959    m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
960    *openInvalid = InitializeISPChain();
961    if (*openInvalid < 0) {
962        ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
963        // clean process
964        // 1. close video nodes
965        // SCP
966        res = exynos_v4l2_close(m_camera_info.scp.fd);
967        if (res != NO_ERROR ) {
968            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
969        }
970        // SCC
971        res = exynos_v4l2_close(m_camera_info.capture.fd);
972        if (res != NO_ERROR ) {
973            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
974        }
975        // Sensor
976        res = exynos_v4l2_close(m_camera_info.sensor.fd);
977        if (res != NO_ERROR ) {
978            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
979        }
980        // ISP
981        res = exynos_v4l2_close(m_camera_info.isp.fd);
982        if (res != NO_ERROR ) {
983            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
984        }
985    } else {
986        m_sensorThread  = new SensorThread(this);
987        m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
988        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
989        ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
990
991        for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
992            m_subStreams[i].type =  SUBSTREAM_TYPE_NONE;
993        CSC_METHOD cscMethod = CSC_METHOD_HW;
994        m_exynosPictureCSC = csc_init(cscMethod);
995        if (m_exynosPictureCSC == NULL)
996            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
997        csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
998
999        m_exynosVideoCSC = csc_init(cscMethod);
1000        if (m_exynosVideoCSC == NULL)
1001            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1002        csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
1003
1004        m_setExifFixedAttribute();
1005
1006        // contol information clear
1007        // flash
1008        m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
1009        m_ctlInfo.flash.m_afFlashDoneFlg= false;
1010        m_ctlInfo.flash.m_flashEnableFlg = false;
1011        m_ctlInfo.flash.m_flashFrameCount = 0;
1012        m_ctlInfo.flash.m_flashCnt = 0;
1013        m_ctlInfo.flash.m_flashTimeOut = 0;
1014        m_ctlInfo.flash.m_flashDecisionResult = false;
1015        m_ctlInfo.flash.m_flashTorchMode = false;
1016        m_ctlInfo.flash.m_precaptureState = 0;
1017        m_ctlInfo.flash.m_precaptureTriggerId = 0;
1018        // awb
1019        m_ctlInfo.awb.i_awbMode = AA_AWBMODE_OFF;
1020        // ae
1021        m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
1022        // af
1023        m_ctlInfo.af.m_afTriggerTimeOut = 0;
1024        // scene
1025        m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
1026    }
1027    ALOGD("(%s): EXIT", __FUNCTION__);
1028}
1029
1030ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
1031{
1032    ALOGD("(%s): ENTER", __FUNCTION__);
1033    this->release();
1034    ALOGD("(%s): EXIT", __FUNCTION__);
1035}
1036
1037void ExynosCameraHWInterface2::release()
1038{
1039    int i, res;
1040    ALOGD("(HAL2::release): ENTER");
1041
1042    if (m_streamThreads[1] != NULL) {
1043        m_streamThreads[1]->release();
1044        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
1045    }
1046
1047    if (m_streamThreads[0] != NULL) {
1048        m_streamThreads[0]->release();
1049        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
1050    }
1051
1052    if (m_sensorThread != NULL) {
1053        m_sensorThread->release();
1054    }
1055
1056    if (m_mainThread != NULL) {
1057        m_mainThread->release();
1058    }
1059
1060    if (m_exynosPictureCSC)
1061        csc_deinit(m_exynosPictureCSC);
1062    m_exynosPictureCSC = NULL;
1063
1064    if (m_exynosVideoCSC)
1065        csc_deinit(m_exynosVideoCSC);
1066    m_exynosVideoCSC = NULL;
1067
1068    if (m_streamThreads[1] != NULL) {
1069        ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
1070        while (!m_streamThreads[1]->IsTerminated())
1071            usleep(SIG_WAITING_TICK);
1072        ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 1 termination");
1073        m_streamThreads[1] = NULL;
1074    }
1075
1076    if (m_streamThreads[0] != NULL) {
1077        ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
1078        while (!m_streamThreads[0]->IsTerminated())
1079            usleep(SIG_WAITING_TICK);
1080        ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 0 termination");
1081        m_streamThreads[0] = NULL;
1082    }
1083
1084    if (m_sensorThread != NULL) {
1085        ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
1086        while (!m_sensorThread->IsTerminated())
1087            usleep(SIG_WAITING_TICK);
1088        ALOGD("(HAL2::release): END   Waiting for (indirect) sensor thread termination");
1089        m_sensorThread = NULL;
1090    }
1091
1092    if (m_mainThread != NULL) {
1093        ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
1094        while (!m_mainThread->IsTerminated())
1095            usleep(SIG_WAITING_TICK);
1096        ALOGD("(HAL2::release): END   Waiting for (indirect) main thread termination");
1097        m_mainThread = NULL;
1098    }
1099
1100    if (m_requestManager != NULL) {
1101        delete m_requestManager;
1102        m_requestManager = NULL;
1103    }
1104
1105    if (m_BayerManager != NULL) {
1106        delete m_BayerManager;
1107        m_BayerManager = NULL;
1108    }
1109    for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1110        freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1111
1112    if (m_sccLocalBufferValid) {
1113        for (i = 0; i < NUM_SCC_BUFFERS; i++)
1114#ifdef ENABLE_FRAME_SYNC
1115            freeCameraMemory(&m_sccLocalBuffer[i], 2);
1116#else
1117            freeCameraMemory(&m_sccLocalBuffer[i], 1);
1118#endif
1119    }
1120    else {
1121        for (i = 0; i < NUM_SCC_BUFFERS; i++)
1122            freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1123    }
1124
1125    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1126    res = exynos_v4l2_close(m_camera_info.sensor.fd);
1127    if (res != NO_ERROR ) {
1128        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1129    }
1130
1131    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1132    res = exynos_v4l2_close(m_camera_info.isp.fd);
1133    if (res != NO_ERROR ) {
1134        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1135    }
1136
1137    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1138    res = exynos_v4l2_close(m_camera_info.capture.fd);
1139    if (res != NO_ERROR ) {
1140        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1141    }
1142
1143    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1144    res = exynos_v4l2_close(m_camera_info.scp.fd);
1145    if (res != NO_ERROR ) {
1146        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1147    }
1148    ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1149    deleteIonClient(m_ionCameraClient);
1150
1151    ALOGD("(HAL2::release): EXIT");
1152}
1153
1154int ExynosCameraHWInterface2::InitializeISPChain()
1155{
1156    char node_name[30];
1157    int fd = 0;
1158    int i;
1159    int ret = 0;
1160
1161    /* Open Sensor */
1162    memset(&node_name, 0x00, sizeof(char[30]));
1163    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1164    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1165
1166    if (fd < 0) {
1167        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1168    }
1169    else {
1170        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1171    }
1172    m_camera_info.sensor.fd = fd;
1173
1174    /* Open ISP */
1175    memset(&node_name, 0x00, sizeof(char[30]));
1176    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1177    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1178
1179    if (fd < 0) {
1180        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1181    }
1182    else {
1183        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1184    }
1185    m_camera_info.isp.fd = fd;
1186
1187    /* Open ScalerC */
1188    memset(&node_name, 0x00, sizeof(char[30]));
1189    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1190    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1191
1192    if (fd < 0) {
1193        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1194    }
1195    else {
1196        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1197    }
1198    m_camera_info.capture.fd = fd;
1199
1200    /* Open ScalerP */
1201    memset(&node_name, 0x00, sizeof(char[30]));
1202    sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1203    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1204    if (fd < 0) {
1205        ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1206    }
1207    else {
1208        ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1209    }
1210    m_camera_info.scp.fd = fd;
1211
1212    if(m_cameraId == 0)
1213        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1214    else
1215        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1216
1217    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1218    m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1219    m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1220
1221    m_camera_info.dummy_shot.dis_bypass = 1;
1222    m_camera_info.dummy_shot.dnr_bypass = 1;
1223    m_camera_info.dummy_shot.fd_bypass = 1;
1224
1225    /*sensor setting*/
1226    m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1227    m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1228    m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1229
1230    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1231    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1232
1233    /*request setting*/
1234    m_camera_info.dummy_shot.request_sensor = 1;
1235    m_camera_info.dummy_shot.request_scc = 0;
1236    m_camera_info.dummy_shot.request_scp = 0;
1237    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1238
1239    m_camera_info.sensor.width = m_camera2->getSensorRawW();
1240    m_camera_info.sensor.height = m_camera2->getSensorRawH();
1241
1242    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1243    m_camera_info.sensor.planes = 2;
1244    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1245    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1246    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1247
1248    for(i = 0; i < m_camera_info.sensor.buffers; i++){
1249        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1250        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1251        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1252        allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
1253    }
1254
1255    m_camera_info.isp.width = m_camera_info.sensor.width;
1256    m_camera_info.isp.height = m_camera_info.sensor.height;
1257    m_camera_info.isp.format = m_camera_info.sensor.format;
1258    m_camera_info.isp.planes = m_camera_info.sensor.planes;
1259    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1260    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1261    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1262
1263    for(i = 0; i < m_camera_info.isp.buffers; i++){
1264        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1265        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1266        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1267        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1268        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1269        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1270        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1271    };
1272
1273    /* init ISP */
1274    ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1275    if (ret < 0) {
1276        ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
1277        return false;
1278    }
1279    cam_int_s_fmt(&(m_camera_info.isp));
1280    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1281    cam_int_reqbufs(&(m_camera_info.isp));
1282    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1283    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1284
1285    /* init Sensor */
1286    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1287    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1288    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1289        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1290    }
1291    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1292    cam_int_reqbufs(&(m_camera_info.sensor));
1293    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1294    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1295        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1296        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1297        m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1298        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1299                sizeof(struct camera2_shot_ext));
1300    }
1301
1302    for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1303        cam_int_qbuf(&(m_camera_info.sensor), i);
1304
1305    for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1306        m_requestManager->pushSensorQ(i);
1307
1308    ALOGV("== stream_on :: sensor");
1309    cam_int_streamon(&(m_camera_info.sensor));
1310    m_camera_info.sensor.status = true;
1311
1312    /* init Capture */
1313    m_camera_info.capture.width = m_camera2->getSensorW();
1314    m_camera_info.capture.height = m_camera2->getSensorH();
1315    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1316#ifdef ENABLE_FRAME_SYNC
1317    m_camera_info.capture.planes = 2;
1318#else
1319    m_camera_info.capture.planes = 1;
1320#endif
1321    m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1322    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1323    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1324
1325    m_camera_info.capture.status = false;
1326
1327    return true;
1328}
1329
1330void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
1331{
1332    ALOGV("(%s)", __FUNCTION__);
1333    StreamThread *AllocatedStream;
1334    stream_parameters_t newParameters;
1335    uint32_t format_actual;
1336
1337
1338    if (!threadExists) {
1339        m_streamThreads[1]  = new StreamThread(this, 1);
1340    }
1341    AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1342    if (!threadExists) {
1343        AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1344        m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1345        AllocatedStream->m_numRegisteredStream = 1;
1346    }
1347    AllocatedStream->m_index        = 1;
1348
1349    format_actual                   = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1350
1351    newParameters.width             = m_camera2->getSensorW();
1352    newParameters.height            = m_camera2->getSensorH();
1353    newParameters.format            = format_actual;
1354    newParameters.streamOps         = NULL;
1355    newParameters.numHwBuffers      = NUM_SCC_BUFFERS;
1356#ifdef ENABLE_FRAME_SYNC
1357    newParameters.planes            = 2;
1358#else
1359    newParameters.planes            = 1;
1360#endif
1361
1362    newParameters.numSvcBufsInHal   = 0;
1363
1364    newParameters.node              = &m_camera_info.capture;
1365
1366    AllocatedStream->streamType     = STREAM_TYPE_INDIRECT;
1367    ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1368
1369    if (!threadExists) {
1370        if (!m_sccLocalBufferValid) {
1371            for (int i = 0; i < m_camera_info.capture.buffers; i++){
1372                initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
1373                m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1374#ifdef ENABLE_FRAME_SYNC
1375                m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1376                allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
1377#else
1378                allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1379#endif
1380                m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
1381            }
1382            m_sccLocalBufferValid = true;
1383        }
1384    } else {
1385        if (m_sccLocalBufferValid) {
1386             for (int i = 0; i < m_camera_info.capture.buffers; i++)
1387                m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
1388        } else {
1389            ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
1390        }
1391    }
1392    cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
1393    m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1394    cam_int_s_fmt(newParameters.node);
1395    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1396    cam_int_reqbufs(newParameters.node);
1397    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1398
1399    for (int i = 0; i < newParameters.node->buffers; i++) {
1400        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1401        cam_int_qbuf(newParameters.node, i);
1402        newParameters.svcBufStatus[i] = ON_DRIVER;
1403    }
1404
1405    ALOGV("== stream_on :: capture");
1406    if (cam_int_streamon(newParameters.node) < 0) {
1407        ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1408    } else {
1409        m_camera_info.capture.status = true;
1410    }
1411
1412    AllocatedStream->setParameter(&newParameters);
1413    AllocatedStream->m_activated    = true;
1414    AllocatedStream->m_isBufferInit = true;
1415}
1416
1417void ExynosCameraHWInterface2::StartISP()
1418{
1419    ALOGV("== stream_on :: isp");
1420    cam_int_streamon(&(m_camera_info.isp));
1421    exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1422}
1423
1424int ExynosCameraHWInterface2::getCameraId() const
1425{
1426    return m_cameraId;
1427}
1428
1429int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1430{
1431    ALOGV("DEBUG(%s):", __FUNCTION__);
1432    if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1433            && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1434        m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1435        return 0;
1436    }
1437    else {
1438        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1439        return 1;
1440    }
1441}
1442
1443int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1444{
1445    int i = 0;
1446
1447    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1448    if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1449        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1450        return 0;
1451    }
1452    m_isRequestQueueNull = false;
1453    if (m_requestManager->GetNumEntries() == 0)
1454        m_requestManager->SetInitialSkip(5);
1455
1456    if (m_isIspStarted == false) {
1457        /* isp */
1458        m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1459        m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1460        cam_int_s_fmt(&(m_camera_info.isp));
1461        cam_int_reqbufs(&(m_camera_info.isp));
1462
1463        /* sensor */
1464        if (m_camera_info.sensor.status == false) {
1465            cam_int_s_fmt(&(m_camera_info.sensor));
1466            cam_int_reqbufs(&(m_camera_info.sensor));
1467
1468            for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1469                ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1470                m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1471                m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1472                memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1473                        sizeof(struct camera2_shot_ext));
1474            }
1475            for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1476                cam_int_qbuf(&(m_camera_info.sensor), i);
1477
1478            for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1479                m_requestManager->pushSensorQ(i);
1480            ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
1481            cam_int_streamon(&(m_camera_info.sensor));
1482            m_camera_info.sensor.status = true;
1483        }
1484    }
1485    if (!(m_streamThreads[1].get())) {
1486        ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
1487        StartSCCThread(false);
1488    } else {
1489        if (m_streamThreads[1]->m_activated ==  false) {
1490            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1491            StartSCCThread(true);
1492        } else {
1493            if (m_camera_info.capture.status == false) {
1494                m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1495                cam_int_s_fmt(&(m_camera_info.capture));
1496                ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1497                cam_int_reqbufs(&(m_camera_info.capture));
1498                ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1499
1500                if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
1501                    StreamThread *          targetStream = m_streamThreads[1].get();
1502                    stream_parameters_t     *targetStreamParms = &(targetStream->m_parameters);
1503                    node_info_t             *currentNode = targetStreamParms->node;
1504
1505                    struct v4l2_buffer v4l2_buf;
1506                    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1507
1508                    for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
1509                        v4l2_buf.m.planes   = planes;
1510                        v4l2_buf.type       = currentNode->type;
1511                        v4l2_buf.memory     = currentNode->memory;
1512
1513                        v4l2_buf.length     = currentNode->planes;
1514                        v4l2_buf.index      = i;
1515                        ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
1516
1517                        if (i < currentNode->buffers) {
1518#ifdef ENABLE_FRAME_SYNC
1519                            v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
1520                            v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
1521                            v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
1522                            v4l2_buf.length += targetStreamParms->metaPlanes;
1523                            v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
1524                            v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
1525
1526                            ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1527#endif
1528                            if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1529                                ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
1530                            }
1531                            ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
1532                            targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1533                        }
1534                        else {
1535                            targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1536                        }
1537
1538                    }
1539
1540                } else {
1541                    for (int i = 0; i < m_camera_info.capture.buffers; i++) {
1542                        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1543                        cam_int_qbuf(&(m_camera_info.capture), i);
1544                    }
1545                }
1546                ALOGV("== stream_on :: capture");
1547                if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1548                    ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1549                } else {
1550                    m_camera_info.capture.status = true;
1551                }
1552            }
1553            if (m_scpForceSuspended) {
1554                m_scpForceSuspended = false;
1555            }
1556        }
1557    }
1558    if (m_isIspStarted == false) {
1559        StartISP();
1560        ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
1561        m_requestManager->SetInitialSkip(5);
1562        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1563        m_isIspStarted = true;
1564    }
1565    m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1566    return 0;
1567}
1568
1569int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1570{
1571    ALOGV("DEBUG(%s):", __FUNCTION__);
1572    if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1573            && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1574        m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1575        return 0;
1576    }
1577    else {
1578        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1579        return 1;
1580    }
1581}
1582
1583int ExynosCameraHWInterface2::getInProgressCount()
1584{
1585    int inProgressCount = m_requestManager->GetNumEntries();
1586    ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount);
1587    return inProgressCount;
1588}
1589
1590int ExynosCameraHWInterface2::flushCapturesInProgress()
1591{
1592    return 0;
1593}
1594
1595int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1596{
1597    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1598
1599    if (request == NULL) return BAD_VALUE;
1600    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1601        return BAD_VALUE;
1602    }
1603    status_t res;
1604    // Pass 1, calculate size and allocate
1605    res = m_camera2->constructDefaultRequest(request_template,
1606            request,
1607            true);
1608    if (res != OK) {
1609        return res;
1610    }
1611    // Pass 2, build request
1612    res = m_camera2->constructDefaultRequest(request_template,
1613            request,
1614            false);
1615    if (res != OK) {
1616        ALOGE("Unable to populate new request for template %d",
1617                request_template);
1618    }
1619
1620    return res;
1621}
1622
1623int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1624                                    uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1625{
1626    ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1627    bool useDirectOutput = false;
1628    StreamThread *AllocatedStream;
1629    stream_parameters_t newParameters;
1630    substream_parameters_t *subParameters;
1631    StreamThread *parentStream;
1632    status_t res;
1633    int allocCase = 0;
1634
1635    if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)  &&
1636            m_camera2->isSupportedResolution(width, height)) {
1637        if (!(m_streamThreads[0].get())) {
1638            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1639            allocCase = 0;
1640        }
1641        else {
1642            if ((m_streamThreads[0].get())->m_activated == true) {
1643                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1644                allocCase = 1;
1645            }
1646            else {
1647                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1648                allocCase = 2;
1649            }
1650        }
1651
1652        // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1653        if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
1654                    || (width == 720 && height == 480) || (width == 1440 && height == 960)
1655                    || (width == 1344 && height == 896)) {
1656            m_wideAspect = true;
1657        } else {
1658            m_wideAspect = false;
1659        }
1660        ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1661
1662        if (allocCase == 0 || allocCase == 2) {
1663            *stream_id = STREAM_ID_PREVIEW;
1664
1665            m_streamThreads[0]  = new StreamThread(this, *stream_id);
1666
1667            AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1668            AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1669            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1670
1671            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1672            *usage                              = GRALLOC_USAGE_SW_WRITE_OFTEN;
1673            *max_buffers                        = 6;
1674
1675            newParameters.width                 = width;
1676            newParameters.height                = height;
1677            newParameters.format                = *format_actual;
1678            newParameters.streamOps             = stream_ops;
1679            newParameters.usage                 = *usage;
1680            newParameters.numHwBuffers          = NUM_SCP_BUFFERS;
1681            newParameters.numOwnSvcBuffers      = *max_buffers;
1682            newParameters.planes                = NUM_PLANES(*format_actual);
1683            newParameters.metaPlanes            = 1;
1684            newParameters.numSvcBufsInHal       = 0;
1685            newParameters.minUndequedBuffer     = 3;
1686
1687            newParameters.node                  = &m_camera_info.scp;
1688            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1689            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1690
1691            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1692            AllocatedStream->m_index            = 0;
1693            AllocatedStream->setParameter(&newParameters);
1694            AllocatedStream->m_activated = true;
1695            AllocatedStream->m_numRegisteredStream = 1;
1696            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1697            m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1698            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1699            if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
1700                AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
1701            if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
1702                AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
1703            return 0;
1704        } else if (allocCase == 1) {
1705            *stream_id = STREAM_ID_RECORD;
1706
1707            subParameters = &m_subStreams[STREAM_ID_RECORD];
1708            memset(subParameters, 0, sizeof(substream_parameters_t));
1709
1710            parentStream = (StreamThread*)(m_streamThreads[0].get());
1711            if (!parentStream) {
1712                return 1;
1713            }
1714
1715            *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1716            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1717            *max_buffers = 6;
1718
1719            subParameters->type         = SUBSTREAM_TYPE_RECORD;
1720            subParameters->width        = width;
1721            subParameters->height       = height;
1722            subParameters->format       = *format_actual;
1723            subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1724            subParameters->streamOps     = stream_ops;
1725            subParameters->usage         = *usage;
1726            subParameters->numOwnSvcBuffers = *max_buffers;
1727            subParameters->numSvcBufsInHal  = 0;
1728            subParameters->needBufferInit    = false;
1729            subParameters->minUndequedBuffer = 2;
1730
1731            res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
1732            if (res != NO_ERROR) {
1733                ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1734                return 1;
1735            }
1736            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1737            ALOGV("(%s): Enabling Record", __FUNCTION__);
1738            return 0;
1739        }
1740    }
1741    else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
1742            && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) {
1743
1744        if (!(m_streamThreads[1].get())) {
1745            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1746            useDirectOutput = true;
1747        }
1748        else {
1749            ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
1750            useDirectOutput = false;
1751        }
1752        if (useDirectOutput) {
1753            *stream_id = STREAM_ID_ZSL;
1754
1755            m_streamThreads[1]  = new StreamThread(this, *stream_id);
1756            AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1757            AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1758            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1759
1760            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1761            *max_buffers                        = 6;
1762
1763            *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1764            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1765            *max_buffers = 6;
1766
1767            newParameters.width                 = width;
1768            newParameters.height                = height;
1769            newParameters.format                = *format_actual;
1770            newParameters.streamOps             = stream_ops;
1771            newParameters.usage                 = *usage;
1772            newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1773            newParameters.numOwnSvcBuffers      = *max_buffers;
1774            newParameters.planes                = NUM_PLANES(*format_actual);
1775            newParameters.metaPlanes            = 1;
1776
1777            newParameters.numSvcBufsInHal       = 0;
1778            newParameters.minUndequedBuffer     = 2;
1779
1780            newParameters.node                  = &m_camera_info.capture;
1781            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1782            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1783
1784            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1785            AllocatedStream->m_index            = 1;
1786            AllocatedStream->setParameter(&newParameters);
1787            AllocatedStream->m_activated = true;
1788            AllocatedStream->m_numRegisteredStream = 1;
1789            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1790            return 0;
1791        } else {
1792            bool bJpegExists = false;
1793            AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1794            subParameters = &m_subStreams[STREAM_ID_JPEG];
1795            if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
1796                ALOGD("(%s): jpeg stream exists", __FUNCTION__);
1797                bJpegExists = true;
1798                AllocatedStream->detachSubStream(STREAM_ID_JPEG);
1799            }
1800            AllocatedStream->m_releasing = true;
1801            ALOGD("START stream thread 1 release %d", __LINE__);
1802            do {
1803                AllocatedStream->release();
1804                usleep(SIG_WAITING_TICK);
1805            } while (AllocatedStream->m_releasing);
1806            ALOGD("END   stream thread 1 release %d", __LINE__);
1807
1808            *stream_id = STREAM_ID_ZSL;
1809
1810            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1811
1812            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1813            *max_buffers                        = 6;
1814
1815            *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1816            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1817            *max_buffers = 6;
1818
1819            newParameters.width                 = width;
1820            newParameters.height                = height;
1821            newParameters.format                = *format_actual;
1822            newParameters.streamOps             = stream_ops;
1823            newParameters.usage                 = *usage;
1824            newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1825            newParameters.numOwnSvcBuffers      = *max_buffers;
1826            newParameters.planes                = NUM_PLANES(*format_actual);
1827            newParameters.metaPlanes            = 1;
1828
1829            newParameters.numSvcBufsInHal       = 0;
1830            newParameters.minUndequedBuffer     = 4;
1831
1832            newParameters.node                  = &m_camera_info.capture;
1833            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1834            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1835
1836            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1837            AllocatedStream->m_index            = 1;
1838            AllocatedStream->setParameter(&newParameters);
1839            AllocatedStream->m_activated = true;
1840            AllocatedStream->m_numRegisteredStream = 1;
1841            if (bJpegExists) {
1842                AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
1843            }
1844            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1845            return 0;
1846
1847        }
1848    }
1849    else if (format == HAL_PIXEL_FORMAT_BLOB
1850            && m_camera2->isSupportedJpegResolution(width, height)) {
1851        *stream_id = STREAM_ID_JPEG;
1852
1853        subParameters = &m_subStreams[*stream_id];
1854        memset(subParameters, 0, sizeof(substream_parameters_t));
1855
1856        if (!(m_streamThreads[1].get())) {
1857            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1858            StartSCCThread(false);
1859        }
1860        else if (m_streamThreads[1]->m_activated ==  false) {
1861            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1862            StartSCCThread(true);
1863        }
1864        parentStream = (StreamThread*)(m_streamThreads[1].get());
1865
1866        *format_actual = HAL_PIXEL_FORMAT_BLOB;
1867        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1868        *max_buffers = 4;
1869
1870        subParameters->type          = SUBSTREAM_TYPE_JPEG;
1871        subParameters->width         = width;
1872        subParameters->height        = height;
1873        subParameters->format        = *format_actual;
1874        subParameters->svcPlanes     = 1;
1875        subParameters->streamOps     = stream_ops;
1876        subParameters->usage         = *usage;
1877        subParameters->numOwnSvcBuffers = *max_buffers;
1878        subParameters->numSvcBufsInHal  = 0;
1879        subParameters->needBufferInit    = false;
1880        subParameters->minUndequedBuffer = 2;
1881
1882        res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
1883        if (res != NO_ERROR) {
1884            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1885            return 1;
1886        }
1887        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1888        ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
1889        return 0;
1890    }
1891    else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1892        *stream_id = STREAM_ID_PRVCB;
1893
1894        subParameters = &m_subStreams[STREAM_ID_PRVCB];
1895        memset(subParameters, 0, sizeof(substream_parameters_t));
1896
1897        parentStream = (StreamThread*)(m_streamThreads[0].get());
1898        if (!parentStream) {
1899            return 1;
1900        }
1901
1902        *format_actual = format;
1903        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1904        *max_buffers = 6;
1905
1906        subParameters->type         = SUBSTREAM_TYPE_PRVCB;
1907        subParameters->width        = width;
1908        subParameters->height       = height;
1909        subParameters->format       = *format_actual;
1910        subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1911        subParameters->streamOps     = stream_ops;
1912        subParameters->usage         = *usage;
1913        subParameters->numOwnSvcBuffers = *max_buffers;
1914        subParameters->numSvcBufsInHal  = 0;
1915        subParameters->needBufferInit    = false;
1916        subParameters->minUndequedBuffer = 2;
1917
1918        if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
1919            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
1920            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
1921        }
1922        else {
1923            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1924            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
1925        }
1926
1927        res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
1928        if (res != NO_ERROR) {
1929            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1930            return 1;
1931        }
1932        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1933        ALOGV("(%s): Enabling previewcb", __FUNCTION__);
1934        return 0;
1935    }
1936    ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
1937    return 1;
1938}
1939
1940int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1941        int num_buffers, buffer_handle_t *registeringBuffers)
1942{
1943    int                     i,j;
1944    void                    *virtAddr[3];
1945    int                     plane_index = 0;
1946    StreamThread *          targetStream;
1947    stream_parameters_t     *targetStreamParms;
1948    node_info_t             *currentNode;
1949
1950    struct v4l2_buffer v4l2_buf;
1951    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1952
1953    ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1954        stream_id, num_buffers, (uint32_t)registeringBuffers);
1955
1956    if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
1957        targetStream = m_streamThreads[0].get();
1958        targetStreamParms = &(m_streamThreads[0]->m_parameters);
1959
1960    }
1961    else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
1962        substream_parameters_t  *targetParms;
1963        targetParms = &m_subStreams[stream_id];
1964
1965        targetParms->numSvcBuffers = num_buffers;
1966
1967        for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
1968            ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
1969                i, stream_id, (uint32_t)(registeringBuffers[i]));
1970            if (m_grallocHal) {
1971                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1972                       targetParms->usage, 0, 0,
1973                       targetParms->width, targetParms->height, virtAddr) != 0) {
1974                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1975                }
1976                else {
1977                    ExynosBuffer currentBuf;
1978                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1979                    if (targetParms->svcPlanes == 1) {
1980                        currentBuf.fd.extFd[0] = priv_handle->fd;
1981                        currentBuf.size.extS[0] = priv_handle->size;
1982                        currentBuf.size.extS[1] = 0;
1983                        currentBuf.size.extS[2] = 0;
1984                    } else if (targetParms->svcPlanes == 2) {
1985                        currentBuf.fd.extFd[0] = priv_handle->fd;
1986                        currentBuf.fd.extFd[1] = priv_handle->fd1;
1987
1988                    } else if (targetParms->svcPlanes == 3) {
1989                        currentBuf.fd.extFd[0] = priv_handle->fd;
1990                        currentBuf.fd.extFd[1] = priv_handle->fd1;
1991                        currentBuf.fd.extFd[2] = priv_handle->fd2;
1992                    }
1993                    for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
1994                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1995                        CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
1996                             __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
1997                             (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
1998                    }
1999                    targetParms->svcBufStatus[i]  = ON_SERVICE;
2000                    targetParms->svcBuffers[i]    = currentBuf;
2001                    targetParms->svcBufHandle[i]  = registeringBuffers[i];
2002                }
2003            }
2004        }
2005        targetParms->needBufferInit = true;
2006        return 0;
2007    }
2008    else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
2009        targetStream = m_streamThreads[1].get();
2010        targetStreamParms = &(m_streamThreads[1]->m_parameters);
2011    }
2012    else {
2013        ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
2014        return 1;
2015    }
2016
2017    if (targetStream->streamType == STREAM_TYPE_DIRECT) {
2018        if (num_buffers < targetStreamParms->numHwBuffers) {
2019            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
2020                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
2021            return 1;
2022        }
2023    }
2024    CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
2025            __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
2026            targetStreamParms->height, targetStreamParms->planes);
2027    targetStreamParms->numSvcBuffers = num_buffers;
2028    currentNode = targetStreamParms->node;
2029    currentNode->width      = targetStreamParms->width;
2030    currentNode->height     = targetStreamParms->height;
2031    currentNode->format     = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
2032    currentNode->planes     = targetStreamParms->planes;
2033    currentNode->buffers    = targetStreamParms->numHwBuffers;
2034    cam_int_s_input(currentNode, m_camera_info.sensor_id);
2035    cam_int_s_fmt(currentNode);
2036    cam_int_reqbufs(currentNode);
2037    for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
2038        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
2039            i, (uint32_t)(registeringBuffers[i]));
2040                v4l2_buf.m.planes   = planes;
2041                v4l2_buf.type       = currentNode->type;
2042                v4l2_buf.memory     = currentNode->memory;
2043                v4l2_buf.index      = i;
2044                v4l2_buf.length     = currentNode->planes;
2045
2046                ExynosBuffer currentBuf;
2047                ExynosBuffer metaBuf;
2048                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2049
2050                m_getAlignedYUVSize(currentNode->format,
2051                    currentNode->width, currentNode->height, &currentBuf);
2052
2053                ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
2054                if (currentNode->planes == 1) {
2055                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2056                    currentBuf.fd.extFd[0] = priv_handle->fd;
2057                    currentBuf.size.extS[0] = priv_handle->size;
2058                    currentBuf.size.extS[1] = 0;
2059                    currentBuf.size.extS[2] = 0;
2060                } else if (currentNode->planes == 2) {
2061                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2062                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
2063                    currentBuf.fd.extFd[0] = priv_handle->fd;
2064                    currentBuf.fd.extFd[1] = priv_handle->fd1;
2065
2066                } else if (currentNode->planes == 3) {
2067                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2068                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
2069                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
2070                    currentBuf.fd.extFd[0] = priv_handle->fd;
2071                    currentBuf.fd.extFd[2] = priv_handle->fd1;
2072                    currentBuf.fd.extFd[1] = priv_handle->fd2;
2073                }
2074
2075                for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
2076                    currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
2077                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
2078                    CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
2079                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
2080                         (unsigned int)currentBuf.virt.extP[plane_index],
2081                         v4l2_buf.m.planes[plane_index].length);
2082                }
2083
2084                if (i < currentNode->buffers) {
2085
2086
2087#ifdef ENABLE_FRAME_SYNC
2088                    /* add plane for metadata*/
2089                    metaBuf.size.extS[0] = 4*1024;
2090                    allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
2091
2092                    v4l2_buf.length += targetStreamParms->metaPlanes;
2093                    v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
2094                    v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
2095
2096                    ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
2097#endif
2098                    if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
2099                        ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
2100                            __FUNCTION__, stream_id, currentNode->fd);
2101                    }
2102                    ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
2103                            __FUNCTION__, stream_id, currentNode->fd);
2104                    targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
2105                }
2106                else {
2107                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
2108                }
2109
2110                targetStreamParms->svcBuffers[i]       = currentBuf;
2111                targetStreamParms->metaBuffers[i] = metaBuf;
2112                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
2113            }
2114
2115    ALOGV("DEBUG(%s): calling  streamon stream id = %d", __FUNCTION__, stream_id);
2116    cam_int_streamon(targetStreamParms->node);
2117    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
2118    currentNode->status = true;
2119    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
2120
2121    return 0;
2122}
2123
2124int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
2125{
2126    StreamThread *targetStream;
2127    status_t res = NO_ERROR;
2128    ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2129    bool releasingScpMain = false;
2130
2131    if (stream_id == STREAM_ID_PREVIEW) {
2132        targetStream = (StreamThread*)(m_streamThreads[0].get());
2133        targetStream->m_numRegisteredStream--;
2134        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2135        releasingScpMain = true;
2136        for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2137            for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2138                ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2139                                targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2140                CAM_LOGD("DBG(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2141                              targetStream->m_parameters.svcBuffers[i].fd.extFd[j], targetStream->m_parameters.svcBuffers[i].virt.extP[j]);
2142            }
2143        }
2144    } else if (stream_id == STREAM_ID_JPEG) {
2145        targetStream = (StreamThread*)(m_streamThreads[1].get());
2146        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2147        if (m_resizeBuf.size.s != 0) {
2148            freeCameraMemory(&m_resizeBuf, 1);
2149        }
2150        if (targetStream)
2151            res = targetStream->detachSubStream(stream_id);
2152        if (res != NO_ERROR) {
2153            ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2154            return 1;
2155        }
2156        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2157        return 0;
2158    } else if (stream_id == STREAM_ID_RECORD) {
2159        targetStream = (StreamThread*)(m_streamThreads[0].get());
2160        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2161        if (targetStream)
2162            res = targetStream->detachSubStream(stream_id);
2163        else
2164            return 0;
2165    } else if (stream_id == STREAM_ID_PRVCB) {
2166        targetStream = (StreamThread*)(m_streamThreads[0].get());
2167        if (m_resizeBuf.size.s != 0) {
2168            freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
2169        }
2170        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2171        if (targetStream)
2172            res = targetStream->detachSubStream(stream_id);
2173        else
2174            return 0;
2175    } else if (stream_id == STREAM_ID_ZSL) {
2176        targetStream = (StreamThread*)(m_streamThreads[1].get());
2177        targetStream->m_numRegisteredStream--;
2178        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2179    } else {
2180        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
2181        return 1;
2182    }
2183
2184    if (m_sensorThread != NULL) {
2185        m_sensorThread->release();
2186        ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
2187        while (!m_sensorThread->IsTerminated())
2188            usleep(10000);
2189        ALOGD("(%s): END   Waiting for (indirect) sensor thread termination", __FUNCTION__);
2190    }
2191    else {
2192        ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
2193    }
2194
2195    if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
2196        ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
2197        targetStream = (StreamThread*)(m_streamThreads[1].get());
2198        targetStream->m_releasing = true;
2199        ALOGD("START stream thread release %d", __LINE__);
2200        do {
2201            targetStream->release();
2202            usleep(SIG_WAITING_TICK);
2203        } while (targetStream->m_releasing);
2204        m_camera_info.capture.status = false;
2205        ALOGD("END   stream thread release %d", __LINE__);
2206    }
2207
2208    if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
2209        ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
2210        targetStream = (StreamThread*)(m_streamThreads[0].get());
2211        targetStream->m_releasing = true;
2212        ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2213        do {
2214            targetStream->release();
2215            usleep(SIG_WAITING_TICK);
2216        } while (targetStream->m_releasing);
2217        ALOGD("(%s): END   Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2218        targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
2219
2220        if (targetStream != NULL) {
2221            ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
2222            while (!targetStream->IsTerminated())
2223                usleep(SIG_WAITING_TICK);
2224            ALOGD("(%s): END   Waiting for (indirect) stream thread termination", __FUNCTION__);
2225            m_streamThreads[0] = NULL;
2226        }
2227        if (m_camera_info.capture.status == true) {
2228            m_scpForceSuspended = true;
2229        }
2230        m_isIspStarted = false;
2231    }
2232    ALOGV("(%s): END", __FUNCTION__);
2233    return 0;
2234}
2235
2236int ExynosCameraHWInterface2::allocateReprocessStream(
2237    uint32_t width, uint32_t height, uint32_t format,
2238    const camera2_stream_in_ops_t *reprocess_stream_ops,
2239    uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
2240{
2241    ALOGV("DEBUG(%s):", __FUNCTION__);
2242    return 0;
2243}
2244
2245int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2246            uint32_t output_stream_id,
2247            const camera2_stream_in_ops_t *reprocess_stream_ops,
2248            // outputs
2249            uint32_t *stream_id)
2250{
2251    ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
2252    *stream_id = STREAM_ID_JPEG_REPROCESS;
2253
2254    m_reprocessStreamId = *stream_id;
2255    m_reprocessOps = reprocess_stream_ops;
2256    m_reprocessOutputStreamId = output_stream_id;
2257    return 0;
2258}
2259
2260int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
2261{
2262    ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2263    if (stream_id == STREAM_ID_JPEG_REPROCESS) {
2264        m_reprocessStreamId = 0;
2265        m_reprocessOps = NULL;
2266        m_reprocessOutputStreamId = 0;
2267        return 0;
2268    }
2269    return 1;
2270}
2271
2272int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
2273{
2274    ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
2275
2276    switch (trigger_id) {
2277    case CAMERA2_TRIGGER_AUTOFOCUS:
2278        ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2279        OnAfTriggerStart(ext1);
2280        break;
2281
2282    case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
2283        ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2284        OnAfCancel(ext1);
2285        break;
2286    case CAMERA2_TRIGGER_PRECAPTURE_METERING:
2287        ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
2288        OnPrecaptureMeteringTriggerStart(ext1);
2289        break;
2290    default:
2291        break;
2292    }
2293    return 0;
2294}
2295
2296int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
2297{
2298    ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
2299    m_notifyCb = notify_cb;
2300    m_callbackCookie = user;
2301    return 0;
2302}
2303
2304int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
2305{
2306    ALOGV("DEBUG(%s):", __FUNCTION__);
2307    return 0;
2308}
2309
2310int ExynosCameraHWInterface2::dump(int fd)
2311{
2312    ALOGV("DEBUG(%s):", __FUNCTION__);
2313    return 0;
2314}
2315
2316void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
2317{
2318    switch (colorFormat) {
2319    // 1p
2320    case V4L2_PIX_FMT_RGB565 :
2321    case V4L2_PIX_FMT_YUYV :
2322    case V4L2_PIX_FMT_UYVY :
2323    case V4L2_PIX_FMT_VYUY :
2324    case V4L2_PIX_FMT_YVYU :
2325        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2326        buf->size.extS[1] = 0;
2327        buf->size.extS[2] = 0;
2328        break;
2329    // 2p
2330    case V4L2_PIX_FMT_NV12 :
2331    case V4L2_PIX_FMT_NV12T :
2332    case V4L2_PIX_FMT_NV21 :
2333        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
2334        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2335        buf->size.extS[2] = 0;
2336        break;
2337    case V4L2_PIX_FMT_NV12M :
2338    case V4L2_PIX_FMT_NV12MT_16X16 :
2339    case V4L2_PIX_FMT_NV21M:
2340        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
2341        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2342        buf->size.extS[2] = 0;
2343        break;
2344    case V4L2_PIX_FMT_NV16 :
2345    case V4L2_PIX_FMT_NV61 :
2346        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2347        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
2348        buf->size.extS[2] = 0;
2349        break;
2350     // 3p
2351    case V4L2_PIX_FMT_YUV420 :
2352    case V4L2_PIX_FMT_YVU420 :
2353        buf->size.extS[0] = (w * h);
2354        buf->size.extS[1] = (w * h) >> 2;
2355        buf->size.extS[2] = (w * h) >> 2;
2356        break;
2357    case V4L2_PIX_FMT_YUV420M:
2358    case V4L2_PIX_FMT_YVU420M :
2359    case V4L2_PIX_FMT_YUV422P :
2360        buf->size.extS[0] = ALIGN(w,  16) * ALIGN(h,  16);
2361        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2362        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2363        break;
2364    default:
2365        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2366        return;
2367        break;
2368    }
2369}
2370
2371bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
2372                                             int  dst_w,  int   dst_h,
2373                                             int *crop_x, int *crop_y,
2374                                             int *crop_w, int *crop_h,
2375                                             int zoom)
2376{
2377    *crop_w = src_w;
2378    *crop_h = src_h;
2379
2380    if (   src_w != dst_w
2381        || src_h != dst_h) {
2382        float src_ratio = 1.0f;
2383        float dst_ratio = 1.0f;
2384
2385        // ex : 1024 / 768
2386        src_ratio = (float)src_w / (float)src_h;
2387
2388        // ex : 352  / 288
2389        dst_ratio = (float)dst_w / (float)dst_h;
2390
2391        if (dst_w * dst_h < src_w * src_h) {
2392            if (dst_ratio <= src_ratio) {
2393                // shrink w
2394                *crop_w = src_h * dst_ratio;
2395                *crop_h = src_h;
2396            } else {
2397                // shrink h
2398                *crop_w = src_w;
2399                *crop_h = src_w / dst_ratio;
2400            }
2401        } else {
2402            if (dst_ratio <= src_ratio) {
2403                // shrink w
2404                *crop_w = src_h * dst_ratio;
2405                *crop_h = src_h;
2406            } else {
2407                // shrink h
2408                *crop_w = src_w;
2409                *crop_h = src_w / dst_ratio;
2410            }
2411        }
2412    }
2413
2414    if (zoom != 0) {
2415        float zoomLevel = ((float)zoom + 10.0) / 10.0;
2416        *crop_w = (int)((float)*crop_w / zoomLevel);
2417        *crop_h = (int)((float)*crop_h / zoomLevel);
2418    }
2419
2420    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
2421    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2422    if (w_align != 0) {
2423        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2424            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
2425            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2426        }
2427        else
2428            *crop_w -= w_align;
2429    }
2430
2431    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
2432    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2433    if (h_align != 0) {
2434        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2435            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
2436            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2437        }
2438        else
2439            *crop_h -= h_align;
2440    }
2441
2442    *crop_x = (src_w - *crop_w) >> 1;
2443    *crop_y = (src_h - *crop_h) >> 1;
2444
2445    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2446        *crop_x -= 1;
2447
2448    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2449        *crop_y -= 1;
2450
2451    return true;
2452}
2453
2454BayerBufManager::BayerBufManager()
2455{
2456    ALOGV("DEBUG(%s): ", __FUNCTION__);
2457    for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2458        entries[i].status = BAYER_ON_HAL_EMPTY;
2459        entries[i].reqFrameCnt = 0;
2460    }
2461    sensorEnqueueHead = 0;
2462    sensorDequeueHead = 0;
2463    ispEnqueueHead = 0;
2464    ispDequeueHead = 0;
2465    numOnSensor = 0;
2466    numOnIsp = 0;
2467    numOnHalFilled = 0;
2468    numOnHalEmpty = NUM_BAYER_BUFFERS;
2469}
2470
2471BayerBufManager::~BayerBufManager()
2472{
2473    ALOGV("%s", __FUNCTION__);
2474}
2475
2476int     BayerBufManager::GetIndexForSensorEnqueue()
2477{
2478    int ret = 0;
2479    if (numOnHalEmpty == 0)
2480        ret = -1;
2481    else
2482        ret = sensorEnqueueHead;
2483    ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2484    return ret;
2485}
2486
2487int    BayerBufManager::MarkSensorEnqueue(int index)
2488{
2489    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2490
2491    // sanity check
2492    if (index != sensorEnqueueHead) {
2493        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2494        return -1;
2495    }
2496    if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2497        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2498            index, entries[index].status, BAYER_ON_HAL_EMPTY);
2499        return -1;
2500    }
2501
2502    entries[index].status = BAYER_ON_SENSOR;
2503    entries[index].reqFrameCnt = 0;
2504    numOnHalEmpty--;
2505    numOnSensor++;
2506    sensorEnqueueHead = GetNextIndex(index);
2507    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2508        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2509    return 0;
2510}
2511
2512int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
2513{
2514    ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2515
2516    if (entries[index].status != BAYER_ON_SENSOR) {
2517        ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2518            index, entries[index].status, BAYER_ON_SENSOR);
2519        return -1;
2520    }
2521
2522    entries[index].status = BAYER_ON_HAL_FILLED;
2523    numOnHalFilled++;
2524    numOnSensor--;
2525
2526    return 0;
2527}
2528
2529int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2530{
2531    int ret = 0;
2532    if (numOnHalFilled == 0)
2533        ret = -1;
2534    else {
2535        *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2536        ret = ispEnqueueHead;
2537    }
2538    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2539    return ret;
2540}
2541
2542int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2543{
2544    int ret = 0;
2545    if (numOnIsp == 0)
2546        ret = -1;
2547    else {
2548        *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2549        ret = ispDequeueHead;
2550    }
2551    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2552    return ret;
2553}
2554
2555int    BayerBufManager::MarkIspEnqueue(int index)
2556{
2557    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2558
2559    // sanity check
2560    if (index != ispEnqueueHead) {
2561        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2562        return -1;
2563    }
2564    if (entries[index].status != BAYER_ON_HAL_FILLED) {
2565        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2566            index, entries[index].status, BAYER_ON_HAL_FILLED);
2567        return -1;
2568    }
2569
2570    entries[index].status = BAYER_ON_ISP;
2571    numOnHalFilled--;
2572    numOnIsp++;
2573    ispEnqueueHead = GetNextIndex(index);
2574    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2575        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2576    return 0;
2577}
2578
2579int    BayerBufManager::MarkIspDequeue(int index)
2580{
2581    ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2582
2583    // sanity check
2584    if (index != ispDequeueHead) {
2585        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2586        return -1;
2587    }
2588    if (entries[index].status != BAYER_ON_ISP) {
2589        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2590            index, entries[index].status, BAYER_ON_ISP);
2591        return -1;
2592    }
2593
2594    entries[index].status = BAYER_ON_HAL_EMPTY;
2595    entries[index].reqFrameCnt = 0;
2596    numOnHalEmpty++;
2597    numOnIsp--;
2598    ispDequeueHead = GetNextIndex(index);
2599    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2600        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2601    return 0;
2602}
2603
2604int BayerBufManager::GetNumOnSensor()
2605{
2606    return numOnSensor;
2607}
2608
2609int BayerBufManager::GetNumOnHalFilled()
2610{
2611    return numOnHalFilled;
2612}
2613
2614int BayerBufManager::GetNumOnIsp()
2615{
2616    return numOnIsp;
2617}
2618
2619int     BayerBufManager::GetNextIndex(int index)
2620{
2621    index++;
2622    if (index >= NUM_BAYER_BUFFERS)
2623        index = 0;
2624
2625    return index;
2626}
2627
2628void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2629{
2630    camera_metadata_t *currentRequest = NULL;
2631    camera_metadata_t *currentFrame = NULL;
2632    size_t numEntries = 0;
2633    size_t frameSize = 0;
2634    camera_metadata_t * preparedFrame = NULL;
2635    camera_metadata_t *deregisteredRequest = NULL;
2636    uint32_t currentSignal = self->GetProcessingSignal();
2637    MainThread *  selfThread      = ((MainThread*)self);
2638    int res = 0;
2639
2640    int ret;
2641
2642    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2643
2644    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2645        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2646
2647        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2648        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2649        return;
2650    }
2651
2652    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2653        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2654        if (m_requestManager->IsRequestQueueFull()==false) {
2655            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2656            if (NULL == currentRequest) {
2657                ALOGD("DEBUG(%s)(0x%x): dequeue_request returned NULL ", __FUNCTION__, currentSignal);
2658                m_isRequestQueueNull = true;
2659                if (m_requestManager->IsVdisEnable())
2660                    m_vdisBubbleCnt = 1;
2661            }
2662            else {
2663                m_requestManager->RegisterRequest(currentRequest);
2664
2665                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2666                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2667                if (m_requestManager->IsRequestQueueFull()==false)
2668                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2669
2670                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2671            }
2672        }
2673        else {
2674            m_isRequestQueuePending = true;
2675        }
2676    }
2677
2678    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2679        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2680        /*while (1)*/ {
2681            ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2682            if (ret == false)
2683                CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
2684
2685            m_requestManager->DeregisterRequest(&deregisteredRequest);
2686
2687            ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2688            if (ret < 0)
2689                CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
2690
2691            ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2692            if (ret < 0)
2693                CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
2694
2695            if (currentFrame==NULL) {
2696                ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2697            }
2698            else {
2699                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2700            }
2701            res = append_camera_metadata(currentFrame, preparedFrame);
2702            if (res==0) {
2703                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2704                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2705            }
2706            else {
2707                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2708            }
2709        }
2710        if (!m_isRequestQueueNull) {
2711            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2712        }
2713
2714        if (getInProgressCount()>0) {
2715            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2716            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2717        }
2718    }
2719    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2720    return;
2721}
2722
2723void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2724{
2725    ALOGD("####  common Section");
2726    ALOGD("####                 magic(%x) ",
2727        shot_ext->shot.magicNumber);
2728    ALOGD("####  ctl Section");
2729    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2730        shot_ext->shot.ctl.request.metadataMode,
2731        shot_ext->shot.ctl.lens.aperture,
2732        shot_ext->shot.ctl.sensor.exposureTime,
2733        shot_ext->shot.ctl.sensor.frameDuration,
2734        shot_ext->shot.ctl.sensor.sensitivity,
2735        shot_ext->shot.ctl.aa.awbMode);
2736
2737    ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2738        shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2739        shot_ext->shot.ctl.request.outputStreams[0]);
2740
2741    ALOGD("####  DM Section");
2742    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2743        shot_ext->shot.dm.request.metadataMode,
2744        shot_ext->shot.dm.lens.aperture,
2745        shot_ext->shot.dm.sensor.exposureTime,
2746        shot_ext->shot.dm.sensor.frameDuration,
2747        shot_ext->shot.dm.sensor.sensitivity,
2748        shot_ext->shot.dm.sensor.timeStamp,
2749        shot_ext->shot.dm.aa.awbMode,
2750        shot_ext->shot.dm.request.frameCount );
2751}
2752
2753void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
2754{
2755    // Flash
2756    switch (m_ctlInfo.flash.m_flashCnt) {
2757    case IS_FLASH_STATE_ON:
2758        ALOGV("(%s): [Flash] Flash ON for Capture", __FUNCTION__);
2759        // check AF locked
2760        if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
2761            if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2762                if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2763                    shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2764                    m_ctlInfo.flash.m_flashTimeOut = 5;
2765                } else
2766                    shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2767                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2768            } else {
2769                m_ctlInfo.flash.m_flashTimeOut--;
2770            }
2771        } else {
2772            if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2773                shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2774                m_ctlInfo.flash.m_flashTimeOut = 5;
2775            } else
2776                shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2777            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2778        }
2779        break;
2780    case IS_FLASH_STATE_ON_WAIT:
2781        break;
2782    case IS_FLASH_STATE_ON_DONE:
2783        if (!m_ctlInfo.flash.m_afFlashDoneFlg)
2784            // auto transition at pre-capture trigger
2785            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
2786        break;
2787    case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
2788        ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
2789        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2790        //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2791        shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2792        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
2793        break;
2794    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2795    case IS_FLASH_STATE_AUTO_WAIT:
2796        shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
2797        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
2798        break;
2799    case IS_FLASH_STATE_AUTO_DONE:
2800        ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE", __FUNCTION__);
2801        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2802        break;
2803    case IS_FLASH_STATE_AUTO_OFF:
2804        ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear", __FUNCTION__);
2805        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2806        m_ctlInfo.flash.m_afFlashDoneFlg = false;
2807        m_ctlInfo.flash.m_flashEnableFlg = false;
2808        break;
2809    case IS_FLASH_STATE_CAPTURE:
2810        ALOGV("(%s): [Flash] IS_FLASH_CAPTURE", __FUNCTION__);
2811        m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
2812        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2813        shot_ext->request_scc = 0;
2814        shot_ext->request_scp = 0;
2815        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
2816        break;
2817    case IS_FLASH_STATE_CAPTURE_WAIT:
2818        shot_ext->request_scc = 0;
2819        shot_ext->request_scp = 0;
2820        break;
2821    case IS_FLASH_STATE_CAPTURE_JPEG:
2822        ALOGV("(%s): [Flash] Flash Capture  (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
2823        shot_ext->request_scc = 1;
2824        shot_ext->request_scp = 1;
2825        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END;  // auto transition
2826        break;
2827    case IS_FLASH_STATE_CAPTURE_END:
2828        ALOGV("(%s): [Flash] Flash Capture END", __FUNCTION__);
2829        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2830        shot_ext->request_scc = 0;
2831        shot_ext->request_scp = 0;
2832        m_ctlInfo.flash.m_flashEnableFlg = false;
2833        m_ctlInfo.flash.m_flashCnt = 0;
2834        m_ctlInfo.flash.m_afFlashDoneFlg= false;
2835        break;
2836    default:
2837        ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
2838    }
2839}
2840
2841void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
2842{
2843    // Flash
2844    switch (m_ctlInfo.flash.m_flashCnt) {
2845    case IS_FLASH_STATE_AUTO_WAIT:
2846        if (m_ctlInfo.flash.m_flashDecisionResult) {
2847            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2848                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2849                ALOGV("(%s): [Flash] Lis :  AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
2850            } else {
2851                ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
2852            }
2853        } else {
2854            //If flash isn't activated at flash auto mode, skip flash auto control
2855            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2856            ALOGV("(%s): [Flash] Skip :  AUTO -> OFF", __FUNCTION__);
2857        }
2858        break;
2859    }
2860}
2861
2862void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
2863{
2864    // Flash
2865    switch (m_ctlInfo.flash.m_flashCnt) {
2866    case IS_FLASH_STATE_ON_WAIT:
2867        if (shot_ext->shot.dm.flash.decision > 0) {
2868            // store decision result to skip capture sequenece
2869            ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
2870            if (shot_ext->shot.dm.flash.decision == 2)
2871                m_ctlInfo.flash.m_flashDecisionResult = false;
2872            else
2873                m_ctlInfo.flash.m_flashDecisionResult = true;
2874            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2875            if (m_ctlInfo.flash.m_afFlashDoneFlg)
2876                m_IsAfTriggerRequired = true;
2877        } else {
2878            if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2879                ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
2880                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2881                m_ctlInfo.flash.m_flashDecisionResult = false;
2882            } else {
2883                m_ctlInfo.flash.m_flashTimeOut--;
2884            }
2885        }
2886        break;
2887    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2888        if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
2889            ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
2890            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
2891        } else {
2892            ALOGV("(%s):  [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
2893        }
2894        break;
2895    case IS_FLASH_STATE_CAPTURE_WAIT:
2896        if (m_ctlInfo.flash.m_flashDecisionResult) {
2897            if (shot_ext->shot.dm.flash.firingStable) {
2898                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2899            } else {
2900                if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2901                    ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
2902                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2903                } else {
2904                    ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
2905                    m_ctlInfo.flash.m_flashTimeOut--;
2906                }
2907            }
2908        } else {
2909            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2910        }
2911        break;
2912    }
2913}
2914
2915void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext)
2916{
2917    if (0 == shot_ext->shot.ctl.aa.afRegions[0] && 0 == shot_ext->shot.ctl.aa.afRegions[1]
2918            && 0 == shot_ext->shot.ctl.aa.afRegions[2] && 0 == shot_ext->shot.ctl.aa.afRegions[3]) {
2919        ALOGV("(%s): AF region resetting", __FUNCTION__);
2920        lastAfRegion[0] = 0;
2921        lastAfRegion[1] = 0;
2922        lastAfRegion[2] = 0;
2923        lastAfRegion[3] = 0;
2924    } else {
2925        // clear region infos in case of CAF mode
2926        if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
2927            shot_ext->shot.ctl.aa.afRegions[0] = lastAfRegion[0] = 0;
2928            shot_ext->shot.ctl.aa.afRegions[1] = lastAfRegion[1] = 0;
2929            shot_ext->shot.ctl.aa.afRegions[2] = lastAfRegion[2] = 0;
2930            shot_ext->shot.ctl.aa.afRegions[3] = lastAfRegion[3] = 0;
2931        } else if (!(lastAfRegion[0] == shot_ext->shot.ctl.aa.afRegions[0] && lastAfRegion[1] == shot_ext->shot.ctl.aa.afRegions[1]
2932                && lastAfRegion[2] == shot_ext->shot.ctl.aa.afRegions[2] && lastAfRegion[3] == shot_ext->shot.ctl.aa.afRegions[3])) {
2933            ALOGD("(%s): AF region changed : triggering (%d)", __FUNCTION__, m_afMode);
2934            shot_ext->shot.ctl.aa.afTrigger = 1;
2935            shot_ext->shot.ctl.aa.afMode = m_afMode;
2936            m_afState = HAL_AFSTATE_STARTED;
2937            lastAfRegion[0] = shot_ext->shot.ctl.aa.afRegions[0];
2938            lastAfRegion[1] = shot_ext->shot.ctl.aa.afRegions[1];
2939            lastAfRegion[2] = shot_ext->shot.ctl.aa.afRegions[2];
2940            lastAfRegion[3] = shot_ext->shot.ctl.aa.afRegions[3];
2941            m_IsAfTriggerRequired = false;
2942        }
2943    }
2944}
2945
2946void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode)
2947{
2948    if (m_afState == HAL_AFSTATE_SCANNING) {
2949        ALOGD("(%s): restarting trigger ", __FUNCTION__);
2950    } else if (!mode) {
2951        if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
2952            ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
2953        else
2954        m_afState = HAL_AFSTATE_STARTED;
2955    }
2956    ALOGD("### AF Triggering with mode (%d)", m_afMode);
2957    shot_ext->shot.ctl.aa.afTrigger = 1;
2958    shot_ext->shot.ctl.aa.afMode = m_afMode;
2959    m_IsAfTriggerRequired = false;
2960}
2961
2962void ExynosCameraHWInterface2::m_sceneModeFaceSetter(struct camera2_shot_ext * shot_ext, int mode)
2963{
2964    switch (mode) {
2965    case 0:
2966        // af face setting based on scene mode
2967        if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_FACE_PRIORITY) {
2968            if(m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
2969                ALOGV("(%s): AA_AFMODE_CONTINUOUS_PICTURE_FACE", __FUNCTION__);
2970                m_afState = HAL_AFSTATE_STARTED;
2971                shot_ext->shot.ctl.aa.afTrigger = 1;
2972                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE;
2973            } else if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO) {
2974                ALOGV("(%s): AA_AFMODE_CONTINUOUS_VIDEO_FACE", __FUNCTION__);
2975                m_afState = HAL_AFSTATE_STARTED;
2976                shot_ext->shot.ctl.aa.afTrigger = 1;
2977                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_VIDEO_FACE;
2978
2979            }
2980        } else {
2981            if(m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
2982                ALOGV("(%s): AA_AFMODE_CONTINUOUS_PICTURE", __FUNCTION__);
2983                m_afState = HAL_AFSTATE_STARTED;
2984                shot_ext->shot.ctl.aa.afTrigger = 1;
2985                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
2986            } else if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO) {
2987                ALOGV("(%s): AA_AFMODE_CONTINUOUS_VIDEO", __FUNCTION__);
2988                m_afState = HAL_AFSTATE_STARTED;
2989                shot_ext->shot.ctl.aa.afTrigger = 1;
2990                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_VIDEO;
2991
2992            }
2993        }
2994        break;
2995    case 1:
2996        // face af re-setting after single AF
2997        if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_FACE_PRIORITY) {
2998            ALOGV("(%s): Face af setting", __FUNCTION__);
2999            if(m_afMode == AA_AFMODE_CONTINUOUS_PICTURE)
3000                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE;
3001            else if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO)
3002                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_VIDEO_FACE;
3003        }
3004        break;
3005    default:
3006        break;
3007    }
3008}
3009
3010void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
3011{
3012    uint32_t        currentSignal = self->GetProcessingSignal();
3013    SensorThread *  selfThread      = ((SensorThread*)self);
3014    int index;
3015    int index_isp;
3016    status_t res;
3017    nsecs_t frameTime;
3018    int bayersOnSensor = 0, bayersOnIsp = 0;
3019    int j = 0;
3020    bool isCapture = false;
3021    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
3022
3023    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3024        CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3025
3026        ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
3027        cam_int_streamoff(&(m_camera_info.sensor));
3028        ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
3029
3030        m_camera_info.sensor.buffers = 0;
3031        ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
3032        cam_int_reqbufs(&(m_camera_info.sensor));
3033        ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
3034        m_camera_info.sensor.status = false;
3035
3036        ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
3037        isp_int_streamoff(&(m_camera_info.isp));
3038        ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
3039
3040        m_camera_info.isp.buffers = 0;
3041        ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
3042        cam_int_reqbufs(&(m_camera_info.isp));
3043        ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
3044
3045        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
3046
3047        m_requestManager->releaseSensorQ();
3048        m_requestManager->ResetEntry();
3049        ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3050        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
3051        return;
3052    }
3053
3054    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
3055    {
3056        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
3057        int targetStreamIndex = 0, i=0;
3058        int matchedFrameCnt = -1, processingReqIndex;
3059        struct camera2_shot_ext *shot_ext;
3060        struct camera2_shot_ext *shot_ext_capture;
3061        bool triggered = false;
3062        int afMode;
3063
3064        /* dqbuf from sensor */
3065        ALOGV("Sensor DQbuf start");
3066        index = cam_int_dqbuf(&(m_camera_info.sensor));
3067        m_requestManager->pushSensorQ(index);
3068        ALOGV("Sensor DQbuf done(%d)", index);
3069        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3070
3071        if (m_nightCaptureCnt != 0) {
3072            matchedFrameCnt = m_nightCaptureFrameCnt;
3073        } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
3074            matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
3075            ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
3076        } else {
3077            matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
3078        }
3079
3080        if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
3081            matchedFrameCnt = m_vdisDupFrame;
3082        }
3083
3084        if (matchedFrameCnt != -1) {
3085            if (m_vdisBubbleCnt == 0) {
3086                frameTime = systemTime();
3087                m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
3088                m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
3089            }
3090
3091            if (m_afModeWaitingCnt != 0) {
3092                ALOGV("### Af Trigger pulled, waiting for mode change cnt(%d) ", m_afModeWaitingCnt);
3093                m_afModeWaitingCnt --;
3094                if (m_afModeWaitingCnt == 1) {
3095                    m_afModeWaitingCnt = 0;
3096                    OnAfTrigger(m_afPendingTriggerId);
3097                }
3098            }
3099            m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
3100            float zoomLeft, zoomTop, zoomWidth, zoomHeight;
3101            int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
3102
3103            m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
3104                           m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
3105                           &crop_x, &crop_y,
3106                           &crop_w, &crop_h,
3107                           0);
3108
3109            if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
3110                zoomWidth =  m_camera2->getSensorW() / m_zoomRatio;
3111                zoomHeight = zoomWidth *
3112                        m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
3113            } else {
3114                zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
3115                zoomWidth = zoomHeight *
3116                        m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
3117            }
3118            zoomLeft = (crop_w - zoomWidth) / 2;
3119            zoomTop = (crop_h - zoomHeight) / 2;
3120
3121            int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
3122
3123            if (new_cropRegion[0] * 2 + new_cropRegion[2] > (int32_t)m_camera2->getSensorW())
3124                new_cropRegion[2]--;
3125            else if (new_cropRegion[0] * 2 + new_cropRegion[2] < (int32_t)m_camera2->getSensorW())
3126                new_cropRegion[2]++;
3127
3128            shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
3129            shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
3130            shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
3131            if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) {
3132                ALOGD("### AF Mode change(Mode %d) ", m_afMode);
3133                shot_ext->shot.ctl.aa.afMode = m_afMode;
3134                if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
3135                    ALOGD("### With Automatic triger for continuous modes");
3136                    m_afState = HAL_AFSTATE_STARTED;
3137                    shot_ext->shot.ctl.aa.afTrigger = 1;
3138                    triggered = true;
3139                }
3140                m_IsAfModeUpdateRequired = false;
3141                // support inifinity focus mode
3142                if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
3143                    shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
3144                    shot_ext->shot.ctl.aa.afTrigger = 1;
3145                    triggered = true;
3146                }
3147                if (m_afMode2 != NO_CHANGE) {
3148                    enum aa_afmode tempAfMode = m_afMode2;
3149                    m_afMode2 = NO_CHANGE;
3150                    SetAfMode(tempAfMode);
3151                }
3152            }
3153            else {
3154                shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
3155            }
3156            if (m_IsAfTriggerRequired) {
3157                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
3158                    // flash case
3159                    if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
3160                        if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
3161                            // Flash is enabled and start AF
3162                            m_afTrigger(shot_ext, 1);
3163                        } else {
3164                            if (m_ctlInfo.af.m_afTriggerTimeOut == 0)
3165                                m_afTrigger(shot_ext, 0);
3166                            else
3167                                m_ctlInfo.af.m_afTriggerTimeOut--;
3168                        }
3169                    }
3170                } else {
3171                    // non-flash case
3172                    if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
3173                        m_afTrigger(shot_ext, 0);
3174                    } else {
3175                        if (m_ctlInfo.af.m_afTriggerTimeOut == 0)
3176                            m_afTrigger(shot_ext, 0);
3177                        else
3178                            m_ctlInfo.af.m_afTriggerTimeOut--;
3179                    }
3180                }
3181            } else {
3182                shot_ext->shot.ctl.aa.afTrigger = 0;
3183            }
3184
3185            if (m_wideAspect) {
3186                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3187                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3188                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3189            } else {
3190                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3191            }
3192            if (triggered)
3193                shot_ext->shot.ctl.aa.afTrigger = 1;
3194
3195            // TODO : check collision with AFMode Update
3196            if (m_IsAfLockRequired) {
3197                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
3198                m_IsAfLockRequired = false;
3199            }
3200            ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
3201                index,
3202                shot_ext->shot.ctl.request.frameCount,
3203                shot_ext->request_scp,
3204                shot_ext->request_scc,
3205                shot_ext->dis_bypass, sizeof(camera2_shot));
3206
3207            // update AF region
3208            m_updateAfRegion(shot_ext);
3209
3210            if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
3211                    && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
3212                shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
3213            if (m_nightCaptureCnt == 0) {
3214                if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
3215                        && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3216                    shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3217                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3218                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3219                    m_nightCaptureCnt = 4;
3220                    m_nightCaptureFrameCnt = matchedFrameCnt;
3221                    shot_ext->request_scc = 0;
3222                }
3223            }
3224            else if (m_nightCaptureCnt == 1) {
3225                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3226                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3227                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3228                m_nightCaptureCnt--;
3229                m_nightCaptureFrameCnt = 0;
3230                shot_ext->request_scc = 1;
3231            }
3232            else if (m_nightCaptureCnt == 2) {
3233                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3234                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3235                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3236                m_nightCaptureCnt--;
3237                shot_ext->request_scc = 0;
3238            }
3239            else if (m_nightCaptureCnt == 3) {
3240                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3241                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3242                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3243                m_nightCaptureCnt--;
3244                shot_ext->request_scc = 0;
3245            }
3246            else if (m_nightCaptureCnt == 4) {
3247                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3248                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3249                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3250                m_nightCaptureCnt--;
3251                shot_ext->request_scc = 0;
3252            }
3253
3254            // Flash mode
3255            // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
3256            if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
3257                    && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
3258                    && (m_cameraId == 0)) {
3259                if (!m_ctlInfo.flash.m_flashDecisionResult) {
3260                    m_ctlInfo.flash.m_flashEnableFlg = false;
3261                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3262                    m_ctlInfo.flash.m_flashCnt = 0;
3263                } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) || (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
3264                    ALOGE("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
3265                    shot_ext->request_scc = 0;
3266                    m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
3267                    m_ctlInfo.flash.m_flashEnableFlg = true;
3268                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3269                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
3270                } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) {
3271                    ALOGE("(%s): [Flash] Flash capture Error!!!!!!", __FUNCTION__);
3272                }
3273            } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) {
3274                m_ctlInfo.flash.m_flashDecisionResult = false;
3275            }
3276
3277            // TODO : set torch mode for video recording. need to find proper position.
3278            // m_wideAspect is will be changed to recording hint
3279            if ((shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_SINGLE) && m_wideAspect) {
3280                shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_TORCH;
3281                shot_ext->shot.ctl.flash.firingPower = 10;
3282                m_ctlInfo.flash.m_flashTorchMode = true;
3283            } else if (m_wideAspect){
3284                shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3285                shot_ext->shot.ctl.flash.firingPower = 0;
3286                m_ctlInfo.flash.m_flashTorchMode = false;
3287            } else {
3288                if (m_ctlInfo.flash.m_flashTorchMode) {
3289                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3290                    shot_ext->shot.ctl.flash.firingPower = 0;
3291                    m_ctlInfo.flash.m_flashTorchMode = false;
3292                } else {
3293                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
3294                }
3295            }
3296
3297            if (m_ctlInfo.flash.m_flashEnableFlg) {
3298                m_preCaptureListenerSensor(shot_ext);
3299                m_preCaptureSetter(shot_ext);
3300            }
3301
3302            if (shot_ext->isReprocessing) {
3303                ALOGE("(%s): Reprocess request ", __FUNCTION__);
3304                m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
3305                shot_ext->request_scp = 0;
3306                shot_ext->request_scc = 0;
3307                m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
3308                memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot));
3309                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
3310            }
3311
3312            // face af mode setting in case of face priority scene mode
3313            if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) {
3314                ALOGV("(%s): Scene mode changed", __FUNCTION__);
3315                m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3316                m_sceneModeFaceSetter(shot_ext, 0);
3317            } else if (triggered) {
3318                // re-setting after single AF
3319                m_sceneModeFaceSetter(shot_ext, 1);
3320            }
3321
3322            ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
3323            (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3324            (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3325            (int)(shot_ext->shot.ctl.aa.afTrigger));
3326
3327            if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
3328                shot_ext->dis_bypass = 1;
3329                shot_ext->request_scp = 0;
3330                shot_ext->request_scc = 0;
3331                m_vdisBubbleCnt--;
3332                matchedFrameCnt = -1;
3333            } else {
3334                m_vdisDupFrame = matchedFrameCnt;
3335            }
3336            if (m_scpForceSuspended)
3337                shot_ext->request_scc = 0;
3338
3339            uint32_t current_scp = shot_ext->request_scp;
3340            uint32_t current_scc = shot_ext->request_scc;
3341
3342            if (shot_ext->shot.dm.request.frameCount == 0) {
3343                CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
3344            }
3345
3346            cam_int_qbuf(&(m_camera_info.isp), index);
3347
3348            ALOGV("### isp DQBUF start");
3349            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3350
3351            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3352
3353            if (m_ctlInfo.flash.m_flashEnableFlg)
3354                m_preCaptureListenerISP(shot_ext);
3355
3356            ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) shot_size(%d)",
3357                index,
3358                shot_ext->shot.ctl.request.frameCount,
3359                shot_ext->request_scp,
3360                shot_ext->request_scc,
3361                shot_ext->dis_bypass, sizeof(camera2_shot));
3362
3363            ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
3364                (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3365                (int)(shot_ext->shot.dm.aa.awbMode),
3366                (int)(shot_ext->shot.dm.aa.afMode));
3367
3368#ifndef ENABLE_FRAME_SYNC
3369            m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
3370#endif
3371
3372            if (current_scc != shot_ext->request_scc) {
3373                ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
3374                                __FUNCTION__, current_scc, shot_ext->request_scc);
3375                m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3376            }
3377            if (shot_ext->request_scc) {
3378                ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
3379                memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot));
3380                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3381            }
3382            if (current_scp != shot_ext->request_scp) {
3383                ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
3384                                __FUNCTION__, current_scp, shot_ext->request_scp);
3385                m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3386            }
3387            if (shot_ext->request_scp) {
3388                ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
3389                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3390            }
3391
3392            ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
3393               shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
3394            if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
3395                ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
3396                m_scp_closed = true;
3397            }
3398            else
3399                m_scp_closed = false;
3400
3401            if (!shot_ext->fd_bypass) {
3402                /* FD orientation axis transformation */
3403                for (int i=0; i < CAMERA2_MAX_FACES; i++) {
3404                    if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
3405                        shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
3406                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][0])
3407                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3408                    if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
3409                        shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
3410                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][1])
3411                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3412                    if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
3413                        shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
3414                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][2])
3415                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3416                    if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
3417                        shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
3418                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][3])
3419                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3420                }
3421            }
3422            // At flash off mode, capture can be done as zsl capture
3423            if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON)
3424                shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED;
3425
3426            // At scene mode face priority
3427            if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_FACE_PRIORITY) {
3428                if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE)
3429                    shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE;
3430                else if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_VIDEO_FACE)
3431                    shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE;
3432            }
3433
3434            if (m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
3435                m_requestManager->ApplyDynamicMetadata(shot_ext);
3436            }
3437            OnAfNotification(shot_ext->shot.dm.aa.afState);
3438            OnPrecaptureMeteringNotificationISP();
3439        }   else {
3440            shot_ext->shot.ctl.request.frameCount = 0xfffffffe;
3441            shot_ext->request_sensor = 1;
3442            shot_ext->dis_bypass = 1;
3443            shot_ext->dnr_bypass = 1;
3444            shot_ext->fd_bypass = 1;
3445            shot_ext->drc_bypass = 1;
3446            shot_ext->request_scc = 0;
3447            shot_ext->request_scp = 0;
3448            if (m_wideAspect) {
3449                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3450                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3451                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3452            } else {
3453                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3454            }
3455            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3456            ALOGV("### isp QBUF start (bubble)");
3457            cam_int_qbuf(&(m_camera_info.isp), index);
3458            ALOGV("### isp DQBUF start (bubble)");
3459            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3460            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3461            if (m_ctlInfo.flash.m_flashEnableFlg)
3462                m_preCaptureListenerISP(shot_ext);
3463            OnAfNotification(shot_ext->shot.dm.aa.afState);
3464            OnPrecaptureMeteringNotificationISP();
3465        }
3466
3467        index = m_requestManager->popSensorQ();
3468        if(index < 0){
3469            ALOGE("sensorQ is empty");
3470            return;
3471        }
3472
3473        processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]), &afMode);
3474        if (processingReqIndex != -1)
3475            SetAfMode((enum aa_afmode)afMode);
3476
3477
3478        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3479        if (m_scp_closing || m_scp_closed) {
3480            ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
3481            shot_ext->request_scc = 0;
3482            shot_ext->request_scp = 0;
3483            shot_ext->request_sensor = 0;
3484        }
3485        cam_int_qbuf(&(m_camera_info.sensor), index);
3486        ALOGV("Sensor Qbuf done(%d)", index);
3487
3488        if (!m_scp_closing
3489            && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
3490            ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3491                                    matchedFrameCnt, processingReqIndex);
3492            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
3493        }
3494    }
3495    return;
3496}
3497
3498void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
3499{
3500    uint32_t                currentSignal   = self->GetProcessingSignal();
3501    StreamThread *          selfThread      = ((StreamThread*)self);
3502    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3503    node_info_t             *currentNode    = selfStreamParms->node;
3504    substream_parameters_t  *subParms;
3505    buffer_handle_t * buf = NULL;
3506    status_t res;
3507    void *virtAddr[3];
3508    int i, j;
3509    int index;
3510    nsecs_t timestamp;
3511
3512    if (!(selfThread->m_isBufferInit))
3513    {
3514        for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3515            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3516            if (res != NO_ERROR || buf == NULL) {
3517                ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3518                return;
3519            }
3520            ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3521               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3522
3523            index = selfThread->findBufferIndex(buf);
3524            if (index == -1) {
3525                ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
3526            }
3527            else {
3528                ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3529                    __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
3530                if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
3531                    selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3532                else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
3533                    selfStreamParms->svcBufStatus[index] = ON_HAL;
3534                else {
3535                    ALOGV("DBG(%s): buffer status abnormal (%d) "
3536                        , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3537                }
3538                selfStreamParms->numSvcBufsInHal++;
3539            }
3540            selfStreamParms->bufIndex = 0;
3541        }
3542        selfThread->m_isBufferInit = true;
3543    }
3544    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3545        if (selfThread->m_attachedSubStreams[i].streamId == -1)
3546            continue;
3547
3548        subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
3549        if (subParms->type && subParms->needBufferInit) {
3550            ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3551                __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
3552            int checkingIndex = 0;
3553            bool found = false;
3554            for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
3555                res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3556                if (res != NO_ERROR || buf == NULL) {
3557                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3558                    return;
3559                }
3560                subParms->numSvcBufsInHal++;
3561                ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3562                   subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3563
3564                if (m_grallocHal->lock(m_grallocHal, *buf,
3565                       subParms->usage, 0, 0,
3566                       subParms->width, subParms->height, virtAddr) != 0) {
3567                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3568                }
3569                else {
3570                      ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3571                        __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3572                }
3573                found = false;
3574                for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3575                    if (subParms->svcBufHandle[checkingIndex] == *buf ) {
3576                        found = true;
3577                        break;
3578                    }
3579                }
3580                ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3581                if (!found) break;
3582
3583                index = checkingIndex;
3584
3585                if (index == -1) {
3586                    ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3587                }
3588                else {
3589                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3590                        __FUNCTION__, index, subParms->svcBufStatus[index]);
3591                    if (subParms->svcBufStatus[index]== ON_SERVICE)
3592                        subParms->svcBufStatus[index] = ON_HAL;
3593                    else {
3594                        ALOGV("DBG(%s): buffer status abnormal (%d) "
3595                            , __FUNCTION__, subParms->svcBufStatus[index]);
3596                    }
3597                    if (*buf != subParms->svcBufHandle[index])
3598                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3599                    else
3600                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3601                }
3602                subParms->svcBufIndex = 0;
3603            }
3604            if (subParms->type == SUBSTREAM_TYPE_JPEG) {
3605                m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
3606                m_resizeBuf.size.extS[1] = 0;
3607                m_resizeBuf.size.extS[2] = 0;
3608
3609                if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
3610                    ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3611                }
3612            }
3613            if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
3614                m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
3615                subParms->height, &m_previewCbBuf);
3616
3617                if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
3618                    ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
3619                }
3620            }
3621            subParms->needBufferInit= false;
3622        }
3623    }
3624}
3625
3626void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3627{
3628    StreamThread *          selfThread      = ((StreamThread*)self);
3629    ALOGV("DEBUG(%s): ", __FUNCTION__ );
3630    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3631    selfThread->m_isBufferInit = false;
3632    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3633        selfThread->m_attachedSubStreams[i].streamId    = -1;
3634        selfThread->m_attachedSubStreams[i].priority    = 0;
3635    }
3636    return;
3637}
3638
3639int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
3640    int stream_id, nsecs_t frameTimeStamp)
3641{
3642    substream_parameters_t  *subParms = &m_subStreams[stream_id];
3643
3644    switch (stream_id) {
3645
3646    case STREAM_ID_JPEG:
3647        return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
3648
3649    case STREAM_ID_RECORD:
3650        return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
3651
3652    case STREAM_ID_PRVCB:
3653        return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
3654
3655    default:
3656        return 0;
3657    }
3658}
3659void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
3660{
3661    uint32_t                currentSignal   = self->GetProcessingSignal();
3662    StreamThread *          selfThread      = ((StreamThread*)self);
3663    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3664    node_info_t             *currentNode    = selfStreamParms->node;
3665    int i = 0;
3666    nsecs_t frameTimeStamp;
3667
3668    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3669        CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3670
3671        if (selfThread->m_isBufferInit) {
3672            if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) {
3673                ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
3674                    selfThread->m_index, currentNode->fd);
3675                if (cam_int_streamoff(currentNode) < 0 ) {
3676                    ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3677                }
3678                ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
3679                        selfThread->m_index, currentNode->fd);
3680                currentNode->buffers = 0;
3681                cam_int_reqbufs(currentNode);
3682                ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
3683                        selfThread->m_index, currentNode->fd);
3684            }
3685        }
3686#ifdef ENABLE_FRAME_SYNC
3687        // free metabuffers
3688        for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3689            if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
3690                freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3691                selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3692                selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3693            }
3694#endif
3695        selfThread->m_isBufferInit = false;
3696        selfThread->m_releasing = false;
3697        selfThread->m_activated = false;
3698        ALOGV("(%s): [%d] END  SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3699        return;
3700    }
3701    if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
3702        status_t    res;
3703        buffer_handle_t * buf = NULL;
3704        bool found = false;
3705        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3706            __FUNCTION__, selfThread->m_index);
3707        res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
3708        if (res != NO_ERROR || buf == NULL) {
3709            ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
3710            return;
3711        }
3712        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3713        int checkingIndex = 0;
3714        for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3715            if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3716                found = true;
3717                break;
3718            }
3719        }
3720        ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3721            __FUNCTION__, (unsigned int)buf, found, checkingIndex);
3722
3723        if (!found) return;
3724
3725        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3726            if (selfThread->m_attachedSubStreams[i].streamId == -1)
3727                continue;
3728
3729#ifdef ENABLE_FRAME_SYNC
3730            frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt);
3731            m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
3732#else
3733            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3734#endif
3735            if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
3736                m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
3737                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3738        }
3739
3740        res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
3741        if (res != NO_ERROR) {
3742            ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
3743            return;
3744        }
3745        ALOGV("(%s): streamthread[%d] END   SIGNAL_STREAM_REPROCESSING_START",
3746            __FUNCTION__,selfThread->m_index);
3747
3748        return;
3749    }
3750    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3751        buffer_handle_t * buf = NULL;
3752        status_t res = 0;
3753        int i, j;
3754        int index;
3755        nsecs_t timestamp;
3756#ifdef ENABLE_FRAME_SYNC
3757        camera2_stream *frame;
3758        uint8_t currentOutputStreams;
3759        bool directOutputEnabled = false;
3760#endif
3761        int numOfUndqbuf = 0;
3762
3763        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3764
3765        m_streamBufferInit(self);
3766
3767        do {
3768            ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
3769                selfThread->m_index, selfThread->streamType);
3770
3771#ifdef ENABLE_FRAME_SYNC
3772            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
3773            frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
3774            frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3775            currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
3776            ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams);
3777            if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)||
3778                 ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) {
3779                directOutputEnabled = true;
3780            }
3781            if (!directOutputEnabled) {
3782                if (!m_nightCaptureFrameCnt)
3783                    m_requestManager->NotifyStreamOutput(frame->rcount);
3784            }
3785#else
3786            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3787            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
3788#endif
3789            ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)  sigcnt(%d)",__FUNCTION__,
3790                selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
3791
3792            if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] !=  ON_DRIVER)
3793                ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3794                       __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
3795            selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3796
3797            for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3798                if (selfThread->m_attachedSubStreams[i].streamId == -1)
3799                    continue;
3800#ifdef ENABLE_FRAME_SYNC
3801                if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3802                    m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3803                        selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3804                }
3805#else
3806                if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3807                    m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3808                        selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3809                }
3810#endif
3811            }
3812
3813            if (m_requestManager->GetSkipCnt() <= 0) {
3814#ifdef ENABLE_FRAME_SYNC
3815                if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3816                    ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
3817                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3818                            frameTimeStamp,
3819                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3820                }
3821                else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3822                    ALOGV("** SCC output (frameCnt:%d), last(%d)", frame->rcount);
3823                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3824                                frameTimeStamp,
3825                                &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3826                }
3827                else {
3828                    res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3829                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3830                    ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3831                }
3832#else
3833                if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3834                    ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3835                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3836                            frameTimeStamp,
3837                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3838                }
3839                else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3840                    ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
3841                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3842                                frameTimeStamp,
3843                                &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3844                }
3845#endif
3846                ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3847            }
3848            else {
3849                res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3850                        &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3851                ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3852            }
3853#ifdef ENABLE_FRAME_SYNC
3854            if (directOutputEnabled) {
3855                if (!m_nightCaptureFrameCnt)
3856                     m_requestManager->NotifyStreamOutput(frame->rcount);
3857            }
3858#endif
3859            if (res == 0) {
3860                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
3861                selfStreamParms->numSvcBufsInHal--;
3862            }
3863            else {
3864                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3865            }
3866
3867        }
3868        while(0);
3869
3870        while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS))
3871                    < selfStreamParms->minUndequedBuffer) {
3872            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3873            if (res != NO_ERROR || buf == NULL) {
3874                ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index,  res, selfStreamParms->numSvcBufsInHal);
3875                break;
3876            }
3877            selfStreamParms->numSvcBufsInHal++;
3878            ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3879                selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3880               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3881            const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3882
3883            bool found = false;
3884            int checkingIndex = 0;
3885            for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3886                if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3887                    found = true;
3888                    break;
3889                }
3890            }
3891            if (!found) break;
3892            selfStreamParms->bufIndex = checkingIndex;
3893            if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
3894                uint32_t    plane_index = 0;
3895                ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
3896                struct v4l2_buffer v4l2_buf;
3897                struct v4l2_plane  planes[VIDEO_MAX_PLANES];
3898
3899                v4l2_buf.m.planes   = planes;
3900                v4l2_buf.type       = currentNode->type;
3901                v4l2_buf.memory     = currentNode->memory;
3902                v4l2_buf.index      = selfStreamParms->bufIndex;
3903                v4l2_buf.length     = currentNode->planes;
3904
3905                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
3906                v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
3907                v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
3908                for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
3909                    v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
3910                }
3911#ifdef ENABLE_FRAME_SYNC
3912                /* add plane for metadata*/
3913                v4l2_buf.length += selfStreamParms->metaPlanes;
3914                v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
3915                v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
3916#endif
3917                if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
3918                    ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
3919                        __FUNCTION__, selfThread->m_index);
3920                    return;
3921                }
3922                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
3923                ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
3924                    __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
3925            }
3926        }
3927
3928        ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3929    }
3930    return;
3931}
3932
3933void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
3934{
3935    uint32_t                currentSignal   = self->GetProcessingSignal();
3936    StreamThread *          selfThread      = ((StreamThread*)self);
3937    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3938    node_info_t             *currentNode    = selfStreamParms->node;
3939
3940
3941    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3942        CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3943
3944        if (selfThread->m_isBufferInit) {
3945            if (currentNode->fd == m_camera_info.capture.fd) {
3946                if (m_camera_info.capture.status == true) {
3947                    ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
3948                    selfThread->m_index, currentNode->fd);
3949                    if (cam_int_streamoff(currentNode) < 0 ){
3950                        ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3951                    } else {
3952                        m_camera_info.capture.status = false;
3953                    }
3954                }
3955            } else {
3956                ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
3957                selfThread->m_index, currentNode->fd);
3958                if (cam_int_streamoff(currentNode) < 0 ){
3959                    ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3960                }
3961            }
3962            ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
3963            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
3964                    selfThread->m_index, currentNode->fd);
3965            currentNode->buffers = 0;
3966            cam_int_reqbufs(currentNode);
3967            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
3968                    selfThread->m_index, currentNode->fd);
3969        }
3970
3971        selfThread->m_isBufferInit = false;
3972        selfThread->m_releasing = false;
3973        selfThread->m_activated = false;
3974        ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3975        return;
3976    }
3977
3978    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3979#ifdef ENABLE_FRAME_SYNC
3980        camera2_stream *frame;
3981        uint8_t currentOutputStreams;
3982#endif
3983        nsecs_t frameTimeStamp;
3984
3985        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
3986            __FUNCTION__,selfThread->m_index);
3987
3988        m_streamBufferInit(self);
3989
3990        ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
3991        selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3992        ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
3993            selfThread->m_index, selfStreamParms->bufIndex);
3994
3995#ifdef ENABLE_FRAME_SYNC
3996        frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
3997        frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3998        currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
3999        ALOGV("frame count(SCC) : %d outputStream(%x)",  frame->rcount, currentOutputStreams);
4000#else
4001        frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
4002#endif
4003
4004        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
4005            if (selfThread->m_attachedSubStreams[i].streamId == -1)
4006                continue;
4007#ifdef ENABLE_FRAME_SYNC
4008            if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4009                m_requestManager->NotifyStreamOutput(frame->rcount);
4010                m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4011                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4012            }
4013#else
4014            if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4015                m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4016                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4017            }
4018#endif
4019        }
4020        cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
4021        ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
4022
4023
4024
4025        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
4026            __FUNCTION__, selfThread->m_index);
4027    }
4028
4029
4030    return;
4031}
4032
4033void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
4034{
4035    uint32_t                currentSignal   = self->GetProcessingSignal();
4036    StreamThread *          selfThread      = ((StreamThread*)self);
4037    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4038    node_info_t             *currentNode    = selfStreamParms->node;
4039
4040    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
4041
4042    // Do something in Child thread handler
4043    // Should change function to class that inherited StreamThread class to support dynamic stream allocation
4044    if (selfThread->streamType == STREAM_TYPE_DIRECT) {
4045        m_streamFunc_direct(self);
4046    } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
4047        m_streamFunc_indirect(self);
4048    }
4049
4050    return;
4051}
4052int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4053{
4054    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4055    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_JPEG];
4056    status_t    res;
4057    ExynosRect jpegRect;
4058    bool found = false;
4059    int srcW, srcH, srcCropX, srcCropY;
4060    int pictureW, pictureH, pictureFramesize = 0;
4061    int pictureFormat;
4062    int cropX, cropY, cropW, cropH = 0;
4063    ExynosBuffer resizeBufInfo;
4064    ExynosRect   m_jpegPictureRect;
4065    buffer_handle_t * buf = NULL;
4066
4067    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4068    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4069        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4070            found = true;
4071            break;
4072        }
4073        subParms->svcBufIndex++;
4074        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4075            subParms->svcBufIndex = 0;
4076    }
4077    if (!found) {
4078        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4079        subParms->svcBufIndex++;
4080        return 1;
4081    }
4082
4083    m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
4084                    m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
4085                    &srcCropX, &srcCropY,
4086                    &srcW, &srcH,
4087                    0);
4088
4089    m_jpegPictureRect.w = subParms->width;
4090    m_jpegPictureRect.h = subParms->height;
4091
4092     ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
4093              __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
4094                   m_jpegPictureRect.w, m_jpegPictureRect.h);
4095
4096    m_getRatioSize(srcW, srcH,
4097                   m_jpegPictureRect.w, m_jpegPictureRect.h,
4098                   &cropX, &cropY,
4099                   &pictureW, &pictureH,
4100                   0);
4101    pictureFormat = V4L2_PIX_FMT_YUYV;
4102    pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
4103
4104    if (m_exynosPictureCSC) {
4105        float zoom_w = 0, zoom_h = 0;
4106        if (m_zoomRatio == 0)
4107            m_zoomRatio = 1;
4108
4109        if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
4110            zoom_w =  pictureW / m_zoomRatio;
4111            zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
4112        } else {
4113            zoom_h = pictureH / m_zoomRatio;
4114            zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
4115        }
4116        cropX = (srcW - zoom_w) / 2;
4117        cropY = (srcH - zoom_h) / 2;
4118        cropW = zoom_w;
4119        cropH = zoom_h;
4120
4121        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4122              __FUNCTION__, cropX, cropY, cropW, cropH);
4123
4124        csc_set_src_format(m_exynosPictureCSC,
4125                           ALIGN(srcW, 16), ALIGN(srcH, 16),
4126                           cropX, cropY, cropW, cropH,
4127                           V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
4128                           0);
4129
4130        csc_set_dst_format(m_exynosPictureCSC,
4131                           m_jpegPictureRect.w, m_jpegPictureRect.h,
4132                           0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
4133                           V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
4134                           0);
4135        for (int i = 0 ; i < 3 ; i++)
4136            ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
4137                __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
4138        csc_set_src_buffer(m_exynosPictureCSC,
4139                           (void **)&srcImageBuf->fd.fd);
4140
4141        csc_set_dst_buffer(m_exynosPictureCSC,
4142                           (void **)&m_resizeBuf.fd.fd);
4143        for (int i = 0 ; i < 3 ; i++)
4144            ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
4145                __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
4146
4147        if (csc_convert(m_exynosPictureCSC) != 0)
4148            ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
4149
4150    }
4151    else {
4152        ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
4153    }
4154
4155    resizeBufInfo = m_resizeBuf;
4156
4157    m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
4158
4159    for (int i = 1; i < 3; i++) {
4160        if (m_resizeBuf.size.extS[i] != 0)
4161            m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
4162
4163        ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
4164    }
4165
4166    jpegRect.w = m_jpegPictureRect.w;
4167    jpegRect.h = m_jpegPictureRect.h;
4168    jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
4169
4170    for (int j = 0 ; j < 3 ; j++)
4171        ALOGV("DEBUG(%s): dest buf node  fd.extFd[%d]=%d size=%d virt=%x ",
4172            __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
4173            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
4174            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
4175
4176    if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false)
4177        ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
4178
4179    m_resizeBuf = resizeBufInfo;
4180
4181    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4182
4183    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4184            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4185    if (res == 0) {
4186        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4187        subParms->numSvcBufsInHal--;
4188    }
4189    else {
4190        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4191    }
4192
4193    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4194    {
4195        bool found = false;
4196        int checkingIndex = 0;
4197
4198        ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4199
4200        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4201        if (res != NO_ERROR || buf == NULL) {
4202            ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4203            break;
4204        }
4205        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4206        subParms->numSvcBufsInHal ++;
4207        ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4208           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4209
4210
4211        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4212            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4213                found = true;
4214                break;
4215            }
4216        }
4217        ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
4218
4219        if (!found) {
4220             break;
4221        }
4222
4223        subParms->svcBufIndex = checkingIndex;
4224        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4225            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4226        }
4227        else {
4228            ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4229                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4230        }
4231    }
4232    return 0;
4233}
4234
4235int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4236{
4237    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4238    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_RECORD];
4239    status_t    res;
4240    ExynosRect jpegRect;
4241    bool found = false;
4242    int cropX, cropY, cropW, cropH = 0;
4243    buffer_handle_t * buf = NULL;
4244
4245    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4246    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4247        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4248            found = true;
4249            break;
4250        }
4251        subParms->svcBufIndex++;
4252        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4253            subParms->svcBufIndex = 0;
4254    }
4255    if (!found) {
4256        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4257        subParms->svcBufIndex++;
4258        return 1;
4259    }
4260
4261    if (m_exynosVideoCSC) {
4262        int videoW = subParms->width, videoH = subParms->height;
4263        int cropX, cropY, cropW, cropH = 0;
4264        int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4265        m_getRatioSize(previewW, previewH,
4266                       videoW, videoH,
4267                       &cropX, &cropY,
4268                       &cropW, &cropH,
4269                       0);
4270
4271        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4272                 __FUNCTION__, cropX, cropY, cropW, cropH);
4273
4274        csc_set_src_format(m_exynosVideoCSC,
4275                           previewW, previewH,
4276                           cropX, cropY, cropW, cropH,
4277                           selfStreamParms->format,
4278                           0);
4279
4280        csc_set_dst_format(m_exynosVideoCSC,
4281                           videoW, videoH,
4282                           0, 0, videoW, videoH,
4283                           subParms->format,
4284                           1);
4285
4286        csc_set_src_buffer(m_exynosVideoCSC,
4287                        (void **)&srcImageBuf->fd.fd);
4288
4289        csc_set_dst_buffer(m_exynosVideoCSC,
4290            (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
4291
4292        if (csc_convert(m_exynosVideoCSC) != 0) {
4293            ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
4294        }
4295        else {
4296            ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
4297        }
4298    }
4299    else {
4300        ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4301    }
4302
4303    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4304
4305    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4306            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4307    if (res == 0) {
4308        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4309        subParms->numSvcBufsInHal--;
4310    }
4311    else {
4312        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4313    }
4314
4315    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4316    {
4317        bool found = false;
4318        int checkingIndex = 0;
4319
4320        ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4321
4322        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4323        if (res != NO_ERROR || buf == NULL) {
4324            ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4325            break;
4326        }
4327        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4328        subParms->numSvcBufsInHal ++;
4329        ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4330           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4331
4332        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4333            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4334                found = true;
4335                break;
4336            }
4337        }
4338        ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4339
4340        if (!found) {
4341             break;
4342        }
4343
4344        subParms->svcBufIndex = checkingIndex;
4345        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4346            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4347        }
4348        else {
4349            ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4350                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4351        }
4352    }
4353    return 0;
4354}
4355
4356int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4357{
4358    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4359    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_PRVCB];
4360    status_t    res;
4361    bool found = false;
4362    int cropX, cropY, cropW, cropH = 0;
4363    buffer_handle_t * buf = NULL;
4364
4365    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4366    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4367        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4368            found = true;
4369            break;
4370        }
4371        subParms->svcBufIndex++;
4372        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4373            subParms->svcBufIndex = 0;
4374    }
4375    if (!found) {
4376        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4377        subParms->svcBufIndex++;
4378        return 1;
4379    }
4380
4381    if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
4382        if (m_exynosVideoCSC) {
4383            int previewCbW = subParms->width, previewCbH = subParms->height;
4384            int cropX, cropY, cropW, cropH = 0;
4385            int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4386            m_getRatioSize(previewW, previewH,
4387                           previewCbW, previewCbH,
4388                           &cropX, &cropY,
4389                           &cropW, &cropH,
4390                           0);
4391
4392            ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4393                     __FUNCTION__, cropX, cropY, cropW, cropH);
4394            csc_set_src_format(m_exynosVideoCSC,
4395                               previewW, previewH,
4396                               cropX, cropY, cropW, cropH,
4397                               selfStreamParms->format,
4398                               0);
4399
4400            csc_set_dst_format(m_exynosVideoCSC,
4401                               previewCbW, previewCbH,
4402                               0, 0, previewCbW, previewCbH,
4403                               subParms->internalFormat,
4404                               1);
4405
4406            csc_set_src_buffer(m_exynosVideoCSC,
4407                        (void **)&srcImageBuf->fd.fd);
4408
4409            csc_set_dst_buffer(m_exynosVideoCSC,
4410                (void **)(&(m_previewCbBuf.fd.fd)));
4411
4412            if (csc_convert(m_exynosVideoCSC) != 0) {
4413                ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
4414            }
4415            else {
4416                ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
4417            }
4418            if (previewCbW == ALIGN(previewCbW, 16)) {
4419                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4420                    m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
4421                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
4422                    m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
4423            }
4424            else {
4425                // TODO : copy line by line ?
4426            }
4427        }
4428        else {
4429            ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4430        }
4431    }
4432    else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
4433        int previewCbW = subParms->width, previewCbH = subParms->height;
4434        int stride = ALIGN(previewCbW, 16);
4435        int c_stride = ALIGN(stride / 2, 16);
4436        memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4437            srcImageBuf->virt.extP[0], stride * previewCbH);
4438        memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
4439            srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
4440        memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
4441            srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
4442    }
4443    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4444
4445    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4446            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4447    if (res == 0) {
4448        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4449        subParms->numSvcBufsInHal--;
4450    }
4451    else {
4452        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4453    }
4454
4455    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4456    {
4457        bool found = false;
4458        int checkingIndex = 0;
4459
4460        ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4461
4462        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4463        if (res != NO_ERROR || buf == NULL) {
4464            ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4465            break;
4466        }
4467        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4468        subParms->numSvcBufsInHal ++;
4469        ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4470           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4471
4472
4473        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4474            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4475                found = true;
4476                break;
4477            }
4478        }
4479        ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4480
4481        if (!found) {
4482             break;
4483        }
4484
4485        subParms->svcBufIndex = checkingIndex;
4486        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4487            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4488        }
4489        else {
4490            ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4491                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4492        }
4493    }
4494    return 0;
4495}
4496
4497bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
4498{
4499    int sizeOfSupportList;
4500
4501    //REAR Camera
4502    if(this->getCameraId() == 0) {
4503        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
4504
4505        for(int i = 0; i < sizeOfSupportList; i++) {
4506            if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
4507                return true;
4508        }
4509
4510    }
4511    else {
4512        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
4513
4514        for(int i = 0; i < sizeOfSupportList; i++) {
4515            if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
4516                return true;
4517        }
4518    }
4519
4520    return false;
4521}
4522bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
4523                            ExynosBuffer *jpegBuf,
4524                            ExynosRect *rect)
4525{
4526    unsigned char *addr;
4527
4528    ExynosJpegEncoderForCamera jpegEnc;
4529    bool ret = false;
4530    int res = 0;
4531
4532    unsigned int *yuvSize = yuvBuf->size.extS;
4533
4534    if (jpegEnc.create()) {
4535        ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
4536        goto jpeg_encode_done;
4537    }
4538
4539    if (jpegEnc.setQuality(100)) {
4540        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
4541        goto jpeg_encode_done;
4542    }
4543
4544    if (jpegEnc.setSize(rect->w, rect->h)) {
4545        ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
4546        goto jpeg_encode_done;
4547    }
4548    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
4549
4550    if (jpegEnc.setColorFormat(rect->colorFormat)) {
4551        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
4552        goto jpeg_encode_done;
4553    }
4554
4555    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
4556        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
4557        goto jpeg_encode_done;
4558    }
4559
4560    if((m_jpegMetadata.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.ctl.jpeg.thumbnailSize[1] != 0)) {
4561        mExifInfo.enableThumb = true;
4562        if(!m_checkThumbnailSize(m_jpegMetadata.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.ctl.jpeg.thumbnailSize[1])) {
4563            // in the case of unsupported parameter, disable thumbnail
4564            mExifInfo.enableThumb = false;
4565        } else {
4566            m_thumbNailW = m_jpegMetadata.ctl.jpeg.thumbnailSize[0];
4567            m_thumbNailH = m_jpegMetadata.ctl.jpeg.thumbnailSize[1];
4568        }
4569
4570        ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
4571
4572    } else {
4573        mExifInfo.enableThumb = false;
4574    }
4575
4576    if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
4577        ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
4578        goto jpeg_encode_done;
4579    }
4580
4581    ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
4582    if (jpegEnc.setThumbnailQuality(50)) {
4583        ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
4584        goto jpeg_encode_done;
4585    }
4586
4587    m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
4588    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
4589    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
4590        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
4591        goto jpeg_encode_done;
4592    }
4593    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
4594        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
4595        goto jpeg_encode_done;
4596    }
4597
4598    if (jpegEnc.updateConfig()) {
4599        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
4600        goto jpeg_encode_done;
4601    }
4602
4603    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
4604        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
4605        goto jpeg_encode_done;
4606    }
4607
4608    ret = true;
4609
4610jpeg_encode_done:
4611
4612    if (jpegEnc.flagCreate() == true)
4613        jpegEnc.destroy();
4614
4615    return ret;
4616}
4617
4618void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
4619{
4620    m_ctlInfo.flash.m_precaptureTriggerId = id;
4621    m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
4622    if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
4623        // flash is required
4624        switch (m_ctlInfo.flash.m_flashCnt) {
4625        case IS_FLASH_STATE_AUTO_DONE:
4626        case IS_FLASH_STATE_AUTO_OFF:
4627            // Flash capture sequence, AF flash was executed before
4628            break;
4629        default:
4630            // Full flash sequence
4631            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4632            m_ctlInfo.flash.m_flashEnableFlg = true;
4633            m_ctlInfo.flash.m_flashTimeOut = 0;
4634        }
4635    } else {
4636        // Skip pre-capture in case of non-flash.
4637        ALOGV("[PreCap] Flash OFF mode ");
4638        m_ctlInfo.flash.m_flashEnableFlg = false;
4639        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
4640    }
4641    ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
4642    OnPrecaptureMeteringNotificationSensor();
4643}
4644void ExynosCameraHWInterface2::OnAfTriggerStart(int id)
4645{
4646    m_afPendingTriggerId = id;
4647    m_afModeWaitingCnt = 6;
4648}
4649
4650void ExynosCameraHWInterface2::OnAfTrigger(int id)
4651{
4652    m_afTriggerId = id;
4653
4654    switch (m_afMode) {
4655    case AA_AFMODE_AUTO:
4656    case AA_AFMODE_MACRO:
4657    case AA_AFMODE_OFF:
4658        ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
4659        // If flash is enable, Flash operation is executed before triggering AF
4660        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4661                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4662                && (m_cameraId == 0)) {
4663            ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode);
4664            m_ctlInfo.flash.m_flashEnableFlg = true;
4665            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4666            m_ctlInfo.flash.m_flashDecisionResult = false;
4667            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4668        }
4669        OnAfTriggerAutoMacro(id);
4670        break;
4671    case AA_AFMODE_CONTINUOUS_VIDEO:
4672        ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
4673        OnAfTriggerCAFVideo(id);
4674        break;
4675    case AA_AFMODE_CONTINUOUS_PICTURE:
4676        ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
4677        OnAfTriggerCAFPicture(id);
4678        break;
4679
4680    default:
4681        break;
4682    }
4683}
4684
4685void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
4686{
4687    int nextState = NO_TRANSITION;
4688
4689    switch (m_afState) {
4690    case HAL_AFSTATE_INACTIVE:
4691        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4692        m_IsAfTriggerRequired = true;
4693        m_ctlInfo.af.m_afTriggerTimeOut = 4;
4694        break;
4695    case HAL_AFSTATE_NEEDS_COMMAND:
4696        nextState = NO_TRANSITION;
4697        break;
4698    case HAL_AFSTATE_STARTED:
4699        nextState = NO_TRANSITION;
4700        break;
4701    case HAL_AFSTATE_SCANNING:
4702        nextState = NO_TRANSITION;
4703        break;
4704    case HAL_AFSTATE_LOCKED:
4705        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4706        m_IsAfTriggerRequired = true;
4707        break;
4708    case HAL_AFSTATE_FAILED:
4709        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4710        m_IsAfTriggerRequired = true;
4711        m_ctlInfo.af.m_afTriggerTimeOut = 4;
4712        break;
4713    default:
4714        break;
4715    }
4716    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4717    if (nextState != NO_TRANSITION)
4718        m_afState = nextState;
4719}
4720
4721void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4722{
4723    int nextState = NO_TRANSITION;
4724
4725    switch (m_afState) {
4726    case HAL_AFSTATE_INACTIVE:
4727        nextState = HAL_AFSTATE_FAILED;
4728        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4729        break;
4730    case HAL_AFSTATE_NEEDS_COMMAND:
4731        // not used
4732        break;
4733    case HAL_AFSTATE_STARTED:
4734        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4735        m_AfHwStateFailed = false;
4736        break;
4737    case HAL_AFSTATE_SCANNING:
4738        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4739        m_AfHwStateFailed = false;
4740        // If flash is enable, Flash operation is executed before triggering AF
4741        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4742                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4743                && (m_cameraId == 0)) {
4744            ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4745            m_ctlInfo.flash.m_flashEnableFlg = true;
4746            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4747            m_ctlInfo.flash.m_flashDecisionResult = false;
4748            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4749        }
4750        break;
4751    case HAL_AFSTATE_NEEDS_DETERMINATION:
4752        nextState = NO_TRANSITION;
4753        break;
4754    case HAL_AFSTATE_PASSIVE_FOCUSED:
4755        m_IsAfLockRequired = true;
4756        if (m_AfHwStateFailed) {
4757            ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
4758            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4759            nextState = HAL_AFSTATE_FAILED;
4760        }
4761        else {
4762            ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
4763            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4764            nextState = HAL_AFSTATE_LOCKED;
4765        }
4766        m_AfHwStateFailed = false;
4767        break;
4768    case HAL_AFSTATE_LOCKED:
4769        nextState = NO_TRANSITION;
4770        break;
4771    case HAL_AFSTATE_FAILED:
4772        nextState = NO_TRANSITION;
4773        break;
4774    default:
4775        break;
4776    }
4777    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4778    if (nextState != NO_TRANSITION)
4779        m_afState = nextState;
4780}
4781
4782
4783void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
4784{
4785    int nextState = NO_TRANSITION;
4786
4787    switch (m_afState) {
4788    case HAL_AFSTATE_INACTIVE:
4789        nextState = HAL_AFSTATE_FAILED;
4790        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4791        break;
4792    case HAL_AFSTATE_NEEDS_COMMAND:
4793        // not used
4794        break;
4795    case HAL_AFSTATE_STARTED:
4796        m_IsAfLockRequired = true;
4797        nextState = HAL_AFSTATE_FAILED;
4798        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4799        break;
4800    case HAL_AFSTATE_SCANNING:
4801        m_IsAfLockRequired = true;
4802        nextState = HAL_AFSTATE_FAILED;
4803        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4804        break;
4805    case HAL_AFSTATE_NEEDS_DETERMINATION:
4806        // not used
4807        break;
4808    case HAL_AFSTATE_PASSIVE_FOCUSED:
4809        m_IsAfLockRequired = true;
4810        SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4811        nextState = HAL_AFSTATE_LOCKED;
4812        break;
4813    case HAL_AFSTATE_LOCKED:
4814        nextState = NO_TRANSITION;
4815        break;
4816    case HAL_AFSTATE_FAILED:
4817        nextState = NO_TRANSITION;
4818        break;
4819    default:
4820        break;
4821    }
4822    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4823    if (nextState != NO_TRANSITION)
4824        m_afState = nextState;
4825}
4826
4827void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
4828{
4829    if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4830        // Just noti of pre-capture start
4831        if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) {
4832            m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4833                        ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4834                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4835            ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4836            m_notifyCb(CAMERA2_MSG_AUTOWB,
4837                        ANDROID_CONTROL_AWB_STATE_CONVERGED,
4838                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4839            m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
4840        }
4841    }
4842}
4843
4844void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
4845{
4846    if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4847        if (m_ctlInfo.flash.m_flashEnableFlg) {
4848            // flash case
4849            switch (m_ctlInfo.flash.m_flashCnt) {
4850            case IS_FLASH_STATE_AUTO_DONE:
4851            case IS_FLASH_STATE_AUTO_OFF:
4852                if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
4853                    // End notification
4854                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4855                                    ANDROID_CONTROL_AE_STATE_CONVERGED,
4856                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4857                    ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4858                    m_notifyCb(CAMERA2_MSG_AUTOWB,
4859                                    ANDROID_CONTROL_AWB_STATE_CONVERGED,
4860                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4861                    m_ctlInfo.flash.m_precaptureTriggerId = 0;
4862                } else {
4863                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4864                                    ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4865                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4866                    ALOGV("(%s) ANDROID_CONTROL_AE_STATE_LOCKED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4867                    m_notifyCb(CAMERA2_MSG_AUTOWB,
4868                                    ANDROID_CONTROL_AWB_STATE_CONVERGED,
4869                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4870                    m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
4871                }
4872                break;
4873            case IS_FLASH_STATE_CAPTURE:
4874            case IS_FLASH_STATE_CAPTURE_WAIT:
4875            case IS_FLASH_STATE_CAPTURE_JPEG:
4876            case IS_FLASH_STATE_CAPTURE_END:
4877                ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt);
4878                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
4879                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4880                        ANDROID_CONTROL_AE_STATE_CONVERGED,
4881                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4882                m_notifyCb(CAMERA2_MSG_AUTOWB,
4883                        ANDROID_CONTROL_AWB_STATE_CONVERGED,
4884                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4885                m_ctlInfo.flash.m_precaptureTriggerId = 0;
4886                break;
4887            }
4888        } else {
4889            // non-flash case
4890            if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
4891                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4892                                ANDROID_CONTROL_AE_STATE_CONVERGED,
4893                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4894                ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4895                m_notifyCb(CAMERA2_MSG_AUTOWB,
4896                                ANDROID_CONTROL_AWB_STATE_CONVERGED,
4897                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4898                m_ctlInfo.flash.m_precaptureTriggerId = 0;
4899            }
4900        }
4901    }
4902}
4903
4904void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
4905{
4906    switch (m_afMode) {
4907    case AA_AFMODE_AUTO:
4908    case AA_AFMODE_MACRO:
4909        OnAfNotificationAutoMacro(noti);
4910        break;
4911    case AA_AFMODE_CONTINUOUS_VIDEO:
4912        OnAfNotificationCAFVideo(noti);
4913        break;
4914    case AA_AFMODE_CONTINUOUS_PICTURE:
4915        OnAfNotificationCAFPicture(noti);
4916        break;
4917    case AA_AFMODE_OFF:
4918    default:
4919        break;
4920    }
4921}
4922
4923void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
4924{
4925    int nextState = NO_TRANSITION;
4926    bool bWrongTransition = false;
4927
4928    if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
4929        switch (noti) {
4930        case AA_AFSTATE_INACTIVE:
4931        case AA_AFSTATE_ACTIVE_SCAN:
4932        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4933        case AA_AFSTATE_AF_FAILED_FOCUS:
4934        default:
4935            nextState = NO_TRANSITION;
4936            break;
4937        }
4938    }
4939    else if (m_afState == HAL_AFSTATE_STARTED) {
4940        switch (noti) {
4941        case AA_AFSTATE_INACTIVE:
4942            nextState = NO_TRANSITION;
4943            break;
4944        case AA_AFSTATE_ACTIVE_SCAN:
4945            nextState = HAL_AFSTATE_SCANNING;
4946            SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
4947            break;
4948        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4949            nextState = NO_TRANSITION;
4950            break;
4951        case AA_AFSTATE_AF_FAILED_FOCUS:
4952            nextState = NO_TRANSITION;
4953            break;
4954        default:
4955            bWrongTransition = true;
4956            break;
4957        }
4958    }
4959    else if (m_afState == HAL_AFSTATE_SCANNING) {
4960        switch (noti) {
4961        case AA_AFSTATE_INACTIVE:
4962            bWrongTransition = true;
4963            break;
4964        case AA_AFSTATE_ACTIVE_SCAN:
4965            nextState = NO_TRANSITION;
4966            break;
4967        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4968            // If Flash mode is enable, after AF execute pre-capture metering
4969            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4970                switch (m_ctlInfo.flash.m_flashCnt) {
4971                case IS_FLASH_STATE_ON_DONE:
4972                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4973                    nextState = NO_TRANSITION;
4974                    break;
4975                case IS_FLASH_STATE_AUTO_DONE:
4976                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4977                    nextState = HAL_AFSTATE_LOCKED;
4978                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4979                    break;
4980                default:
4981                    nextState = NO_TRANSITION;
4982                }
4983            } else {
4984                nextState = HAL_AFSTATE_LOCKED;
4985                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4986            }
4987            break;
4988        case AA_AFSTATE_AF_FAILED_FOCUS:
4989            // If Flash mode is enable, after AF execute pre-capture metering
4990            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4991                switch (m_ctlInfo.flash.m_flashCnt) {
4992                case IS_FLASH_STATE_ON_DONE:
4993                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4994                    nextState = NO_TRANSITION;
4995                    break;
4996                case IS_FLASH_STATE_AUTO_DONE:
4997                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4998                    nextState = HAL_AFSTATE_FAILED;
4999                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5000                    break;
5001                default:
5002                    nextState = NO_TRANSITION;
5003                }
5004            } else {
5005                nextState = HAL_AFSTATE_FAILED;
5006                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5007            }
5008            break;
5009        default:
5010            bWrongTransition = true;
5011            break;
5012        }
5013    }
5014    else if (m_afState == HAL_AFSTATE_LOCKED) {
5015        switch (noti) {
5016            case AA_AFSTATE_INACTIVE:
5017            case AA_AFSTATE_ACTIVE_SCAN:
5018                bWrongTransition = true;
5019                break;
5020            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5021                nextState = NO_TRANSITION;
5022                break;
5023            case AA_AFSTATE_AF_FAILED_FOCUS:
5024            default:
5025                bWrongTransition = true;
5026                break;
5027        }
5028    }
5029    else if (m_afState == HAL_AFSTATE_FAILED) {
5030        switch (noti) {
5031            case AA_AFSTATE_INACTIVE:
5032            case AA_AFSTATE_ACTIVE_SCAN:
5033            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5034                bWrongTransition = true;
5035                break;
5036            case AA_AFSTATE_AF_FAILED_FOCUS:
5037                nextState = NO_TRANSITION;
5038                break;
5039            default:
5040                bWrongTransition = true;
5041                break;
5042        }
5043    }
5044    if (bWrongTransition) {
5045        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5046        return;
5047    }
5048    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5049    if (nextState != NO_TRANSITION)
5050        m_afState = nextState;
5051}
5052
5053void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
5054{
5055    int nextState = NO_TRANSITION;
5056    bool bWrongTransition = false;
5057
5058    if (m_afState == HAL_AFSTATE_INACTIVE) {
5059        switch (noti) {
5060        case AA_AFSTATE_INACTIVE:
5061        case AA_AFSTATE_ACTIVE_SCAN:
5062        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5063        case AA_AFSTATE_AF_FAILED_FOCUS:
5064        default:
5065            nextState = NO_TRANSITION;
5066            break;
5067        }
5068    }
5069    else if (m_afState == HAL_AFSTATE_STARTED) {
5070        switch (noti) {
5071        case AA_AFSTATE_INACTIVE:
5072            nextState = NO_TRANSITION;
5073            break;
5074        case AA_AFSTATE_ACTIVE_SCAN:
5075            nextState = HAL_AFSTATE_SCANNING;
5076            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5077            break;
5078        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5079            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5080            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5081            break;
5082        case AA_AFSTATE_AF_FAILED_FOCUS:
5083            //nextState = HAL_AFSTATE_FAILED;
5084            //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5085            nextState = NO_TRANSITION;
5086            break;
5087        default:
5088            bWrongTransition = true;
5089            break;
5090        }
5091    }
5092    else if (m_afState == HAL_AFSTATE_SCANNING) {
5093        switch (noti) {
5094        case AA_AFSTATE_INACTIVE:
5095            nextState = NO_TRANSITION;
5096            break;
5097        case AA_AFSTATE_ACTIVE_SCAN:
5098            nextState = NO_TRANSITION;
5099            m_AfHwStateFailed = false;
5100            break;
5101        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5102            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5103            m_AfHwStateFailed = false;
5104            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5105            break;
5106        case AA_AFSTATE_AF_FAILED_FOCUS:
5107            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5108            m_AfHwStateFailed = true;
5109            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5110            break;
5111        default:
5112            bWrongTransition = true;
5113            break;
5114        }
5115    }
5116    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5117        switch (noti) {
5118        case AA_AFSTATE_INACTIVE:
5119            nextState = NO_TRANSITION;
5120            break;
5121        case AA_AFSTATE_ACTIVE_SCAN:
5122            nextState = HAL_AFSTATE_SCANNING;
5123            m_AfHwStateFailed = false;
5124            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5125            break;
5126        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5127            nextState = NO_TRANSITION;
5128            m_AfHwStateFailed = false;
5129            break;
5130        case AA_AFSTATE_AF_FAILED_FOCUS:
5131            nextState = NO_TRANSITION;
5132            m_AfHwStateFailed = true;
5133            break;
5134        default:
5135            bWrongTransition = true;
5136            break;
5137        }
5138    }
5139    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5140        //Skip notification in case of flash, wait the end of flash on
5141        if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5142            if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE)
5143                return;
5144        }
5145        switch (noti) {
5146        case AA_AFSTATE_INACTIVE:
5147            nextState = NO_TRANSITION;
5148            break;
5149        case AA_AFSTATE_ACTIVE_SCAN:
5150            nextState = NO_TRANSITION;
5151            break;
5152        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5153            // If Flash mode is enable, after AF execute pre-capture metering
5154            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5155                switch (m_ctlInfo.flash.m_flashCnt) {
5156                case IS_FLASH_STATE_ON_DONE:
5157                    ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5158                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5159                    nextState = NO_TRANSITION;
5160                    break;
5161                case IS_FLASH_STATE_AUTO_DONE:
5162                    ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5163                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5164                    m_IsAfLockRequired = true;
5165                    nextState = HAL_AFSTATE_LOCKED;
5166                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5167                    break;
5168                default:
5169                    nextState = NO_TRANSITION;
5170                }
5171            } else {
5172                m_IsAfLockRequired = true;
5173                nextState = HAL_AFSTATE_LOCKED;
5174                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5175            }
5176            break;
5177        case AA_AFSTATE_AF_FAILED_FOCUS:
5178            // If Flash mode is enable, after AF execute pre-capture metering
5179            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5180                switch (m_ctlInfo.flash.m_flashCnt) {
5181                case IS_FLASH_STATE_ON_DONE:
5182                    ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5183                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5184                    nextState = NO_TRANSITION;
5185                    break;
5186                case IS_FLASH_STATE_AUTO_DONE:
5187                    ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5188                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5189                    m_IsAfLockRequired = true;
5190                    nextState = HAL_AFSTATE_FAILED;
5191                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5192                    break;
5193                default:
5194                    nextState = NO_TRANSITION;
5195                }
5196            } else {
5197                m_IsAfLockRequired = true;
5198                nextState = HAL_AFSTATE_FAILED;
5199                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5200            }
5201            break;
5202        default:
5203            bWrongTransition = true;
5204            break;
5205        }
5206    }
5207    else if (m_afState == HAL_AFSTATE_LOCKED) {
5208        switch (noti) {
5209            case AA_AFSTATE_INACTIVE:
5210                nextState = NO_TRANSITION;
5211                break;
5212            case AA_AFSTATE_ACTIVE_SCAN:
5213                bWrongTransition = true;
5214                break;
5215            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5216                nextState = NO_TRANSITION;
5217                break;
5218            case AA_AFSTATE_AF_FAILED_FOCUS:
5219            default:
5220                bWrongTransition = true;
5221                break;
5222        }
5223    }
5224    else if (m_afState == HAL_AFSTATE_FAILED) {
5225        switch (noti) {
5226            case AA_AFSTATE_INACTIVE:
5227                bWrongTransition = true;
5228                break;
5229            case AA_AFSTATE_ACTIVE_SCAN:
5230                nextState = HAL_AFSTATE_SCANNING;
5231                break;
5232            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5233                bWrongTransition = true;
5234                break;
5235            case AA_AFSTATE_AF_FAILED_FOCUS:
5236                nextState = NO_TRANSITION;
5237                break;
5238            default:
5239                bWrongTransition = true;
5240                break;
5241        }
5242    }
5243    if (bWrongTransition) {
5244        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5245        return;
5246    }
5247    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5248    if (nextState != NO_TRANSITION)
5249        m_afState = nextState;
5250}
5251
5252void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
5253{
5254    int nextState = NO_TRANSITION;
5255    bool bWrongTransition = false;
5256
5257    if (m_afState == HAL_AFSTATE_INACTIVE) {
5258        switch (noti) {
5259        case AA_AFSTATE_INACTIVE:
5260        case AA_AFSTATE_ACTIVE_SCAN:
5261        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5262        case AA_AFSTATE_AF_FAILED_FOCUS:
5263        default:
5264            nextState = NO_TRANSITION;
5265            break;
5266        }
5267    }
5268    else if (m_afState == HAL_AFSTATE_STARTED) {
5269        switch (noti) {
5270        case AA_AFSTATE_INACTIVE:
5271            nextState = NO_TRANSITION;
5272            break;
5273        case AA_AFSTATE_ACTIVE_SCAN:
5274            nextState = HAL_AFSTATE_SCANNING;
5275            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5276            break;
5277        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5278            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5279            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5280            break;
5281        case AA_AFSTATE_AF_FAILED_FOCUS:
5282            nextState = HAL_AFSTATE_FAILED;
5283            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5284            break;
5285        default:
5286            bWrongTransition = true;
5287            break;
5288        }
5289    }
5290    else if (m_afState == HAL_AFSTATE_SCANNING) {
5291        switch (noti) {
5292        case AA_AFSTATE_INACTIVE:
5293            bWrongTransition = true;
5294            break;
5295        case AA_AFSTATE_ACTIVE_SCAN:
5296            nextState = NO_TRANSITION;
5297            break;
5298        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5299            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5300            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5301            break;
5302        case AA_AFSTATE_AF_FAILED_FOCUS:
5303            nextState = NO_TRANSITION;
5304            break;
5305        default:
5306            bWrongTransition = true;
5307            break;
5308        }
5309    }
5310    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5311        switch (noti) {
5312        case AA_AFSTATE_INACTIVE:
5313            bWrongTransition = true;
5314            break;
5315        case AA_AFSTATE_ACTIVE_SCAN:
5316            nextState = HAL_AFSTATE_SCANNING;
5317            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5318            break;
5319        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5320            nextState = NO_TRANSITION;
5321            break;
5322        case AA_AFSTATE_AF_FAILED_FOCUS:
5323            nextState = HAL_AFSTATE_FAILED;
5324            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5325            // TODO : needs NO_TRANSITION ?
5326            break;
5327        default:
5328            bWrongTransition = true;
5329            break;
5330        }
5331    }
5332    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5333        switch (noti) {
5334        case AA_AFSTATE_INACTIVE:
5335            bWrongTransition = true;
5336            break;
5337        case AA_AFSTATE_ACTIVE_SCAN:
5338            nextState = NO_TRANSITION;
5339            break;
5340        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5341            m_IsAfLockRequired = true;
5342            nextState = HAL_AFSTATE_LOCKED;
5343            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5344            break;
5345        case AA_AFSTATE_AF_FAILED_FOCUS:
5346            nextState = HAL_AFSTATE_FAILED;
5347            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5348            break;
5349        default:
5350            bWrongTransition = true;
5351            break;
5352        }
5353    }
5354    else if (m_afState == HAL_AFSTATE_LOCKED) {
5355        switch (noti) {
5356            case AA_AFSTATE_INACTIVE:
5357                nextState = NO_TRANSITION;
5358                break;
5359            case AA_AFSTATE_ACTIVE_SCAN:
5360                bWrongTransition = true;
5361                break;
5362            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5363                nextState = NO_TRANSITION;
5364                break;
5365            case AA_AFSTATE_AF_FAILED_FOCUS:
5366            default:
5367                bWrongTransition = true;
5368                break;
5369        }
5370    }
5371    else if (m_afState == HAL_AFSTATE_FAILED) {
5372        switch (noti) {
5373            case AA_AFSTATE_INACTIVE:
5374            case AA_AFSTATE_ACTIVE_SCAN:
5375            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5376                bWrongTransition = true;
5377                break;
5378            case AA_AFSTATE_AF_FAILED_FOCUS:
5379                nextState = NO_TRANSITION;
5380                break;
5381            default:
5382                bWrongTransition = true;
5383                break;
5384        }
5385    }
5386    if (bWrongTransition) {
5387        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5388        return;
5389    }
5390    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5391    if (nextState != NO_TRANSITION)
5392        m_afState = nextState;
5393}
5394
5395void ExynosCameraHWInterface2::OnAfCancel(int id)
5396{
5397    m_afTriggerId = id;
5398
5399    switch (m_afMode) {
5400    case AA_AFMODE_AUTO:
5401    case AA_AFMODE_MACRO:
5402    case AA_AFMODE_OFF:
5403        OnAfCancelAutoMacro(id);
5404        break;
5405    case AA_AFMODE_CONTINUOUS_VIDEO:
5406        OnAfCancelCAFVideo(id);
5407        break;
5408    case AA_AFMODE_CONTINUOUS_PICTURE:
5409        OnAfCancelCAFPicture(id);
5410        break;
5411    default:
5412        break;
5413    }
5414}
5415
5416void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
5417{
5418    int nextState = NO_TRANSITION;
5419    m_afTriggerId = id;
5420
5421    if (m_ctlInfo.flash.m_flashEnableFlg  && m_ctlInfo.flash.m_afFlashDoneFlg) {
5422        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5423    }
5424    switch (m_afState) {
5425    case HAL_AFSTATE_INACTIVE:
5426        nextState = NO_TRANSITION;
5427        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5428        break;
5429    case HAL_AFSTATE_NEEDS_COMMAND:
5430    case HAL_AFSTATE_STARTED:
5431    case HAL_AFSTATE_SCANNING:
5432    case HAL_AFSTATE_LOCKED:
5433    case HAL_AFSTATE_FAILED:
5434        SetAfMode(AA_AFMODE_OFF);
5435        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5436        nextState = HAL_AFSTATE_INACTIVE;
5437        break;
5438    default:
5439        break;
5440    }
5441    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5442    if (nextState != NO_TRANSITION)
5443        m_afState = nextState;
5444}
5445
5446void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
5447{
5448    int nextState = NO_TRANSITION;
5449    m_afTriggerId = id;
5450
5451    switch (m_afState) {
5452    case HAL_AFSTATE_INACTIVE:
5453        nextState = NO_TRANSITION;
5454        break;
5455    case HAL_AFSTATE_NEEDS_COMMAND:
5456    case HAL_AFSTATE_STARTED:
5457    case HAL_AFSTATE_SCANNING:
5458    case HAL_AFSTATE_LOCKED:
5459    case HAL_AFSTATE_FAILED:
5460    case HAL_AFSTATE_NEEDS_DETERMINATION:
5461    case HAL_AFSTATE_PASSIVE_FOCUSED:
5462        SetAfMode(AA_AFMODE_OFF);
5463        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5464        SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
5465        nextState = HAL_AFSTATE_INACTIVE;
5466        break;
5467    default:
5468        break;
5469    }
5470    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5471    if (nextState != NO_TRANSITION)
5472        m_afState = nextState;
5473}
5474
5475void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
5476{
5477    int nextState = NO_TRANSITION;
5478    m_afTriggerId = id;
5479
5480    switch (m_afState) {
5481    case HAL_AFSTATE_INACTIVE:
5482        nextState = NO_TRANSITION;
5483        break;
5484    case HAL_AFSTATE_NEEDS_COMMAND:
5485    case HAL_AFSTATE_STARTED:
5486    case HAL_AFSTATE_SCANNING:
5487    case HAL_AFSTATE_LOCKED:
5488    case HAL_AFSTATE_FAILED:
5489    case HAL_AFSTATE_NEEDS_DETERMINATION:
5490    case HAL_AFSTATE_PASSIVE_FOCUSED:
5491        SetAfMode(AA_AFMODE_OFF);
5492        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5493        SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
5494        nextState = HAL_AFSTATE_INACTIVE;
5495        break;
5496    default:
5497        break;
5498    }
5499    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5500    if (nextState != NO_TRANSITION)
5501        m_afState = nextState;
5502}
5503
5504void ExynosCameraHWInterface2::SetAfStateForService(int newState)
5505{
5506    if (m_serviceAfState != newState || newState == 0)
5507        m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
5508    m_serviceAfState = newState;
5509}
5510
5511int ExynosCameraHWInterface2::GetAfStateForService()
5512{
5513   return m_serviceAfState;
5514}
5515
5516void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
5517{
5518    if (m_afMode != afMode) {
5519        if (m_IsAfModeUpdateRequired) {
5520            m_afMode2 = afMode;
5521            ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
5522        }
5523        else {
5524            ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
5525            m_IsAfModeUpdateRequired = true;
5526            m_afMode = afMode;
5527            if (m_afModeWaitingCnt != 0) {
5528                m_afModeWaitingCnt = 0;
5529                m_afState = HAL_AFSTATE_INACTIVE;
5530                OnAfTrigger(m_afPendingTriggerId);
5531            }
5532        }
5533    }
5534}
5535
5536void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5537{
5538    char property[PROPERTY_VALUE_MAX];
5539
5540    //2 0th IFD TIFF Tags
5541#if 0 // STOPSHIP TODO(aray): remove before launch, but for now don't leak product data
5542    //3 Maker
5543    property_get("ro.product.brand", property, EXIF_DEF_MAKER);
5544    strncpy((char *)mExifInfo.maker, property,
5545                sizeof(mExifInfo.maker) - 1);
5546    mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
5547    //3 Model
5548    property_get("ro.product.model", property, EXIF_DEF_MODEL);
5549    strncpy((char *)mExifInfo.model, property,
5550                sizeof(mExifInfo.model) - 1);
5551    mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
5552    //3 Software
5553    property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
5554    strncpy((char *)mExifInfo.software, property,
5555                sizeof(mExifInfo.software) - 1);
5556    mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
5557#endif
5558
5559    //3 YCbCr Positioning
5560    mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
5561
5562    //2 0th IFD Exif Private Tags
5563    //3 F Number
5564    mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
5565    mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
5566    //3 Exposure Program
5567    mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
5568    //3 Exif Version
5569    memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
5570    //3 Aperture
5571    double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
5572    mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
5573    mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
5574    //3 Maximum lens aperture
5575    mExifInfo.max_aperture.num = mExifInfo.aperture.num;
5576    mExifInfo.max_aperture.den = mExifInfo.aperture.den;
5577    //3 Lens Focal Length
5578    mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
5579
5580    mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
5581    //3 User Comments
5582    strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
5583    //3 Color Space information
5584    mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
5585    //3 Exposure Mode
5586    mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
5587
5588    //2 0th IFD GPS Info Tags
5589    unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
5590    memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
5591
5592    //2 1th IFD TIFF Tags
5593    mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
5594    mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5595    mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5596    mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5597    mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5598    mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
5599}
5600
5601void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
5602	camera2_shot *currentEntry)
5603{
5604    camera2_dm *dm = &(currentEntry->dm);
5605    camera2_ctl *ctl = &(currentEntry->ctl);
5606
5607    ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
5608    if (!ctl->request.frameCount)
5609       return;
5610    //2 0th IFD TIFF Tags
5611    //3 Width
5612    exifInfo->width = rect->w;
5613    //3 Height
5614    exifInfo->height = rect->h;
5615    //3 Orientation
5616    switch (ctl->jpeg.orientation) {
5617    case 90:
5618        exifInfo->orientation = EXIF_ORIENTATION_90;
5619        break;
5620    case 180:
5621        exifInfo->orientation = EXIF_ORIENTATION_180;
5622        break;
5623    case 270:
5624        exifInfo->orientation = EXIF_ORIENTATION_270;
5625        break;
5626    case 0:
5627    default:
5628        exifInfo->orientation = EXIF_ORIENTATION_UP;
5629        break;
5630    }
5631
5632    //3 Date time
5633    time_t rawtime;
5634    struct tm *timeinfo;
5635    time(&rawtime);
5636    timeinfo = localtime(&rawtime);
5637    strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
5638
5639    //2 0th IFD Exif Private Tags
5640    //3 Exposure Time
5641    int shutterSpeed = (dm->sensor.exposureTime/1000);
5642
5643    // To display exposure time just above 500ms as 1/2sec, not 1 sec.
5644    if (shutterSpeed > 500000)
5645        shutterSpeed -=  100000;
5646
5647    if (shutterSpeed < 0) {
5648        shutterSpeed = 100;
5649    }
5650
5651    exifInfo->exposure_time.num = 1;
5652    // x us -> 1/x s */
5653    //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5654    exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
5655
5656    //3 ISO Speed Rating
5657    exifInfo->iso_speed_rating = dm->aa.isoValue;
5658
5659    uint32_t av, tv, bv, sv, ev;
5660    av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
5661    tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
5662    sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
5663    bv = av + tv - sv;
5664    ev = av + tv;
5665    //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5666    ALOGD("AV=%d, TV=%d, SV=%d", av, tv, sv);
5667
5668    //3 Shutter Speed
5669    exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
5670    exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
5671    //3 Brightness
5672    exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
5673    exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
5674    //3 Exposure Bias
5675    if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
5676        ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
5677        exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
5678        exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
5679    } else {
5680        exifInfo->exposure_bias.num = 0;
5681        exifInfo->exposure_bias.den = 0;
5682    }
5683    //3 Metering Mode
5684    /*switch (m_curCameraInfo->metering) {
5685    case METERING_MODE_CENTER:
5686        exifInfo->metering_mode = EXIF_METERING_CENTER;
5687        break;
5688    case METERING_MODE_MATRIX:
5689        exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5690        break;
5691    case METERING_MODE_SPOT:
5692        exifInfo->metering_mode = EXIF_METERING_SPOT;
5693        break;
5694    case METERING_MODE_AVERAGE:
5695    default:
5696        exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5697        break;
5698    }*/
5699    exifInfo->metering_mode = EXIF_METERING_CENTER;
5700
5701    //3 Flash
5702    if (m_ctlInfo.flash.m_flashDecisionResult)
5703        exifInfo->flash = 1;
5704    else
5705        exifInfo->flash = EXIF_DEF_FLASH;
5706
5707    //3 White Balance
5708    if (m_ctlInfo.awb.i_awbMode == AA_AWBMODE_WB_AUTO)
5709        exifInfo->white_balance = EXIF_WB_AUTO;
5710    else
5711        exifInfo->white_balance = EXIF_WB_MANUAL;
5712
5713    //3 Scene Capture Type
5714    switch (ctl->aa.sceneMode) {
5715    case AA_SCENE_MODE_PORTRAIT:
5716        exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
5717        break;
5718    case AA_SCENE_MODE_LANDSCAPE:
5719        exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
5720        break;
5721    case AA_SCENE_MODE_NIGHT_PORTRAIT:
5722        exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
5723        break;
5724    default:
5725        exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
5726        break;
5727    }
5728
5729    //2 0th IFD GPS Info Tags
5730    if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
5731
5732        if (ctl->jpeg.gpsCoordinates[0] > 0)
5733            strcpy((char *)exifInfo->gps_latitude_ref, "N");
5734        else
5735            strcpy((char *)exifInfo->gps_latitude_ref, "S");
5736
5737        if (ctl->jpeg.gpsCoordinates[1] > 0)
5738            strcpy((char *)exifInfo->gps_longitude_ref, "E");
5739        else
5740            strcpy((char *)exifInfo->gps_longitude_ref, "W");
5741
5742        if (ctl->jpeg.gpsCoordinates[2] > 0)
5743            exifInfo->gps_altitude_ref = 0;
5744        else
5745            exifInfo->gps_altitude_ref = 1;
5746
5747        double latitude = fabs(ctl->jpeg.gpsCoordinates[0] / 10000.0);
5748        double longitude = fabs(ctl->jpeg.gpsCoordinates[1] / 10000.0);
5749        double altitude = fabs(ctl->jpeg.gpsCoordinates[2] / 100.0);
5750
5751        exifInfo->gps_latitude[0].num = (uint32_t)latitude;
5752        exifInfo->gps_latitude[0].den = 1;
5753        exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
5754        exifInfo->gps_latitude[1].den = 1;
5755        exifInfo->gps_latitude[2].num = (uint32_t)((((latitude - exifInfo->gps_latitude[0].num) * 60)
5756                                        - exifInfo->gps_latitude[1].num) * 60);
5757        exifInfo->gps_latitude[2].den = 1;
5758
5759        exifInfo->gps_longitude[0].num = (uint32_t)longitude;
5760        exifInfo->gps_longitude[0].den = 1;
5761        exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
5762        exifInfo->gps_longitude[1].den = 1;
5763        exifInfo->gps_longitude[2].num = (uint32_t)((((longitude - exifInfo->gps_longitude[0].num) * 60)
5764                                        - exifInfo->gps_longitude[1].num) * 60);
5765        exifInfo->gps_longitude[2].den = 1;
5766
5767        exifInfo->gps_altitude.num = (uint32_t)altitude;
5768        exifInfo->gps_altitude.den = 1;
5769
5770        struct tm tm_data;
5771        long timestamp;
5772        timestamp = (long)ctl->jpeg.gpsTimestamp;
5773        gmtime_r(&timestamp, &tm_data);
5774        exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
5775        exifInfo->gps_timestamp[0].den = 1;
5776        exifInfo->gps_timestamp[1].num = tm_data.tm_min;
5777        exifInfo->gps_timestamp[1].den = 1;
5778        exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
5779        exifInfo->gps_timestamp[2].den = 1;
5780        snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
5781                "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
5782
5783        exifInfo->enableGps = true;
5784    } else {
5785        exifInfo->enableGps = false;
5786    }
5787
5788    //2 1th IFD TIFF Tags
5789    exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
5790    exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
5791}
5792
5793ExynosCameraHWInterface2::MainThread::~MainThread()
5794{
5795    ALOGV("(%s):", __FUNCTION__);
5796}
5797
5798void ExynosCameraHWInterface2::MainThread::release()
5799{
5800    ALOGV("(%s):", __func__);
5801    SetSignal(SIGNAL_THREAD_RELEASE);
5802}
5803
5804ExynosCameraHWInterface2::SensorThread::~SensorThread()
5805{
5806    ALOGV("(%s):", __FUNCTION__);
5807}
5808
5809void ExynosCameraHWInterface2::SensorThread::release()
5810{
5811    ALOGV("(%s):", __func__);
5812    SetSignal(SIGNAL_THREAD_RELEASE);
5813}
5814
5815ExynosCameraHWInterface2::StreamThread::~StreamThread()
5816{
5817    ALOGV("(%s):", __FUNCTION__);
5818}
5819
5820void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
5821{
5822    ALOGV("DEBUG(%s):", __FUNCTION__);
5823    memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
5824}
5825
5826void ExynosCameraHWInterface2::StreamThread::release()
5827{
5828    ALOGV("(%s):", __func__);
5829    SetSignal(SIGNAL_THREAD_RELEASE);
5830}
5831
5832int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
5833{
5834    int index;
5835    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5836        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
5837            return index;
5838    }
5839    return -1;
5840}
5841
5842int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
5843{
5844    int index;
5845    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5846        if (m_parameters.svcBufHandle[index] == *bufHandle)
5847            return index;
5848    }
5849    return -1;
5850}
5851
5852status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
5853{
5854    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
5855    int index, vacantIndex;
5856    bool vacancy = false;
5857
5858    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
5859        if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
5860            vacancy = true;
5861            vacantIndex = index;
5862        } else if (m_attachedSubStreams[index].streamId == stream_id) {
5863            return BAD_VALUE;
5864        }
5865    }
5866    if (!vacancy)
5867        return NO_MEMORY;
5868    m_attachedSubStreams[vacantIndex].streamId = stream_id;
5869    m_attachedSubStreams[vacantIndex].priority = priority;
5870    m_numRegisteredStream++;
5871    return NO_ERROR;
5872}
5873
5874status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
5875{
5876    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
5877    int index;
5878    bool found = false;
5879
5880    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
5881        if (m_attachedSubStreams[index].streamId == stream_id) {
5882            found = true;
5883            break;
5884        }
5885    }
5886    if (!found)
5887        return BAD_VALUE;
5888    m_attachedSubStreams[index].streamId = -1;
5889    m_attachedSubStreams[index].priority = 0;
5890    m_numRegisteredStream--;
5891    return NO_ERROR;
5892}
5893
5894int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
5895{
5896    if (ionClient == 0) {
5897        ionClient = ion_client_create();
5898        if (ionClient < 0) {
5899            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
5900            return 0;
5901        }
5902    }
5903    return ionClient;
5904}
5905
5906int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
5907{
5908    if (ionClient != 0) {
5909        if (ionClient > 0) {
5910            ion_client_destroy(ionClient);
5911        }
5912        ionClient = 0;
5913    }
5914    return ionClient;
5915}
5916
5917int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
5918{
5919    return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
5920}
5921
5922int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
5923{
5924    int ret = 0;
5925    int i = 0;
5926    int flag = 0;
5927
5928    if (ionClient == 0) {
5929        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
5930        return -1;
5931    }
5932
5933    for (i = 0 ; i < iMemoryNum ; i++) {
5934        if (buf->size.extS[i] == 0) {
5935            break;
5936        }
5937        if (1 << i & cacheFlag)
5938            flag = ION_FLAG_CACHED;
5939        else
5940            flag = 0;
5941        buf->fd.extFd[i] = ion_alloc(ionClient, \
5942                                      buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK, flag);
5943        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
5944            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
5945            buf->fd.extFd[i] = -1;
5946            freeCameraMemory(buf, iMemoryNum);
5947            return -1;
5948        }
5949
5950        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
5951                                        buf->size.extS[i], 0);
5952        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
5953            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
5954            buf->virt.extP[i] = (char *)MAP_FAILED;
5955            freeCameraMemory(buf, iMemoryNum);
5956            return -1;
5957        }
5958        ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
5959    }
5960
5961    return ret;
5962}
5963
5964void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
5965{
5966
5967    int i = 0 ;
5968    int ret = 0;
5969
5970    for (i=0;i<iMemoryNum;i++) {
5971        if (buf->fd.extFd[i] != -1) {
5972            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
5973                ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
5974                if (ret < 0)
5975                    ALOGE("ERR(%s)", __FUNCTION__);
5976            }
5977            ion_free(buf->fd.extFd[i]);
5978        ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
5979        }
5980        buf->fd.extFd[i] = -1;
5981        buf->virt.extP[i] = (char *)MAP_FAILED;
5982        buf->size.extS[i] = 0;
5983    }
5984}
5985
5986void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
5987{
5988    int i =0 ;
5989    for (i=0;i<iMemoryNum;i++) {
5990        buf->virt.extP[i] = (char *)MAP_FAILED;
5991        buf->fd.extFd[i] = -1;
5992        buf->size.extS[i] = 0;
5993    }
5994}
5995
5996
5997
5998
5999static camera2_device_t *g_cam2_device = NULL;
6000static bool g_camera_vaild = false;
6001ExynosCamera2 * g_camera2[2] = { NULL, NULL };
6002
6003static int HAL2_camera_device_close(struct hw_device_t* device)
6004{
6005    ALOGD("(%s): ENTER", __FUNCTION__);
6006    if (device) {
6007
6008        camera2_device_t *cam_device = (camera2_device_t *)device;
6009        ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
6010        ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
6011        delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
6012        free(cam_device);
6013        g_camera_vaild = false;
6014        g_cam2_device = NULL;
6015    }
6016
6017    ALOGD("(%s): EXIT", __FUNCTION__);
6018    return 0;
6019}
6020
6021static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
6022{
6023    return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
6024}
6025
6026static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
6027            const camera2_request_queue_src_ops_t *request_src_ops)
6028{
6029    ALOGV("DEBUG(%s):", __FUNCTION__);
6030    return obj(dev)->setRequestQueueSrcOps(request_src_ops);
6031}
6032
6033static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
6034{
6035    ALOGV("DEBUG(%s):", __FUNCTION__);
6036    return obj(dev)->notifyRequestQueueNotEmpty();
6037}
6038
6039static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
6040            const camera2_frame_queue_dst_ops_t *frame_dst_ops)
6041{
6042    ALOGV("DEBUG(%s):", __FUNCTION__);
6043    return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
6044}
6045
6046static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
6047{
6048    ALOGV("DEBUG(%s):", __FUNCTION__);
6049    return obj(dev)->getInProgressCount();
6050}
6051
6052static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
6053{
6054    ALOGV("DEBUG(%s):", __FUNCTION__);
6055    return obj(dev)->flushCapturesInProgress();
6056}
6057
6058static int HAL2_device_construct_default_request(const struct camera2_device *dev,
6059            int request_template, camera_metadata_t **request)
6060{
6061    ALOGV("DEBUG(%s):", __FUNCTION__);
6062    return obj(dev)->constructDefaultRequest(request_template, request);
6063}
6064
6065static int HAL2_device_allocate_stream(
6066            const struct camera2_device *dev,
6067            // inputs
6068            uint32_t width,
6069            uint32_t height,
6070            int      format,
6071            const camera2_stream_ops_t *stream_ops,
6072            // outputs
6073            uint32_t *stream_id,
6074            uint32_t *format_actual,
6075            uint32_t *usage,
6076            uint32_t *max_buffers)
6077{
6078    ALOGV("(%s): ", __FUNCTION__);
6079    return obj(dev)->allocateStream(width, height, format, stream_ops,
6080                                    stream_id, format_actual, usage, max_buffers);
6081}
6082
6083static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
6084            uint32_t stream_id,
6085            int num_buffers,
6086            buffer_handle_t *buffers)
6087{
6088    ALOGV("DEBUG(%s):", __FUNCTION__);
6089    return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
6090}
6091
6092static int HAL2_device_release_stream(
6093        const struct camera2_device *dev,
6094            uint32_t stream_id)
6095{
6096    ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
6097    if (!g_camera_vaild)
6098        return 0;
6099    return obj(dev)->releaseStream(stream_id);
6100}
6101
6102static int HAL2_device_allocate_reprocess_stream(
6103           const struct camera2_device *dev,
6104            uint32_t width,
6105            uint32_t height,
6106            uint32_t format,
6107            const camera2_stream_in_ops_t *reprocess_stream_ops,
6108            // outputs
6109            uint32_t *stream_id,
6110            uint32_t *consumer_usage,
6111            uint32_t *max_buffers)
6112{
6113    ALOGV("DEBUG(%s):", __FUNCTION__);
6114    return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
6115                                    stream_id, consumer_usage, max_buffers);
6116}
6117
6118static int HAL2_device_allocate_reprocess_stream_from_stream(
6119           const struct camera2_device *dev,
6120            uint32_t output_stream_id,
6121            const camera2_stream_in_ops_t *reprocess_stream_ops,
6122            // outputs
6123            uint32_t *stream_id)
6124{
6125    ALOGV("DEBUG(%s):", __FUNCTION__);
6126    return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
6127                                    reprocess_stream_ops, stream_id);
6128}
6129
6130static int HAL2_device_release_reprocess_stream(
6131        const struct camera2_device *dev,
6132            uint32_t stream_id)
6133{
6134    ALOGV("DEBUG(%s):", __FUNCTION__);
6135    return obj(dev)->releaseReprocessStream(stream_id);
6136}
6137
6138static int HAL2_device_trigger_action(const struct camera2_device *dev,
6139           uint32_t trigger_id,
6140            int ext1,
6141            int ext2)
6142{
6143    ALOGV("DEBUG(%s):", __FUNCTION__);
6144    if (!g_camera_vaild)
6145        return 0;
6146    return obj(dev)->triggerAction(trigger_id, ext1, ext2);
6147}
6148
6149static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
6150            camera2_notify_callback notify_cb,
6151            void *user)
6152{
6153    ALOGV("DEBUG(%s):", __FUNCTION__);
6154    return obj(dev)->setNotifyCallback(notify_cb, user);
6155}
6156
6157static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
6158            vendor_tag_query_ops_t **ops)
6159{
6160    ALOGV("DEBUG(%s):", __FUNCTION__);
6161    return obj(dev)->getMetadataVendorTagOps(ops);
6162}
6163
6164static int HAL2_device_dump(const struct camera2_device *dev, int fd)
6165{
6166    ALOGV("DEBUG(%s):", __FUNCTION__);
6167    return obj(dev)->dump(fd);
6168}
6169
6170
6171
6172
6173
6174static int HAL2_getNumberOfCameras()
6175{
6176    ALOGV("(%s): returning 2", __FUNCTION__);
6177    return 2;
6178}
6179
6180
6181static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
6182{
6183    ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
6184    static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
6185
6186    status_t res;
6187
6188    if (cameraId == 0) {
6189        info->facing = CAMERA_FACING_BACK;
6190        if (!g_camera2[0])
6191            g_camera2[0] = new ExynosCamera2(0);
6192    }
6193    else if (cameraId == 1) {
6194        info->facing = CAMERA_FACING_FRONT;
6195        if (!g_camera2[1])
6196            g_camera2[1] = new ExynosCamera2(1);
6197    }
6198    else
6199        return BAD_VALUE;
6200
6201    info->orientation = 0;
6202    info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
6203    if (mCameraInfo[cameraId] == NULL) {
6204        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
6205        if (res != OK) {
6206            ALOGE("%s: Unable to allocate static info: %s (%d)",
6207                    __FUNCTION__, strerror(-res), res);
6208            return res;
6209        }
6210        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
6211        if (res != OK) {
6212            ALOGE("%s: Unable to fill in static info: %s (%d)",
6213                    __FUNCTION__, strerror(-res), res);
6214            return res;
6215        }
6216    }
6217    info->static_camera_characteristics = mCameraInfo[cameraId];
6218    return NO_ERROR;
6219}
6220
6221#define SET_METHOD(m) m : HAL2_device_##m
6222
6223static camera2_device_ops_t camera2_device_ops = {
6224        SET_METHOD(set_request_queue_src_ops),
6225        SET_METHOD(notify_request_queue_not_empty),
6226        SET_METHOD(set_frame_queue_dst_ops),
6227        SET_METHOD(get_in_progress_count),
6228        SET_METHOD(flush_captures_in_progress),
6229        SET_METHOD(construct_default_request),
6230        SET_METHOD(allocate_stream),
6231        SET_METHOD(register_stream_buffers),
6232        SET_METHOD(release_stream),
6233        SET_METHOD(allocate_reprocess_stream),
6234        SET_METHOD(allocate_reprocess_stream_from_stream),
6235        SET_METHOD(release_reprocess_stream),
6236        SET_METHOD(trigger_action),
6237        SET_METHOD(set_notify_callback),
6238        SET_METHOD(get_metadata_vendor_tag_ops),
6239        SET_METHOD(dump),
6240};
6241
6242#undef SET_METHOD
6243
6244
6245static int HAL2_camera_device_open(const struct hw_module_t* module,
6246                                  const char *id,
6247                                  struct hw_device_t** device)
6248{
6249    int cameraId = atoi(id);
6250    int openInvalid = 0;
6251
6252    g_camera_vaild = false;
6253    ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
6254    if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
6255        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
6256        return -EINVAL;
6257    }
6258
6259    ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6260    if (g_cam2_device) {
6261        if (obj(g_cam2_device)->getCameraId() == cameraId) {
6262            ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
6263            goto done;
6264        } else {
6265            ALOGD("(%s): START waiting for cam device free", __FUNCTION__);
6266            while (g_cam2_device)
6267                usleep(SIG_WAITING_TICK);
6268            ALOGD("(%s): END   waiting for cam device free", __FUNCTION__);
6269        }
6270    }
6271
6272    g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
6273    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6274
6275    if (!g_cam2_device)
6276        return -ENOMEM;
6277
6278    g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
6279    g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
6280    g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
6281    g_cam2_device->common.close   = HAL2_camera_device_close;
6282
6283    g_cam2_device->ops = &camera2_device_ops;
6284
6285    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
6286
6287    g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
6288    if (!openInvalid) {
6289        ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
6290        return -ENODEV;
6291    }
6292done:
6293    *device = (hw_device_t *)g_cam2_device;
6294    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
6295    g_camera_vaild = true;
6296
6297    return 0;
6298}
6299
6300
6301static hw_module_methods_t camera_module_methods = {
6302            open : HAL2_camera_device_open
6303};
6304
6305extern "C" {
6306    struct camera_module HAL_MODULE_INFO_SYM = {
6307      common : {
6308          tag                : HARDWARE_MODULE_TAG,
6309          module_api_version : CAMERA_MODULE_API_VERSION_2_0,
6310          hal_api_version    : HARDWARE_HAL_API_VERSION,
6311          id                 : CAMERA_HARDWARE_MODULE_ID,
6312          name               : "Exynos Camera HAL2",
6313          author             : "Samsung Corporation",
6314          methods            : &camera_module_methods,
6315          dso:                NULL,
6316          reserved:           {0},
6317      },
6318      get_number_of_cameras : HAL2_getNumberOfCameras,
6319      get_camera_info       : HAL2_getCameraInfo
6320    };
6321}
6322
6323}; // namespace android
6324