1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10**     http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file      ExynosCameraHWInterface2.cpp
21 * \brief     source file for Android Camera API 2.0 HAL
22 * \author    Sungjoong Kang(sj3.kang@samsung.com)
23 * \date      2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 *   Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 *   2nd Release
31 *
32 */
33
34//#define LOG_NDEBUG 0
35#define LOG_TAG "ExynosCameraHAL2"
36#include <utils/Log.h>
37#include <math.h>
38
39#include "ExynosCameraHWInterface2.h"
40#include "exynos_format.h"
41
42namespace android {
43
44void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
45{
46    int nw;
47    int cnt = 0;
48    uint32_t written = 0;
49
50    ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
51    int fd = open(fname, O_RDWR | O_CREAT, 0644);
52    if (fd < 0) {
53        ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
54        return;
55    }
56
57    ALOGV("writing %d bytes to file [%s]", size, fname);
58    while (written < size) {
59        nw = ::write(fd, buf + written, size - written);
60        if (nw < 0) {
61            ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
62            break;
63        }
64        written += nw;
65        cnt++;
66    }
67    ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
68    ::close(fd);
69}
70
71int get_pixel_depth(uint32_t fmt)
72{
73    int depth = 0;
74
75    switch (fmt) {
76    case V4L2_PIX_FMT_JPEG:
77        depth = 8;
78        break;
79
80    case V4L2_PIX_FMT_NV12:
81    case V4L2_PIX_FMT_NV21:
82    case V4L2_PIX_FMT_YUV420:
83    case V4L2_PIX_FMT_YVU420M:
84    case V4L2_PIX_FMT_NV12M:
85    case V4L2_PIX_FMT_NV12MT:
86        depth = 12;
87        break;
88
89    case V4L2_PIX_FMT_RGB565:
90    case V4L2_PIX_FMT_YUYV:
91    case V4L2_PIX_FMT_YVYU:
92    case V4L2_PIX_FMT_UYVY:
93    case V4L2_PIX_FMT_VYUY:
94    case V4L2_PIX_FMT_NV16:
95    case V4L2_PIX_FMT_NV61:
96    case V4L2_PIX_FMT_YUV422P:
97    case V4L2_PIX_FMT_SBGGR10:
98    case V4L2_PIX_FMT_SBGGR12:
99    case V4L2_PIX_FMT_SBGGR16:
100        depth = 16;
101        break;
102
103    case V4L2_PIX_FMT_RGB32:
104        depth = 32;
105        break;
106    default:
107        ALOGE("Get depth failed(format : %d)", fmt);
108        break;
109    }
110
111    return depth;
112}
113
114int cam_int_s_fmt(node_info_t *node)
115{
116    struct v4l2_format v4l2_fmt;
117    unsigned int framesize;
118    int ret;
119
120    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
121
122    v4l2_fmt.type = node->type;
123    framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
124
125    if (node->planes >= 1) {
126        v4l2_fmt.fmt.pix_mp.width       = node->width;
127        v4l2_fmt.fmt.pix_mp.height      = node->height;
128        v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
129        v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
130    } else {
131        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
132    }
133
134    /* Set up for capture */
135    ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
136
137    if (ret < 0)
138        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
139
140
141    return ret;
142}
143
144int cam_int_reqbufs(node_info_t *node)
145{
146    struct v4l2_requestbuffers req;
147    int ret;
148
149    req.count = node->buffers;
150    req.type = node->type;
151    req.memory = node->memory;
152
153    ret = exynos_v4l2_reqbufs(node->fd, &req);
154
155    if (ret < 0)
156        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
157
158    return req.count;
159}
160
161int cam_int_qbuf(node_info_t *node, int index)
162{
163    struct v4l2_buffer v4l2_buf;
164    struct v4l2_plane planes[VIDEO_MAX_PLANES];
165    int i;
166    int ret = 0;
167
168    v4l2_buf.m.planes   = planes;
169    v4l2_buf.type       = node->type;
170    v4l2_buf.memory     = node->memory;
171    v4l2_buf.index      = index;
172    v4l2_buf.length     = node->planes;
173
174    for(i = 0; i < node->planes; i++){
175        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
176        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
177    }
178
179    ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
180
181    if (ret < 0)
182        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
183
184    return ret;
185}
186
187int cam_int_streamon(node_info_t *node)
188{
189    enum v4l2_buf_type type = node->type;
190    int ret;
191
192
193    ret = exynos_v4l2_streamon(node->fd, type);
194
195    if (ret < 0)
196        ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
197
198    ALOGV("On streaming I/O... ... fd(%d)", node->fd);
199
200    return ret;
201}
202
203int cam_int_streamoff(node_info_t *node)
204{
205    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
206    int ret;
207
208
209    ALOGV("Off streaming I/O... fd(%d)", node->fd);
210    ret = exynos_v4l2_streamoff(node->fd, type);
211
212    if (ret < 0)
213        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
214
215    return ret;
216}
217
218int isp_int_streamoff(node_info_t *node)
219{
220    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
221    int ret;
222
223    ALOGV("Off streaming I/O... fd(%d)", node->fd);
224    ret = exynos_v4l2_streamoff(node->fd, type);
225
226    if (ret < 0)
227        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
228
229    return ret;
230}
231
232int cam_int_dqbuf(node_info_t *node)
233{
234    struct v4l2_buffer v4l2_buf;
235    struct v4l2_plane planes[VIDEO_MAX_PLANES];
236    int ret;
237
238    v4l2_buf.type       = node->type;
239    v4l2_buf.memory     = node->memory;
240    v4l2_buf.m.planes   = planes;
241    v4l2_buf.length     = node->planes;
242
243    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
244    if (ret < 0)
245        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
246
247    return v4l2_buf.index;
248}
249
250int cam_int_dqbuf(node_info_t *node, int num_plane)
251{
252    struct v4l2_buffer v4l2_buf;
253    struct v4l2_plane planes[VIDEO_MAX_PLANES];
254    int ret;
255
256    v4l2_buf.type       = node->type;
257    v4l2_buf.memory     = node->memory;
258    v4l2_buf.m.planes   = planes;
259    v4l2_buf.length     = num_plane;
260
261    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
262    if (ret < 0)
263        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
264
265    return v4l2_buf.index;
266}
267
268int cam_int_s_input(node_info_t *node, int index)
269{
270    int ret;
271
272    ret = exynos_v4l2_s_input(node->fd, index);
273    if (ret < 0)
274        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
275
276    return ret;
277}
278
279
280gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
281
282RequestManager::RequestManager(SignalDrivenThread* main_thread):
283    m_vdisEnable(false),
284    m_lastAeMode(0),
285    m_lastAaMode(0),
286    m_lastAwbMode(0),
287    m_vdisBubbleEn(false),
288    m_lastAeComp(0),
289    m_lastCompletedFrameCnt(-1)
290{
291    m_metadataConverter = new MetadataConverter;
292    m_mainThread = main_thread;
293    ResetEntry();
294    m_sensorPipelineSkipCnt = 0;
295    return;
296}
297
298RequestManager::~RequestManager()
299{
300    ALOGV("%s", __FUNCTION__);
301    if (m_metadataConverter != NULL) {
302        delete m_metadataConverter;
303        m_metadataConverter = NULL;
304    }
305
306    releaseSensorQ();
307    return;
308}
309
310void RequestManager::ResetEntry()
311{
312    Mutex::Autolock lock(m_requestMutex);
313    Mutex::Autolock lock2(m_numOfEntriesLock);
314    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
315        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
316        entries[i].internal_shot.shot.ctl.request.frameCount = -1;
317    }
318    m_numOfEntries = 0;
319    m_entryInsertionIndex = -1;
320    m_entryProcessingIndex = -1;
321    m_entryFrameOutputIndex = -1;
322}
323
324int RequestManager::GetNumEntries()
325{
326    Mutex::Autolock lock(m_numOfEntriesLock);
327    return m_numOfEntries;
328}
329
330void RequestManager::SetDefaultParameters(int cropX)
331{
332    m_cropX = cropX;
333}
334
335bool RequestManager::IsRequestQueueFull()
336{
337    Mutex::Autolock lock(m_requestMutex);
338    Mutex::Autolock lock2(m_numOfEntriesLock);
339    if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
340        return true;
341    else
342        return false;
343}
344
345void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion)
346{
347    ALOGV("DEBUG(%s):", __FUNCTION__);
348
349    Mutex::Autolock lock(m_requestMutex);
350    Mutex::Autolock lock2(m_numOfEntriesLock);
351
352    request_manager_entry * newEntry = NULL;
353    int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
354    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries );
355
356
357    newEntry = &(entries[newInsertionIndex]);
358
359    if (newEntry->status!=EMPTY) {
360        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
361        return;
362    }
363    newEntry->status = REGISTERED;
364    newEntry->original_request = new_request;
365    memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
366    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
367    newEntry->output_stream_count = 0;
368    if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
369        newEntry->output_stream_count++;
370
371    if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
372        newEntry->output_stream_count++;
373
374    m_numOfEntries++;
375    m_entryInsertionIndex = newInsertionIndex;
376
377
378    *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
379    afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0];
380    afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1];
381    afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2];
382    afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3];
383    ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
384    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
385}
386
387void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
388{
389    ALOGV("DEBUG(%s):", __FUNCTION__);
390    int frame_index;
391    request_manager_entry * currentEntry;
392
393    Mutex::Autolock lock(m_requestMutex);
394    Mutex::Autolock lock2(m_numOfEntriesLock);
395
396    frame_index = GetCompletedIndex();
397    currentEntry =  &(entries[frame_index]);
398    if (currentEntry->status != COMPLETED) {
399        CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
400                       m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
401        return;
402    }
403    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
404
405    m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
406
407    currentEntry->status = EMPTY;
408    currentEntry->original_request = NULL;
409    memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
410    currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
411    currentEntry->output_stream_count = 0;
412    m_numOfEntries--;
413    ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
414     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
415
416    CheckCompleted(GetNextIndex(frame_index));
417    return;
418}
419
420bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
421                camera_metadata_t ** prepared_frame, int afState)
422{
423    ALOGV("DEBUG(%s):", __FUNCTION__);
424    Mutex::Autolock lock(m_requestMutex);
425    status_t res = NO_ERROR;
426    int tempFrameOutputIndex = GetCompletedIndex();
427    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
428    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
429        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
430
431    if (currentEntry->status != COMPLETED) {
432        ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
433
434        return false;
435    }
436    m_entryFrameOutputIndex = tempFrameOutputIndex;
437    m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
438    add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
439    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
440                m_tempFrameMetadata);
441    if (res!=NO_ERROR) {
442        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
443        return false;
444    }
445    *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
446    *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
447    *prepared_frame = m_tempFrameMetadata;
448    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
449        currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
450    // Dump();
451    return true;
452}
453
454int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
455{
456    struct camera2_shot_ext * shot_ext;
457    struct camera2_shot_ext * request_shot;
458    int targetStreamIndex = 0;
459    request_manager_entry * newEntry = NULL;
460    static int count = 0;
461
462    Mutex::Autolock lock(m_requestMutex);
463    Mutex::Autolock lock2(m_numOfEntriesLock);
464    if (m_numOfEntries == 0)  {
465        CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
466        return -1;
467    }
468
469    if ((m_entryProcessingIndex == m_entryInsertionIndex)
470        && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
471        ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
472         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
473        return -1;
474    }
475
476    int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
477    ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
478
479    newEntry = &(entries[newProcessingIndex]);
480    request_shot = &(newEntry->internal_shot);
481    if (newEntry->status != REGISTERED) {
482        CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
483        for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
484                CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
485        }
486        return -1;
487    }
488
489    newEntry->status = REQUESTED;
490
491    shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
492
493    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
494    shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
495    shot_ext->request_sensor = 1;
496    shot_ext->dis_bypass = 1;
497    shot_ext->dnr_bypass = 1;
498    shot_ext->fd_bypass = 1;
499    shot_ext->setfile = 0;
500
501    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
502    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
503    if (targetStreamIndex & MASK_OUTPUT_SCP)
504        shot_ext->request_scp = 1;
505
506    if (targetStreamIndex & MASK_OUTPUT_SCC)
507        shot_ext->request_scc = 1;
508
509    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
510        shot_ext->fd_bypass = 0;
511
512    if (count == 0){
513        shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
514    } else
515        shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
516
517    count++;
518    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
519    shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
520    shot_ext->shot.magicNumber = 0x23456789;
521    shot_ext->shot.ctl.sensor.exposureTime = 0;
522    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
523    shot_ext->shot.ctl.sensor.sensitivity = 0;
524
525
526    shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
527    shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
528    shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
529
530    m_entryProcessingIndex = newProcessingIndex;
531    return newProcessingIndex;
532}
533
534void RequestManager::NotifyStreamOutput(int frameCnt)
535{
536    int index;
537
538    Mutex::Autolock lock(m_requestMutex);
539    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
540
541    index = FindEntryIndexByFrameCnt(frameCnt);
542    if (index == -1) {
543        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
544        return;
545    }
546    ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt,   entries[index].output_stream_count);
547
548    entries[index].output_stream_count--;  //TODO : match stream id also
549    CheckCompleted(index);
550}
551
552void RequestManager::CheckCompleted(int index)
553{
554    if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
555        && (entries[index].output_stream_count <= 0)){
556        ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
557                index, entries[index].internal_shot.shot.ctl.request.frameCount );
558        entries[index].status = COMPLETED;
559        if (m_lastCompletedFrameCnt + 1 == entries[index].internal_shot.shot.ctl.request.frameCount)
560            m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
561    }
562}
563
564int RequestManager::GetCompletedIndex()
565{
566    return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
567}
568
569void  RequestManager::pushSensorQ(int index)
570{
571    Mutex::Autolock lock(m_requestMutex);
572    m_sensorQ.push_back(index);
573}
574
575int RequestManager::popSensorQ()
576{
577   List<int>::iterator sensor_token;
578   int index;
579
580    Mutex::Autolock lock(m_requestMutex);
581
582    if(m_sensorQ.size() == 0)
583        return -1;
584
585    sensor_token = m_sensorQ.begin()++;
586    index = *sensor_token;
587    m_sensorQ.erase(sensor_token);
588
589    return (index);
590}
591
592void RequestManager::releaseSensorQ()
593{
594    List<int>::iterator r;
595
596    Mutex::Autolock lock(m_requestMutex);
597    ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
598
599    while(m_sensorQ.size() > 0){
600        r  = m_sensorQ.begin()++;
601        m_sensorQ.erase(r);
602    }
603    return;
604}
605
606void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
607{
608    int index;
609    struct camera2_shot_ext * request_shot;
610    nsecs_t timeStamp;
611    int i;
612
613    Mutex::Autolock lock(m_requestMutex);
614    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
615
616    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
617        if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
618            && (entries[i].status == CAPTURED)){
619            entries[i].status = METADONE;
620            break;
621        }
622    }
623
624    if (i == NUM_MAX_REQUEST_MGR_ENTRY){
625        ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
626        return;
627    }
628
629    request_manager_entry * newEntry = &(entries[i]);
630    request_shot = &(newEntry->internal_shot);
631
632    timeStamp = request_shot->shot.dm.sensor.timeStamp;
633    memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
634    request_shot->shot.dm.sensor.timeStamp = timeStamp;
635    m_lastTimeStamp = timeStamp;
636    CheckCompleted(i);
637}
638
639void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
640{
641    int index, targetStreamIndex;
642    struct camera2_shot_ext * request_shot;
643
644    ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
645    if (frameCnt < 0)
646        return;
647
648    index = FindEntryIndexByFrameCnt(frameCnt);
649    if (index == -1) {
650        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
651        return;
652    }
653
654    request_manager_entry * newEntry = &(entries[index]);
655    request_shot = &(newEntry->internal_shot);
656    memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
657    shot_ext->shot.ctl.request.frameCount = frameCnt;
658    shot_ext->request_sensor = 1;
659    shot_ext->dis_bypass = 1;
660    shot_ext->dnr_bypass = 1;
661    shot_ext->fd_bypass = 1;
662    shot_ext->drc_bypass = 1;
663    shot_ext->setfile = 0;
664
665    shot_ext->request_scc = 0;
666    shot_ext->request_scp = 0;
667
668    shot_ext->isReprocessing = request_shot->isReprocessing;
669    shot_ext->reprocessInput = request_shot->reprocessInput;
670    shot_ext->shot.ctl.request.outputStreams[0] = 0;
671
672    shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
673
674    shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
675    shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
676    shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
677
678    // mapping flash UI mode from aeMode
679    if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
680        if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
681            ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
682        else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD)
683            ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
684        request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
685    }
686
687    // Apply ae/awb lock or unlock
688    if (request_shot->ae_lock == AEMODE_LOCK_ON)
689            request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
690    if (request_shot->awb_lock == AWBMODE_LOCK_ON)
691            request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
692
693    if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
694        shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
695    }
696    else {
697        shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
698        m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
699    }
700    if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
701        shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
702    }
703    else {
704        shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
705        m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
706    }
707    if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
708        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
709    }
710    else {
711        shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
712        m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
713    }
714    if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
715        shot_ext->shot.ctl.aa.aeExpCompensation = 0;
716    }
717    else {
718        shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
719        m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
720    }
721
722    if (request_shot->shot.ctl.aa.videoStabilizationMode && m_vdisEnable) {
723        m_vdisBubbleEn = true;
724        shot_ext->dis_bypass = 0;
725        shot_ext->dnr_bypass = 0;
726    } else {
727        m_vdisBubbleEn = false;
728        shot_ext->dis_bypass = 1;
729        shot_ext->dnr_bypass = 1;
730    }
731
732    shot_ext->shot.ctl.aa.afTrigger = 0;
733
734    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
735    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
736    if (targetStreamIndex & MASK_OUTPUT_SCP)
737        shot_ext->request_scp = 1;
738
739    if (targetStreamIndex & MASK_OUTPUT_SCC)
740        shot_ext->request_scc = 1;
741
742    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
743        shot_ext->fd_bypass = 0;
744
745    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
746    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
747
748    ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
749    (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
750    (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
751    (int)(shot_ext->shot.ctl.aa.afMode));
752}
753
754bool    RequestManager::IsVdisEnable(void)
755{
756        return m_vdisBubbleEn;
757}
758
759int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
760{
761    for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
762        if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
763            return i;
764    }
765    return -1;
766}
767
768void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
769{
770    int index = FindEntryIndexByFrameCnt(frameCnt);
771    if (index == -1) {
772        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
773        return;
774    }
775
776    request_manager_entry * currentEntry = &(entries[index]);
777    if (currentEntry->internal_shot.isReprocessing == 1) {
778        ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
779        index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
780    } else {
781        currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
782        ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
783            index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
784    }
785}
786
787
788nsecs_t  RequestManager::GetTimestampByFrameCnt(int frameCnt)
789{
790    int index = FindEntryIndexByFrameCnt(frameCnt);
791    if (index == -1) {
792        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
793        return m_lastTimeStamp;
794    }
795    else
796        return GetTimestamp(index);
797}
798
799nsecs_t  RequestManager::GetTimestamp(int index)
800{
801    Mutex::Autolock lock(m_requestMutex);
802    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
803        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
804        return 0;
805    }
806
807    request_manager_entry * currentEntry = &(entries[index]);
808    nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
809    if (frameTime == 0) {
810        ALOGV("DEBUG(%s): timestamp null,  returning saved value", __FUNCTION__);
811        frameTime = m_lastTimeStamp;
812    }
813    ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
814    return frameTime;
815}
816
817uint8_t  RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
818{
819    int index = FindEntryIndexByFrameCnt(frameCnt);
820    if (index == -1) {
821        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
822        return 0;
823    }
824    else
825        return GetOutputStream(index);
826}
827
828uint8_t  RequestManager::GetOutputStream(int index)
829{
830    Mutex::Autolock lock(m_requestMutex);
831    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
832        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
833        return 0;
834    }
835
836    request_manager_entry * currentEntry = &(entries[index]);
837    return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
838}
839
840camera2_shot_ext *  RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
841{
842    int index = FindEntryIndexByFrameCnt(frameCnt);
843    if (index == -1) {
844        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
845        return 0;
846    }
847    else
848        return GetInternalShotExt(index);
849}
850
851camera2_shot_ext *  RequestManager::GetInternalShotExt(int index)
852{
853    Mutex::Autolock lock(m_requestMutex);
854    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
855        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
856        return 0;
857    }
858
859    request_manager_entry * currentEntry = &(entries[index]);
860    return &currentEntry->internal_shot;
861}
862
863int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
864{
865    Mutex::Autolock lock(m_requestMutex);
866    int i;
867
868    if (m_numOfEntries == 0) {
869        CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
870        return -1;
871    }
872
873    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
874        if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
875            continue;
876
877        if (entries[i].status == REQUESTED) {
878            entries[i].status = CAPTURED;
879            return entries[i].internal_shot.shot.ctl.request.frameCount;
880        }
881        CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
882
883    }
884    CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
885
886    return -1;
887}
888
889void     RequestManager::SetInitialSkip(int count)
890{
891    ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
892    if (count > m_sensorPipelineSkipCnt)
893        m_sensorPipelineSkipCnt = count;
894}
895
896int     RequestManager::GetSkipCnt()
897{
898    ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
899    if (m_sensorPipelineSkipCnt == 0)
900        return m_sensorPipelineSkipCnt;
901    else
902        return --m_sensorPipelineSkipCnt;
903}
904
905void RequestManager::Dump(void)
906{
907    int i = 0;
908    request_manager_entry * currentEntry;
909    Mutex::Autolock lock(m_numOfEntriesLock);
910    ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
911    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
912
913    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
914        currentEntry =  &(entries[i]);
915        ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
916        currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
917            currentEntry->output_stream_count,
918            currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
919    }
920}
921
922int     RequestManager::GetNextIndex(int index)
923{
924    index++;
925    if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
926        index = 0;
927
928    return index;
929}
930
931int     RequestManager::GetPrevIndex(int index)
932{
933    index--;
934    if (index < 0)
935        index = NUM_MAX_REQUEST_MGR_ENTRY-1;
936
937    return index;
938}
939
940ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
941            m_requestQueueOps(NULL),
942            m_frameQueueOps(NULL),
943            m_callbackCookie(NULL),
944            m_numOfRemainingReqInSvc(0),
945            m_isRequestQueuePending(false),
946            m_isRequestQueueNull(true),
947            m_isIspStarted(false),
948            m_ionCameraClient(0),
949            m_zoomRatio(1),
950            m_scp_closing(false),
951            m_scp_closed(false),
952            m_afState(HAL_AFSTATE_INACTIVE),
953            m_afMode(NO_CHANGE),
954            m_afMode2(NO_CHANGE),
955            m_vdisBubbleCnt(0),
956            m_vdisDupFrame(0),
957            m_IsAfModeUpdateRequired(false),
958            m_IsAfTriggerRequired(false),
959            m_IsAfLockRequired(false),
960            m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE),
961            m_sccLocalBufferValid(false),
962            m_wideAspect(false),
963            m_scpOutputSignalCnt(0),
964            m_scpOutputImageCnt(0),
965            m_afTriggerId(0),
966            m_afPendingTriggerId(0),
967            m_afModeWaitingCnt(0),
968            m_jpegEncodingCount(0),
969            m_scpForceSuspended(false),
970            m_halDevice(dev),
971            m_nightCaptureCnt(0),
972            m_nightCaptureFrameCnt(0),
973            m_lastSceneMode(0),
974            m_cameraId(cameraId),
975            m_thumbNailW(160),
976            m_thumbNailH(120)
977{
978    ALOGD("(%s): ENTER", __FUNCTION__);
979    int ret = 0;
980    int res = 0;
981
982    m_exynosPictureCSC = NULL;
983    m_exynosVideoCSC = NULL;
984
985    if (!m_grallocHal) {
986        ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
987        if (ret)
988            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
989    }
990
991    m_camera2 = camera;
992    m_ionCameraClient = createIonClient(m_ionCameraClient);
993    if(m_ionCameraClient == 0)
994        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
995
996
997    m_BayerManager = new BayerBufManager();
998    m_mainThread    = new MainThread(this);
999    m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
1000    *openInvalid = InitializeISPChain();
1001    if (*openInvalid < 0) {
1002        ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
1003        // clean process
1004        // 1. close video nodes
1005        // SCP
1006        res = exynos_v4l2_close(m_camera_info.scp.fd);
1007        if (res != NO_ERROR ) {
1008            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1009        }
1010        // SCC
1011        res = exynos_v4l2_close(m_camera_info.capture.fd);
1012        if (res != NO_ERROR ) {
1013            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1014        }
1015        // Sensor
1016        res = exynos_v4l2_close(m_camera_info.sensor.fd);
1017        if (res != NO_ERROR ) {
1018            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1019        }
1020        // ISP
1021        res = exynos_v4l2_close(m_camera_info.isp.fd);
1022        if (res != NO_ERROR ) {
1023            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1024        }
1025    } else {
1026        m_sensorThread  = new SensorThread(this);
1027        m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
1028        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1029        ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
1030
1031        for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
1032            m_subStreams[i].type =  SUBSTREAM_TYPE_NONE;
1033        CSC_METHOD cscMethod = CSC_METHOD_HW;
1034        m_exynosPictureCSC = csc_init(cscMethod);
1035        if (m_exynosPictureCSC == NULL)
1036            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1037        csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
1038        csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
1039
1040        m_exynosVideoCSC = csc_init(cscMethod);
1041        if (m_exynosVideoCSC == NULL)
1042            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1043        csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
1044        csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
1045
1046        m_setExifFixedAttribute();
1047
1048        // contol information clear
1049        // flash
1050        m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
1051        m_ctlInfo.flash.m_afFlashDoneFlg= false;
1052        m_ctlInfo.flash.m_flashEnableFlg = false;
1053        m_ctlInfo.flash.m_flashFrameCount = 0;
1054        m_ctlInfo.flash.m_flashCnt = 0;
1055        m_ctlInfo.flash.m_flashTimeOut = 0;
1056        m_ctlInfo.flash.m_flashDecisionResult = false;
1057        m_ctlInfo.flash.m_flashTorchMode = false;
1058        m_ctlInfo.flash.m_precaptureState = 0;
1059        m_ctlInfo.flash.m_precaptureTriggerId = 0;
1060        // ae
1061        m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
1062        // af
1063        m_ctlInfo.af.m_afTriggerTimeOut = 0;
1064        // scene
1065        m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
1066    }
1067    ALOGD("(%s): EXIT", __FUNCTION__);
1068}
1069
1070ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
1071{
1072    ALOGD("(%s): ENTER", __FUNCTION__);
1073    this->release();
1074    ALOGD("(%s): EXIT", __FUNCTION__);
1075}
1076
1077void ExynosCameraHWInterface2::release()
1078{
1079    int i, res;
1080    ALOGD("(HAL2::release): ENTER");
1081
1082    if (m_streamThreads[1] != NULL) {
1083        m_streamThreads[1]->release();
1084        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
1085    }
1086
1087    if (m_streamThreads[0] != NULL) {
1088        m_streamThreads[0]->release();
1089        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
1090    }
1091
1092    if (m_sensorThread != NULL) {
1093        m_sensorThread->release();
1094    }
1095
1096    if (m_mainThread != NULL) {
1097        m_mainThread->release();
1098    }
1099
1100    if (m_exynosPictureCSC)
1101        csc_deinit(m_exynosPictureCSC);
1102    m_exynosPictureCSC = NULL;
1103
1104    if (m_exynosVideoCSC)
1105        csc_deinit(m_exynosVideoCSC);
1106    m_exynosVideoCSC = NULL;
1107
1108    if (m_streamThreads[1] != NULL) {
1109        ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
1110        while (!m_streamThreads[1]->IsTerminated())
1111            usleep(SIG_WAITING_TICK);
1112        ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 1 termination");
1113        m_streamThreads[1] = NULL;
1114    }
1115
1116    if (m_streamThreads[0] != NULL) {
1117        ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
1118        while (!m_streamThreads[0]->IsTerminated())
1119            usleep(SIG_WAITING_TICK);
1120        ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 0 termination");
1121        m_streamThreads[0] = NULL;
1122    }
1123
1124    if (m_sensorThread != NULL) {
1125        ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
1126        while (!m_sensorThread->IsTerminated())
1127            usleep(SIG_WAITING_TICK);
1128        ALOGD("(HAL2::release): END   Waiting for (indirect) sensor thread termination");
1129        m_sensorThread = NULL;
1130    }
1131
1132    if (m_mainThread != NULL) {
1133        ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
1134        while (!m_mainThread->IsTerminated())
1135            usleep(SIG_WAITING_TICK);
1136        ALOGD("(HAL2::release): END   Waiting for (indirect) main thread termination");
1137        m_mainThread = NULL;
1138    }
1139
1140    if (m_requestManager != NULL) {
1141        delete m_requestManager;
1142        m_requestManager = NULL;
1143    }
1144
1145    if (m_BayerManager != NULL) {
1146        delete m_BayerManager;
1147        m_BayerManager = NULL;
1148    }
1149    for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1150        freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1151
1152    if (m_sccLocalBufferValid) {
1153        for (i = 0; i < NUM_SCC_BUFFERS; i++)
1154#ifdef ENABLE_FRAME_SYNC
1155            freeCameraMemory(&m_sccLocalBuffer[i], 2);
1156#else
1157            freeCameraMemory(&m_sccLocalBuffer[i], 1);
1158#endif
1159    }
1160    else {
1161        for (i = 0; i < NUM_SCC_BUFFERS; i++)
1162            freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1163    }
1164
1165    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1166    res = exynos_v4l2_close(m_camera_info.sensor.fd);
1167    if (res != NO_ERROR ) {
1168        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1169    }
1170
1171    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1172    res = exynos_v4l2_close(m_camera_info.isp.fd);
1173    if (res != NO_ERROR ) {
1174        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1175    }
1176
1177    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1178    res = exynos_v4l2_close(m_camera_info.capture.fd);
1179    if (res != NO_ERROR ) {
1180        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1181    }
1182
1183    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1184    res = exynos_v4l2_close(m_camera_info.scp.fd);
1185    if (res != NO_ERROR ) {
1186        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1187    }
1188    ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1189    deleteIonClient(m_ionCameraClient);
1190
1191    ALOGD("(HAL2::release): EXIT");
1192}
1193
1194int ExynosCameraHWInterface2::InitializeISPChain()
1195{
1196    char node_name[30];
1197    int fd = 0;
1198    int i;
1199    int ret = 0;
1200
1201    /* Open Sensor */
1202    memset(&node_name, 0x00, sizeof(char[30]));
1203    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1204    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1205
1206    if (fd < 0) {
1207        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1208    }
1209    else {
1210        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1211    }
1212    m_camera_info.sensor.fd = fd;
1213
1214    /* Open ISP */
1215    memset(&node_name, 0x00, sizeof(char[30]));
1216    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1217    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1218
1219    if (fd < 0) {
1220        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1221    }
1222    else {
1223        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1224    }
1225    m_camera_info.isp.fd = fd;
1226
1227    /* Open ScalerC */
1228    memset(&node_name, 0x00, sizeof(char[30]));
1229    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1230    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1231
1232    if (fd < 0) {
1233        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1234    }
1235    else {
1236        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1237    }
1238    m_camera_info.capture.fd = fd;
1239
1240    /* Open ScalerP */
1241    memset(&node_name, 0x00, sizeof(char[30]));
1242    sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1243    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1244    if (fd < 0) {
1245        ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1246    }
1247    else {
1248        ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1249    }
1250    m_camera_info.scp.fd = fd;
1251
1252    if(m_cameraId == 0)
1253        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1254    else
1255        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1256
1257    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1258    m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1259    m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1260
1261    m_camera_info.dummy_shot.dis_bypass = 1;
1262    m_camera_info.dummy_shot.dnr_bypass = 1;
1263    m_camera_info.dummy_shot.fd_bypass = 1;
1264
1265    /*sensor setting*/
1266    m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1267    m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1268    m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1269
1270    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1271    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1272
1273    /*request setting*/
1274    m_camera_info.dummy_shot.request_sensor = 1;
1275    m_camera_info.dummy_shot.request_scc = 0;
1276    m_camera_info.dummy_shot.request_scp = 0;
1277    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1278
1279    m_camera_info.sensor.width = m_camera2->getSensorRawW();
1280    m_camera_info.sensor.height = m_camera2->getSensorRawH();
1281
1282    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1283    m_camera_info.sensor.planes = 2;
1284    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1285    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1286    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1287
1288    for(i = 0; i < m_camera_info.sensor.buffers; i++){
1289        int res;
1290        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1291        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1292        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1293        res = allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
1294        if (res) {
1295            ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__, i);
1296            // Free allocated sensor buffers
1297            for (int j = 0; j < i; j++) {
1298                freeCameraMemory(&m_camera_info.sensor.buffer[j], m_camera_info.sensor.planes);
1299            }
1300            return false;
1301        }
1302    }
1303
1304    m_camera_info.isp.width = m_camera_info.sensor.width;
1305    m_camera_info.isp.height = m_camera_info.sensor.height;
1306    m_camera_info.isp.format = m_camera_info.sensor.format;
1307    m_camera_info.isp.planes = m_camera_info.sensor.planes;
1308    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1309    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1310    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1311
1312    for(i = 0; i < m_camera_info.isp.buffers; i++){
1313        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1314        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1315        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1316        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1317        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1318        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1319        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1320    };
1321
1322    /* init ISP */
1323    ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1324    if (ret < 0) {
1325        ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
1326        return false;
1327    }
1328    cam_int_s_fmt(&(m_camera_info.isp));
1329    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1330    cam_int_reqbufs(&(m_camera_info.isp));
1331    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1332    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1333
1334    /* init Sensor */
1335    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1336    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1337    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1338        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1339    }
1340    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1341    cam_int_reqbufs(&(m_camera_info.sensor));
1342    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1343    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1344        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1345        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1346        m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1347        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1348                sizeof(struct camera2_shot_ext));
1349    }
1350
1351    for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1352        cam_int_qbuf(&(m_camera_info.sensor), i);
1353
1354    for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1355        m_requestManager->pushSensorQ(i);
1356
1357    ALOGV("== stream_on :: sensor");
1358    cam_int_streamon(&(m_camera_info.sensor));
1359    m_camera_info.sensor.status = true;
1360
1361    /* init Capture */
1362    m_camera_info.capture.width = m_camera2->getSensorW();
1363    m_camera_info.capture.height = m_camera2->getSensorH();
1364    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1365#ifdef ENABLE_FRAME_SYNC
1366    m_camera_info.capture.planes = 2;
1367#else
1368    m_camera_info.capture.planes = 1;
1369#endif
1370    m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1371    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1372    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1373
1374    m_camera_info.capture.status = false;
1375
1376    return true;
1377}
1378
1379void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
1380{
1381    ALOGV("(%s)", __FUNCTION__);
1382    StreamThread *AllocatedStream;
1383    stream_parameters_t newParameters;
1384    uint32_t format_actual;
1385
1386
1387    if (!threadExists) {
1388        m_streamThreads[1]  = new StreamThread(this, 1);
1389    }
1390    AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1391    if (!threadExists) {
1392        AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1393        m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1394        AllocatedStream->m_numRegisteredStream = 1;
1395    }
1396    AllocatedStream->m_index        = 1;
1397
1398    format_actual                   = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1399
1400    newParameters.width             = m_camera2->getSensorW();
1401    newParameters.height            = m_camera2->getSensorH();
1402    newParameters.format            = format_actual;
1403    newParameters.streamOps         = NULL;
1404    newParameters.numHwBuffers      = NUM_SCC_BUFFERS;
1405#ifdef ENABLE_FRAME_SYNC
1406    newParameters.planes            = 2;
1407#else
1408    newParameters.planes            = 1;
1409#endif
1410
1411    newParameters.numSvcBufsInHal   = 0;
1412
1413    newParameters.node              = &m_camera_info.capture;
1414
1415    AllocatedStream->streamType     = STREAM_TYPE_INDIRECT;
1416    ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1417
1418    if (!threadExists) {
1419        if (!m_sccLocalBufferValid) {
1420            for (int i = 0; i < m_camera_info.capture.buffers; i++){
1421                initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
1422                m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1423#ifdef ENABLE_FRAME_SYNC
1424                m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1425                allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
1426#else
1427                allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1428#endif
1429                m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
1430            }
1431            m_sccLocalBufferValid = true;
1432        }
1433    } else {
1434        if (m_sccLocalBufferValid) {
1435             for (int i = 0; i < m_camera_info.capture.buffers; i++)
1436                m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
1437        } else {
1438            ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
1439        }
1440    }
1441    cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
1442    m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1443    cam_int_s_fmt(newParameters.node);
1444    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1445    cam_int_reqbufs(newParameters.node);
1446    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1447
1448    for (int i = 0; i < newParameters.node->buffers; i++) {
1449        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1450        cam_int_qbuf(newParameters.node, i);
1451        newParameters.svcBufStatus[i] = ON_DRIVER;
1452    }
1453
1454    ALOGV("== stream_on :: capture");
1455    if (cam_int_streamon(newParameters.node) < 0) {
1456        ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1457    } else {
1458        m_camera_info.capture.status = true;
1459    }
1460
1461    AllocatedStream->setParameter(&newParameters);
1462    AllocatedStream->m_activated    = true;
1463    AllocatedStream->m_isBufferInit = true;
1464}
1465
1466void ExynosCameraHWInterface2::StartISP()
1467{
1468    ALOGV("== stream_on :: isp");
1469    cam_int_streamon(&(m_camera_info.isp));
1470    exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1471}
1472
1473int ExynosCameraHWInterface2::getCameraId() const
1474{
1475    return m_cameraId;
1476}
1477
1478int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1479{
1480    ALOGV("DEBUG(%s):", __FUNCTION__);
1481    if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1482            && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1483        m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1484        return 0;
1485    }
1486    else {
1487        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1488        return 1;
1489    }
1490}
1491
1492int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1493{
1494    int i = 0;
1495
1496    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1497    if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1498        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1499        return 0;
1500    }
1501    m_isRequestQueueNull = false;
1502    if (m_requestManager->GetNumEntries() == 0)
1503        m_requestManager->SetInitialSkip(0);
1504
1505    if (m_isIspStarted == false) {
1506        /* isp */
1507        m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1508        m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1509        cam_int_s_fmt(&(m_camera_info.isp));
1510        cam_int_reqbufs(&(m_camera_info.isp));
1511
1512        /* sensor */
1513        if (m_camera_info.sensor.status == false) {
1514            cam_int_s_fmt(&(m_camera_info.sensor));
1515            cam_int_reqbufs(&(m_camera_info.sensor));
1516
1517            for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1518                ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1519                m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1520                m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1521                memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1522                        sizeof(struct camera2_shot_ext));
1523            }
1524            for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1525                cam_int_qbuf(&(m_camera_info.sensor), i);
1526
1527            for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1528                m_requestManager->pushSensorQ(i);
1529            ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
1530            cam_int_streamon(&(m_camera_info.sensor));
1531            m_camera_info.sensor.status = true;
1532        }
1533    }
1534    if (!(m_streamThreads[1].get())) {
1535        ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
1536        StartSCCThread(false);
1537    } else {
1538        if (m_streamThreads[1]->m_activated ==  false) {
1539            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1540            StartSCCThread(true);
1541        } else {
1542            if (m_camera_info.capture.status == false) {
1543                m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1544                cam_int_s_fmt(&(m_camera_info.capture));
1545                ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1546                cam_int_reqbufs(&(m_camera_info.capture));
1547                ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1548
1549                if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
1550                    StreamThread *          targetStream = m_streamThreads[1].get();
1551                    stream_parameters_t     *targetStreamParms = &(targetStream->m_parameters);
1552                    node_info_t             *currentNode = targetStreamParms->node;
1553
1554                    struct v4l2_buffer v4l2_buf;
1555                    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1556
1557                    for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
1558                        v4l2_buf.m.planes   = planes;
1559                        v4l2_buf.type       = currentNode->type;
1560                        v4l2_buf.memory     = currentNode->memory;
1561
1562                        v4l2_buf.length     = currentNode->planes;
1563                        v4l2_buf.index      = i;
1564                        ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
1565
1566                        if (i < currentNode->buffers) {
1567#ifdef ENABLE_FRAME_SYNC
1568                            v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
1569                            v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
1570                            v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
1571                            v4l2_buf.length += targetStreamParms->metaPlanes;
1572                            v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
1573                            v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
1574
1575                            ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1576#endif
1577                            if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1578                                ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
1579                            }
1580                            ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
1581                            targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1582                        }
1583                        else {
1584                            targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1585                        }
1586
1587                    }
1588
1589                } else {
1590                    for (int i = 0; i < m_camera_info.capture.buffers; i++) {
1591                        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1592                        cam_int_qbuf(&(m_camera_info.capture), i);
1593                    }
1594                }
1595                ALOGV("== stream_on :: capture");
1596                if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1597                    ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1598                } else {
1599                    m_camera_info.capture.status = true;
1600                }
1601            }
1602            if (m_scpForceSuspended) {
1603                m_scpForceSuspended = false;
1604            }
1605        }
1606    }
1607    if (m_isIspStarted == false) {
1608        StartISP();
1609        ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
1610        m_requestManager->SetInitialSkip(6);
1611        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1612        m_isIspStarted = true;
1613    }
1614    m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1615    return 0;
1616}
1617
1618int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1619{
1620    ALOGV("DEBUG(%s):", __FUNCTION__);
1621    if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1622            && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1623        m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1624        return 0;
1625    }
1626    else {
1627        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1628        return 1;
1629    }
1630}
1631
1632int ExynosCameraHWInterface2::getInProgressCount()
1633{
1634    int inProgressJpeg;
1635    int inProgressCount;
1636
1637    {
1638        Mutex::Autolock lock(m_jpegEncoderLock);
1639        inProgressJpeg = m_jpegEncodingCount;
1640        inProgressCount = m_requestManager->GetNumEntries();
1641    }
1642    ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__,
1643        inProgressCount, inProgressJpeg, (inProgressCount + inProgressJpeg));
1644    return (inProgressCount + inProgressJpeg);
1645}
1646
1647int ExynosCameraHWInterface2::flushCapturesInProgress()
1648{
1649    return 0;
1650}
1651
1652int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1653{
1654    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1655
1656    if (request == NULL) return BAD_VALUE;
1657    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1658        return BAD_VALUE;
1659    }
1660    status_t res;
1661    // Pass 1, calculate size and allocate
1662    res = m_camera2->constructDefaultRequest(request_template,
1663            request,
1664            true);
1665    if (res != OK) {
1666        return res;
1667    }
1668    // Pass 2, build request
1669    res = m_camera2->constructDefaultRequest(request_template,
1670            request,
1671            false);
1672    if (res != OK) {
1673        ALOGE("Unable to populate new request for template %d",
1674                request_template);
1675    }
1676
1677    return res;
1678}
1679
1680int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1681                                    uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1682{
1683    ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1684    bool useDirectOutput = false;
1685    StreamThread *AllocatedStream;
1686    stream_parameters_t newParameters;
1687    substream_parameters_t *subParameters;
1688    StreamThread *parentStream;
1689    status_t res;
1690    int allocCase = 0;
1691
1692    if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)  &&
1693            m_camera2->isSupportedResolution(width, height)) {
1694        if (!(m_streamThreads[0].get())) {
1695            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1696            allocCase = 0;
1697        }
1698        else {
1699            if ((m_streamThreads[0].get())->m_activated == true) {
1700                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1701                allocCase = 1;
1702            }
1703            else {
1704                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1705                allocCase = 2;
1706            }
1707        }
1708
1709        // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1710        if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
1711                    || (width == 720 && height == 480) || (width == 1440 && height == 960)
1712                    || (width == 1344 && height == 896)) {
1713            m_wideAspect = true;
1714        } else {
1715            m_wideAspect = false;
1716        }
1717        ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1718
1719        if (allocCase == 0 || allocCase == 2) {
1720            *stream_id = STREAM_ID_PREVIEW;
1721
1722            m_streamThreads[0]  = new StreamThread(this, *stream_id);
1723
1724            AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1725            AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1726            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1727
1728            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1729            *usage                              = GRALLOC_USAGE_SW_WRITE_OFTEN;
1730            if (m_wideAspect)
1731                *usage                         |= GRALLOC_USAGE_PRIVATE_CHROMA;
1732            *max_buffers                        = 7;
1733
1734            newParameters.width                 = width;
1735            newParameters.height                = height;
1736            newParameters.format                = *format_actual;
1737            newParameters.streamOps             = stream_ops;
1738            newParameters.usage                 = *usage;
1739            newParameters.numHwBuffers          = NUM_SCP_BUFFERS;
1740            newParameters.numOwnSvcBuffers      = *max_buffers;
1741            newParameters.planes                = NUM_PLANES(*format_actual);
1742            newParameters.metaPlanes            = 1;
1743            newParameters.numSvcBufsInHal       = 0;
1744            newParameters.minUndequedBuffer     = 3;
1745            newParameters.needsIonMap           = true;
1746
1747            newParameters.node                  = &m_camera_info.scp;
1748            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1749            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1750
1751            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1752            AllocatedStream->m_index            = 0;
1753            AllocatedStream->setParameter(&newParameters);
1754            AllocatedStream->m_activated = true;
1755            AllocatedStream->m_numRegisteredStream = 1;
1756            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1757            m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1758            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1759            if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
1760                AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
1761            if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
1762                AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
1763
1764            // set video stabilization killswitch
1765            m_requestManager->m_vdisEnable = width > 352 && height > 288;
1766
1767            return 0;
1768        } else if (allocCase == 1) {
1769            *stream_id = STREAM_ID_RECORD;
1770
1771            subParameters = &m_subStreams[STREAM_ID_RECORD];
1772            memset(subParameters, 0, sizeof(substream_parameters_t));
1773
1774            parentStream = (StreamThread*)(m_streamThreads[0].get());
1775            if (!parentStream) {
1776                return 1;
1777            }
1778
1779            *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1780            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1781            if (m_wideAspect)
1782                *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1783            *max_buffers = 7;
1784
1785            subParameters->type         = SUBSTREAM_TYPE_RECORD;
1786            subParameters->width        = width;
1787            subParameters->height       = height;
1788            subParameters->format       = *format_actual;
1789            subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1790            subParameters->streamOps     = stream_ops;
1791            subParameters->usage         = *usage;
1792            subParameters->numOwnSvcBuffers = *max_buffers;
1793            subParameters->numSvcBufsInHal  = 0;
1794            subParameters->needBufferInit    = false;
1795            subParameters->minUndequedBuffer = 2;
1796
1797            res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
1798            if (res != NO_ERROR) {
1799                ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1800                return 1;
1801            }
1802            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1803            ALOGV("(%s): Enabling Record", __FUNCTION__);
1804            return 0;
1805        }
1806    }
1807    else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
1808            && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) {
1809
1810        if (!(m_streamThreads[1].get())) {
1811            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1812            useDirectOutput = true;
1813        }
1814        else {
1815            ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
1816            useDirectOutput = false;
1817        }
1818        if (useDirectOutput) {
1819            *stream_id = STREAM_ID_ZSL;
1820
1821            m_streamThreads[1]  = new StreamThread(this, *stream_id);
1822            AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1823            AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1824            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1825
1826            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1827
1828            *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1829            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1830            if (m_wideAspect)
1831                *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1832            *max_buffers = 7;
1833
1834            newParameters.width                 = width;
1835            newParameters.height                = height;
1836            newParameters.format                = *format_actual;
1837            newParameters.streamOps             = stream_ops;
1838            newParameters.usage                 = *usage;
1839            newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1840            newParameters.numOwnSvcBuffers      = *max_buffers;
1841            newParameters.planes                = NUM_PLANES(*format_actual);
1842            newParameters.metaPlanes            = 1;
1843
1844            newParameters.numSvcBufsInHal       = 0;
1845            newParameters.minUndequedBuffer     = 2;
1846            newParameters.needsIonMap           = false;
1847
1848            newParameters.node                  = &m_camera_info.capture;
1849            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1850            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1851
1852            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1853            AllocatedStream->m_index            = 1;
1854            AllocatedStream->setParameter(&newParameters);
1855            AllocatedStream->m_activated = true;
1856            AllocatedStream->m_numRegisteredStream = 1;
1857            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1858            return 0;
1859        } else {
1860            bool bJpegExists = false;
1861            AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1862            subParameters = &m_subStreams[STREAM_ID_JPEG];
1863            if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
1864                ALOGD("(%s): jpeg stream exists", __FUNCTION__);
1865                bJpegExists = true;
1866                AllocatedStream->detachSubStream(STREAM_ID_JPEG);
1867            }
1868            AllocatedStream->m_releasing = true;
1869            ALOGD("START stream thread 1 release %d", __LINE__);
1870            do {
1871                AllocatedStream->release();
1872                usleep(SIG_WAITING_TICK);
1873            } while (AllocatedStream->m_releasing);
1874            ALOGD("END   stream thread 1 release %d", __LINE__);
1875
1876            *stream_id = STREAM_ID_ZSL;
1877
1878            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1879
1880            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1881
1882            *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1883            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1884            if (m_wideAspect)
1885                *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1886            *max_buffers = 7;
1887
1888            newParameters.width                 = width;
1889            newParameters.height                = height;
1890            newParameters.format                = *format_actual;
1891            newParameters.streamOps             = stream_ops;
1892            newParameters.usage                 = *usage;
1893            newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1894            newParameters.numOwnSvcBuffers      = *max_buffers;
1895            newParameters.planes                = NUM_PLANES(*format_actual);
1896            newParameters.metaPlanes            = 1;
1897
1898            newParameters.numSvcBufsInHal       = 0;
1899            newParameters.minUndequedBuffer     = 2;
1900            newParameters.needsIonMap           = false;
1901
1902            newParameters.node                  = &m_camera_info.capture;
1903            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1904            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1905
1906            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1907            AllocatedStream->m_index            = 1;
1908            AllocatedStream->setParameter(&newParameters);
1909            AllocatedStream->m_activated = true;
1910            AllocatedStream->m_numRegisteredStream = 1;
1911            if (bJpegExists) {
1912                AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
1913            }
1914            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1915            return 0;
1916
1917        }
1918    }
1919    else if (format == HAL_PIXEL_FORMAT_BLOB
1920            && m_camera2->isSupportedJpegResolution(width, height)) {
1921        *stream_id = STREAM_ID_JPEG;
1922
1923        subParameters = &m_subStreams[*stream_id];
1924        memset(subParameters, 0, sizeof(substream_parameters_t));
1925
1926        if (!(m_streamThreads[1].get())) {
1927            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1928            StartSCCThread(false);
1929        }
1930        else if (m_streamThreads[1]->m_activated ==  false) {
1931            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1932            StartSCCThread(true);
1933        }
1934        parentStream = (StreamThread*)(m_streamThreads[1].get());
1935
1936        *format_actual = HAL_PIXEL_FORMAT_BLOB;
1937        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1938        if (m_wideAspect)
1939            *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1940        *max_buffers = 5;
1941
1942        subParameters->type          = SUBSTREAM_TYPE_JPEG;
1943        subParameters->width         = width;
1944        subParameters->height        = height;
1945        subParameters->format        = *format_actual;
1946        subParameters->svcPlanes     = 1;
1947        subParameters->streamOps     = stream_ops;
1948        subParameters->usage         = *usage;
1949        subParameters->numOwnSvcBuffers = *max_buffers;
1950        subParameters->numSvcBufsInHal  = 0;
1951        subParameters->needBufferInit    = false;
1952        subParameters->minUndequedBuffer = 2;
1953
1954        res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
1955        if (res != NO_ERROR) {
1956            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1957            return 1;
1958        }
1959        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1960        ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
1961        return 0;
1962    }
1963    else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1964        *stream_id = STREAM_ID_PRVCB;
1965
1966        subParameters = &m_subStreams[STREAM_ID_PRVCB];
1967        memset(subParameters, 0, sizeof(substream_parameters_t));
1968
1969        parentStream = (StreamThread*)(m_streamThreads[0].get());
1970        if (!parentStream) {
1971            return 1;
1972        }
1973
1974        *format_actual = format;
1975        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1976        if (m_wideAspect)
1977            *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1978        *max_buffers = 7;
1979
1980        subParameters->type         = SUBSTREAM_TYPE_PRVCB;
1981        subParameters->width        = width;
1982        subParameters->height       = height;
1983        subParameters->format       = *format_actual;
1984        subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1985        subParameters->streamOps     = stream_ops;
1986        subParameters->usage         = *usage;
1987        subParameters->numOwnSvcBuffers = *max_buffers;
1988        subParameters->numSvcBufsInHal  = 0;
1989        subParameters->needBufferInit    = false;
1990        subParameters->minUndequedBuffer = 2;
1991
1992        if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
1993            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
1994            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
1995        }
1996        else {
1997            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1998            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
1999        }
2000
2001        res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
2002        if (res != NO_ERROR) {
2003            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
2004            return 1;
2005        }
2006        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
2007        ALOGV("(%s): Enabling previewcb", __FUNCTION__);
2008        return 0;
2009    }
2010    ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
2011    return 1;
2012}
2013
2014int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
2015        int num_buffers, buffer_handle_t *registeringBuffers)
2016{
2017    int                     i,j;
2018    void                    *virtAddr[3];
2019    int                     plane_index = 0;
2020    StreamThread *          targetStream;
2021    stream_parameters_t     *targetStreamParms;
2022    node_info_t             *currentNode;
2023
2024    struct v4l2_buffer v4l2_buf;
2025    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
2026
2027    ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
2028        stream_id, num_buffers, (uint32_t)registeringBuffers);
2029
2030    if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
2031        targetStream = m_streamThreads[0].get();
2032        targetStreamParms = &(m_streamThreads[0]->m_parameters);
2033
2034    }
2035    else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
2036        substream_parameters_t  *targetParms;
2037        targetParms = &m_subStreams[stream_id];
2038
2039        targetParms->numSvcBuffers = num_buffers;
2040
2041        for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
2042            ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
2043                i, stream_id, (uint32_t)(registeringBuffers[i]));
2044            if (m_grallocHal) {
2045                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
2046                       targetParms->usage, 0, 0,
2047                       targetParms->width, targetParms->height, virtAddr) != 0) {
2048                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2049                }
2050                else {
2051                    ExynosBuffer currentBuf;
2052                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2053                    if (targetParms->svcPlanes == 1) {
2054                        currentBuf.fd.extFd[0] = priv_handle->fd;
2055                        currentBuf.size.extS[0] = priv_handle->size;
2056                        currentBuf.size.extS[1] = 0;
2057                        currentBuf.size.extS[2] = 0;
2058                    } else if (targetParms->svcPlanes == 2) {
2059                        currentBuf.fd.extFd[0] = priv_handle->fd;
2060                        currentBuf.fd.extFd[1] = priv_handle->fd1;
2061
2062                    } else if (targetParms->svcPlanes == 3) {
2063                        currentBuf.fd.extFd[0] = priv_handle->fd;
2064                        currentBuf.fd.extFd[1] = priv_handle->fd1;
2065                        currentBuf.fd.extFd[2] = priv_handle->fd2;
2066                    }
2067                    for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
2068                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
2069                        CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
2070                             __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
2071                             (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
2072                    }
2073                    targetParms->svcBufStatus[i]  = ON_SERVICE;
2074                    targetParms->svcBuffers[i]    = currentBuf;
2075                    targetParms->svcBufHandle[i]  = registeringBuffers[i];
2076                }
2077            }
2078        }
2079        targetParms->needBufferInit = true;
2080        return 0;
2081    }
2082    else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
2083        targetStream = m_streamThreads[1].get();
2084        targetStreamParms = &(m_streamThreads[1]->m_parameters);
2085    }
2086    else {
2087        ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
2088        return 1;
2089    }
2090
2091    if (targetStream->streamType == STREAM_TYPE_DIRECT) {
2092        if (num_buffers < targetStreamParms->numHwBuffers) {
2093            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
2094                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
2095            return 1;
2096        }
2097    }
2098    CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
2099            __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
2100            targetStreamParms->height, targetStreamParms->planes);
2101    targetStreamParms->numSvcBuffers = num_buffers;
2102    currentNode = targetStreamParms->node;
2103    currentNode->width      = targetStreamParms->width;
2104    currentNode->height     = targetStreamParms->height;
2105    currentNode->format     = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
2106    currentNode->planes     = targetStreamParms->planes;
2107    currentNode->buffers    = targetStreamParms->numHwBuffers;
2108    cam_int_s_input(currentNode, m_camera_info.sensor_id);
2109    cam_int_s_fmt(currentNode);
2110    cam_int_reqbufs(currentNode);
2111    for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
2112        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
2113            i, (uint32_t)(registeringBuffers[i]));
2114                v4l2_buf.m.planes   = planes;
2115                v4l2_buf.type       = currentNode->type;
2116                v4l2_buf.memory     = currentNode->memory;
2117                v4l2_buf.index      = i;
2118                v4l2_buf.length     = currentNode->planes;
2119
2120                ExynosBuffer currentBuf;
2121                ExynosBuffer metaBuf;
2122                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2123
2124                m_getAlignedYUVSize(currentNode->format,
2125                    currentNode->width, currentNode->height, &currentBuf);
2126
2127                ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
2128                if (currentNode->planes == 1) {
2129                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2130                    currentBuf.fd.extFd[0] = priv_handle->fd;
2131                    currentBuf.size.extS[0] = priv_handle->size;
2132                    currentBuf.size.extS[1] = 0;
2133                    currentBuf.size.extS[2] = 0;
2134                } else if (currentNode->planes == 2) {
2135                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2136                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
2137                    currentBuf.fd.extFd[0] = priv_handle->fd;
2138                    currentBuf.fd.extFd[1] = priv_handle->fd1;
2139
2140                } else if (currentNode->planes == 3) {
2141                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2142                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
2143                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
2144                    currentBuf.fd.extFd[0] = priv_handle->fd;
2145                    currentBuf.fd.extFd[2] = priv_handle->fd1;
2146                    currentBuf.fd.extFd[1] = priv_handle->fd2;
2147                }
2148
2149                for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
2150                    if (targetStreamParms->needsIonMap)
2151                        currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
2152                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
2153                    ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
2154                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
2155                         (unsigned int)currentBuf.virt.extP[plane_index],
2156                         v4l2_buf.m.planes[plane_index].length);
2157                }
2158
2159                if (i < currentNode->buffers) {
2160
2161
2162#ifdef ENABLE_FRAME_SYNC
2163                    /* add plane for metadata*/
2164                    metaBuf.size.extS[0] = 4*1024;
2165                    allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
2166
2167                    v4l2_buf.length += targetStreamParms->metaPlanes;
2168                    v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
2169                    v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
2170
2171                    ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
2172#endif
2173                    if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
2174                        ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
2175                            __FUNCTION__, stream_id, currentNode->fd);
2176                    }
2177                    ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
2178                            __FUNCTION__, stream_id, currentNode->fd);
2179                    targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
2180                }
2181                else {
2182                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
2183                }
2184
2185                targetStreamParms->svcBuffers[i]       = currentBuf;
2186                targetStreamParms->metaBuffers[i] = metaBuf;
2187                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
2188            }
2189
2190    ALOGV("DEBUG(%s): calling  streamon stream id = %d", __FUNCTION__, stream_id);
2191    cam_int_streamon(targetStreamParms->node);
2192    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
2193    currentNode->status = true;
2194    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
2195
2196    return 0;
2197}
2198
2199int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
2200{
2201    StreamThread *targetStream;
2202    status_t res = NO_ERROR;
2203    ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2204    bool releasingScpMain = false;
2205
2206    if (stream_id == STREAM_ID_PREVIEW) {
2207        targetStream = (StreamThread*)(m_streamThreads[0].get());
2208        if (!targetStream) {
2209            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2210            return NO_ERROR;
2211        }
2212        targetStream->m_numRegisteredStream--;
2213        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2214        releasingScpMain = true;
2215        if (targetStream->m_parameters.needsIonMap) {
2216            for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2217                for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2218                    ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2219                                    targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2220                    ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2221                                  targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2222                }
2223            }
2224        }
2225    } else if (stream_id == STREAM_ID_JPEG) {
2226        if (m_resizeBuf.size.s != 0) {
2227            freeCameraMemory(&m_resizeBuf, 1);
2228        }
2229        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2230
2231        targetStream = (StreamThread*)(m_streamThreads[1].get());
2232        if (!targetStream) {
2233            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2234            return NO_ERROR;
2235        }
2236
2237        if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2238            ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2239            return 1;
2240        }
2241        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2242        return 0;
2243    } else if (stream_id == STREAM_ID_RECORD) {
2244        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2245
2246        targetStream = (StreamThread*)(m_streamThreads[0].get());
2247        if (!targetStream) {
2248            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2249            return NO_ERROR;
2250        }
2251
2252        if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2253            ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2254            return 1;
2255        }
2256
2257        if (targetStream->m_numRegisteredStream != 0)
2258            return 0;
2259    } else if (stream_id == STREAM_ID_PRVCB) {
2260        if (m_previewCbBuf.size.s != 0) {
2261            freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
2262        }
2263        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2264
2265        targetStream = (StreamThread*)(m_streamThreads[0].get());
2266        if (!targetStream) {
2267            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2268            return NO_ERROR;
2269        }
2270
2271        if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2272            ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2273            return 1;
2274        }
2275
2276        if (targetStream->m_numRegisteredStream != 0)
2277            return 0;
2278    } else if (stream_id == STREAM_ID_ZSL) {
2279        targetStream = (StreamThread*)(m_streamThreads[1].get());
2280        if (!targetStream) {
2281            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2282            return NO_ERROR;
2283        }
2284
2285        targetStream->m_numRegisteredStream--;
2286        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2287        if (targetStream->m_parameters.needsIonMap) {
2288            for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2289                for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2290                    ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2291                                    targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2292                    ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2293                                  targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2294                }
2295            }
2296        }
2297    } else {
2298        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
2299        return 1;
2300    }
2301
2302    if (m_sensorThread != NULL && releasingScpMain) {
2303        m_sensorThread->release();
2304        ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
2305        while (!m_sensorThread->IsTerminated())
2306            usleep(SIG_WAITING_TICK);
2307        ALOGD("(%s): END   Waiting for (indirect) sensor thread termination", __FUNCTION__);
2308    }
2309
2310    if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
2311        ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
2312        targetStream = (StreamThread*)(m_streamThreads[1].get());
2313        targetStream->m_releasing = true;
2314        ALOGD("START stream thread release %d", __LINE__);
2315        do {
2316            targetStream->release();
2317            usleep(SIG_WAITING_TICK);
2318        } while (targetStream->m_releasing);
2319        m_camera_info.capture.status = false;
2320        ALOGD("END   stream thread release %d", __LINE__);
2321    }
2322
2323    if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
2324        ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
2325        targetStream = (StreamThread*)(m_streamThreads[0].get());
2326        targetStream->m_releasing = true;
2327        ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2328        do {
2329            targetStream->release();
2330            usleep(SIG_WAITING_TICK);
2331        } while (targetStream->m_releasing);
2332        ALOGD("(%s): END   Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2333        targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
2334
2335        if (targetStream != NULL) {
2336            ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
2337            while (!targetStream->IsTerminated())
2338                usleep(SIG_WAITING_TICK);
2339            ALOGD("(%s): END   Waiting for (indirect) stream thread termination", __FUNCTION__);
2340            m_streamThreads[0] = NULL;
2341        }
2342        if (m_camera_info.capture.status == true) {
2343            m_scpForceSuspended = true;
2344        }
2345        m_isIspStarted = false;
2346    }
2347    ALOGV("(%s): END", __FUNCTION__);
2348    return 0;
2349}
2350
2351int ExynosCameraHWInterface2::allocateReprocessStream(
2352    uint32_t width, uint32_t height, uint32_t format,
2353    const camera2_stream_in_ops_t *reprocess_stream_ops,
2354    uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
2355{
2356    ALOGV("DEBUG(%s):", __FUNCTION__);
2357    return 0;
2358}
2359
2360int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2361            uint32_t output_stream_id,
2362            const camera2_stream_in_ops_t *reprocess_stream_ops,
2363            // outputs
2364            uint32_t *stream_id)
2365{
2366    ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
2367    *stream_id = STREAM_ID_JPEG_REPROCESS;
2368
2369    m_reprocessStreamId = *stream_id;
2370    m_reprocessOps = reprocess_stream_ops;
2371    m_reprocessOutputStreamId = output_stream_id;
2372    return 0;
2373}
2374
2375int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
2376{
2377    ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2378    if (stream_id == STREAM_ID_JPEG_REPROCESS) {
2379        m_reprocessStreamId = 0;
2380        m_reprocessOps = NULL;
2381        m_reprocessOutputStreamId = 0;
2382        return 0;
2383    }
2384    return 1;
2385}
2386
2387int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
2388{
2389    Mutex::Autolock lock(m_afModeTriggerLock);
2390    ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
2391
2392    switch (trigger_id) {
2393    case CAMERA2_TRIGGER_AUTOFOCUS:
2394        ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2395        OnAfTrigger(ext1);
2396        break;
2397
2398    case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
2399        ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2400        OnAfCancel(ext1);
2401        break;
2402    case CAMERA2_TRIGGER_PRECAPTURE_METERING:
2403        ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
2404        OnPrecaptureMeteringTriggerStart(ext1);
2405        break;
2406    default:
2407        break;
2408    }
2409    return 0;
2410}
2411
2412int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
2413{
2414    ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
2415    m_notifyCb = notify_cb;
2416    m_callbackCookie = user;
2417    return 0;
2418}
2419
2420int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
2421{
2422    ALOGV("DEBUG(%s):", __FUNCTION__);
2423    *ops = NULL;
2424    return 0;
2425}
2426
2427int ExynosCameraHWInterface2::dump(int fd)
2428{
2429    ALOGV("DEBUG(%s):", __FUNCTION__);
2430    return 0;
2431}
2432
2433void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
2434{
2435    switch (colorFormat) {
2436    // 1p
2437    case V4L2_PIX_FMT_RGB565 :
2438    case V4L2_PIX_FMT_YUYV :
2439    case V4L2_PIX_FMT_UYVY :
2440    case V4L2_PIX_FMT_VYUY :
2441    case V4L2_PIX_FMT_YVYU :
2442        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2443        buf->size.extS[1] = 0;
2444        buf->size.extS[2] = 0;
2445        break;
2446    // 2p
2447    case V4L2_PIX_FMT_NV12 :
2448    case V4L2_PIX_FMT_NV12T :
2449    case V4L2_PIX_FMT_NV21 :
2450        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
2451        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2452        buf->size.extS[2] = 0;
2453        break;
2454    case V4L2_PIX_FMT_NV12M :
2455    case V4L2_PIX_FMT_NV12MT_16X16 :
2456    case V4L2_PIX_FMT_NV21M:
2457        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
2458        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2459        buf->size.extS[2] = 0;
2460        break;
2461    case V4L2_PIX_FMT_NV16 :
2462    case V4L2_PIX_FMT_NV61 :
2463        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2464        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
2465        buf->size.extS[2] = 0;
2466        break;
2467     // 3p
2468    case V4L2_PIX_FMT_YUV420 :
2469    case V4L2_PIX_FMT_YVU420 :
2470        buf->size.extS[0] = (w * h);
2471        buf->size.extS[1] = (w * h) >> 2;
2472        buf->size.extS[2] = (w * h) >> 2;
2473        break;
2474    case V4L2_PIX_FMT_YUV420M:
2475    case V4L2_PIX_FMT_YVU420M :
2476        buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
2477        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2478        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2479        break;
2480    case V4L2_PIX_FMT_YUV422P :
2481        buf->size.extS[0] = ALIGN(w,  16) * ALIGN(h,  16);
2482        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2483        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2484        break;
2485    default:
2486        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2487        return;
2488        break;
2489    }
2490}
2491
2492bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
2493                                             int  dst_w,  int   dst_h,
2494                                             int *crop_x, int *crop_y,
2495                                             int *crop_w, int *crop_h,
2496                                             int zoom)
2497{
2498    *crop_w = src_w;
2499    *crop_h = src_h;
2500
2501    if (   src_w != dst_w
2502        || src_h != dst_h) {
2503        float src_ratio = 1.0f;
2504        float dst_ratio = 1.0f;
2505
2506        // ex : 1024 / 768
2507        src_ratio = (float)src_w / (float)src_h;
2508
2509        // ex : 352  / 288
2510        dst_ratio = (float)dst_w / (float)dst_h;
2511
2512        if (dst_w * dst_h < src_w * src_h) {
2513            if (dst_ratio <= src_ratio) {
2514                // shrink w
2515                *crop_w = src_h * dst_ratio;
2516                *crop_h = src_h;
2517            } else {
2518                // shrink h
2519                *crop_w = src_w;
2520                *crop_h = src_w / dst_ratio;
2521            }
2522        } else {
2523            if (dst_ratio <= src_ratio) {
2524                // shrink w
2525                *crop_w = src_h * dst_ratio;
2526                *crop_h = src_h;
2527            } else {
2528                // shrink h
2529                *crop_w = src_w;
2530                *crop_h = src_w / dst_ratio;
2531            }
2532        }
2533    }
2534
2535    if (zoom != 0) {
2536        float zoomLevel = ((float)zoom + 10.0) / 10.0;
2537        *crop_w = (int)((float)*crop_w / zoomLevel);
2538        *crop_h = (int)((float)*crop_h / zoomLevel);
2539    }
2540
2541    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
2542    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2543    if (w_align != 0) {
2544        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2545            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
2546            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2547        }
2548        else
2549            *crop_w -= w_align;
2550    }
2551
2552    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
2553    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2554    if (h_align != 0) {
2555        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2556            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
2557            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2558        }
2559        else
2560            *crop_h -= h_align;
2561    }
2562
2563    *crop_x = (src_w - *crop_w) >> 1;
2564    *crop_y = (src_h - *crop_h) >> 1;
2565
2566    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2567        *crop_x -= 1;
2568
2569    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2570        *crop_y -= 1;
2571
2572    return true;
2573}
2574
2575BayerBufManager::BayerBufManager()
2576{
2577    ALOGV("DEBUG(%s): ", __FUNCTION__);
2578    for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2579        entries[i].status = BAYER_ON_HAL_EMPTY;
2580        entries[i].reqFrameCnt = 0;
2581    }
2582    sensorEnqueueHead = 0;
2583    sensorDequeueHead = 0;
2584    ispEnqueueHead = 0;
2585    ispDequeueHead = 0;
2586    numOnSensor = 0;
2587    numOnIsp = 0;
2588    numOnHalFilled = 0;
2589    numOnHalEmpty = NUM_BAYER_BUFFERS;
2590}
2591
2592BayerBufManager::~BayerBufManager()
2593{
2594    ALOGV("%s", __FUNCTION__);
2595}
2596
2597int     BayerBufManager::GetIndexForSensorEnqueue()
2598{
2599    int ret = 0;
2600    if (numOnHalEmpty == 0)
2601        ret = -1;
2602    else
2603        ret = sensorEnqueueHead;
2604    ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2605    return ret;
2606}
2607
2608int    BayerBufManager::MarkSensorEnqueue(int index)
2609{
2610    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2611
2612    // sanity check
2613    if (index != sensorEnqueueHead) {
2614        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2615        return -1;
2616    }
2617    if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2618        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2619            index, entries[index].status, BAYER_ON_HAL_EMPTY);
2620        return -1;
2621    }
2622
2623    entries[index].status = BAYER_ON_SENSOR;
2624    entries[index].reqFrameCnt = 0;
2625    numOnHalEmpty--;
2626    numOnSensor++;
2627    sensorEnqueueHead = GetNextIndex(index);
2628    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2629        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2630    return 0;
2631}
2632
2633int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
2634{
2635    ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2636
2637    if (entries[index].status != BAYER_ON_SENSOR) {
2638        ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2639            index, entries[index].status, BAYER_ON_SENSOR);
2640        return -1;
2641    }
2642
2643    entries[index].status = BAYER_ON_HAL_FILLED;
2644    numOnHalFilled++;
2645    numOnSensor--;
2646
2647    return 0;
2648}
2649
2650int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2651{
2652    int ret = 0;
2653    if (numOnHalFilled == 0)
2654        ret = -1;
2655    else {
2656        *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2657        ret = ispEnqueueHead;
2658    }
2659    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2660    return ret;
2661}
2662
2663int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2664{
2665    int ret = 0;
2666    if (numOnIsp == 0)
2667        ret = -1;
2668    else {
2669        *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2670        ret = ispDequeueHead;
2671    }
2672    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2673    return ret;
2674}
2675
2676int    BayerBufManager::MarkIspEnqueue(int index)
2677{
2678    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2679
2680    // sanity check
2681    if (index != ispEnqueueHead) {
2682        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2683        return -1;
2684    }
2685    if (entries[index].status != BAYER_ON_HAL_FILLED) {
2686        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2687            index, entries[index].status, BAYER_ON_HAL_FILLED);
2688        return -1;
2689    }
2690
2691    entries[index].status = BAYER_ON_ISP;
2692    numOnHalFilled--;
2693    numOnIsp++;
2694    ispEnqueueHead = GetNextIndex(index);
2695    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2696        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2697    return 0;
2698}
2699
2700int    BayerBufManager::MarkIspDequeue(int index)
2701{
2702    ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2703
2704    // sanity check
2705    if (index != ispDequeueHead) {
2706        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2707        return -1;
2708    }
2709    if (entries[index].status != BAYER_ON_ISP) {
2710        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2711            index, entries[index].status, BAYER_ON_ISP);
2712        return -1;
2713    }
2714
2715    entries[index].status = BAYER_ON_HAL_EMPTY;
2716    entries[index].reqFrameCnt = 0;
2717    numOnHalEmpty++;
2718    numOnIsp--;
2719    ispDequeueHead = GetNextIndex(index);
2720    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2721        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2722    return 0;
2723}
2724
2725int BayerBufManager::GetNumOnSensor()
2726{
2727    return numOnSensor;
2728}
2729
2730int BayerBufManager::GetNumOnHalFilled()
2731{
2732    return numOnHalFilled;
2733}
2734
2735int BayerBufManager::GetNumOnIsp()
2736{
2737    return numOnIsp;
2738}
2739
2740int     BayerBufManager::GetNextIndex(int index)
2741{
2742    index++;
2743    if (index >= NUM_BAYER_BUFFERS)
2744        index = 0;
2745
2746    return index;
2747}
2748
2749void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2750{
2751    camera_metadata_t *currentRequest = NULL;
2752    camera_metadata_t *currentFrame = NULL;
2753    size_t numEntries = 0;
2754    size_t frameSize = 0;
2755    camera_metadata_t * preparedFrame = NULL;
2756    camera_metadata_t *deregisteredRequest = NULL;
2757    uint32_t currentSignal = self->GetProcessingSignal();
2758    MainThread *  selfThread      = ((MainThread*)self);
2759    int res = 0;
2760
2761    int ret;
2762    int afMode;
2763    uint32_t afRegion[4];
2764
2765    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2766
2767    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2768        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2769
2770        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2771        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2772        return;
2773    }
2774
2775    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2776        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2777        if (m_requestManager->IsRequestQueueFull()==false) {
2778            Mutex::Autolock lock(m_afModeTriggerLock);
2779            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2780            if (NULL == currentRequest) {
2781                ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal);
2782                m_isRequestQueueNull = true;
2783                if (m_requestManager->IsVdisEnable())
2784                    m_vdisBubbleCnt = 1;
2785            }
2786            else {
2787                m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion);
2788
2789                SetAfMode((enum aa_afmode)afMode);
2790                SetAfRegion(afRegion);
2791
2792                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2793                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2794                if (m_requestManager->IsRequestQueueFull()==false)
2795                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2796
2797                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2798            }
2799        }
2800        else {
2801            m_isRequestQueuePending = true;
2802        }
2803    }
2804
2805    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2806        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2807        /*while (1)*/ {
2808            ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2809            if (ret == false)
2810                CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
2811
2812            m_requestManager->DeregisterRequest(&deregisteredRequest);
2813
2814            ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2815            if (ret < 0)
2816                CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
2817
2818            ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2819            if (ret < 0)
2820                CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
2821
2822            if (currentFrame==NULL) {
2823                ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2824            }
2825            else {
2826                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2827            }
2828            res = append_camera_metadata(currentFrame, preparedFrame);
2829            if (res==0) {
2830                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2831                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2832            }
2833            else {
2834                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2835            }
2836        }
2837        if (!m_isRequestQueueNull) {
2838            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2839        }
2840
2841        if (getInProgressCount()>0) {
2842            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2843            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2844        }
2845    }
2846    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2847    return;
2848}
2849
2850void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2851{
2852    ALOGD("####  common Section");
2853    ALOGD("####                 magic(%x) ",
2854        shot_ext->shot.magicNumber);
2855    ALOGD("####  ctl Section");
2856    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2857        shot_ext->shot.ctl.request.metadataMode,
2858        shot_ext->shot.ctl.lens.aperture,
2859        shot_ext->shot.ctl.sensor.exposureTime,
2860        shot_ext->shot.ctl.sensor.frameDuration,
2861        shot_ext->shot.ctl.sensor.sensitivity,
2862        shot_ext->shot.ctl.aa.awbMode);
2863
2864    ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2865        shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2866        shot_ext->shot.ctl.request.outputStreams[0]);
2867
2868    ALOGD("####  DM Section");
2869    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2870        shot_ext->shot.dm.request.metadataMode,
2871        shot_ext->shot.dm.lens.aperture,
2872        shot_ext->shot.dm.sensor.exposureTime,
2873        shot_ext->shot.dm.sensor.frameDuration,
2874        shot_ext->shot.dm.sensor.sensitivity,
2875        shot_ext->shot.dm.sensor.timeStamp,
2876        shot_ext->shot.dm.aa.awbMode,
2877        shot_ext->shot.dm.request.frameCount );
2878}
2879
2880void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
2881{
2882    // Flash
2883    switch (m_ctlInfo.flash.m_flashCnt) {
2884    case IS_FLASH_STATE_ON:
2885        ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2886        // check AF locked
2887        if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
2888            if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2889                if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2890                    shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2891                    m_ctlInfo.flash.m_flashTimeOut = 5;
2892                } else
2893                    shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2894                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2895            } else {
2896                m_ctlInfo.flash.m_flashTimeOut--;
2897            }
2898        } else {
2899            if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2900                shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2901                m_ctlInfo.flash.m_flashTimeOut = 5;
2902            } else
2903                shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2904            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2905        }
2906        break;
2907    case IS_FLASH_STATE_ON_WAIT:
2908        break;
2909    case IS_FLASH_STATE_ON_DONE:
2910        if (!m_ctlInfo.flash.m_afFlashDoneFlg)
2911            // auto transition at pre-capture trigger
2912            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
2913        break;
2914    case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
2915        ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2916        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2917        //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2918        shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2919        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
2920        break;
2921    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2922    case IS_FLASH_STATE_AUTO_WAIT:
2923        shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
2924        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
2925        break;
2926    case IS_FLASH_STATE_AUTO_DONE:
2927        ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2928        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2929        break;
2930    case IS_FLASH_STATE_AUTO_OFF:
2931        ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2932        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2933        m_ctlInfo.flash.m_flashEnableFlg = false;
2934        break;
2935    case IS_FLASH_STATE_CAPTURE:
2936        ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2937        m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
2938        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2939        shot_ext->request_scc = 0;
2940        shot_ext->request_scp = 0;
2941        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
2942        break;
2943    case IS_FLASH_STATE_CAPTURE_WAIT:
2944        shot_ext->request_scc = 0;
2945        shot_ext->request_scp = 0;
2946        break;
2947    case IS_FLASH_STATE_CAPTURE_JPEG:
2948        ALOGV("(%s): [Flash] Flash Capture  (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
2949        shot_ext->request_scc = 1;
2950        shot_ext->request_scp = 1;
2951        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END;  // auto transition
2952        break;
2953    case IS_FLASH_STATE_CAPTURE_END:
2954        ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2955        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2956        shot_ext->request_scc = 0;
2957        shot_ext->request_scp = 0;
2958        m_ctlInfo.flash.m_flashEnableFlg = false;
2959        m_ctlInfo.flash.m_flashCnt = 0;
2960        m_ctlInfo.flash.m_afFlashDoneFlg= false;
2961        break;
2962    case IS_FLASH_STATE_NONE:
2963        break;
2964    default:
2965        ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
2966    }
2967}
2968
2969void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
2970{
2971    // Flash
2972    switch (m_ctlInfo.flash.m_flashCnt) {
2973    case IS_FLASH_STATE_AUTO_WAIT:
2974        if (m_ctlInfo.flash.m_flashDecisionResult) {
2975            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2976                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2977                ALOGV("(%s): [Flash] Lis :  AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
2978            } else {
2979                ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
2980            }
2981        } else {
2982            //If flash isn't activated at flash auto mode, skip flash auto control
2983            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2984            ALOGV("(%s): [Flash] Skip :  AUTO -> OFF", __FUNCTION__);
2985        }
2986        break;
2987    }
2988}
2989
2990void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
2991{
2992    // Flash
2993    switch (m_ctlInfo.flash.m_flashCnt) {
2994    case IS_FLASH_STATE_ON_WAIT:
2995        if (shot_ext->shot.dm.flash.decision > 0) {
2996            // store decision result to skip capture sequenece
2997            ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
2998            if (shot_ext->shot.dm.flash.decision == 2)
2999                m_ctlInfo.flash.m_flashDecisionResult = false;
3000            else
3001                m_ctlInfo.flash.m_flashDecisionResult = true;
3002            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
3003        } else {
3004            if (m_ctlInfo.flash.m_flashTimeOut == 0) {
3005                ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
3006                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
3007                m_ctlInfo.flash.m_flashDecisionResult = false;
3008            } else {
3009                m_ctlInfo.flash.m_flashTimeOut--;
3010            }
3011        }
3012        break;
3013    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
3014        if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
3015            ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
3016            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
3017        } else {
3018            ALOGV("(%s):  [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
3019        }
3020        break;
3021    case IS_FLASH_STATE_CAPTURE_WAIT:
3022        if (m_ctlInfo.flash.m_flashDecisionResult) {
3023            if (shot_ext->shot.dm.flash.firingStable) {
3024                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3025            } else {
3026                if (m_ctlInfo.flash.m_flashTimeOut == 0) {
3027                    ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
3028                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3029                } else {
3030                    ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
3031                    m_ctlInfo.flash.m_flashTimeOut--;
3032                }
3033            }
3034        } else {
3035            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3036        }
3037        break;
3038    }
3039}
3040
3041void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext)
3042{
3043    switch (m_ctlInfo.flash.i_flashMode) {
3044    case AA_AEMODE_ON:
3045        // At flash off mode, capture can be done as zsl capture
3046        shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED;
3047        break;
3048    case AA_AEMODE_ON_AUTO_FLASH:
3049        // At flash auto mode, main flash have to be done if pre-flash was done.
3050        if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg)
3051            shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED;
3052        break;
3053    }
3054}
3055
3056void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext)
3057{
3058    shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0];
3059    shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1];
3060    shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2];
3061    shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3];
3062}
3063
3064void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion)
3065{
3066    currentAfRegion[0] = afRegion[0];
3067    currentAfRegion[1] = afRegion[1];
3068    currentAfRegion[2] = afRegion[2];
3069    currentAfRegion[3] = afRegion[3];
3070}
3071
3072void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode)
3073{
3074    if (m_afState == HAL_AFSTATE_SCANNING) {
3075        ALOGD("(%s): restarting trigger ", __FUNCTION__);
3076    } else if (!mode) {
3077        if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
3078            ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
3079        else
3080            m_afState = HAL_AFSTATE_STARTED;
3081    }
3082    ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState);
3083    shot_ext->shot.ctl.aa.afTrigger = 1;
3084    shot_ext->shot.ctl.aa.afMode = m_afMode;
3085    m_IsAfTriggerRequired = false;
3086}
3087
3088void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
3089{
3090    uint32_t        currentSignal = self->GetProcessingSignal();
3091    SensorThread *  selfThread      = ((SensorThread*)self);
3092    int index;
3093    int index_isp;
3094    status_t res;
3095    nsecs_t frameTime;
3096    int bayersOnSensor = 0, bayersOnIsp = 0;
3097    int j = 0;
3098    bool isCapture = false;
3099    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
3100
3101    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3102        CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3103
3104        ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
3105        cam_int_streamoff(&(m_camera_info.sensor));
3106        ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
3107
3108        m_camera_info.sensor.buffers = 0;
3109        ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
3110        cam_int_reqbufs(&(m_camera_info.sensor));
3111        ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
3112        m_camera_info.sensor.status = false;
3113
3114        ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
3115        isp_int_streamoff(&(m_camera_info.isp));
3116        ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
3117
3118        m_camera_info.isp.buffers = 0;
3119        ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
3120        cam_int_reqbufs(&(m_camera_info.isp));
3121        ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
3122
3123        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
3124
3125        m_requestManager->releaseSensorQ();
3126        m_requestManager->ResetEntry();
3127        ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3128        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
3129        return;
3130    }
3131
3132    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
3133    {
3134        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
3135        int targetStreamIndex = 0, i=0;
3136        int matchedFrameCnt = -1, processingReqIndex;
3137        struct camera2_shot_ext *shot_ext;
3138        struct camera2_shot_ext *shot_ext_capture;
3139        bool triggered = false;
3140
3141        /* dqbuf from sensor */
3142        ALOGV("Sensor DQbuf start");
3143        index = cam_int_dqbuf(&(m_camera_info.sensor));
3144        m_requestManager->pushSensorQ(index);
3145        ALOGV("Sensor DQbuf done(%d)", index);
3146        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3147
3148        if (m_nightCaptureCnt != 0) {
3149            matchedFrameCnt = m_nightCaptureFrameCnt;
3150        } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
3151            matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
3152            ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
3153        } else {
3154            matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
3155        }
3156
3157        if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
3158            matchedFrameCnt = m_vdisDupFrame;
3159        }
3160
3161        if (matchedFrameCnt != -1) {
3162            if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) {
3163                frameTime = systemTime();
3164                m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
3165                m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
3166            } else {
3167                ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt);
3168            }
3169
3170            // face af mode setting in case of face priority scene mode
3171            if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) {
3172                ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode);
3173                m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3174            }
3175
3176            m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
3177            float zoomLeft, zoomTop, zoomWidth, zoomHeight;
3178            int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
3179
3180            m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
3181                           m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
3182                           &crop_x, &crop_y,
3183                           &crop_w, &crop_h,
3184                           0);
3185
3186            if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
3187                zoomWidth =  m_camera2->getSensorW() / m_zoomRatio;
3188                zoomHeight = zoomWidth *
3189                        m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
3190            } else {
3191                zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
3192                zoomWidth = zoomHeight *
3193                        m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
3194            }
3195            zoomLeft = (crop_w - zoomWidth) / 2;
3196            zoomTop = (crop_h - zoomHeight) / 2;
3197
3198            int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
3199
3200            int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4);
3201            if (cropCompensation)
3202                new_cropRegion[2] -= cropCompensation;
3203
3204            shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
3205            shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
3206            shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
3207            if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) {
3208                ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode);
3209                shot_ext->shot.ctl.aa.afMode = m_afMode;
3210                if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
3211                    ALOGD("### With Automatic triger for continuous modes");
3212                    m_afState = HAL_AFSTATE_STARTED;
3213                    shot_ext->shot.ctl.aa.afTrigger = 1;
3214                    triggered = true;
3215                    if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) ||
3216                            (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) {
3217                        switch (m_afMode) {
3218                        case AA_AFMODE_CONTINUOUS_PICTURE:
3219                            shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE;
3220                            ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode);
3221                            break;
3222                        }
3223                    }
3224                    // reset flash result
3225                    if (m_ctlInfo.flash.m_afFlashDoneFlg) {
3226                        m_ctlInfo.flash.m_flashEnableFlg = false;
3227                        m_ctlInfo.flash.m_afFlashDoneFlg = false;
3228                        m_ctlInfo.flash.m_flashDecisionResult = false;
3229                        m_ctlInfo.flash.m_flashCnt = 0;
3230                    }
3231                    m_ctlInfo.af.m_afTriggerTimeOut = 1;
3232                }
3233
3234                m_IsAfModeUpdateRequired = false;
3235                // support inifinity focus mode
3236                if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
3237                    shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
3238                    shot_ext->shot.ctl.aa.afTrigger = 1;
3239                    triggered = true;
3240                }
3241                if (m_afMode2 != NO_CHANGE) {
3242                    enum aa_afmode tempAfMode = m_afMode2;
3243                    m_afMode2 = NO_CHANGE;
3244                    SetAfMode(tempAfMode);
3245                }
3246            }
3247            else {
3248                shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
3249            }
3250            if (m_IsAfTriggerRequired) {
3251                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
3252                    // flash case
3253                    if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
3254                        if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
3255                            // Flash is enabled and start AF
3256                            m_afTrigger(shot_ext, 1);
3257                        } else {
3258                            m_afTrigger(shot_ext, 0);
3259                        }
3260                    }
3261                } else {
3262                    // non-flash case
3263                    m_afTrigger(shot_ext, 0);
3264                }
3265            } else {
3266                shot_ext->shot.ctl.aa.afTrigger = 0;
3267            }
3268
3269            if (m_wideAspect) {
3270                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3271            } else {
3272                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3273            }
3274            if (triggered)
3275                shot_ext->shot.ctl.aa.afTrigger = 1;
3276
3277            // TODO : check collision with AFMode Update
3278            if (m_IsAfLockRequired) {
3279                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
3280                m_IsAfLockRequired = false;
3281            }
3282            ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
3283                index,
3284                shot_ext->shot.ctl.request.frameCount,
3285                shot_ext->request_scp,
3286                shot_ext->request_scc,
3287                shot_ext->dis_bypass, sizeof(camera2_shot));
3288
3289            // update AF region
3290            m_updateAfRegion(shot_ext);
3291
3292            m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3293            if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
3294                    && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
3295                shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
3296            if (m_nightCaptureCnt == 0) {
3297                if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
3298                        && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3299                    shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3300                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3301                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3302                    m_nightCaptureCnt = 4;
3303                    m_nightCaptureFrameCnt = matchedFrameCnt;
3304                    shot_ext->request_scc = 0;
3305                }
3306            }
3307            else if (m_nightCaptureCnt == 1) {
3308                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3309                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3310                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3311                m_nightCaptureCnt--;
3312                m_nightCaptureFrameCnt = 0;
3313                shot_ext->request_scc = 1;
3314            }
3315            else if (m_nightCaptureCnt == 2) {
3316                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3317                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3318                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3319                m_nightCaptureCnt--;
3320                shot_ext->request_scc = 0;
3321            }
3322            else if (m_nightCaptureCnt == 3) {
3323                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3324                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3325                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3326                m_nightCaptureCnt--;
3327                shot_ext->request_scc = 0;
3328            }
3329            else if (m_nightCaptureCnt == 4) {
3330                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3331                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3332                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3333                m_nightCaptureCnt--;
3334                shot_ext->request_scc = 0;
3335            }
3336
3337            switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) {
3338            case 15:
3339                shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000);
3340                break;
3341
3342            case 24:
3343                shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000);
3344                break;
3345
3346            case 25:
3347                shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000);
3348                break;
3349
3350            case 30:
3351            default:
3352                shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000);
3353                break;
3354            }
3355            shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3356
3357            // Flash mode
3358            // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
3359            if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
3360                    && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
3361                    && (m_cameraId == 0)) {
3362                if (!m_ctlInfo.flash.m_flashDecisionResult) {
3363                    m_ctlInfo.flash.m_flashEnableFlg = false;
3364                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3365                    m_ctlInfo.flash.m_flashCnt = 0;
3366                } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) ||
3367                                          (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
3368                    ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
3369                    shot_ext->request_scc = 0;
3370                    m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
3371                    m_ctlInfo.flash.m_flashEnableFlg = true;
3372                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3373                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
3374                } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) {
3375                    ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
3376                    shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3377                    m_ctlInfo.flash.m_flashEnableFlg = false;
3378                    m_ctlInfo.flash.m_afFlashDoneFlg= false;
3379                    m_ctlInfo.flash.m_flashCnt = 0;
3380                }
3381            } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) {
3382                m_ctlInfo.flash.m_flashDecisionResult = false;
3383            }
3384
3385            if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
3386                if (m_ctlInfo.flash.m_flashTorchMode == false) {
3387                    m_ctlInfo.flash.m_flashTorchMode = true;
3388                }
3389            } else {
3390                if (m_ctlInfo.flash.m_flashTorchMode == true) {
3391                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3392                    shot_ext->shot.ctl.flash.firingPower = 0;
3393                    m_ctlInfo.flash.m_flashTorchMode = false;
3394                } else {
3395                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
3396                }
3397            }
3398
3399            if (shot_ext->isReprocessing) {
3400                ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__);
3401                m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
3402                shot_ext->request_scp = 0;
3403                shot_ext->request_scc = 0;
3404                m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
3405                m_ctlInfo.flash.m_flashDecisionResult = false;
3406                memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt)),
3407                    sizeof(struct camera2_shot_ext));
3408                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
3409                m_ctlInfo.flash.m_flashEnableFlg = false;
3410            }
3411
3412            if (m_ctlInfo.flash.m_flashEnableFlg) {
3413                m_preCaptureListenerSensor(shot_ext);
3414                m_preCaptureSetter(shot_ext);
3415            }
3416
3417            ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
3418            (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3419            (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3420            (int)(shot_ext->shot.ctl.aa.afTrigger));
3421
3422            if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
3423                shot_ext->dis_bypass = 1;
3424                shot_ext->dnr_bypass = 1;
3425                shot_ext->request_scp = 0;
3426                shot_ext->request_scc = 0;
3427                m_vdisBubbleCnt--;
3428                matchedFrameCnt = -1;
3429            } else {
3430                m_vdisDupFrame = matchedFrameCnt;
3431            }
3432            if (m_scpForceSuspended)
3433                shot_ext->request_scc = 0;
3434
3435            uint32_t current_scp = shot_ext->request_scp;
3436            uint32_t current_scc = shot_ext->request_scc;
3437
3438            if (shot_ext->shot.dm.request.frameCount == 0) {
3439                CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
3440            }
3441
3442            cam_int_qbuf(&(m_camera_info.isp), index);
3443
3444            ALOGV("### isp DQBUF start");
3445            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3446
3447            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3448
3449            if (m_ctlInfo.flash.m_flashEnableFlg)
3450                m_preCaptureListenerISP(shot_ext);
3451
3452            ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)",
3453                index,
3454                shot_ext->shot.ctl.request.frameCount,
3455                shot_ext->request_scp,
3456                shot_ext->request_scc,
3457                shot_ext->dis_bypass,
3458                shot_ext->dnr_bypass, sizeof(camera2_shot));
3459
3460            ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
3461                (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3462                (int)(shot_ext->shot.dm.aa.awbMode),
3463                (int)(shot_ext->shot.dm.aa.afMode));
3464
3465#ifndef ENABLE_FRAME_SYNC
3466            m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
3467#endif
3468
3469            if (!shot_ext->fd_bypass) {
3470                /* FD orientation axis transformation */
3471                for (int i=0; i < CAMERA2_MAX_FACES; i++) {
3472                    if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
3473                        shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
3474                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][0])
3475                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3476                    if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
3477                        shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
3478                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][1])
3479                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3480                    if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
3481                        shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
3482                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][2])
3483                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3484                    if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
3485                        shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
3486                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][3])
3487                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3488                }
3489            }
3490            // aeState control
3491            if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT)
3492                m_preCaptureAeState(shot_ext);
3493
3494            // At scene mode face priority
3495            if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE)
3496                shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
3497
3498            if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
3499                m_requestManager->ApplyDynamicMetadata(shot_ext);
3500            }
3501
3502            if (current_scc != shot_ext->request_scc) {
3503                ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
3504                                __FUNCTION__, current_scc, shot_ext->request_scc);
3505                m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3506            }
3507            if (shot_ext->request_scc) {
3508                ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
3509                if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) {
3510                    if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)
3511                        memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(shot_ext->shot.ctl.request.frameCount)),
3512                            sizeof(struct camera2_shot_ext));
3513                    else
3514                        memcpy(&m_jpegMetadata, (void*)shot_ext, sizeof(struct camera2_shot_ext));
3515                }
3516                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3517            }
3518            if (current_scp != shot_ext->request_scp) {
3519                ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
3520                                __FUNCTION__, current_scp, shot_ext->request_scp);
3521                m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3522            }
3523            if (shot_ext->request_scp) {
3524                ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
3525                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3526            }
3527
3528            ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
3529               shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
3530            if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
3531                ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
3532                m_scp_closed = true;
3533            }
3534            else
3535                m_scp_closed = false;
3536
3537            OnAfNotification(shot_ext->shot.dm.aa.afState);
3538            OnPrecaptureMeteringNotificationISP();
3539        }   else {
3540            memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl));
3541            shot_ext->shot.ctl.request.frameCount = 0xfffffffe;
3542            shot_ext->request_sensor = 1;
3543            shot_ext->dis_bypass = 1;
3544            shot_ext->dnr_bypass = 1;
3545            shot_ext->fd_bypass = 1;
3546            shot_ext->drc_bypass = 1;
3547            shot_ext->request_scc = 0;
3548            shot_ext->request_scp = 0;
3549            if (m_wideAspect) {
3550                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3551            } else {
3552                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3553            }
3554            shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode;
3555            if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3556                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
3557                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3558            }
3559            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3560            shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3561            ALOGV("### isp QBUF start (bubble)");
3562            ALOGV("bubble: queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)",
3563                (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3564                (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3565                (int)(shot_ext->shot.ctl.aa.afTrigger));
3566
3567            cam_int_qbuf(&(m_camera_info.isp), index);
3568            ALOGV("### isp DQBUF start (bubble)");
3569            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3570            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3571            ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)",
3572                (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3573                (int)(shot_ext->shot.dm.aa.awbMode),
3574                (int)(shot_ext->shot.dm.aa.afMode));
3575
3576            OnAfNotification(shot_ext->shot.dm.aa.afState);
3577        }
3578
3579        index = m_requestManager->popSensorQ();
3580        if(index < 0){
3581            ALOGE("sensorQ is empty");
3582            return;
3583        }
3584
3585        processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
3586        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3587        if (m_scp_closing || m_scp_closed) {
3588            ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
3589            shot_ext->request_scc = 0;
3590            shot_ext->request_scp = 0;
3591            shot_ext->request_sensor = 0;
3592        }
3593        cam_int_qbuf(&(m_camera_info.sensor), index);
3594        ALOGV("Sensor Qbuf done(%d)", index);
3595
3596        if (!m_scp_closing
3597            && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
3598            ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3599                                    matchedFrameCnt, processingReqIndex);
3600            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
3601        }
3602    }
3603    return;
3604}
3605
3606void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
3607{
3608    uint32_t                currentSignal   = self->GetProcessingSignal();
3609    StreamThread *          selfThread      = ((StreamThread*)self);
3610    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3611    node_info_t             *currentNode    = selfStreamParms->node;
3612    substream_parameters_t  *subParms;
3613    buffer_handle_t * buf = NULL;
3614    status_t res;
3615    void *virtAddr[3];
3616    int i, j;
3617    int index;
3618    nsecs_t timestamp;
3619
3620    if (!(selfThread->m_isBufferInit))
3621    {
3622        for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3623            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3624            if (res != NO_ERROR || buf == NULL) {
3625                ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3626                return;
3627            }
3628            ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3629               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3630
3631            index = selfThread->findBufferIndex(buf);
3632            if (index == -1) {
3633                ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
3634            }
3635            else {
3636                ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3637                    __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
3638                if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
3639                    selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3640                else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
3641                    selfStreamParms->svcBufStatus[index] = ON_HAL;
3642                else {
3643                    ALOGV("DBG(%s): buffer status abnormal (%d) "
3644                        , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3645                }
3646                selfStreamParms->numSvcBufsInHal++;
3647            }
3648            selfStreamParms->bufIndex = 0;
3649        }
3650        selfThread->m_isBufferInit = true;
3651    }
3652    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3653        if (selfThread->m_attachedSubStreams[i].streamId == -1)
3654            continue;
3655
3656        subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
3657        if (subParms->type && subParms->needBufferInit) {
3658            ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3659                __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
3660            int checkingIndex = 0;
3661            bool found = false;
3662            for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
3663                res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3664                if (res != NO_ERROR || buf == NULL) {
3665                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3666                    return;
3667                }
3668                subParms->numSvcBufsInHal++;
3669                ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3670                   subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3671
3672                if (m_grallocHal->lock(m_grallocHal, *buf,
3673                       subParms->usage, 0, 0,
3674                       subParms->width, subParms->height, virtAddr) != 0) {
3675                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3676                }
3677                else {
3678                      ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3679                        __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3680                }
3681                found = false;
3682                for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3683                    if (subParms->svcBufHandle[checkingIndex] == *buf ) {
3684                        found = true;
3685                        break;
3686                    }
3687                }
3688                ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3689                if (!found) break;
3690
3691                index = checkingIndex;
3692
3693                if (index == -1) {
3694                    ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3695                }
3696                else {
3697                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3698                        __FUNCTION__, index, subParms->svcBufStatus[index]);
3699                    if (subParms->svcBufStatus[index]== ON_SERVICE)
3700                        subParms->svcBufStatus[index] = ON_HAL;
3701                    else {
3702                        ALOGV("DBG(%s): buffer status abnormal (%d) "
3703                            , __FUNCTION__, subParms->svcBufStatus[index]);
3704                    }
3705                    if (*buf != subParms->svcBufHandle[index])
3706                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3707                    else
3708                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3709                }
3710                subParms->svcBufIndex = 0;
3711            }
3712            if (subParms->type == SUBSTREAM_TYPE_JPEG) {
3713                m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
3714                m_resizeBuf.size.extS[1] = 0;
3715                m_resizeBuf.size.extS[2] = 0;
3716
3717                if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
3718                    ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3719                }
3720            }
3721            if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
3722                m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
3723                subParms->height, &m_previewCbBuf);
3724
3725                if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
3726                    ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
3727                }
3728            }
3729            subParms->needBufferInit= false;
3730        }
3731    }
3732}
3733
3734void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3735{
3736    StreamThread *          selfThread      = ((StreamThread*)self);
3737    ALOGV("DEBUG(%s): ", __FUNCTION__ );
3738    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3739    selfThread->m_isBufferInit = false;
3740    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3741        selfThread->m_attachedSubStreams[i].streamId    = -1;
3742        selfThread->m_attachedSubStreams[i].priority    = 0;
3743    }
3744    return;
3745}
3746
3747int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
3748    int stream_id, nsecs_t frameTimeStamp)
3749{
3750    substream_parameters_t  *subParms = &m_subStreams[stream_id];
3751
3752    switch (stream_id) {
3753
3754    case STREAM_ID_JPEG:
3755        return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
3756
3757    case STREAM_ID_RECORD:
3758        return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
3759
3760    case STREAM_ID_PRVCB:
3761        return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
3762
3763    default:
3764        return 0;
3765    }
3766}
3767void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
3768{
3769    uint32_t                currentSignal   = self->GetProcessingSignal();
3770    StreamThread *          selfThread      = ((StreamThread*)self);
3771    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3772    node_info_t             *currentNode    = selfStreamParms->node;
3773    int i = 0;
3774    nsecs_t frameTimeStamp;
3775
3776    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3777        CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3778
3779        if (selfThread->m_isBufferInit) {
3780            if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) {
3781                ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
3782                    selfThread->m_index, currentNode->fd);
3783                if (cam_int_streamoff(currentNode) < 0 ) {
3784                    ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3785                }
3786                ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
3787                        selfThread->m_index, currentNode->fd);
3788                currentNode->buffers = 0;
3789                cam_int_reqbufs(currentNode);
3790                ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
3791                        selfThread->m_index, currentNode->fd);
3792            }
3793        }
3794#ifdef ENABLE_FRAME_SYNC
3795        // free metabuffers
3796        for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3797            if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
3798                freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3799                selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3800                selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3801            }
3802#endif
3803        selfThread->m_isBufferInit = false;
3804        selfThread->m_releasing = false;
3805        selfThread->m_activated = false;
3806        ALOGV("(%s): [%d] END  SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3807        return;
3808    }
3809    if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
3810        status_t    res;
3811        buffer_handle_t * buf = NULL;
3812        bool found = false;
3813        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3814            __FUNCTION__, selfThread->m_index);
3815        res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
3816        if (res != NO_ERROR || buf == NULL) {
3817            ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
3818            return;
3819        }
3820        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3821        int checkingIndex = 0;
3822        for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3823            if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3824                found = true;
3825                break;
3826            }
3827        }
3828        ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3829            __FUNCTION__, (unsigned int)buf, found, checkingIndex);
3830
3831        if (!found) return;
3832
3833        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3834            if (selfThread->m_attachedSubStreams[i].streamId == -1)
3835                continue;
3836
3837#ifdef ENABLE_FRAME_SYNC
3838            frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt);
3839            m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
3840#else
3841            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3842#endif
3843            if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
3844                m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
3845                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3846        }
3847
3848        res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
3849        if (res != NO_ERROR) {
3850            ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
3851            return;
3852        }
3853        ALOGV("(%s): streamthread[%d] END   SIGNAL_STREAM_REPROCESSING_START",
3854            __FUNCTION__,selfThread->m_index);
3855
3856        return;
3857    }
3858    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3859        buffer_handle_t * buf = NULL;
3860        status_t res = 0;
3861        int i, j;
3862        int index;
3863        nsecs_t timestamp;
3864#ifdef ENABLE_FRAME_SYNC
3865        camera2_stream *frame;
3866        uint8_t currentOutputStreams;
3867        bool directOutputEnabled = false;
3868#endif
3869        int numOfUndqbuf = 0;
3870
3871        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3872
3873        m_streamBufferInit(self);
3874
3875        do {
3876            ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
3877                selfThread->m_index, selfThread->streamType);
3878
3879#ifdef ENABLE_FRAME_SYNC
3880            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
3881            frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
3882            frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3883            currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
3884            ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams);
3885            if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)||
3886                 ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) {
3887                directOutputEnabled = true;
3888            }
3889            if (!directOutputEnabled) {
3890                if (!m_nightCaptureFrameCnt)
3891                    m_requestManager->NotifyStreamOutput(frame->rcount);
3892            }
3893#else
3894            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3895            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
3896#endif
3897            ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)  sigcnt(%d)",__FUNCTION__,
3898                selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
3899
3900            if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] !=  ON_DRIVER)
3901                ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3902                       __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
3903            selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3904
3905            for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3906                if (selfThread->m_attachedSubStreams[i].streamId == -1)
3907                    continue;
3908#ifdef ENABLE_FRAME_SYNC
3909                if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3910                    m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3911                        selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3912                }
3913#else
3914                if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3915                    m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3916                        selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3917                }
3918#endif
3919            }
3920
3921            if (m_requestManager->GetSkipCnt() <= 0) {
3922#ifdef ENABLE_FRAME_SYNC
3923                if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3924                    ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
3925                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3926                            frameTimeStamp,
3927                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3928                }
3929                else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3930                    ALOGV("** SCC output (frameCnt:%d)", frame->rcount);
3931                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3932                                frameTimeStamp,
3933                                &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3934                }
3935                else {
3936                    res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3937                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3938                    ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3939                }
3940#else
3941                if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3942                    ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3943                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3944                            frameTimeStamp,
3945                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3946                }
3947                else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3948                    ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
3949                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3950                                frameTimeStamp,
3951                                &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3952                }
3953#endif
3954                ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3955            }
3956            else {
3957                res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3958                        &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3959                ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3960            }
3961#ifdef ENABLE_FRAME_SYNC
3962            if (directOutputEnabled) {
3963                if (!m_nightCaptureFrameCnt)
3964                     m_requestManager->NotifyStreamOutput(frame->rcount);
3965            }
3966#endif
3967            if (res == 0) {
3968                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
3969                selfStreamParms->numSvcBufsInHal--;
3970            }
3971            else {
3972                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3973            }
3974
3975        }
3976        while(0);
3977
3978        while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS))
3979                    < selfStreamParms->minUndequedBuffer) {
3980            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3981            if (res != NO_ERROR || buf == NULL) {
3982                ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index,  res, selfStreamParms->numSvcBufsInHal);
3983                break;
3984            }
3985            selfStreamParms->numSvcBufsInHal++;
3986            ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3987                selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3988               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3989            const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3990
3991            bool found = false;
3992            int checkingIndex = 0;
3993            for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3994                if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3995                    found = true;
3996                    break;
3997                }
3998            }
3999            if (!found) break;
4000            selfStreamParms->bufIndex = checkingIndex;
4001            if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
4002                uint32_t    plane_index = 0;
4003                ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
4004                struct v4l2_buffer v4l2_buf;
4005                struct v4l2_plane  planes[VIDEO_MAX_PLANES];
4006
4007                v4l2_buf.m.planes   = planes;
4008                v4l2_buf.type       = currentNode->type;
4009                v4l2_buf.memory     = currentNode->memory;
4010                v4l2_buf.index      = selfStreamParms->bufIndex;
4011                v4l2_buf.length     = currentNode->planes;
4012
4013                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
4014                v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
4015                v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
4016                for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
4017                    v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
4018                }
4019#ifdef ENABLE_FRAME_SYNC
4020                /* add plane for metadata*/
4021                v4l2_buf.length += selfStreamParms->metaPlanes;
4022                v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
4023                v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
4024#endif
4025                if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
4026                    ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
4027                        __FUNCTION__, selfThread->m_index);
4028                    return;
4029                }
4030                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
4031                ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
4032                    __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
4033            }
4034        }
4035
4036        ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
4037    }
4038    return;
4039}
4040
4041void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
4042{
4043    uint32_t                currentSignal   = self->GetProcessingSignal();
4044    StreamThread *          selfThread      = ((StreamThread*)self);
4045    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4046    node_info_t             *currentNode    = selfStreamParms->node;
4047
4048
4049    if (currentSignal & SIGNAL_THREAD_RELEASE) {
4050        CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4051
4052        if (selfThread->m_isBufferInit) {
4053            if (currentNode->fd == m_camera_info.capture.fd) {
4054                if (m_camera_info.capture.status == true) {
4055                    ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4056                    selfThread->m_index, currentNode->fd);
4057                    if (cam_int_streamoff(currentNode) < 0 ){
4058                        ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4059                    } else {
4060                        m_camera_info.capture.status = false;
4061                    }
4062                }
4063            } else {
4064                ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4065                selfThread->m_index, currentNode->fd);
4066                if (cam_int_streamoff(currentNode) < 0 ){
4067                    ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4068                }
4069            }
4070            ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
4071            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
4072                    selfThread->m_index, currentNode->fd);
4073            currentNode->buffers = 0;
4074            cam_int_reqbufs(currentNode);
4075            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
4076                    selfThread->m_index, currentNode->fd);
4077        }
4078
4079        selfThread->m_isBufferInit = false;
4080        selfThread->m_releasing = false;
4081        selfThread->m_activated = false;
4082        ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4083        return;
4084    }
4085
4086    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
4087#ifdef ENABLE_FRAME_SYNC
4088        camera2_stream *frame;
4089        uint8_t currentOutputStreams;
4090#endif
4091        nsecs_t frameTimeStamp;
4092
4093        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
4094            __FUNCTION__,selfThread->m_index);
4095
4096        m_streamBufferInit(self);
4097
4098        ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
4099        selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
4100        ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
4101            selfThread->m_index, selfStreamParms->bufIndex);
4102
4103#ifdef ENABLE_FRAME_SYNC
4104        frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
4105        frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
4106        currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
4107        ALOGV("frame count(SCC) : %d outputStream(%x)",  frame->rcount, currentOutputStreams);
4108#else
4109        frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
4110#endif
4111
4112        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
4113            if (selfThread->m_attachedSubStreams[i].streamId == -1)
4114                continue;
4115#ifdef ENABLE_FRAME_SYNC
4116            if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4117                m_requestManager->NotifyStreamOutput(frame->rcount);
4118                m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4119                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4120            }
4121#else
4122            if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4123                m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4124                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4125            }
4126#endif
4127        }
4128        cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
4129        ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
4130
4131
4132
4133        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
4134            __FUNCTION__, selfThread->m_index);
4135    }
4136
4137
4138    return;
4139}
4140
4141void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
4142{
4143    uint32_t                currentSignal   = self->GetProcessingSignal();
4144    StreamThread *          selfThread      = ((StreamThread*)self);
4145    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4146    node_info_t             *currentNode    = selfStreamParms->node;
4147
4148    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
4149
4150    // Do something in Child thread handler
4151    // Should change function to class that inherited StreamThread class to support dynamic stream allocation
4152    if (selfThread->streamType == STREAM_TYPE_DIRECT) {
4153        m_streamFunc_direct(self);
4154    } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
4155        m_streamFunc_indirect(self);
4156    }
4157
4158    return;
4159}
4160int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4161{
4162    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4163    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_JPEG];
4164    status_t    res;
4165    ExynosRect jpegRect;
4166    bool found = false;
4167    int srcW, srcH, srcCropX, srcCropY;
4168    int pictureW, pictureH, pictureFramesize = 0;
4169    int pictureFormat;
4170    int cropX, cropY, cropW, cropH = 0;
4171    ExynosBuffer resizeBufInfo;
4172    ExynosRect   m_jpegPictureRect;
4173    buffer_handle_t * buf = NULL;
4174    camera2_jpeg_blob * jpegBlob = NULL;
4175    int jpegBufSize = 0;
4176
4177    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4178    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4179        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4180            found = true;
4181            break;
4182        }
4183        subParms->svcBufIndex++;
4184        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4185            subParms->svcBufIndex = 0;
4186    }
4187    if (!found) {
4188        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4189        subParms->svcBufIndex++;
4190        return 1;
4191    }
4192
4193    {
4194        Mutex::Autolock lock(m_jpegEncoderLock);
4195        m_jpegEncodingCount++;
4196    }
4197
4198    m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
4199                    m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
4200                    &srcCropX, &srcCropY,
4201                    &srcW, &srcH,
4202                    0);
4203
4204    m_jpegPictureRect.w = subParms->width;
4205    m_jpegPictureRect.h = subParms->height;
4206
4207     ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
4208              __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
4209                   m_jpegPictureRect.w, m_jpegPictureRect.h);
4210
4211    m_getRatioSize(srcW, srcH,
4212                   m_jpegPictureRect.w, m_jpegPictureRect.h,
4213                   &cropX, &cropY,
4214                   &pictureW, &pictureH,
4215                   0);
4216    pictureFormat = V4L2_PIX_FMT_YUYV;
4217    pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
4218
4219    if (m_exynosPictureCSC) {
4220        float zoom_w = 0, zoom_h = 0;
4221        if (m_zoomRatio == 0)
4222            m_zoomRatio = 1;
4223
4224        if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
4225            zoom_w =  pictureW / m_zoomRatio;
4226            zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
4227        } else {
4228            zoom_h = pictureH / m_zoomRatio;
4229            zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
4230        }
4231        cropX = (srcW - zoom_w) / 2;
4232        cropY = (srcH - zoom_h) / 2;
4233        cropW = zoom_w;
4234        cropH = zoom_h;
4235
4236        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4237              __FUNCTION__, cropX, cropY, cropW, cropH);
4238
4239        csc_set_src_format(m_exynosPictureCSC,
4240                           ALIGN(srcW, 16), ALIGN(srcH, 16),
4241                           cropX, cropY, cropW, cropH,
4242                           V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
4243                           0);
4244
4245        csc_set_dst_format(m_exynosPictureCSC,
4246                           m_jpegPictureRect.w, m_jpegPictureRect.h,
4247                           0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
4248                           V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
4249                           0);
4250        for (int i = 0 ; i < 3 ; i++)
4251            ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
4252                __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
4253        csc_set_src_buffer(m_exynosPictureCSC,
4254                           (void **)&srcImageBuf->fd.fd);
4255
4256        csc_set_dst_buffer(m_exynosPictureCSC,
4257                           (void **)&m_resizeBuf.fd.fd);
4258        for (int i = 0 ; i < 3 ; i++)
4259            ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
4260                __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
4261
4262        if (csc_convert(m_exynosPictureCSC) != 0)
4263            ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
4264
4265    }
4266    else {
4267        ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
4268    }
4269
4270    resizeBufInfo = m_resizeBuf;
4271
4272    m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
4273
4274    for (int i = 1; i < 3; i++) {
4275        if (m_resizeBuf.size.extS[i] != 0)
4276            m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
4277
4278        ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
4279    }
4280
4281    jpegRect.w = m_jpegPictureRect.w;
4282    jpegRect.h = m_jpegPictureRect.h;
4283    jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
4284
4285    for (int j = 0 ; j < 3 ; j++)
4286        ALOGV("DEBUG(%s): dest buf node  fd.extFd[%d]=%d size=%d virt=%x ",
4287            __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
4288            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
4289            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
4290
4291    jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0];
4292    if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) {
4293        ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
4294    } else {
4295        m_resizeBuf = resizeBufInfo;
4296
4297        int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s;
4298        ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__,
4299            m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize);
4300        char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4301        jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]);
4302
4303        if (jpegBuffer[jpegSize-1] == 0)
4304            jpegSize--;
4305        jpegBlob->jpeg_size = jpegSize;
4306        jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID;
4307    }
4308    subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize;
4309    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4310
4311    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4312            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4313    if (res == 0) {
4314        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4315        subParms->numSvcBufsInHal--;
4316    }
4317    else {
4318        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4319    }
4320
4321    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4322    {
4323        bool found = false;
4324        int checkingIndex = 0;
4325
4326        ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4327
4328        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4329        if (res != NO_ERROR || buf == NULL) {
4330            ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4331            break;
4332        }
4333        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4334        subParms->numSvcBufsInHal ++;
4335        ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4336           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4337
4338
4339        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4340            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4341                found = true;
4342                break;
4343            }
4344        }
4345        ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
4346
4347        if (!found) {
4348             break;
4349        }
4350
4351        subParms->svcBufIndex = checkingIndex;
4352        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4353            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4354        }
4355        else {
4356            ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4357                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4358        }
4359    }
4360    {
4361        Mutex::Autolock lock(m_jpegEncoderLock);
4362        m_jpegEncodingCount--;
4363    }
4364    return 0;
4365}
4366
4367int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4368{
4369    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4370    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_RECORD];
4371    status_t    res;
4372    ExynosRect jpegRect;
4373    bool found = false;
4374    int cropX, cropY, cropW, cropH = 0;
4375    buffer_handle_t * buf = NULL;
4376
4377    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4378    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4379        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4380            found = true;
4381            break;
4382        }
4383        subParms->svcBufIndex++;
4384        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4385            subParms->svcBufIndex = 0;
4386    }
4387    if (!found) {
4388        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4389        subParms->svcBufIndex++;
4390        return 1;
4391    }
4392
4393    if (m_exynosVideoCSC) {
4394        int videoW = subParms->width, videoH = subParms->height;
4395        int cropX, cropY, cropW, cropH = 0;
4396        int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4397        m_getRatioSize(previewW, previewH,
4398                       videoW, videoH,
4399                       &cropX, &cropY,
4400                       &cropW, &cropH,
4401                       0);
4402
4403        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4404                 __FUNCTION__, cropX, cropY, cropW, cropH);
4405
4406        csc_set_src_format(m_exynosVideoCSC,
4407                           ALIGN(previewW, 32), previewH,
4408                           cropX, cropY, cropW, cropH,
4409                           selfStreamParms->format,
4410                           0);
4411
4412        csc_set_dst_format(m_exynosVideoCSC,
4413                           videoW, videoH,
4414                           0, 0, videoW, videoH,
4415                           subParms->format,
4416                           1);
4417
4418        csc_set_src_buffer(m_exynosVideoCSC,
4419                        (void **)&srcImageBuf->fd.fd);
4420
4421        csc_set_dst_buffer(m_exynosVideoCSC,
4422            (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
4423
4424        if (csc_convert(m_exynosVideoCSC) != 0) {
4425            ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
4426        }
4427        else {
4428            ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
4429        }
4430    }
4431    else {
4432        ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4433    }
4434
4435    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4436
4437    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4438            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4439    if (res == 0) {
4440        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4441        subParms->numSvcBufsInHal--;
4442    }
4443    else {
4444        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4445    }
4446
4447    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4448    {
4449        bool found = false;
4450        int checkingIndex = 0;
4451
4452        ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4453
4454        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4455        if (res != NO_ERROR || buf == NULL) {
4456            ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4457            break;
4458        }
4459        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4460        subParms->numSvcBufsInHal ++;
4461        ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4462           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4463
4464        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4465            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4466                found = true;
4467                break;
4468            }
4469        }
4470        ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4471
4472        if (!found) {
4473             break;
4474        }
4475
4476        subParms->svcBufIndex = checkingIndex;
4477        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4478            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4479        }
4480        else {
4481            ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4482                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4483        }
4484    }
4485    return 0;
4486}
4487
4488int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4489{
4490    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4491    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_PRVCB];
4492    status_t    res;
4493    bool found = false;
4494    int cropX, cropY, cropW, cropH = 0;
4495    buffer_handle_t * buf = NULL;
4496
4497    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4498    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4499        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4500            found = true;
4501            break;
4502        }
4503        subParms->svcBufIndex++;
4504        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4505            subParms->svcBufIndex = 0;
4506    }
4507    if (!found) {
4508        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4509        subParms->svcBufIndex++;
4510        return 1;
4511    }
4512
4513    if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
4514        if (m_exynosVideoCSC) {
4515            int previewCbW = subParms->width, previewCbH = subParms->height;
4516            int cropX, cropY, cropW, cropH = 0;
4517            int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4518            m_getRatioSize(previewW, previewH,
4519                           previewCbW, previewCbH,
4520                           &cropX, &cropY,
4521                           &cropW, &cropH,
4522                           0);
4523
4524            ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4525                     __FUNCTION__, cropX, cropY, cropW, cropH);
4526            csc_set_src_format(m_exynosVideoCSC,
4527                               ALIGN(previewW, 32), previewH,
4528                               cropX, cropY, cropW, cropH,
4529                               selfStreamParms->format,
4530                               0);
4531
4532            csc_set_dst_format(m_exynosVideoCSC,
4533                               previewCbW, previewCbH,
4534                               0, 0, previewCbW, previewCbH,
4535                               subParms->internalFormat,
4536                               1);
4537
4538            csc_set_src_buffer(m_exynosVideoCSC,
4539                        (void **)&srcImageBuf->fd.fd);
4540
4541            csc_set_dst_buffer(m_exynosVideoCSC,
4542                (void **)(&(m_previewCbBuf.fd.fd)));
4543
4544            if (csc_convert(m_exynosVideoCSC) != 0) {
4545                ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
4546            }
4547            else {
4548                ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
4549            }
4550            if (previewCbW == ALIGN(previewCbW, 16)) {
4551                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4552                    m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
4553                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
4554                    m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
4555            }
4556            else {
4557                // TODO : copy line by line ?
4558            }
4559        }
4560        else {
4561            ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4562        }
4563    }
4564    else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
4565        int previewCbW = subParms->width, previewCbH = subParms->height;
4566        int stride = ALIGN(previewCbW, 16);
4567        int uv_stride = ALIGN(previewCbW/2, 16);
4568        int c_stride = ALIGN(stride / 2, 16);
4569
4570        if (previewCbW == ALIGN(previewCbW, 32)) {
4571            memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4572                srcImageBuf->virt.extP[0], stride * previewCbH);
4573            memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
4574                srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
4575            memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
4576                srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
4577        } else {
4578            char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4579            char * srcAddr = (char *)(srcImageBuf->virt.extP[0]);
4580            for (int i = 0 ; i < previewCbH ; i++) {
4581                memcpy(dstAddr, srcAddr, previewCbW);
4582                dstAddr += stride;
4583                srcAddr += ALIGN(stride, 32);
4584            }
4585            dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH);
4586            srcAddr = (char *)(srcImageBuf->virt.extP[1]);
4587            for (int i = 0 ; i < previewCbH/2 ; i++) {
4588                memcpy(dstAddr, srcAddr, previewCbW/2);
4589                dstAddr += c_stride;
4590                srcAddr += uv_stride;
4591            }
4592            srcAddr = (char *)(srcImageBuf->virt.extP[2]);
4593            for (int i = 0 ; i < previewCbH/2 ; i++) {
4594                memcpy(dstAddr, srcAddr, previewCbW/2);
4595                dstAddr += c_stride;
4596                srcAddr += uv_stride;
4597            }
4598        }
4599    }
4600    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4601
4602    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4603            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4604    if (res == 0) {
4605        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4606        subParms->numSvcBufsInHal--;
4607    }
4608    else {
4609        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4610    }
4611
4612    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4613    {
4614        bool found = false;
4615        int checkingIndex = 0;
4616
4617        ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4618
4619        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4620        if (res != NO_ERROR || buf == NULL) {
4621            ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4622            break;
4623        }
4624        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4625        subParms->numSvcBufsInHal ++;
4626        ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4627           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4628
4629
4630        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4631            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4632                found = true;
4633                break;
4634            }
4635        }
4636        ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4637
4638        if (!found) {
4639             break;
4640        }
4641
4642        subParms->svcBufIndex = checkingIndex;
4643        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4644            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4645        }
4646        else {
4647            ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4648                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4649        }
4650    }
4651    return 0;
4652}
4653
4654bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
4655{
4656    int sizeOfSupportList;
4657
4658    //REAR Camera
4659    if(this->getCameraId() == 0) {
4660        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
4661
4662        for(int i = 0; i < sizeOfSupportList; i++) {
4663            if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
4664                return true;
4665        }
4666
4667    }
4668    else {
4669        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
4670
4671        for(int i = 0; i < sizeOfSupportList; i++) {
4672            if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
4673                return true;
4674        }
4675    }
4676
4677    return false;
4678}
4679bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
4680                            ExynosBuffer *jpegBuf,
4681                            ExynosRect *rect)
4682{
4683    unsigned char *addr;
4684
4685    ExynosJpegEncoderForCamera jpegEnc;
4686    bool ret = false;
4687    int res = 0;
4688
4689    unsigned int *yuvSize = yuvBuf->size.extS;
4690
4691    if (jpegEnc.create()) {
4692        ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
4693        goto jpeg_encode_done;
4694    }
4695
4696    if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) {
4697        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
4698        goto jpeg_encode_done;
4699    }
4700
4701    if (jpegEnc.setSize(rect->w, rect->h)) {
4702        ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
4703        goto jpeg_encode_done;
4704    }
4705    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
4706
4707    if (jpegEnc.setColorFormat(rect->colorFormat)) {
4708        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
4709        goto jpeg_encode_done;
4710    }
4711
4712    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
4713        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
4714        goto jpeg_encode_done;
4715    }
4716
4717    if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) {
4718        mExifInfo.enableThumb = true;
4719        if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) {
4720            // in the case of unsupported parameter, disable thumbnail
4721            mExifInfo.enableThumb = false;
4722        } else {
4723            m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0];
4724            m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1];
4725        }
4726
4727        ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
4728
4729    } else {
4730        mExifInfo.enableThumb = false;
4731    }
4732
4733    if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
4734        ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
4735        goto jpeg_encode_done;
4736    }
4737
4738    ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
4739    if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) {
4740        ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
4741        goto jpeg_encode_done;
4742    }
4743
4744    m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
4745    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
4746    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
4747        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
4748        goto jpeg_encode_done;
4749    }
4750    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
4751        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
4752        goto jpeg_encode_done;
4753    }
4754
4755    if (jpegEnc.updateConfig()) {
4756        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
4757        goto jpeg_encode_done;
4758    }
4759
4760    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
4761        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
4762        goto jpeg_encode_done;
4763    }
4764
4765    ret = true;
4766
4767jpeg_encode_done:
4768
4769    if (jpegEnc.flagCreate() == true)
4770        jpegEnc.destroy();
4771
4772    return ret;
4773}
4774
4775void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
4776{
4777    m_ctlInfo.flash.m_precaptureTriggerId = id;
4778    m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
4779    if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
4780        // flash is required
4781        switch (m_ctlInfo.flash.m_flashCnt) {
4782        case IS_FLASH_STATE_AUTO_DONE:
4783        case IS_FLASH_STATE_AUTO_OFF:
4784            // Flash capture sequence, AF flash was executed before
4785            break;
4786        default:
4787            // Full flash sequence
4788            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4789            m_ctlInfo.flash.m_flashEnableFlg = true;
4790            m_ctlInfo.flash.m_flashTimeOut = 0;
4791        }
4792    } else {
4793        // Skip pre-capture in case of non-flash.
4794        ALOGV("[PreCap] Flash OFF mode ");
4795        m_ctlInfo.flash.m_flashEnableFlg = false;
4796        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
4797    }
4798    ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
4799    OnPrecaptureMeteringNotificationSensor();
4800}
4801
4802void ExynosCameraHWInterface2::OnAfTrigger(int id)
4803{
4804    m_afTriggerId = id;
4805
4806    switch (m_afMode) {
4807    case AA_AFMODE_AUTO:
4808    case AA_AFMODE_MACRO:
4809    case AA_AFMODE_MANUAL:
4810        ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
4811        // If flash is enable, Flash operation is executed before triggering AF
4812        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4813                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4814                && (m_cameraId == 0)) {
4815            ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode);
4816            m_ctlInfo.flash.m_flashEnableFlg = true;
4817            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4818            m_ctlInfo.flash.m_flashDecisionResult = false;
4819            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4820        }
4821        OnAfTriggerAutoMacro(id);
4822        break;
4823    case AA_AFMODE_CONTINUOUS_VIDEO:
4824        ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
4825        OnAfTriggerCAFVideo(id);
4826        break;
4827    case AA_AFMODE_CONTINUOUS_PICTURE:
4828        ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
4829        OnAfTriggerCAFPicture(id);
4830        break;
4831
4832    case AA_AFMODE_OFF:
4833    default:
4834        break;
4835    }
4836}
4837
4838void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
4839{
4840    int nextState = NO_TRANSITION;
4841
4842    switch (m_afState) {
4843    case HAL_AFSTATE_INACTIVE:
4844    case HAL_AFSTATE_PASSIVE_FOCUSED:
4845    case HAL_AFSTATE_SCANNING:
4846        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4847        m_IsAfTriggerRequired = true;
4848        break;
4849    case HAL_AFSTATE_NEEDS_COMMAND:
4850        nextState = NO_TRANSITION;
4851        break;
4852    case HAL_AFSTATE_STARTED:
4853        nextState = NO_TRANSITION;
4854        break;
4855    case HAL_AFSTATE_LOCKED:
4856        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4857        m_IsAfTriggerRequired = true;
4858        break;
4859    case HAL_AFSTATE_FAILED:
4860        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4861        m_IsAfTriggerRequired = true;
4862        break;
4863    default:
4864        break;
4865    }
4866    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4867    if (nextState != NO_TRANSITION)
4868        m_afState = nextState;
4869}
4870
4871void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4872{
4873    int nextState = NO_TRANSITION;
4874
4875    switch (m_afState) {
4876    case HAL_AFSTATE_INACTIVE:
4877        nextState = HAL_AFSTATE_FAILED;
4878        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4879        break;
4880    case HAL_AFSTATE_NEEDS_COMMAND:
4881        // not used
4882        break;
4883    case HAL_AFSTATE_STARTED:
4884        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4885        m_AfHwStateFailed = false;
4886        break;
4887    case HAL_AFSTATE_SCANNING:
4888        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4889        m_AfHwStateFailed = false;
4890        // If flash is enable, Flash operation is executed before triggering AF
4891        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4892                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4893                && (m_cameraId == 0)) {
4894            ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4895            m_ctlInfo.flash.m_flashEnableFlg = true;
4896            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4897            m_ctlInfo.flash.m_flashDecisionResult = false;
4898            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4899        }
4900        break;
4901    case HAL_AFSTATE_NEEDS_DETERMINATION:
4902        nextState = NO_TRANSITION;
4903        break;
4904    case HAL_AFSTATE_PASSIVE_FOCUSED:
4905        m_IsAfLockRequired = true;
4906        if (m_AfHwStateFailed) {
4907            ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
4908            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4909            nextState = HAL_AFSTATE_FAILED;
4910        }
4911        else {
4912            ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
4913            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4914            nextState = HAL_AFSTATE_LOCKED;
4915        }
4916        m_AfHwStateFailed = false;
4917        break;
4918    case HAL_AFSTATE_LOCKED:
4919        nextState = NO_TRANSITION;
4920        break;
4921    case HAL_AFSTATE_FAILED:
4922        nextState = NO_TRANSITION;
4923        break;
4924    default:
4925        break;
4926    }
4927    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4928    if (nextState != NO_TRANSITION)
4929        m_afState = nextState;
4930}
4931
4932
4933void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
4934{
4935    int nextState = NO_TRANSITION;
4936
4937    switch (m_afState) {
4938    case HAL_AFSTATE_INACTIVE:
4939        nextState = HAL_AFSTATE_FAILED;
4940        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4941        break;
4942    case HAL_AFSTATE_NEEDS_COMMAND:
4943        // not used
4944        break;
4945    case HAL_AFSTATE_STARTED:
4946        m_IsAfLockRequired = true;
4947        nextState = HAL_AFSTATE_FAILED;
4948        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4949        break;
4950    case HAL_AFSTATE_SCANNING:
4951        m_IsAfLockRequired = true;
4952        nextState = HAL_AFSTATE_FAILED;
4953        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4954        break;
4955    case HAL_AFSTATE_NEEDS_DETERMINATION:
4956        // not used
4957        break;
4958    case HAL_AFSTATE_PASSIVE_FOCUSED:
4959        m_IsAfLockRequired = true;
4960        SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4961        nextState = HAL_AFSTATE_LOCKED;
4962        break;
4963    case HAL_AFSTATE_LOCKED:
4964        nextState = NO_TRANSITION;
4965        break;
4966    case HAL_AFSTATE_FAILED:
4967        nextState = NO_TRANSITION;
4968        break;
4969    default:
4970        break;
4971    }
4972    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4973    if (nextState != NO_TRANSITION)
4974        m_afState = nextState;
4975}
4976
4977void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
4978{
4979    if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4980        // Just noti of pre-capture start
4981        if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) {
4982            m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4983                        ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4984                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4985            ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4986            m_notifyCb(CAMERA2_MSG_AUTOWB,
4987                        ANDROID_CONTROL_AWB_STATE_CONVERGED,
4988                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4989            m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
4990        }
4991    }
4992}
4993
4994void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
4995{
4996    if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4997        if (m_ctlInfo.flash.m_flashEnableFlg) {
4998            // flash case
4999            switch (m_ctlInfo.flash.m_flashCnt) {
5000            case IS_FLASH_STATE_AUTO_DONE:
5001            case IS_FLASH_STATE_AUTO_OFF:
5002                if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
5003                    // End notification
5004                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5005                                    ANDROID_CONTROL_AE_STATE_CONVERGED,
5006                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5007                    ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5008                    m_notifyCb(CAMERA2_MSG_AUTOWB,
5009                                    ANDROID_CONTROL_AWB_STATE_CONVERGED,
5010                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5011                    m_ctlInfo.flash.m_precaptureTriggerId = 0;
5012                } else {
5013                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5014                                    ANDROID_CONTROL_AE_STATE_PRECAPTURE,
5015                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5016                    ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5017                    m_notifyCb(CAMERA2_MSG_AUTOWB,
5018                                    ANDROID_CONTROL_AWB_STATE_CONVERGED,
5019                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5020                    m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
5021                }
5022                break;
5023            case IS_FLASH_STATE_CAPTURE:
5024            case IS_FLASH_STATE_CAPTURE_WAIT:
5025            case IS_FLASH_STATE_CAPTURE_JPEG:
5026            case IS_FLASH_STATE_CAPTURE_END:
5027                ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt);
5028                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
5029                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5030                        ANDROID_CONTROL_AE_STATE_CONVERGED,
5031                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5032                m_notifyCb(CAMERA2_MSG_AUTOWB,
5033                        ANDROID_CONTROL_AWB_STATE_CONVERGED,
5034                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5035                m_ctlInfo.flash.m_precaptureTriggerId = 0;
5036                break;
5037            }
5038        } else {
5039            // non-flash case
5040            if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
5041                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5042                                ANDROID_CONTROL_AE_STATE_CONVERGED,
5043                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5044                ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5045                m_notifyCb(CAMERA2_MSG_AUTOWB,
5046                                ANDROID_CONTROL_AWB_STATE_CONVERGED,
5047                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5048                m_ctlInfo.flash.m_precaptureTriggerId = 0;
5049            }
5050        }
5051    }
5052}
5053
5054void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
5055{
5056    switch (m_afMode) {
5057    case AA_AFMODE_AUTO:
5058    case AA_AFMODE_MACRO:
5059        OnAfNotificationAutoMacro(noti);
5060        break;
5061    case AA_AFMODE_CONTINUOUS_VIDEO:
5062        OnAfNotificationCAFVideo(noti);
5063        break;
5064    case AA_AFMODE_CONTINUOUS_PICTURE:
5065        OnAfNotificationCAFPicture(noti);
5066        break;
5067    case AA_AFMODE_OFF:
5068    default:
5069        break;
5070    }
5071}
5072
5073void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
5074{
5075    int nextState = NO_TRANSITION;
5076    bool bWrongTransition = false;
5077
5078    if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
5079        switch (noti) {
5080        case AA_AFSTATE_INACTIVE:
5081        case AA_AFSTATE_ACTIVE_SCAN:
5082        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5083        case AA_AFSTATE_AF_FAILED_FOCUS:
5084        default:
5085            nextState = NO_TRANSITION;
5086            break;
5087        }
5088    }
5089    else if (m_afState == HAL_AFSTATE_STARTED) {
5090        switch (noti) {
5091        case AA_AFSTATE_INACTIVE:
5092            nextState = NO_TRANSITION;
5093            break;
5094        case AA_AFSTATE_ACTIVE_SCAN:
5095            nextState = HAL_AFSTATE_SCANNING;
5096            SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
5097            break;
5098        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5099            nextState = NO_TRANSITION;
5100            break;
5101        case AA_AFSTATE_AF_FAILED_FOCUS:
5102            nextState = NO_TRANSITION;
5103            break;
5104        default:
5105            bWrongTransition = true;
5106            break;
5107        }
5108    }
5109    else if (m_afState == HAL_AFSTATE_SCANNING) {
5110        switch (noti) {
5111        case AA_AFSTATE_INACTIVE:
5112            bWrongTransition = true;
5113            break;
5114        case AA_AFSTATE_ACTIVE_SCAN:
5115            nextState = NO_TRANSITION;
5116            break;
5117        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5118            // If Flash mode is enable, after AF execute pre-capture metering
5119            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5120                switch (m_ctlInfo.flash.m_flashCnt) {
5121                case IS_FLASH_STATE_ON_DONE:
5122                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5123                    nextState = NO_TRANSITION;
5124                    break;
5125                case IS_FLASH_STATE_AUTO_DONE:
5126                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5127                    nextState = HAL_AFSTATE_LOCKED;
5128                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5129                    break;
5130                default:
5131                    nextState = NO_TRANSITION;
5132                }
5133            } else {
5134                nextState = HAL_AFSTATE_LOCKED;
5135                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5136            }
5137            break;
5138        case AA_AFSTATE_AF_FAILED_FOCUS:
5139            // If Flash mode is enable, after AF execute pre-capture metering
5140            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5141                switch (m_ctlInfo.flash.m_flashCnt) {
5142                case IS_FLASH_STATE_ON_DONE:
5143                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5144                    nextState = NO_TRANSITION;
5145                    break;
5146                case IS_FLASH_STATE_AUTO_DONE:
5147                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5148                    nextState = HAL_AFSTATE_FAILED;
5149                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5150                    break;
5151                default:
5152                    nextState = NO_TRANSITION;
5153                }
5154            } else {
5155                nextState = HAL_AFSTATE_FAILED;
5156                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5157            }
5158            break;
5159        default:
5160            bWrongTransition = true;
5161            break;
5162        }
5163    }
5164    else if (m_afState == HAL_AFSTATE_LOCKED) {
5165        switch (noti) {
5166            case AA_AFSTATE_INACTIVE:
5167            case AA_AFSTATE_ACTIVE_SCAN:
5168                bWrongTransition = true;
5169                break;
5170            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5171                nextState = NO_TRANSITION;
5172                break;
5173            case AA_AFSTATE_AF_FAILED_FOCUS:
5174            default:
5175                bWrongTransition = true;
5176                break;
5177        }
5178    }
5179    else if (m_afState == HAL_AFSTATE_FAILED) {
5180        switch (noti) {
5181            case AA_AFSTATE_INACTIVE:
5182            case AA_AFSTATE_ACTIVE_SCAN:
5183            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5184                bWrongTransition = true;
5185                break;
5186            case AA_AFSTATE_AF_FAILED_FOCUS:
5187                nextState = NO_TRANSITION;
5188                break;
5189            default:
5190                bWrongTransition = true;
5191                break;
5192        }
5193    }
5194    if (bWrongTransition) {
5195        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5196        return;
5197    }
5198    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5199    if (nextState != NO_TRANSITION)
5200        m_afState = nextState;
5201}
5202
5203void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
5204{
5205    int nextState = NO_TRANSITION;
5206    bool bWrongTransition = false;
5207
5208    if (m_afState == HAL_AFSTATE_INACTIVE) {
5209        switch (noti) {
5210        case AA_AFSTATE_INACTIVE:
5211        case AA_AFSTATE_ACTIVE_SCAN:
5212        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5213        case AA_AFSTATE_AF_FAILED_FOCUS:
5214        default:
5215            nextState = NO_TRANSITION;
5216            break;
5217        }
5218        // Check AF notification after triggering
5219        if (m_ctlInfo.af.m_afTriggerTimeOut > 0) {
5220            if (m_ctlInfo.af.m_afTriggerTimeOut > 5) {
5221                ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode);
5222                SetAfMode(AA_AFMODE_OFF);
5223                SetAfMode(m_afMode);
5224                m_ctlInfo.af.m_afTriggerTimeOut = 0;
5225            } else {
5226                m_ctlInfo.af.m_afTriggerTimeOut++;
5227            }
5228        }
5229    }
5230    else if (m_afState == HAL_AFSTATE_STARTED) {
5231        switch (noti) {
5232        case AA_AFSTATE_INACTIVE:
5233            nextState = NO_TRANSITION;
5234            break;
5235        case AA_AFSTATE_ACTIVE_SCAN:
5236            nextState = HAL_AFSTATE_SCANNING;
5237            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5238            m_ctlInfo.af.m_afTriggerTimeOut = 0;
5239            break;
5240        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5241            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5242            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5243            m_ctlInfo.af.m_afTriggerTimeOut = 0;
5244            break;
5245        case AA_AFSTATE_AF_FAILED_FOCUS:
5246            //nextState = HAL_AFSTATE_FAILED;
5247            //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5248            nextState = NO_TRANSITION;
5249            break;
5250        default:
5251            bWrongTransition = true;
5252            break;
5253        }
5254    }
5255    else if (m_afState == HAL_AFSTATE_SCANNING) {
5256        switch (noti) {
5257        case AA_AFSTATE_INACTIVE:
5258            nextState = NO_TRANSITION;
5259            break;
5260        case AA_AFSTATE_ACTIVE_SCAN:
5261            nextState = NO_TRANSITION;
5262            m_AfHwStateFailed = false;
5263            break;
5264        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5265            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5266            m_AfHwStateFailed = false;
5267            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5268            break;
5269        case AA_AFSTATE_AF_FAILED_FOCUS:
5270            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5271            m_AfHwStateFailed = true;
5272            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5273            break;
5274        default:
5275            bWrongTransition = true;
5276            break;
5277        }
5278    }
5279    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5280        switch (noti) {
5281        case AA_AFSTATE_INACTIVE:
5282            nextState = NO_TRANSITION;
5283            break;
5284        case AA_AFSTATE_ACTIVE_SCAN:
5285            nextState = HAL_AFSTATE_SCANNING;
5286            m_AfHwStateFailed = false;
5287            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5288            break;
5289        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5290            nextState = NO_TRANSITION;
5291            m_AfHwStateFailed = false;
5292            break;
5293        case AA_AFSTATE_AF_FAILED_FOCUS:
5294            nextState = NO_TRANSITION;
5295            m_AfHwStateFailed = true;
5296            break;
5297        default:
5298            bWrongTransition = true;
5299            break;
5300        }
5301    }
5302    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5303        //Skip notification in case of flash, wait the end of flash on
5304        if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5305            if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE)
5306                return;
5307        }
5308        switch (noti) {
5309        case AA_AFSTATE_INACTIVE:
5310            nextState = NO_TRANSITION;
5311            break;
5312        case AA_AFSTATE_ACTIVE_SCAN:
5313            nextState = NO_TRANSITION;
5314            break;
5315        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5316            // If Flash mode is enable, after AF execute pre-capture metering
5317            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5318                switch (m_ctlInfo.flash.m_flashCnt) {
5319                case IS_FLASH_STATE_ON_DONE:
5320                    ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5321                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5322                    nextState = NO_TRANSITION;
5323                    break;
5324                case IS_FLASH_STATE_AUTO_DONE:
5325                    ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5326                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5327                    m_IsAfLockRequired = true;
5328                    nextState = HAL_AFSTATE_LOCKED;
5329                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5330                    break;
5331                default:
5332                    nextState = NO_TRANSITION;
5333                }
5334            } else {
5335                m_IsAfLockRequired = true;
5336                nextState = HAL_AFSTATE_LOCKED;
5337                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5338            }
5339            break;
5340        case AA_AFSTATE_AF_FAILED_FOCUS:
5341            // If Flash mode is enable, after AF execute pre-capture metering
5342            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5343                switch (m_ctlInfo.flash.m_flashCnt) {
5344                case IS_FLASH_STATE_ON_DONE:
5345                    ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5346                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5347                    nextState = NO_TRANSITION;
5348                    break;
5349                case IS_FLASH_STATE_AUTO_DONE:
5350                    ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5351                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5352                    m_IsAfLockRequired = true;
5353                    nextState = HAL_AFSTATE_FAILED;
5354                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5355                    break;
5356                default:
5357                    nextState = NO_TRANSITION;
5358                }
5359            } else {
5360                m_IsAfLockRequired = true;
5361                nextState = HAL_AFSTATE_FAILED;
5362                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5363            }
5364            break;
5365        default:
5366            bWrongTransition = true;
5367            break;
5368        }
5369    }
5370    else if (m_afState == HAL_AFSTATE_LOCKED) {
5371        switch (noti) {
5372            case AA_AFSTATE_INACTIVE:
5373                nextState = NO_TRANSITION;
5374                break;
5375            case AA_AFSTATE_ACTIVE_SCAN:
5376                bWrongTransition = true;
5377                break;
5378            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5379                nextState = NO_TRANSITION;
5380                break;
5381            case AA_AFSTATE_AF_FAILED_FOCUS:
5382            default:
5383                bWrongTransition = true;
5384                break;
5385        }
5386    }
5387    else if (m_afState == HAL_AFSTATE_FAILED) {
5388        switch (noti) {
5389            case AA_AFSTATE_INACTIVE:
5390                bWrongTransition = true;
5391                break;
5392            case AA_AFSTATE_ACTIVE_SCAN:
5393                nextState = HAL_AFSTATE_SCANNING;
5394                break;
5395            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5396                bWrongTransition = true;
5397                break;
5398            case AA_AFSTATE_AF_FAILED_FOCUS:
5399                nextState = NO_TRANSITION;
5400                break;
5401            default:
5402                bWrongTransition = true;
5403                break;
5404        }
5405    }
5406    if (bWrongTransition) {
5407        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5408        return;
5409    }
5410    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5411    if (nextState != NO_TRANSITION)
5412        m_afState = nextState;
5413}
5414
5415void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
5416{
5417    int nextState = NO_TRANSITION;
5418    bool bWrongTransition = false;
5419
5420    if (m_afState == HAL_AFSTATE_INACTIVE) {
5421        switch (noti) {
5422        case AA_AFSTATE_INACTIVE:
5423        case AA_AFSTATE_ACTIVE_SCAN:
5424        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5425        case AA_AFSTATE_AF_FAILED_FOCUS:
5426        default:
5427            nextState = NO_TRANSITION;
5428            break;
5429        }
5430    }
5431    else if (m_afState == HAL_AFSTATE_STARTED) {
5432        switch (noti) {
5433        case AA_AFSTATE_INACTIVE:
5434            nextState = NO_TRANSITION;
5435            break;
5436        case AA_AFSTATE_ACTIVE_SCAN:
5437            nextState = HAL_AFSTATE_SCANNING;
5438            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5439            break;
5440        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5441            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5442            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5443            break;
5444        case AA_AFSTATE_AF_FAILED_FOCUS:
5445            nextState = HAL_AFSTATE_FAILED;
5446            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5447            break;
5448        default:
5449            bWrongTransition = true;
5450            break;
5451        }
5452    }
5453    else if (m_afState == HAL_AFSTATE_SCANNING) {
5454        switch (noti) {
5455        case AA_AFSTATE_INACTIVE:
5456            bWrongTransition = true;
5457            break;
5458        case AA_AFSTATE_ACTIVE_SCAN:
5459            nextState = NO_TRANSITION;
5460            break;
5461        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5462            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5463            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5464            break;
5465        case AA_AFSTATE_AF_FAILED_FOCUS:
5466            nextState = NO_TRANSITION;
5467            break;
5468        default:
5469            bWrongTransition = true;
5470            break;
5471        }
5472    }
5473    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5474        switch (noti) {
5475        case AA_AFSTATE_INACTIVE:
5476            bWrongTransition = true;
5477            break;
5478        case AA_AFSTATE_ACTIVE_SCAN:
5479            nextState = HAL_AFSTATE_SCANNING;
5480            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5481            break;
5482        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5483            nextState = NO_TRANSITION;
5484            break;
5485        case AA_AFSTATE_AF_FAILED_FOCUS:
5486            nextState = HAL_AFSTATE_FAILED;
5487            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5488            // TODO : needs NO_TRANSITION ?
5489            break;
5490        default:
5491            bWrongTransition = true;
5492            break;
5493        }
5494    }
5495    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5496        switch (noti) {
5497        case AA_AFSTATE_INACTIVE:
5498            bWrongTransition = true;
5499            break;
5500        case AA_AFSTATE_ACTIVE_SCAN:
5501            nextState = NO_TRANSITION;
5502            break;
5503        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5504            m_IsAfLockRequired = true;
5505            nextState = HAL_AFSTATE_LOCKED;
5506            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5507            break;
5508        case AA_AFSTATE_AF_FAILED_FOCUS:
5509            nextState = HAL_AFSTATE_FAILED;
5510            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5511            break;
5512        default:
5513            bWrongTransition = true;
5514            break;
5515        }
5516    }
5517    else if (m_afState == HAL_AFSTATE_LOCKED) {
5518        switch (noti) {
5519            case AA_AFSTATE_INACTIVE:
5520                nextState = NO_TRANSITION;
5521                break;
5522            case AA_AFSTATE_ACTIVE_SCAN:
5523                bWrongTransition = true;
5524                break;
5525            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5526                nextState = NO_TRANSITION;
5527                break;
5528            case AA_AFSTATE_AF_FAILED_FOCUS:
5529            default:
5530                bWrongTransition = true;
5531                break;
5532        }
5533    }
5534    else if (m_afState == HAL_AFSTATE_FAILED) {
5535        switch (noti) {
5536            case AA_AFSTATE_INACTIVE:
5537            case AA_AFSTATE_ACTIVE_SCAN:
5538            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5539                bWrongTransition = true;
5540                break;
5541            case AA_AFSTATE_AF_FAILED_FOCUS:
5542                nextState = NO_TRANSITION;
5543                break;
5544            default:
5545                bWrongTransition = true;
5546                break;
5547        }
5548    }
5549    if (bWrongTransition) {
5550        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5551        return;
5552    }
5553    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5554    if (nextState != NO_TRANSITION)
5555        m_afState = nextState;
5556}
5557
5558void ExynosCameraHWInterface2::OnAfCancel(int id)
5559{
5560    m_afTriggerId = id;
5561
5562    switch (m_afMode) {
5563    case AA_AFMODE_AUTO:
5564    case AA_AFMODE_MACRO:
5565    case AA_AFMODE_OFF:
5566    case AA_AFMODE_MANUAL:
5567        OnAfCancelAutoMacro(id);
5568        break;
5569    case AA_AFMODE_CONTINUOUS_VIDEO:
5570        OnAfCancelCAFVideo(id);
5571        break;
5572    case AA_AFMODE_CONTINUOUS_PICTURE:
5573        OnAfCancelCAFPicture(id);
5574        break;
5575    default:
5576        break;
5577    }
5578}
5579
5580void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
5581{
5582    int nextState = NO_TRANSITION;
5583
5584    if (m_ctlInfo.flash.m_flashEnableFlg  && m_ctlInfo.flash.m_afFlashDoneFlg) {
5585        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5586    }
5587    switch (m_afState) {
5588    case HAL_AFSTATE_INACTIVE:
5589        nextState = NO_TRANSITION;
5590        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5591        break;
5592    case HAL_AFSTATE_NEEDS_COMMAND:
5593    case HAL_AFSTATE_STARTED:
5594    case HAL_AFSTATE_SCANNING:
5595    case HAL_AFSTATE_LOCKED:
5596    case HAL_AFSTATE_FAILED:
5597        SetAfMode(AA_AFMODE_OFF);
5598        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5599        nextState = HAL_AFSTATE_INACTIVE;
5600        break;
5601    default:
5602        break;
5603    }
5604    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5605    if (nextState != NO_TRANSITION)
5606        m_afState = nextState;
5607}
5608
5609void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
5610{
5611    int nextState = NO_TRANSITION;
5612
5613    switch (m_afState) {
5614    case HAL_AFSTATE_INACTIVE:
5615        nextState = NO_TRANSITION;
5616        break;
5617    case HAL_AFSTATE_NEEDS_COMMAND:
5618    case HAL_AFSTATE_STARTED:
5619    case HAL_AFSTATE_SCANNING:
5620    case HAL_AFSTATE_LOCKED:
5621    case HAL_AFSTATE_FAILED:
5622    case HAL_AFSTATE_NEEDS_DETERMINATION:
5623    case HAL_AFSTATE_PASSIVE_FOCUSED:
5624        SetAfMode(AA_AFMODE_OFF);
5625        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5626        SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
5627        nextState = HAL_AFSTATE_INACTIVE;
5628        break;
5629    default:
5630        break;
5631    }
5632    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5633    if (nextState != NO_TRANSITION)
5634        m_afState = nextState;
5635}
5636
5637void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
5638{
5639    int nextState = NO_TRANSITION;
5640
5641    switch (m_afState) {
5642    case HAL_AFSTATE_INACTIVE:
5643        nextState = NO_TRANSITION;
5644        break;
5645    case HAL_AFSTATE_NEEDS_COMMAND:
5646    case HAL_AFSTATE_STARTED:
5647    case HAL_AFSTATE_SCANNING:
5648    case HAL_AFSTATE_LOCKED:
5649    case HAL_AFSTATE_FAILED:
5650    case HAL_AFSTATE_NEEDS_DETERMINATION:
5651    case HAL_AFSTATE_PASSIVE_FOCUSED:
5652        SetAfMode(AA_AFMODE_OFF);
5653        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5654        SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
5655        nextState = HAL_AFSTATE_INACTIVE;
5656        break;
5657    default:
5658        break;
5659    }
5660    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5661    if (nextState != NO_TRANSITION)
5662        m_afState = nextState;
5663}
5664
5665void ExynosCameraHWInterface2::SetAfStateForService(int newState)
5666{
5667    if (m_serviceAfState != newState || newState == 0)
5668        m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
5669    m_serviceAfState = newState;
5670}
5671
5672int ExynosCameraHWInterface2::GetAfStateForService()
5673{
5674   return m_serviceAfState;
5675}
5676
5677void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
5678{
5679    if (m_afMode != afMode) {
5680        if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) {
5681            m_afMode2 = afMode;
5682            ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
5683        }
5684        else {
5685            ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
5686            m_IsAfModeUpdateRequired = true;
5687            m_afMode = afMode;
5688            SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5689            m_afState = HAL_AFSTATE_INACTIVE;
5690        }
5691    }
5692}
5693
5694void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5695{
5696    char property[PROPERTY_VALUE_MAX];
5697
5698    //2 0th IFD TIFF Tags
5699    //3 Maker
5700    property_get("ro.product.brand", property, EXIF_DEF_MAKER);
5701    strncpy((char *)mExifInfo.maker, property,
5702                sizeof(mExifInfo.maker) - 1);
5703    mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
5704    //3 Model
5705    property_get("ro.product.model", property, EXIF_DEF_MODEL);
5706    strncpy((char *)mExifInfo.model, property,
5707                sizeof(mExifInfo.model) - 1);
5708    mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
5709    //3 Software
5710    property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
5711    strncpy((char *)mExifInfo.software, property,
5712                sizeof(mExifInfo.software) - 1);
5713    mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
5714
5715    //3 YCbCr Positioning
5716    mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
5717
5718    //2 0th IFD Exif Private Tags
5719    //3 F Number
5720    mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
5721    mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
5722    //3 Exposure Program
5723    mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
5724    //3 Exif Version
5725    memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
5726    //3 Aperture
5727    double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
5728    mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
5729    mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
5730    //3 Maximum lens aperture
5731    mExifInfo.max_aperture.num = mExifInfo.aperture.num;
5732    mExifInfo.max_aperture.den = mExifInfo.aperture.den;
5733    //3 Lens Focal Length
5734    mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
5735
5736    mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
5737    //3 User Comments
5738    strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
5739    //3 Color Space information
5740    mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
5741    //3 Exposure Mode
5742    mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
5743
5744    //2 0th IFD GPS Info Tags
5745    unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
5746    memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
5747
5748    //2 1th IFD TIFF Tags
5749    mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
5750    mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5751    mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5752    mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5753    mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5754    mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
5755}
5756
5757void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
5758	camera2_shot_ext *currentEntry)
5759{
5760    camera2_dm *dm = &(currentEntry->shot.dm);
5761    camera2_ctl *ctl = &(currentEntry->shot.ctl);
5762
5763    ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
5764    if (!ctl->request.frameCount)
5765       return;
5766    //2 0th IFD TIFF Tags
5767    //3 Width
5768    exifInfo->width = rect->w;
5769    //3 Height
5770    exifInfo->height = rect->h;
5771    //3 Orientation
5772    switch (ctl->jpeg.orientation) {
5773    case 90:
5774        exifInfo->orientation = EXIF_ORIENTATION_90;
5775        break;
5776    case 180:
5777        exifInfo->orientation = EXIF_ORIENTATION_180;
5778        break;
5779    case 270:
5780        exifInfo->orientation = EXIF_ORIENTATION_270;
5781        break;
5782    case 0:
5783    default:
5784        exifInfo->orientation = EXIF_ORIENTATION_UP;
5785        break;
5786    }
5787
5788    //3 Date time
5789    time_t rawtime;
5790    struct tm *timeinfo;
5791    time(&rawtime);
5792    timeinfo = localtime(&rawtime);
5793    strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
5794
5795    //2 0th IFD Exif Private Tags
5796    //3 Exposure Time
5797    int shutterSpeed = (dm->sensor.exposureTime/1000);
5798
5799    // To display exposure time just above 500ms as 1/2sec, not 1 sec.
5800    if (shutterSpeed > 500000)
5801        shutterSpeed -=  100000;
5802
5803    if (shutterSpeed < 0) {
5804        shutterSpeed = 100;
5805    }
5806
5807    exifInfo->exposure_time.num = 1;
5808    // x us -> 1/x s */
5809    //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5810    exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
5811
5812    //3 ISO Speed Rating
5813    exifInfo->iso_speed_rating = dm->aa.isoValue;
5814
5815    uint32_t av, tv, bv, sv, ev;
5816    av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
5817    tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
5818    sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
5819    bv = av + tv - sv;
5820    ev = av + tv;
5821    //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5822    ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv);
5823
5824    //3 Shutter Speed
5825    exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
5826    exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
5827    //3 Brightness
5828    exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
5829    exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
5830    //3 Exposure Bias
5831    if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
5832        ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
5833        exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
5834        exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
5835    } else {
5836        exifInfo->exposure_bias.num = 0;
5837        exifInfo->exposure_bias.den = 0;
5838    }
5839    //3 Metering Mode
5840    /*switch (m_curCameraInfo->metering) {
5841    case METERING_MODE_CENTER:
5842        exifInfo->metering_mode = EXIF_METERING_CENTER;
5843        break;
5844    case METERING_MODE_MATRIX:
5845        exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5846        break;
5847    case METERING_MODE_SPOT:
5848        exifInfo->metering_mode = EXIF_METERING_SPOT;
5849        break;
5850    case METERING_MODE_AVERAGE:
5851    default:
5852        exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5853        break;
5854    }*/
5855    exifInfo->metering_mode = EXIF_METERING_CENTER;
5856
5857    //3 Flash
5858    if (m_ctlInfo.flash.m_flashDecisionResult)
5859        exifInfo->flash = 1;
5860    else
5861        exifInfo->flash = EXIF_DEF_FLASH;
5862
5863    //3 White Balance
5864    if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO)
5865        exifInfo->white_balance = EXIF_WB_AUTO;
5866    else
5867        exifInfo->white_balance = EXIF_WB_MANUAL;
5868
5869    //3 Scene Capture Type
5870    switch (ctl->aa.sceneMode) {
5871    case AA_SCENE_MODE_PORTRAIT:
5872        exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
5873        break;
5874    case AA_SCENE_MODE_LANDSCAPE:
5875        exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
5876        break;
5877    case AA_SCENE_MODE_NIGHT_PORTRAIT:
5878        exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
5879        break;
5880    default:
5881        exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
5882        break;
5883    }
5884
5885    //2 0th IFD GPS Info Tags
5886    if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
5887
5888        if (ctl->jpeg.gpsCoordinates[0] > 0)
5889            strcpy((char *)exifInfo->gps_latitude_ref, "N");
5890        else
5891            strcpy((char *)exifInfo->gps_latitude_ref, "S");
5892
5893        if (ctl->jpeg.gpsCoordinates[1] > 0)
5894            strcpy((char *)exifInfo->gps_longitude_ref, "E");
5895        else
5896            strcpy((char *)exifInfo->gps_longitude_ref, "W");
5897
5898        if (ctl->jpeg.gpsCoordinates[2] > 0)
5899            exifInfo->gps_altitude_ref = 0;
5900        else
5901            exifInfo->gps_altitude_ref = 1;
5902
5903        double latitude = fabs(ctl->jpeg.gpsCoordinates[0]);
5904        double longitude = fabs(ctl->jpeg.gpsCoordinates[1]);
5905        double altitude = fabs(ctl->jpeg.gpsCoordinates[2]);
5906
5907        exifInfo->gps_latitude[0].num = (uint32_t)latitude;
5908        exifInfo->gps_latitude[0].den = 1;
5909        exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
5910        exifInfo->gps_latitude[1].den = 1;
5911        exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60)
5912                                        - exifInfo->gps_latitude[1].num) * 60);
5913        exifInfo->gps_latitude[2].den = 1;
5914
5915        exifInfo->gps_longitude[0].num = (uint32_t)longitude;
5916        exifInfo->gps_longitude[0].den = 1;
5917        exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
5918        exifInfo->gps_longitude[1].den = 1;
5919        exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60)
5920                                        - exifInfo->gps_longitude[1].num) * 60);
5921        exifInfo->gps_longitude[2].den = 1;
5922
5923        exifInfo->gps_altitude.num = (uint32_t)round(altitude);
5924        exifInfo->gps_altitude.den = 1;
5925
5926        struct tm tm_data;
5927        long timestamp;
5928        timestamp = (long)ctl->jpeg.gpsTimestamp;
5929        gmtime_r(&timestamp, &tm_data);
5930        exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
5931        exifInfo->gps_timestamp[0].den = 1;
5932        exifInfo->gps_timestamp[1].num = tm_data.tm_min;
5933        exifInfo->gps_timestamp[1].den = 1;
5934        exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
5935        exifInfo->gps_timestamp[2].den = 1;
5936        snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
5937                "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
5938
5939        memset(exifInfo->gps_processing_method, 0, 100);
5940        memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32);
5941        exifInfo->enableGps = true;
5942    } else {
5943        exifInfo->enableGps = false;
5944    }
5945
5946    //2 1th IFD TIFF Tags
5947    exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
5948    exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
5949}
5950
5951ExynosCameraHWInterface2::MainThread::~MainThread()
5952{
5953    ALOGV("(%s):", __FUNCTION__);
5954}
5955
5956void ExynosCameraHWInterface2::MainThread::release()
5957{
5958    ALOGV("(%s):", __func__);
5959    SetSignal(SIGNAL_THREAD_RELEASE);
5960}
5961
5962ExynosCameraHWInterface2::SensorThread::~SensorThread()
5963{
5964    ALOGV("(%s):", __FUNCTION__);
5965}
5966
5967void ExynosCameraHWInterface2::SensorThread::release()
5968{
5969    ALOGV("(%s):", __func__);
5970    SetSignal(SIGNAL_THREAD_RELEASE);
5971}
5972
5973ExynosCameraHWInterface2::StreamThread::~StreamThread()
5974{
5975    ALOGV("(%s):", __FUNCTION__);
5976}
5977
5978void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
5979{
5980    ALOGV("DEBUG(%s):", __FUNCTION__);
5981    memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
5982}
5983
5984void ExynosCameraHWInterface2::StreamThread::release()
5985{
5986    ALOGV("(%s):", __func__);
5987    SetSignal(SIGNAL_THREAD_RELEASE);
5988}
5989
5990int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
5991{
5992    int index;
5993    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5994        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
5995            return index;
5996    }
5997    return -1;
5998}
5999
6000int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
6001{
6002    int index;
6003    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
6004        if (m_parameters.svcBufHandle[index] == *bufHandle)
6005            return index;
6006    }
6007    return -1;
6008}
6009
6010status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
6011{
6012    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
6013    int index, vacantIndex;
6014    bool vacancy = false;
6015
6016    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
6017        if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
6018            vacancy = true;
6019            vacantIndex = index;
6020        } else if (m_attachedSubStreams[index].streamId == stream_id) {
6021            return BAD_VALUE;
6022        }
6023    }
6024    if (!vacancy)
6025        return NO_MEMORY;
6026    m_attachedSubStreams[vacantIndex].streamId = stream_id;
6027    m_attachedSubStreams[vacantIndex].priority = priority;
6028    m_numRegisteredStream++;
6029    return NO_ERROR;
6030}
6031
6032status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
6033{
6034    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
6035    int index;
6036    bool found = false;
6037
6038    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
6039        if (m_attachedSubStreams[index].streamId == stream_id) {
6040            found = true;
6041            break;
6042        }
6043    }
6044    if (!found)
6045        return BAD_VALUE;
6046    m_attachedSubStreams[index].streamId = -1;
6047    m_attachedSubStreams[index].priority = 0;
6048    m_numRegisteredStream--;
6049    return NO_ERROR;
6050}
6051
6052int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
6053{
6054    if (ionClient == 0) {
6055        ionClient = ion_client_create();
6056        if (ionClient < 0) {
6057            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
6058            return 0;
6059        }
6060    }
6061    return ionClient;
6062}
6063
6064int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
6065{
6066    if (ionClient != 0) {
6067        if (ionClient > 0) {
6068            ion_client_destroy(ionClient);
6069        }
6070        ionClient = 0;
6071    }
6072    return ionClient;
6073}
6074
6075int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
6076{
6077    return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
6078}
6079
6080int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
6081{
6082    int ret = 0;
6083    int i = 0;
6084    int flag = 0;
6085
6086    if (ionClient == 0) {
6087        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
6088        return -1;
6089    }
6090
6091    for (i = 0 ; i < iMemoryNum ; i++) {
6092        if (buf->size.extS[i] == 0) {
6093            break;
6094        }
6095        if (1 << i & cacheFlag)
6096            flag = ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC;
6097        else
6098            flag = 0;
6099        buf->fd.extFd[i] = ion_alloc(ionClient, \
6100                                      buf->size.extS[i], 0, ION_HEAP_SYSTEM_MASK, flag);
6101        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
6102            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
6103            buf->fd.extFd[i] = -1;
6104            freeCameraMemory(buf, iMemoryNum);
6105            return -1;
6106        }
6107
6108        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
6109                                        buf->size.extS[i], 0);
6110        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
6111            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
6112            buf->virt.extP[i] = (char *)MAP_FAILED;
6113            freeCameraMemory(buf, iMemoryNum);
6114            return -1;
6115        }
6116        ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
6117    }
6118
6119    return ret;
6120}
6121
6122void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6123{
6124
6125    int i = 0 ;
6126    int ret = 0;
6127
6128    for (i=0;i<iMemoryNum;i++) {
6129        if (buf->fd.extFd[i] != -1) {
6130            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
6131                ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
6132                if (ret < 0)
6133                    ALOGE("ERR(%s)", __FUNCTION__);
6134            }
6135            ion_free(buf->fd.extFd[i]);
6136        ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
6137        }
6138        buf->fd.extFd[i] = -1;
6139        buf->virt.extP[i] = (char *)MAP_FAILED;
6140        buf->size.extS[i] = 0;
6141    }
6142}
6143
6144void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6145{
6146    int i =0 ;
6147    for (i=0;i<iMemoryNum;i++) {
6148        buf->virt.extP[i] = (char *)MAP_FAILED;
6149        buf->fd.extFd[i] = -1;
6150        buf->size.extS[i] = 0;
6151    }
6152}
6153
6154
6155
6156
6157static camera2_device_t *g_cam2_device = NULL;
6158static bool g_camera_vaild = false;
6159static Mutex g_camera_mutex;
6160ExynosCamera2 * g_camera2[2] = { NULL, NULL };
6161
6162static int HAL2_camera_device_close(struct hw_device_t* device)
6163{
6164    Mutex::Autolock lock(g_camera_mutex);
6165    ALOGD("(%s): ENTER", __FUNCTION__);
6166    if (device) {
6167
6168        camera2_device_t *cam_device = (camera2_device_t *)device;
6169        ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
6170        ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
6171        delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
6172        free(cam_device);
6173        g_camera_vaild = false;
6174        g_cam2_device = NULL;
6175    }
6176
6177    ALOGD("(%s): EXIT", __FUNCTION__);
6178    return 0;
6179}
6180
6181static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
6182{
6183    return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
6184}
6185
6186static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
6187            const camera2_request_queue_src_ops_t *request_src_ops)
6188{
6189    ALOGV("DEBUG(%s):", __FUNCTION__);
6190    return obj(dev)->setRequestQueueSrcOps(request_src_ops);
6191}
6192
6193static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
6194{
6195    ALOGV("DEBUG(%s):", __FUNCTION__);
6196    return obj(dev)->notifyRequestQueueNotEmpty();
6197}
6198
6199static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
6200            const camera2_frame_queue_dst_ops_t *frame_dst_ops)
6201{
6202    ALOGV("DEBUG(%s):", __FUNCTION__);
6203    return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
6204}
6205
6206static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
6207{
6208    ALOGV("DEBUG(%s):", __FUNCTION__);
6209    return obj(dev)->getInProgressCount();
6210}
6211
6212static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
6213{
6214    ALOGV("DEBUG(%s):", __FUNCTION__);
6215    return obj(dev)->flushCapturesInProgress();
6216}
6217
6218static int HAL2_device_construct_default_request(const struct camera2_device *dev,
6219            int request_template, camera_metadata_t **request)
6220{
6221    ALOGV("DEBUG(%s):", __FUNCTION__);
6222    return obj(dev)->constructDefaultRequest(request_template, request);
6223}
6224
6225static int HAL2_device_allocate_stream(
6226            const struct camera2_device *dev,
6227            // inputs
6228            uint32_t width,
6229            uint32_t height,
6230            int      format,
6231            const camera2_stream_ops_t *stream_ops,
6232            // outputs
6233            uint32_t *stream_id,
6234            uint32_t *format_actual,
6235            uint32_t *usage,
6236            uint32_t *max_buffers)
6237{
6238    ALOGV("(%s): ", __FUNCTION__);
6239    return obj(dev)->allocateStream(width, height, format, stream_ops,
6240                                    stream_id, format_actual, usage, max_buffers);
6241}
6242
6243static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
6244            uint32_t stream_id,
6245            int num_buffers,
6246            buffer_handle_t *buffers)
6247{
6248    ALOGV("DEBUG(%s):", __FUNCTION__);
6249    return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
6250}
6251
6252static int HAL2_device_release_stream(
6253        const struct camera2_device *dev,
6254            uint32_t stream_id)
6255{
6256    ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
6257    if (!g_camera_vaild)
6258        return 0;
6259    return obj(dev)->releaseStream(stream_id);
6260}
6261
6262static int HAL2_device_allocate_reprocess_stream(
6263           const struct camera2_device *dev,
6264            uint32_t width,
6265            uint32_t height,
6266            uint32_t format,
6267            const camera2_stream_in_ops_t *reprocess_stream_ops,
6268            // outputs
6269            uint32_t *stream_id,
6270            uint32_t *consumer_usage,
6271            uint32_t *max_buffers)
6272{
6273    ALOGV("DEBUG(%s):", __FUNCTION__);
6274    return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
6275                                    stream_id, consumer_usage, max_buffers);
6276}
6277
6278static int HAL2_device_allocate_reprocess_stream_from_stream(
6279           const struct camera2_device *dev,
6280            uint32_t output_stream_id,
6281            const camera2_stream_in_ops_t *reprocess_stream_ops,
6282            // outputs
6283            uint32_t *stream_id)
6284{
6285    ALOGV("DEBUG(%s):", __FUNCTION__);
6286    return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
6287                                    reprocess_stream_ops, stream_id);
6288}
6289
6290static int HAL2_device_release_reprocess_stream(
6291        const struct camera2_device *dev,
6292            uint32_t stream_id)
6293{
6294    ALOGV("DEBUG(%s):", __FUNCTION__);
6295    return obj(dev)->releaseReprocessStream(stream_id);
6296}
6297
6298static int HAL2_device_trigger_action(const struct camera2_device *dev,
6299           uint32_t trigger_id,
6300            int ext1,
6301            int ext2)
6302{
6303    ALOGV("DEBUG(%s):", __FUNCTION__);
6304    if (!g_camera_vaild)
6305        return 0;
6306    return obj(dev)->triggerAction(trigger_id, ext1, ext2);
6307}
6308
6309static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
6310            camera2_notify_callback notify_cb,
6311            void *user)
6312{
6313    ALOGV("DEBUG(%s):", __FUNCTION__);
6314    return obj(dev)->setNotifyCallback(notify_cb, user);
6315}
6316
6317static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
6318            vendor_tag_query_ops_t **ops)
6319{
6320    ALOGV("DEBUG(%s):", __FUNCTION__);
6321    return obj(dev)->getMetadataVendorTagOps(ops);
6322}
6323
6324static int HAL2_device_dump(const struct camera2_device *dev, int fd)
6325{
6326    ALOGV("DEBUG(%s):", __FUNCTION__);
6327    return obj(dev)->dump(fd);
6328}
6329
6330
6331
6332
6333
6334static int HAL2_getNumberOfCameras()
6335{
6336    ALOGV("(%s): returning 2", __FUNCTION__);
6337    return 2;
6338}
6339
6340
6341static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
6342{
6343    ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
6344    static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
6345
6346    status_t res;
6347
6348    if (cameraId == 0) {
6349        info->facing = CAMERA_FACING_BACK;
6350        if (!g_camera2[0])
6351            g_camera2[0] = new ExynosCamera2(0);
6352    }
6353    else if (cameraId == 1) {
6354        info->facing = CAMERA_FACING_FRONT;
6355        if (!g_camera2[1])
6356            g_camera2[1] = new ExynosCamera2(1);
6357    }
6358    else
6359        return BAD_VALUE;
6360
6361    info->orientation = 0;
6362    info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
6363    if (mCameraInfo[cameraId] == NULL) {
6364        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
6365        if (res != OK) {
6366            ALOGE("%s: Unable to allocate static info: %s (%d)",
6367                    __FUNCTION__, strerror(-res), res);
6368            return res;
6369        }
6370        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
6371        if (res != OK) {
6372            ALOGE("%s: Unable to fill in static info: %s (%d)",
6373                    __FUNCTION__, strerror(-res), res);
6374            return res;
6375        }
6376    }
6377    info->static_camera_characteristics = mCameraInfo[cameraId];
6378    return NO_ERROR;
6379}
6380
6381#define SET_METHOD(m) m : HAL2_device_##m
6382
6383static camera2_device_ops_t camera2_device_ops = {
6384        SET_METHOD(set_request_queue_src_ops),
6385        SET_METHOD(notify_request_queue_not_empty),
6386        SET_METHOD(set_frame_queue_dst_ops),
6387        SET_METHOD(get_in_progress_count),
6388        SET_METHOD(flush_captures_in_progress),
6389        SET_METHOD(construct_default_request),
6390        SET_METHOD(allocate_stream),
6391        SET_METHOD(register_stream_buffers),
6392        SET_METHOD(release_stream),
6393        SET_METHOD(allocate_reprocess_stream),
6394        SET_METHOD(allocate_reprocess_stream_from_stream),
6395        SET_METHOD(release_reprocess_stream),
6396        SET_METHOD(trigger_action),
6397        SET_METHOD(set_notify_callback),
6398        SET_METHOD(get_metadata_vendor_tag_ops),
6399        SET_METHOD(dump),
6400};
6401
6402#undef SET_METHOD
6403
6404
6405static int HAL2_camera_device_open(const struct hw_module_t* module,
6406                                  const char *id,
6407                                  struct hw_device_t** device)
6408{
6409    int cameraId = atoi(id);
6410    int openInvalid = 0;
6411
6412    Mutex::Autolock lock(g_camera_mutex);
6413    if (g_camera_vaild) {
6414        ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__);
6415        return -EUSERS;
6416    }
6417    g_camera_vaild = false;
6418    ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
6419    if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
6420        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
6421        return -EINVAL;
6422    }
6423
6424    ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6425    if (g_cam2_device) {
6426        if (obj(g_cam2_device)->getCameraId() == cameraId) {
6427            ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
6428            goto done;
6429        } else {
6430            ALOGD("(%s): START waiting for cam device free", __FUNCTION__);
6431            while (g_cam2_device)
6432                usleep(SIG_WAITING_TICK);
6433            ALOGD("(%s): END   waiting for cam device free", __FUNCTION__);
6434        }
6435    }
6436
6437    g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
6438    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6439
6440    if (!g_cam2_device)
6441        return -ENOMEM;
6442
6443    g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
6444    g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
6445    g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
6446    g_cam2_device->common.close   = HAL2_camera_device_close;
6447
6448    g_cam2_device->ops = &camera2_device_ops;
6449
6450    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
6451
6452    g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
6453    if (!openInvalid) {
6454        ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
6455        return -ENODEV;
6456    }
6457done:
6458    *device = (hw_device_t *)g_cam2_device;
6459    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
6460    g_camera_vaild = true;
6461
6462    return 0;
6463}
6464
6465
6466static hw_module_methods_t camera_module_methods = {
6467            open : HAL2_camera_device_open
6468};
6469
6470extern "C" {
6471    struct camera_module HAL_MODULE_INFO_SYM = {
6472      common : {
6473          tag                : HARDWARE_MODULE_TAG,
6474          module_api_version : CAMERA_MODULE_API_VERSION_2_0,
6475          hal_api_version    : HARDWARE_HAL_API_VERSION,
6476          id                 : CAMERA_HARDWARE_MODULE_ID,
6477          name               : "Exynos Camera HAL2",
6478          author             : "Samsung Corporation",
6479          methods            : &camera_module_methods,
6480          dso:                NULL,
6481          reserved:           {0},
6482      },
6483      get_number_of_cameras : HAL2_getNumberOfCameras,
6484      get_camera_info       : HAL2_getCameraInfo
6485    };
6486}
6487
6488}; // namespace android
6489