ExynosCameraHWInterface2.cpp revision a038aa84011c540c88d997107e80a9b6394e9180
1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10**     http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file      ExynosCameraHWInterface2.cpp
21 * \brief     source file for Android Camera API 2.0 HAL
22 * \author    Sungjoong Kang(sj3.kang@samsung.com)
23 * \date      2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 *   Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 *   2nd Release
31 *
32 */
33
34//#define LOG_NDEBUG 0
35#define LOG_TAG "ExynosCameraHAL2"
36#include <utils/Log.h>
37
38#include "ExynosCameraHWInterface2.h"
39#include "exynos_format.h"
40
41namespace android {
42
43void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
44{
45    int nw;
46    int cnt = 0;
47    uint32_t written = 0;
48
49    ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
50    int fd = open(fname, O_RDWR | O_CREAT, 0644);
51    if (fd < 0) {
52        ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
53        return;
54    }
55
56    ALOGV("writing %d bytes to file [%s]", size, fname);
57    while (written < size) {
58        nw = ::write(fd, buf + written, size - written);
59        if (nw < 0) {
60            ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
61            break;
62        }
63        written += nw;
64        cnt++;
65    }
66    ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
67    ::close(fd);
68}
69
70int get_pixel_depth(uint32_t fmt)
71{
72    int depth = 0;
73
74    switch (fmt) {
75    case V4L2_PIX_FMT_JPEG:
76        depth = 8;
77        break;
78
79    case V4L2_PIX_FMT_NV12:
80    case V4L2_PIX_FMT_NV21:
81    case V4L2_PIX_FMT_YUV420:
82    case V4L2_PIX_FMT_YVU420M:
83    case V4L2_PIX_FMT_NV12M:
84    case V4L2_PIX_FMT_NV12MT:
85        depth = 12;
86        break;
87
88    case V4L2_PIX_FMT_RGB565:
89    case V4L2_PIX_FMT_YUYV:
90    case V4L2_PIX_FMT_YVYU:
91    case V4L2_PIX_FMT_UYVY:
92    case V4L2_PIX_FMT_VYUY:
93    case V4L2_PIX_FMT_NV16:
94    case V4L2_PIX_FMT_NV61:
95    case V4L2_PIX_FMT_YUV422P:
96    case V4L2_PIX_FMT_SBGGR10:
97    case V4L2_PIX_FMT_SBGGR12:
98    case V4L2_PIX_FMT_SBGGR16:
99        depth = 16;
100        break;
101
102    case V4L2_PIX_FMT_RGB32:
103        depth = 32;
104        break;
105    default:
106        ALOGE("Get depth failed(format : %d)", fmt);
107        break;
108    }
109
110    return depth;
111}
112
113int cam_int_s_fmt(node_info_t *node)
114{
115    struct v4l2_format v4l2_fmt;
116    unsigned int framesize;
117    int ret;
118
119    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
120
121    v4l2_fmt.type = node->type;
122    framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
123
124    if (node->planes >= 1) {
125        v4l2_fmt.fmt.pix_mp.width       = node->width;
126        v4l2_fmt.fmt.pix_mp.height      = node->height;
127        v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
128        v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
129    } else {
130        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
131    }
132
133    /* Set up for capture */
134    ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
135
136    if (ret < 0)
137        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
138
139
140    return ret;
141}
142
143int cam_int_reqbufs(node_info_t *node)
144{
145    struct v4l2_requestbuffers req;
146    int ret;
147
148    req.count = node->buffers;
149    req.type = node->type;
150    req.memory = node->memory;
151
152    ret = exynos_v4l2_reqbufs(node->fd, &req);
153
154    if (ret < 0)
155        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
156
157    return req.count;
158}
159
160int cam_int_qbuf(node_info_t *node, int index)
161{
162    struct v4l2_buffer v4l2_buf;
163    struct v4l2_plane planes[VIDEO_MAX_PLANES];
164    int i;
165    int ret = 0;
166
167    v4l2_buf.m.planes   = planes;
168    v4l2_buf.type       = node->type;
169    v4l2_buf.memory     = node->memory;
170    v4l2_buf.index      = index;
171    v4l2_buf.length     = node->planes;
172
173    for(i = 0; i < node->planes; i++){
174        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
175        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
176    }
177
178    ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
179
180    if (ret < 0)
181        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
182
183    return ret;
184}
185
186int cam_int_streamon(node_info_t *node)
187{
188    enum v4l2_buf_type type = node->type;
189    int ret;
190
191
192    ret = exynos_v4l2_streamon(node->fd, type);
193
194    if (ret < 0)
195        ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
196
197    ALOGV("On streaming I/O... ... fd(%d)", node->fd);
198
199    return ret;
200}
201
202int cam_int_streamoff(node_info_t *node)
203{
204    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
205    int ret;
206
207
208    ALOGV("Off streaming I/O... fd(%d)", node->fd);
209    ret = exynos_v4l2_streamoff(node->fd, type);
210
211    if (ret < 0)
212        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
213
214    return ret;
215}
216
217int isp_int_streamoff(node_info_t *node)
218{
219    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
220    int ret;
221
222    ALOGV("Off streaming I/O... fd(%d)", node->fd);
223    ret = exynos_v4l2_streamoff(node->fd, type);
224
225    if (ret < 0)
226        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
227
228    return ret;
229}
230
231int cam_int_dqbuf(node_info_t *node)
232{
233    struct v4l2_buffer v4l2_buf;
234    struct v4l2_plane planes[VIDEO_MAX_PLANES];
235    int ret;
236
237    v4l2_buf.type       = node->type;
238    v4l2_buf.memory     = node->memory;
239    v4l2_buf.m.planes   = planes;
240    v4l2_buf.length     = node->planes;
241
242    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
243    if (ret < 0)
244        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
245
246    return v4l2_buf.index;
247}
248
249int cam_int_dqbuf(node_info_t *node, int num_plane)
250{
251    struct v4l2_buffer v4l2_buf;
252    struct v4l2_plane planes[VIDEO_MAX_PLANES];
253    int ret;
254
255    v4l2_buf.type       = node->type;
256    v4l2_buf.memory     = node->memory;
257    v4l2_buf.m.planes   = planes;
258    v4l2_buf.length     = num_plane;
259
260    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
261    if (ret < 0)
262        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
263
264    return v4l2_buf.index;
265}
266
267int cam_int_s_input(node_info_t *node, int index)
268{
269    int ret;
270
271    ret = exynos_v4l2_s_input(node->fd, index);
272    if (ret < 0)
273        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
274
275    return ret;
276}
277
278
279gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
280
281RequestManager::RequestManager(SignalDrivenThread* main_thread):
282    m_lastAeMode(0),
283    m_lastAaMode(0),
284    m_lastAwbMode(0),
285    m_vdisBubbleEn(false),
286    m_lastAeComp(0),
287    m_lastCompletedFrameCnt(-1)
288{
289    m_metadataConverter = new MetadataConverter;
290    m_mainThread = main_thread;
291    ResetEntry();
292    m_sensorPipelineSkipCnt = 0;
293    return;
294}
295
296RequestManager::~RequestManager()
297{
298    ALOGV("%s", __FUNCTION__);
299    if (m_metadataConverter != NULL) {
300        delete m_metadataConverter;
301        m_metadataConverter = NULL;
302    }
303
304    releaseSensorQ();
305    return;
306}
307
308void RequestManager::ResetEntry()
309{
310    Mutex::Autolock lock(m_requestMutex);
311    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
312        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
313        entries[i].internal_shot.shot.ctl.request.frameCount = -1;
314    }
315    m_numOfEntries = 0;
316    m_entryInsertionIndex = -1;
317    m_entryProcessingIndex = -1;
318    m_entryFrameOutputIndex = -1;
319}
320
321int RequestManager::GetNumEntries()
322{
323    return m_numOfEntries;
324}
325
326void RequestManager::SetDefaultParameters(int cropX)
327{
328    m_cropX = cropX;
329}
330
331bool RequestManager::IsRequestQueueFull()
332{
333    Mutex::Autolock lock(m_requestMutex);
334    if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
335        return true;
336    else
337        return false;
338}
339
340void RequestManager::RegisterRequest(camera_metadata_t * new_request)
341{
342    ALOGV("DEBUG(%s):", __FUNCTION__);
343
344    Mutex::Autolock lock(m_requestMutex);
345
346    request_manager_entry * newEntry = NULL;
347    int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
348    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
349
350
351    newEntry = &(entries[newInsertionIndex]);
352
353    if (newEntry->status!=EMPTY) {
354        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
355        return;
356    }
357    newEntry->status = REGISTERED;
358    newEntry->original_request = new_request;
359    memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
360    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
361    newEntry->output_stream_count = 0;
362    if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
363        newEntry->output_stream_count++;
364
365    if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
366        newEntry->output_stream_count++;
367
368    m_numOfEntries++;
369    m_entryInsertionIndex = newInsertionIndex;
370
371
372    ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
373    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
374}
375
376void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
377{
378    ALOGV("DEBUG(%s):", __FUNCTION__);
379    int frame_index;
380    request_manager_entry * currentEntry;
381
382    Mutex::Autolock lock(m_requestMutex);
383
384    frame_index = GetCompletedIndex();
385    currentEntry =  &(entries[frame_index]);
386    if (currentEntry->status != COMPLETED) {
387        CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
388                       m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
389        return;
390    }
391    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
392
393    m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
394
395    currentEntry->status = EMPTY;
396    currentEntry->original_request = NULL;
397    memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
398    currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
399    currentEntry->output_stream_count = 0;
400    m_numOfEntries--;
401    ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
402     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
403
404    CheckCompleted(GetNextIndex(frame_index));
405    return;
406}
407
408bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
409                camera_metadata_t ** prepared_frame, int afState)
410{
411    ALOGV("DEBUG(%s):", __FUNCTION__);
412    Mutex::Autolock lock(m_requestMutex);
413    status_t res = NO_ERROR;
414    int tempFrameOutputIndex = GetCompletedIndex();
415    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
416    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
417        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
418
419    if (currentEntry->status != COMPLETED) {
420        ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
421
422        return false;
423    }
424    m_entryFrameOutputIndex = tempFrameOutputIndex;
425    m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
426    add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
427    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
428                m_tempFrameMetadata);
429    if (res!=NO_ERROR) {
430        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
431        return false;
432    }
433    *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
434    *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
435    *prepared_frame = m_tempFrameMetadata;
436    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
437        currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
438    // Dump();
439    return true;
440}
441
442int RequestManager::MarkProcessingRequest(ExynosBuffer* buf, int *afMode)
443{
444    struct camera2_shot_ext * shot_ext;
445    struct camera2_shot_ext * request_shot;
446    int targetStreamIndex = 0;
447    request_manager_entry * newEntry = NULL;
448    static int count = 0;
449
450    Mutex::Autolock lock(m_requestMutex);
451    if (m_numOfEntries == 0)  {
452        CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
453        return -1;
454    }
455
456    if ((m_entryProcessingIndex == m_entryInsertionIndex)
457        && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
458        ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
459         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
460        return -1;
461    }
462
463    int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
464    ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
465
466    newEntry = &(entries[newProcessingIndex]);
467    request_shot = &(newEntry->internal_shot);
468    *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
469    if (newEntry->status != REGISTERED) {
470        CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
471        for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
472                CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
473        }
474        return -1;
475    }
476
477    newEntry->status = REQUESTED;
478
479    shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
480
481    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
482    shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
483    shot_ext->request_sensor = 1;
484    shot_ext->dis_bypass = 1;
485    shot_ext->dnr_bypass = 1;
486    shot_ext->fd_bypass = 1;
487    shot_ext->setfile = 0;
488
489    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
490    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
491    if (targetStreamIndex & MASK_OUTPUT_SCP)
492        shot_ext->request_scp = 1;
493
494    if (targetStreamIndex & MASK_OUTPUT_SCC)
495        shot_ext->request_scc = 1;
496
497    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
498        shot_ext->fd_bypass = 0;
499
500    if (count == 0){
501        shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
502    } else
503        shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
504
505    count++;
506    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
507    shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
508    shot_ext->shot.magicNumber = 0x23456789;
509    shot_ext->shot.ctl.sensor.exposureTime = 0;
510    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
511    shot_ext->shot.ctl.sensor.sensitivity = 0;
512
513
514    shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
515    shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
516    shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
517
518    m_entryProcessingIndex = newProcessingIndex;
519    return newProcessingIndex;
520}
521
522void RequestManager::NotifyStreamOutput(int frameCnt)
523{
524    int index;
525
526    Mutex::Autolock lock(m_requestMutex);
527    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
528
529    index = FindEntryIndexByFrameCnt(frameCnt);
530    if (index == -1) {
531        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
532        return;
533    }
534    ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt,   entries[index].output_stream_count);
535
536    entries[index].output_stream_count--;  //TODO : match stream id also
537    CheckCompleted(index);
538}
539
540void RequestManager::CheckCompleted(int index)
541{
542    if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
543        && (entries[index].output_stream_count <= 0)){
544        ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
545                index, entries[index].internal_shot.shot.ctl.request.frameCount );
546        entries[index].status = COMPLETED;
547        if (m_lastCompletedFrameCnt + 1 == entries[index].internal_shot.shot.ctl.request.frameCount)
548            m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
549    }
550}
551
552int RequestManager::GetCompletedIndex()
553{
554    return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
555}
556
557void  RequestManager::pushSensorQ(int index)
558{
559    Mutex::Autolock lock(m_requestMutex);
560    m_sensorQ.push_back(index);
561}
562
563int RequestManager::popSensorQ()
564{
565   List<int>::iterator sensor_token;
566   int index;
567
568    Mutex::Autolock lock(m_requestMutex);
569
570    if(m_sensorQ.size() == 0)
571        return -1;
572
573    sensor_token = m_sensorQ.begin()++;
574    index = *sensor_token;
575    m_sensorQ.erase(sensor_token);
576
577    return (index);
578}
579
580void RequestManager::releaseSensorQ()
581{
582    List<int>::iterator r;
583
584    Mutex::Autolock lock(m_requestMutex);
585    ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
586
587    while(m_sensorQ.size() > 0){
588        r  = m_sensorQ.begin()++;
589        m_sensorQ.erase(r);
590    }
591    return;
592}
593
594void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
595{
596    int index;
597    struct camera2_shot_ext * request_shot;
598    nsecs_t timeStamp;
599    int i;
600
601    Mutex::Autolock lock(m_requestMutex);
602    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
603
604    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
605        if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
606            && (entries[i].status == CAPTURED)){
607            entries[i].status = METADONE;
608            break;
609        }
610    }
611
612    if (i == NUM_MAX_REQUEST_MGR_ENTRY){
613        ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
614        return;
615    }
616
617    request_manager_entry * newEntry = &(entries[i]);
618    request_shot = &(newEntry->internal_shot);
619
620    timeStamp = request_shot->shot.dm.sensor.timeStamp;
621    memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
622    request_shot->shot.dm.sensor.timeStamp = timeStamp;
623    m_lastTimeStamp = timeStamp;
624    CheckCompleted(i);
625}
626
627void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
628{
629    int index, targetStreamIndex;
630    struct camera2_shot_ext * request_shot;
631
632    ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
633    if (frameCnt < 0)
634        return;
635
636    index = FindEntryIndexByFrameCnt(frameCnt);
637    if (index == -1) {
638        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
639        return;
640    }
641
642    request_manager_entry * newEntry = &(entries[index]);
643    request_shot = &(newEntry->internal_shot);
644    memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
645    shot_ext->shot.ctl.request.frameCount = frameCnt;
646    shot_ext->request_sensor = 1;
647    shot_ext->dis_bypass = 1;
648    shot_ext->dnr_bypass = 1;
649    shot_ext->fd_bypass = 1;
650    shot_ext->drc_bypass = 1;
651    shot_ext->setfile = 0;
652
653    shot_ext->request_scc = 0;
654    shot_ext->request_scp = 0;
655
656    shot_ext->isReprocessing = request_shot->isReprocessing;
657    shot_ext->reprocessInput = request_shot->reprocessInput;
658    shot_ext->shot.ctl.request.outputStreams[0] = 0;
659
660    shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
661
662    shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
663    shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
664    shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
665
666    // mapping flash UI mode from aeMode
667    if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
668        if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
669            ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
670        request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
671    }
672
673    // Apply ae/awb lock or unlock
674    if (request_shot->ae_lock == AEMODE_LOCK_ON)
675            request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
676    if (request_shot->awb_lock == AWBMODE_LOCK_ON)
677            request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
678
679    if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
680        shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
681    }
682    else {
683        shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
684        m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
685    }
686    if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
687        shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
688    }
689    else {
690        shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
691        m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
692    }
693    if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
694        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
695    }
696    else {
697        shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
698        m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
699    }
700    if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
701        shot_ext->shot.ctl.aa.aeExpCompensation = 0;
702    }
703    else {
704        shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
705        m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
706    }
707
708    if (request_shot->shot.ctl.aa.videoStabilizationMode) {
709        m_vdisBubbleEn = true;
710        shot_ext->dis_bypass = 0;
711        shot_ext->dnr_bypass = 0;
712    } else {
713        m_vdisBubbleEn = false;
714        shot_ext->dis_bypass = 1;
715        shot_ext->dnr_bypass = 1;
716    }
717
718    shot_ext->shot.ctl.aa.afTrigger = 0;
719
720    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
721    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
722    if (targetStreamIndex & MASK_OUTPUT_SCP)
723        shot_ext->request_scp = 1;
724
725    if (targetStreamIndex & MASK_OUTPUT_SCC)
726        shot_ext->request_scc = 1;
727
728    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
729        shot_ext->fd_bypass = 0;
730
731    if (targetStreamIndex & STREAM_MASK_RECORD) {
732        shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
733        shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
734    } else {
735        shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
736        shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
737    }
738
739    ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
740    (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
741    (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
742    (int)(shot_ext->shot.ctl.aa.afMode));
743}
744
745bool    RequestManager::IsVdisEnable(void)
746{
747        return m_vdisBubbleEn;
748}
749
750int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
751{
752    for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
753        if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
754            return i;
755    }
756    return -1;
757}
758
759void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
760{
761    int index = FindEntryIndexByFrameCnt(frameCnt);
762    if (index == -1) {
763        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
764        return;
765    }
766
767    request_manager_entry * currentEntry = &(entries[index]);
768    if (currentEntry->internal_shot.isReprocessing == 1) {
769        ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
770        index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
771    } else {
772        currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
773        ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
774            index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
775    }
776}
777
778
779nsecs_t  RequestManager::GetTimestampByFrameCnt(int frameCnt)
780{
781    int index = FindEntryIndexByFrameCnt(frameCnt);
782    if (index == -1) {
783        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
784        return m_lastTimeStamp;
785    }
786    else
787        return GetTimestamp(index);
788}
789
790nsecs_t  RequestManager::GetTimestamp(int index)
791{
792    Mutex::Autolock lock(m_requestMutex);
793    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
794        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
795        return 0;
796    }
797
798    request_manager_entry * currentEntry = &(entries[index]);
799    nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
800    if (frameTime == 0) {
801        ALOGV("DEBUG(%s): timestamp null,  returning saved value", __FUNCTION__);
802        frameTime = m_lastTimeStamp;
803    }
804    ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
805    return frameTime;
806}
807
808uint8_t  RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
809{
810    int index = FindEntryIndexByFrameCnt(frameCnt);
811    if (index == -1) {
812        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
813        return 0;
814    }
815    else
816        return GetOutputStream(index);
817}
818
819uint8_t  RequestManager::GetOutputStream(int index)
820{
821    Mutex::Autolock lock(m_requestMutex);
822    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
823        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
824        return 0;
825    }
826
827    request_manager_entry * currentEntry = &(entries[index]);
828    return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
829}
830
831camera2_shot_ext *  RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
832{
833    int index = FindEntryIndexByFrameCnt(frameCnt);
834    if (index == -1) {
835        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
836        return 0;
837    }
838    else
839        return GetInternalShotExt(index);
840}
841
842camera2_shot_ext *  RequestManager::GetInternalShotExt(int index)
843{
844    Mutex::Autolock lock(m_requestMutex);
845    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
846        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
847        return 0;
848    }
849
850    request_manager_entry * currentEntry = &(entries[index]);
851    return &currentEntry->internal_shot;
852}
853
854int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
855{
856    Mutex::Autolock lock(m_requestMutex);
857    int i;
858
859    if (m_numOfEntries == 0) {
860        CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
861        return -1;
862    }
863
864    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
865        if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
866            continue;
867
868        if (entries[i].status == REQUESTED) {
869            entries[i].status = CAPTURED;
870            return entries[i].internal_shot.shot.ctl.request.frameCount;
871        }
872        CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
873
874    }
875    CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
876
877    return -1;
878}
879
880void     RequestManager::SetInitialSkip(int count)
881{
882    ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
883    if (count > m_sensorPipelineSkipCnt)
884        m_sensorPipelineSkipCnt = count;
885}
886
887int     RequestManager::GetSkipCnt()
888{
889    ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
890    if (m_sensorPipelineSkipCnt == 0)
891        return m_sensorPipelineSkipCnt;
892    else
893        return --m_sensorPipelineSkipCnt;
894}
895
896void RequestManager::Dump(void)
897{
898    int i = 0;
899    request_manager_entry * currentEntry;
900    ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
901    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
902
903    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
904        currentEntry =  &(entries[i]);
905        ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
906        currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
907            currentEntry->output_stream_count,
908            currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
909    }
910}
911
912int     RequestManager::GetNextIndex(int index)
913{
914    index++;
915    if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
916        index = 0;
917
918    return index;
919}
920
921int     RequestManager::GetPrevIndex(int index)
922{
923    index--;
924    if (index < 0)
925        index = NUM_MAX_REQUEST_MGR_ENTRY-1;
926
927    return index;
928}
929
930ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
931            m_requestQueueOps(NULL),
932            m_frameQueueOps(NULL),
933            m_callbackCookie(NULL),
934            m_numOfRemainingReqInSvc(0),
935            m_isRequestQueuePending(false),
936            m_isRequestQueueNull(true),
937            m_isIspStarted(false),
938            m_ionCameraClient(0),
939            m_zoomRatio(1),
940            m_scp_closing(false),
941            m_scp_closed(false),
942            m_afState(HAL_AFSTATE_INACTIVE),
943            m_afMode(NO_CHANGE),
944            m_afMode2(NO_CHANGE),
945            m_vdisBubbleCnt(0),
946            m_vdisDupFrame(0),
947            m_IsAfModeUpdateRequired(false),
948            m_IsAfTriggerRequired(false),
949            m_IsAfLockRequired(false),
950            m_sccLocalBufferValid(false),
951            m_wideAspect(false),
952            m_scpOutputSignalCnt(0),
953            m_scpOutputImageCnt(0),
954            m_afTriggerId(0),
955            m_afPendingTriggerId(0),
956            m_afModeWaitingCnt(0),
957            m_jpegEncodingCount(0),
958            m_scpForceSuspended(false),
959            m_halDevice(dev),
960            m_nightCaptureCnt(0),
961            m_nightCaptureFrameCnt(0),
962            m_lastSceneMode(0),
963            m_cameraId(cameraId),
964            m_thumbNailW(160),
965            m_thumbNailH(120)
966{
967    ALOGD("(%s): ENTER", __FUNCTION__);
968    int ret = 0;
969    int res = 0;
970
971    m_exynosPictureCSC = NULL;
972    m_exynosVideoCSC = NULL;
973
974    if (!m_grallocHal) {
975        ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
976        if (ret)
977            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
978    }
979
980    m_camera2 = camera;
981    m_ionCameraClient = createIonClient(m_ionCameraClient);
982    if(m_ionCameraClient == 0)
983        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
984
985
986    m_BayerManager = new BayerBufManager();
987    m_mainThread    = new MainThread(this);
988    m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
989    *openInvalid = InitializeISPChain();
990    if (*openInvalid < 0) {
991        ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
992        // clean process
993        // 1. close video nodes
994        // SCP
995        res = exynos_v4l2_close(m_camera_info.scp.fd);
996        if (res != NO_ERROR ) {
997            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
998        }
999        // SCC
1000        res = exynos_v4l2_close(m_camera_info.capture.fd);
1001        if (res != NO_ERROR ) {
1002            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1003        }
1004        // Sensor
1005        res = exynos_v4l2_close(m_camera_info.sensor.fd);
1006        if (res != NO_ERROR ) {
1007            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1008        }
1009        // ISP
1010        res = exynos_v4l2_close(m_camera_info.isp.fd);
1011        if (res != NO_ERROR ) {
1012            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1013        }
1014    } else {
1015        m_sensorThread  = new SensorThread(this);
1016        m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
1017        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1018        ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
1019
1020        for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
1021            m_subStreams[i].type =  SUBSTREAM_TYPE_NONE;
1022        CSC_METHOD cscMethod = CSC_METHOD_HW;
1023        m_exynosPictureCSC = csc_init(cscMethod);
1024        if (m_exynosPictureCSC == NULL)
1025            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1026        csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
1027
1028        m_exynosVideoCSC = csc_init(cscMethod);
1029        if (m_exynosVideoCSC == NULL)
1030            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1031        csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
1032
1033        m_setExifFixedAttribute();
1034
1035        // contol information clear
1036        // flash
1037        m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
1038        m_ctlInfo.flash.m_afFlashDoneFlg= false;
1039        m_ctlInfo.flash.m_flashEnableFlg = false;
1040        m_ctlInfo.flash.m_flashFrameCount = 0;
1041        m_ctlInfo.flash.m_flashCnt = 0;
1042        m_ctlInfo.flash.m_flashTimeOut = 0;
1043        m_ctlInfo.flash.m_flashDecisionResult = false;
1044        m_ctlInfo.flash.m_flashTorchMode = false;
1045        m_ctlInfo.flash.m_precaptureState = 0;
1046        m_ctlInfo.flash.m_precaptureTriggerId = 0;
1047        // ae
1048        m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
1049        // af
1050        m_ctlInfo.af.m_afTriggerTimeOut = 0;
1051        // scene
1052        m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
1053    }
1054    ALOGD("(%s): EXIT", __FUNCTION__);
1055}
1056
1057ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
1058{
1059    ALOGD("(%s): ENTER", __FUNCTION__);
1060    this->release();
1061    ALOGD("(%s): EXIT", __FUNCTION__);
1062}
1063
1064void ExynosCameraHWInterface2::release()
1065{
1066    int i, res;
1067    ALOGD("(HAL2::release): ENTER");
1068
1069    if (m_streamThreads[1] != NULL) {
1070        m_streamThreads[1]->release();
1071        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
1072    }
1073
1074    if (m_streamThreads[0] != NULL) {
1075        m_streamThreads[0]->release();
1076        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
1077    }
1078
1079    if (m_sensorThread != NULL) {
1080        m_sensorThread->release();
1081    }
1082
1083    if (m_mainThread != NULL) {
1084        m_mainThread->release();
1085    }
1086
1087    if (m_exynosPictureCSC)
1088        csc_deinit(m_exynosPictureCSC);
1089    m_exynosPictureCSC = NULL;
1090
1091    if (m_exynosVideoCSC)
1092        csc_deinit(m_exynosVideoCSC);
1093    m_exynosVideoCSC = NULL;
1094
1095    if (m_streamThreads[1] != NULL) {
1096        ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
1097        while (!m_streamThreads[1]->IsTerminated())
1098            usleep(SIG_WAITING_TICK);
1099        ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 1 termination");
1100        m_streamThreads[1] = NULL;
1101    }
1102
1103    if (m_streamThreads[0] != NULL) {
1104        ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
1105        while (!m_streamThreads[0]->IsTerminated())
1106            usleep(SIG_WAITING_TICK);
1107        ALOGD("(HAL2::release): END   Waiting for (indirect) stream thread 0 termination");
1108        m_streamThreads[0] = NULL;
1109    }
1110
1111    if (m_sensorThread != NULL) {
1112        ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
1113        while (!m_sensorThread->IsTerminated())
1114            usleep(SIG_WAITING_TICK);
1115        ALOGD("(HAL2::release): END   Waiting for (indirect) sensor thread termination");
1116        m_sensorThread = NULL;
1117    }
1118
1119    if (m_mainThread != NULL) {
1120        ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
1121        while (!m_mainThread->IsTerminated())
1122            usleep(SIG_WAITING_TICK);
1123        ALOGD("(HAL2::release): END   Waiting for (indirect) main thread termination");
1124        m_mainThread = NULL;
1125    }
1126
1127    if (m_requestManager != NULL) {
1128        delete m_requestManager;
1129        m_requestManager = NULL;
1130    }
1131
1132    if (m_BayerManager != NULL) {
1133        delete m_BayerManager;
1134        m_BayerManager = NULL;
1135    }
1136    for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1137        freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1138
1139    if (m_sccLocalBufferValid) {
1140        for (i = 0; i < NUM_SCC_BUFFERS; i++)
1141#ifdef ENABLE_FRAME_SYNC
1142            freeCameraMemory(&m_sccLocalBuffer[i], 2);
1143#else
1144            freeCameraMemory(&m_sccLocalBuffer[i], 1);
1145#endif
1146    }
1147    else {
1148        for (i = 0; i < NUM_SCC_BUFFERS; i++)
1149            freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1150    }
1151
1152    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1153    res = exynos_v4l2_close(m_camera_info.sensor.fd);
1154    if (res != NO_ERROR ) {
1155        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1156    }
1157
1158    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1159    res = exynos_v4l2_close(m_camera_info.isp.fd);
1160    if (res != NO_ERROR ) {
1161        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1162    }
1163
1164    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1165    res = exynos_v4l2_close(m_camera_info.capture.fd);
1166    if (res != NO_ERROR ) {
1167        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1168    }
1169
1170    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1171    res = exynos_v4l2_close(m_camera_info.scp.fd);
1172    if (res != NO_ERROR ) {
1173        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1174    }
1175    ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1176    deleteIonClient(m_ionCameraClient);
1177
1178    ALOGD("(HAL2::release): EXIT");
1179}
1180
1181int ExynosCameraHWInterface2::InitializeISPChain()
1182{
1183    char node_name[30];
1184    int fd = 0;
1185    int i;
1186    int ret = 0;
1187
1188    /* Open Sensor */
1189    memset(&node_name, 0x00, sizeof(char[30]));
1190    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1191    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1192
1193    if (fd < 0) {
1194        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1195    }
1196    else {
1197        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1198    }
1199    m_camera_info.sensor.fd = fd;
1200
1201    /* Open ISP */
1202    memset(&node_name, 0x00, sizeof(char[30]));
1203    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1204    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1205
1206    if (fd < 0) {
1207        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1208    }
1209    else {
1210        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1211    }
1212    m_camera_info.isp.fd = fd;
1213
1214    /* Open ScalerC */
1215    memset(&node_name, 0x00, sizeof(char[30]));
1216    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1217    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1218
1219    if (fd < 0) {
1220        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1221    }
1222    else {
1223        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1224    }
1225    m_camera_info.capture.fd = fd;
1226
1227    /* Open ScalerP */
1228    memset(&node_name, 0x00, sizeof(char[30]));
1229    sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1230    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1231    if (fd < 0) {
1232        ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1233    }
1234    else {
1235        ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1236    }
1237    m_camera_info.scp.fd = fd;
1238
1239    if(m_cameraId == 0)
1240        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1241    else
1242        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1243
1244    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1245    m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1246    m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1247
1248    m_camera_info.dummy_shot.dis_bypass = 1;
1249    m_camera_info.dummy_shot.dnr_bypass = 1;
1250    m_camera_info.dummy_shot.fd_bypass = 1;
1251
1252    /*sensor setting*/
1253    m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1254    m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1255    m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1256
1257    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1258    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1259
1260    /*request setting*/
1261    m_camera_info.dummy_shot.request_sensor = 1;
1262    m_camera_info.dummy_shot.request_scc = 0;
1263    m_camera_info.dummy_shot.request_scp = 0;
1264    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1265
1266    m_camera_info.sensor.width = m_camera2->getSensorRawW();
1267    m_camera_info.sensor.height = m_camera2->getSensorRawH();
1268
1269    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1270    m_camera_info.sensor.planes = 2;
1271    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1272    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1273    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1274
1275    for(i = 0; i < m_camera_info.sensor.buffers; i++){
1276        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1277        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1278        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1279        allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
1280    }
1281
1282    m_camera_info.isp.width = m_camera_info.sensor.width;
1283    m_camera_info.isp.height = m_camera_info.sensor.height;
1284    m_camera_info.isp.format = m_camera_info.sensor.format;
1285    m_camera_info.isp.planes = m_camera_info.sensor.planes;
1286    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1287    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1288    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1289
1290    for(i = 0; i < m_camera_info.isp.buffers; i++){
1291        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1292        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1293        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1294        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1295        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1296        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1297        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1298    };
1299
1300    /* init ISP */
1301    ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1302    if (ret < 0) {
1303        ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
1304        return false;
1305    }
1306    cam_int_s_fmt(&(m_camera_info.isp));
1307    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1308    cam_int_reqbufs(&(m_camera_info.isp));
1309    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1310    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1311
1312    /* init Sensor */
1313    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1314    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1315    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1316        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1317    }
1318    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1319    cam_int_reqbufs(&(m_camera_info.sensor));
1320    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1321    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1322        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1323        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1324        m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1325        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1326                sizeof(struct camera2_shot_ext));
1327    }
1328
1329    for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1330        cam_int_qbuf(&(m_camera_info.sensor), i);
1331
1332    for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1333        m_requestManager->pushSensorQ(i);
1334
1335    ALOGV("== stream_on :: sensor");
1336    cam_int_streamon(&(m_camera_info.sensor));
1337    m_camera_info.sensor.status = true;
1338
1339    /* init Capture */
1340    m_camera_info.capture.width = m_camera2->getSensorW();
1341    m_camera_info.capture.height = m_camera2->getSensorH();
1342    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1343#ifdef ENABLE_FRAME_SYNC
1344    m_camera_info.capture.planes = 2;
1345#else
1346    m_camera_info.capture.planes = 1;
1347#endif
1348    m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1349    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1350    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1351
1352    m_camera_info.capture.status = false;
1353
1354    return true;
1355}
1356
1357void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
1358{
1359    ALOGV("(%s)", __FUNCTION__);
1360    StreamThread *AllocatedStream;
1361    stream_parameters_t newParameters;
1362    uint32_t format_actual;
1363
1364
1365    if (!threadExists) {
1366        m_streamThreads[1]  = new StreamThread(this, 1);
1367    }
1368    AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1369    if (!threadExists) {
1370        AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1371        m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1372        AllocatedStream->m_numRegisteredStream = 1;
1373    }
1374    AllocatedStream->m_index        = 1;
1375
1376    format_actual                   = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1377
1378    newParameters.width             = m_camera2->getSensorW();
1379    newParameters.height            = m_camera2->getSensorH();
1380    newParameters.format            = format_actual;
1381    newParameters.streamOps         = NULL;
1382    newParameters.numHwBuffers      = NUM_SCC_BUFFERS;
1383#ifdef ENABLE_FRAME_SYNC
1384    newParameters.planes            = 2;
1385#else
1386    newParameters.planes            = 1;
1387#endif
1388
1389    newParameters.numSvcBufsInHal   = 0;
1390
1391    newParameters.node              = &m_camera_info.capture;
1392
1393    AllocatedStream->streamType     = STREAM_TYPE_INDIRECT;
1394    ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1395
1396    if (!threadExists) {
1397        if (!m_sccLocalBufferValid) {
1398            for (int i = 0; i < m_camera_info.capture.buffers; i++){
1399                initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
1400                m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1401#ifdef ENABLE_FRAME_SYNC
1402                m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1403                allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
1404#else
1405                allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1406#endif
1407                m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
1408            }
1409            m_sccLocalBufferValid = true;
1410        }
1411    } else {
1412        if (m_sccLocalBufferValid) {
1413             for (int i = 0; i < m_camera_info.capture.buffers; i++)
1414                m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
1415        } else {
1416            ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
1417        }
1418    }
1419    cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
1420    m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1421    cam_int_s_fmt(newParameters.node);
1422    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1423    cam_int_reqbufs(newParameters.node);
1424    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1425
1426    for (int i = 0; i < newParameters.node->buffers; i++) {
1427        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1428        cam_int_qbuf(newParameters.node, i);
1429        newParameters.svcBufStatus[i] = ON_DRIVER;
1430    }
1431
1432    ALOGV("== stream_on :: capture");
1433    if (cam_int_streamon(newParameters.node) < 0) {
1434        ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1435    } else {
1436        m_camera_info.capture.status = true;
1437    }
1438
1439    AllocatedStream->setParameter(&newParameters);
1440    AllocatedStream->m_activated    = true;
1441    AllocatedStream->m_isBufferInit = true;
1442}
1443
1444void ExynosCameraHWInterface2::StartISP()
1445{
1446    ALOGV("== stream_on :: isp");
1447    cam_int_streamon(&(m_camera_info.isp));
1448    exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1449}
1450
1451int ExynosCameraHWInterface2::getCameraId() const
1452{
1453    return m_cameraId;
1454}
1455
1456int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1457{
1458    ALOGV("DEBUG(%s):", __FUNCTION__);
1459    if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1460            && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1461        m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1462        return 0;
1463    }
1464    else {
1465        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1466        return 1;
1467    }
1468}
1469
1470int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1471{
1472    int i = 0;
1473
1474    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1475    if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1476        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1477        return 0;
1478    }
1479    m_isRequestQueueNull = false;
1480    if (m_requestManager->GetNumEntries() == 0)
1481        m_requestManager->SetInitialSkip(0);
1482
1483    if (m_isIspStarted == false) {
1484        /* isp */
1485        m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1486        m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1487        cam_int_s_fmt(&(m_camera_info.isp));
1488        cam_int_reqbufs(&(m_camera_info.isp));
1489
1490        /* sensor */
1491        if (m_camera_info.sensor.status == false) {
1492            cam_int_s_fmt(&(m_camera_info.sensor));
1493            cam_int_reqbufs(&(m_camera_info.sensor));
1494
1495            for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1496                ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1497                m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1498                m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1499                memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1500                        sizeof(struct camera2_shot_ext));
1501            }
1502            for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1503                cam_int_qbuf(&(m_camera_info.sensor), i);
1504
1505            for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1506                m_requestManager->pushSensorQ(i);
1507            ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
1508            cam_int_streamon(&(m_camera_info.sensor));
1509            m_camera_info.sensor.status = true;
1510        }
1511    }
1512    if (!(m_streamThreads[1].get())) {
1513        ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
1514        StartSCCThread(false);
1515    } else {
1516        if (m_streamThreads[1]->m_activated ==  false) {
1517            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1518            StartSCCThread(true);
1519        } else {
1520            if (m_camera_info.capture.status == false) {
1521                m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1522                cam_int_s_fmt(&(m_camera_info.capture));
1523                ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1524                cam_int_reqbufs(&(m_camera_info.capture));
1525                ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1526
1527                if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
1528                    StreamThread *          targetStream = m_streamThreads[1].get();
1529                    stream_parameters_t     *targetStreamParms = &(targetStream->m_parameters);
1530                    node_info_t             *currentNode = targetStreamParms->node;
1531
1532                    struct v4l2_buffer v4l2_buf;
1533                    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1534
1535                    for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
1536                        v4l2_buf.m.planes   = planes;
1537                        v4l2_buf.type       = currentNode->type;
1538                        v4l2_buf.memory     = currentNode->memory;
1539
1540                        v4l2_buf.length     = currentNode->planes;
1541                        v4l2_buf.index      = i;
1542                        ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
1543
1544                        if (i < currentNode->buffers) {
1545#ifdef ENABLE_FRAME_SYNC
1546                            v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
1547                            v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
1548                            v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
1549                            v4l2_buf.length += targetStreamParms->metaPlanes;
1550                            v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
1551                            v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
1552
1553                            ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1554#endif
1555                            if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1556                                ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
1557                            }
1558                            ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
1559                            targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1560                        }
1561                        else {
1562                            targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1563                        }
1564
1565                    }
1566
1567                } else {
1568                    for (int i = 0; i < m_camera_info.capture.buffers; i++) {
1569                        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1570                        cam_int_qbuf(&(m_camera_info.capture), i);
1571                    }
1572                }
1573                ALOGV("== stream_on :: capture");
1574                if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1575                    ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1576                } else {
1577                    m_camera_info.capture.status = true;
1578                }
1579            }
1580            if (m_scpForceSuspended) {
1581                m_scpForceSuspended = false;
1582            }
1583        }
1584    }
1585    if (m_isIspStarted == false) {
1586        StartISP();
1587        ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
1588        m_requestManager->SetInitialSkip(6);
1589        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1590        m_isIspStarted = true;
1591    }
1592    m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1593    return 0;
1594}
1595
1596int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1597{
1598    ALOGV("DEBUG(%s):", __FUNCTION__);
1599    if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1600            && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1601        m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1602        return 0;
1603    }
1604    else {
1605        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1606        return 1;
1607    }
1608}
1609
1610int ExynosCameraHWInterface2::getInProgressCount()
1611{
1612    int inProgressCount = m_requestManager->GetNumEntries();
1613    ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__,
1614        inProgressCount, m_jpegEncodingCount, (inProgressCount + m_jpegEncodingCount));
1615    return (inProgressCount + m_jpegEncodingCount);
1616}
1617
1618int ExynosCameraHWInterface2::flushCapturesInProgress()
1619{
1620    return 0;
1621}
1622
1623int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1624{
1625    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1626
1627    if (request == NULL) return BAD_VALUE;
1628    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1629        return BAD_VALUE;
1630    }
1631    status_t res;
1632    // Pass 1, calculate size and allocate
1633    res = m_camera2->constructDefaultRequest(request_template,
1634            request,
1635            true);
1636    if (res != OK) {
1637        return res;
1638    }
1639    // Pass 2, build request
1640    res = m_camera2->constructDefaultRequest(request_template,
1641            request,
1642            false);
1643    if (res != OK) {
1644        ALOGE("Unable to populate new request for template %d",
1645                request_template);
1646    }
1647
1648    return res;
1649}
1650
1651int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1652                                    uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1653{
1654    ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1655    bool useDirectOutput = false;
1656    StreamThread *AllocatedStream;
1657    stream_parameters_t newParameters;
1658    substream_parameters_t *subParameters;
1659    StreamThread *parentStream;
1660    status_t res;
1661    int allocCase = 0;
1662
1663    if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)  &&
1664            m_camera2->isSupportedResolution(width, height)) {
1665        if (!(m_streamThreads[0].get())) {
1666            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1667            allocCase = 0;
1668        }
1669        else {
1670            if ((m_streamThreads[0].get())->m_activated == true) {
1671                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1672                allocCase = 1;
1673            }
1674            else {
1675                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1676                allocCase = 2;
1677            }
1678        }
1679
1680        // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1681        if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
1682                    || (width == 720 && height == 480) || (width == 1440 && height == 960)
1683                    || (width == 1344 && height == 896)) {
1684            m_wideAspect = true;
1685        } else {
1686            m_wideAspect = false;
1687        }
1688        ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1689
1690        if (allocCase == 0 || allocCase == 2) {
1691            *stream_id = STREAM_ID_PREVIEW;
1692
1693            m_streamThreads[0]  = new StreamThread(this, *stream_id);
1694
1695            AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1696            AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1697            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1698
1699            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1700            *usage                              = GRALLOC_USAGE_SW_WRITE_OFTEN;
1701            *max_buffers                        = 6;
1702
1703            newParameters.width                 = width;
1704            newParameters.height                = height;
1705            newParameters.format                = *format_actual;
1706            newParameters.streamOps             = stream_ops;
1707            newParameters.usage                 = *usage;
1708            newParameters.numHwBuffers          = NUM_SCP_BUFFERS;
1709            newParameters.numOwnSvcBuffers      = *max_buffers;
1710            newParameters.planes                = NUM_PLANES(*format_actual);
1711            newParameters.metaPlanes            = 1;
1712            newParameters.numSvcBufsInHal       = 0;
1713            newParameters.minUndequedBuffer     = 3;
1714            newParameters.needsIonMap           = true;
1715
1716            newParameters.node                  = &m_camera_info.scp;
1717            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1718            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1719
1720            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1721            AllocatedStream->m_index            = 0;
1722            AllocatedStream->setParameter(&newParameters);
1723            AllocatedStream->m_activated = true;
1724            AllocatedStream->m_numRegisteredStream = 1;
1725            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1726            m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1727            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1728            if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
1729                AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
1730            if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
1731                AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
1732            return 0;
1733        } else if (allocCase == 1) {
1734            *stream_id = STREAM_ID_RECORD;
1735
1736            subParameters = &m_subStreams[STREAM_ID_RECORD];
1737            memset(subParameters, 0, sizeof(substream_parameters_t));
1738
1739            parentStream = (StreamThread*)(m_streamThreads[0].get());
1740            if (!parentStream) {
1741                return 1;
1742            }
1743
1744            *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1745            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1746            *max_buffers = 6;
1747
1748            subParameters->type         = SUBSTREAM_TYPE_RECORD;
1749            subParameters->width        = width;
1750            subParameters->height       = height;
1751            subParameters->format       = *format_actual;
1752            subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1753            subParameters->streamOps     = stream_ops;
1754            subParameters->usage         = *usage;
1755            subParameters->numOwnSvcBuffers = *max_buffers;
1756            subParameters->numSvcBufsInHal  = 0;
1757            subParameters->needBufferInit    = false;
1758            subParameters->minUndequedBuffer = 2;
1759
1760            res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
1761            if (res != NO_ERROR) {
1762                ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1763                return 1;
1764            }
1765            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1766            ALOGV("(%s): Enabling Record", __FUNCTION__);
1767            return 0;
1768        }
1769    }
1770    else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
1771            && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) {
1772
1773        if (!(m_streamThreads[1].get())) {
1774            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1775            useDirectOutput = true;
1776        }
1777        else {
1778            ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
1779            useDirectOutput = false;
1780        }
1781        if (useDirectOutput) {
1782            *stream_id = STREAM_ID_ZSL;
1783
1784            m_streamThreads[1]  = new StreamThread(this, *stream_id);
1785            AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1786            AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1787            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1788
1789            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1790            *max_buffers                        = 6;
1791
1792            *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1793            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1794            *max_buffers = 6;
1795
1796            newParameters.width                 = width;
1797            newParameters.height                = height;
1798            newParameters.format                = *format_actual;
1799            newParameters.streamOps             = stream_ops;
1800            newParameters.usage                 = *usage;
1801            newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1802            newParameters.numOwnSvcBuffers      = *max_buffers;
1803            newParameters.planes                = NUM_PLANES(*format_actual);
1804            newParameters.metaPlanes            = 1;
1805
1806            newParameters.numSvcBufsInHal       = 0;
1807            newParameters.minUndequedBuffer     = 2;
1808            newParameters.needsIonMap           = false;
1809
1810            newParameters.node                  = &m_camera_info.capture;
1811            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1812            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1813
1814            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1815            AllocatedStream->m_index            = 1;
1816            AllocatedStream->setParameter(&newParameters);
1817            AllocatedStream->m_activated = true;
1818            AllocatedStream->m_numRegisteredStream = 1;
1819            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1820            return 0;
1821        } else {
1822            bool bJpegExists = false;
1823            AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1824            subParameters = &m_subStreams[STREAM_ID_JPEG];
1825            if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
1826                ALOGD("(%s): jpeg stream exists", __FUNCTION__);
1827                bJpegExists = true;
1828                AllocatedStream->detachSubStream(STREAM_ID_JPEG);
1829            }
1830            AllocatedStream->m_releasing = true;
1831            ALOGD("START stream thread 1 release %d", __LINE__);
1832            do {
1833                AllocatedStream->release();
1834                usleep(SIG_WAITING_TICK);
1835            } while (AllocatedStream->m_releasing);
1836            ALOGD("END   stream thread 1 release %d", __LINE__);
1837
1838            *stream_id = STREAM_ID_ZSL;
1839
1840            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1841
1842            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1843            *max_buffers                        = 6;
1844
1845            *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1846            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1847            *max_buffers = 6;
1848
1849            newParameters.width                 = width;
1850            newParameters.height                = height;
1851            newParameters.format                = *format_actual;
1852            newParameters.streamOps             = stream_ops;
1853            newParameters.usage                 = *usage;
1854            newParameters.numHwBuffers          = NUM_SCC_BUFFERS;
1855            newParameters.numOwnSvcBuffers      = *max_buffers;
1856            newParameters.planes                = NUM_PLANES(*format_actual);
1857            newParameters.metaPlanes            = 1;
1858
1859            newParameters.numSvcBufsInHal       = 0;
1860            newParameters.minUndequedBuffer     = 2;
1861            newParameters.needsIonMap           = false;
1862
1863            newParameters.node                  = &m_camera_info.capture;
1864            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1865            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1866
1867            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1868            AllocatedStream->m_index            = 1;
1869            AllocatedStream->setParameter(&newParameters);
1870            AllocatedStream->m_activated = true;
1871            AllocatedStream->m_numRegisteredStream = 1;
1872            if (bJpegExists) {
1873                AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
1874            }
1875            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1876            return 0;
1877
1878        }
1879    }
1880    else if (format == HAL_PIXEL_FORMAT_BLOB
1881            && m_camera2->isSupportedJpegResolution(width, height)) {
1882        *stream_id = STREAM_ID_JPEG;
1883
1884        subParameters = &m_subStreams[*stream_id];
1885        memset(subParameters, 0, sizeof(substream_parameters_t));
1886
1887        if (!(m_streamThreads[1].get())) {
1888            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1889            StartSCCThread(false);
1890        }
1891        else if (m_streamThreads[1]->m_activated ==  false) {
1892            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1893            StartSCCThread(true);
1894        }
1895        parentStream = (StreamThread*)(m_streamThreads[1].get());
1896
1897        *format_actual = HAL_PIXEL_FORMAT_BLOB;
1898        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1899        *max_buffers = 4;
1900
1901        subParameters->type          = SUBSTREAM_TYPE_JPEG;
1902        subParameters->width         = width;
1903        subParameters->height        = height;
1904        subParameters->format        = *format_actual;
1905        subParameters->svcPlanes     = 1;
1906        subParameters->streamOps     = stream_ops;
1907        subParameters->usage         = *usage;
1908        subParameters->numOwnSvcBuffers = *max_buffers;
1909        subParameters->numSvcBufsInHal  = 0;
1910        subParameters->needBufferInit    = false;
1911        subParameters->minUndequedBuffer = 2;
1912
1913        res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
1914        if (res != NO_ERROR) {
1915            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1916            return 1;
1917        }
1918        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1919        ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
1920        return 0;
1921    }
1922    else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1923        *stream_id = STREAM_ID_PRVCB;
1924
1925        subParameters = &m_subStreams[STREAM_ID_PRVCB];
1926        memset(subParameters, 0, sizeof(substream_parameters_t));
1927
1928        parentStream = (StreamThread*)(m_streamThreads[0].get());
1929        if (!parentStream) {
1930            return 1;
1931        }
1932
1933        *format_actual = format;
1934        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1935        *max_buffers = 6;
1936
1937        subParameters->type         = SUBSTREAM_TYPE_PRVCB;
1938        subParameters->width        = width;
1939        subParameters->height       = height;
1940        subParameters->format       = *format_actual;
1941        subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1942        subParameters->streamOps     = stream_ops;
1943        subParameters->usage         = *usage;
1944        subParameters->numOwnSvcBuffers = *max_buffers;
1945        subParameters->numSvcBufsInHal  = 0;
1946        subParameters->needBufferInit    = false;
1947        subParameters->minUndequedBuffer = 2;
1948
1949        if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
1950            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
1951            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
1952        }
1953        else {
1954            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1955            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
1956        }
1957
1958        res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
1959        if (res != NO_ERROR) {
1960            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1961            return 1;
1962        }
1963        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1964        ALOGV("(%s): Enabling previewcb", __FUNCTION__);
1965        return 0;
1966    }
1967    ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
1968    return 1;
1969}
1970
1971int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1972        int num_buffers, buffer_handle_t *registeringBuffers)
1973{
1974    int                     i,j;
1975    void                    *virtAddr[3];
1976    int                     plane_index = 0;
1977    StreamThread *          targetStream;
1978    stream_parameters_t     *targetStreamParms;
1979    node_info_t             *currentNode;
1980
1981    struct v4l2_buffer v4l2_buf;
1982    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1983
1984    ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1985        stream_id, num_buffers, (uint32_t)registeringBuffers);
1986
1987    if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
1988        targetStream = m_streamThreads[0].get();
1989        targetStreamParms = &(m_streamThreads[0]->m_parameters);
1990
1991    }
1992    else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
1993        substream_parameters_t  *targetParms;
1994        targetParms = &m_subStreams[stream_id];
1995
1996        targetParms->numSvcBuffers = num_buffers;
1997
1998        for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
1999            ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
2000                i, stream_id, (uint32_t)(registeringBuffers[i]));
2001            if (m_grallocHal) {
2002                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
2003                       targetParms->usage, 0, 0,
2004                       targetParms->width, targetParms->height, virtAddr) != 0) {
2005                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2006                }
2007                else {
2008                    ExynosBuffer currentBuf;
2009                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2010                    if (targetParms->svcPlanes == 1) {
2011                        currentBuf.fd.extFd[0] = priv_handle->fd;
2012                        currentBuf.size.extS[0] = priv_handle->size;
2013                        currentBuf.size.extS[1] = 0;
2014                        currentBuf.size.extS[2] = 0;
2015                    } else if (targetParms->svcPlanes == 2) {
2016                        currentBuf.fd.extFd[0] = priv_handle->fd;
2017                        currentBuf.fd.extFd[1] = priv_handle->fd1;
2018
2019                    } else if (targetParms->svcPlanes == 3) {
2020                        currentBuf.fd.extFd[0] = priv_handle->fd;
2021                        currentBuf.fd.extFd[1] = priv_handle->fd1;
2022                        currentBuf.fd.extFd[2] = priv_handle->fd2;
2023                    }
2024                    for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
2025                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
2026                        CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
2027                             __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
2028                             (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
2029                    }
2030                    targetParms->svcBufStatus[i]  = ON_SERVICE;
2031                    targetParms->svcBuffers[i]    = currentBuf;
2032                    targetParms->svcBufHandle[i]  = registeringBuffers[i];
2033                }
2034            }
2035        }
2036        targetParms->needBufferInit = true;
2037        return 0;
2038    }
2039    else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
2040        targetStream = m_streamThreads[1].get();
2041        targetStreamParms = &(m_streamThreads[1]->m_parameters);
2042    }
2043    else {
2044        ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
2045        return 1;
2046    }
2047
2048    if (targetStream->streamType == STREAM_TYPE_DIRECT) {
2049        if (num_buffers < targetStreamParms->numHwBuffers) {
2050            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
2051                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
2052            return 1;
2053        }
2054    }
2055    CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
2056            __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
2057            targetStreamParms->height, targetStreamParms->planes);
2058    targetStreamParms->numSvcBuffers = num_buffers;
2059    currentNode = targetStreamParms->node;
2060    currentNode->width      = targetStreamParms->width;
2061    currentNode->height     = targetStreamParms->height;
2062    currentNode->format     = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
2063    currentNode->planes     = targetStreamParms->planes;
2064    currentNode->buffers    = targetStreamParms->numHwBuffers;
2065    cam_int_s_input(currentNode, m_camera_info.sensor_id);
2066    cam_int_s_fmt(currentNode);
2067    cam_int_reqbufs(currentNode);
2068    for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
2069        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
2070            i, (uint32_t)(registeringBuffers[i]));
2071                v4l2_buf.m.planes   = planes;
2072                v4l2_buf.type       = currentNode->type;
2073                v4l2_buf.memory     = currentNode->memory;
2074                v4l2_buf.index      = i;
2075                v4l2_buf.length     = currentNode->planes;
2076
2077                ExynosBuffer currentBuf;
2078                ExynosBuffer metaBuf;
2079                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2080
2081                m_getAlignedYUVSize(currentNode->format,
2082                    currentNode->width, currentNode->height, &currentBuf);
2083
2084                ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
2085                if (currentNode->planes == 1) {
2086                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2087                    currentBuf.fd.extFd[0] = priv_handle->fd;
2088                    currentBuf.size.extS[0] = priv_handle->size;
2089                    currentBuf.size.extS[1] = 0;
2090                    currentBuf.size.extS[2] = 0;
2091                } else if (currentNode->planes == 2) {
2092                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2093                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
2094                    currentBuf.fd.extFd[0] = priv_handle->fd;
2095                    currentBuf.fd.extFd[1] = priv_handle->fd1;
2096
2097                } else if (currentNode->planes == 3) {
2098                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2099                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
2100                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
2101                    currentBuf.fd.extFd[0] = priv_handle->fd;
2102                    currentBuf.fd.extFd[2] = priv_handle->fd1;
2103                    currentBuf.fd.extFd[1] = priv_handle->fd2;
2104                }
2105
2106                for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
2107                    if (targetStreamParms->needsIonMap)
2108                        currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
2109                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
2110                    ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
2111                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
2112                         (unsigned int)currentBuf.virt.extP[plane_index],
2113                         v4l2_buf.m.planes[plane_index].length);
2114                }
2115
2116                if (i < currentNode->buffers) {
2117
2118
2119#ifdef ENABLE_FRAME_SYNC
2120                    /* add plane for metadata*/
2121                    metaBuf.size.extS[0] = 4*1024;
2122                    allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
2123
2124                    v4l2_buf.length += targetStreamParms->metaPlanes;
2125                    v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
2126                    v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
2127
2128                    ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
2129#endif
2130                    if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
2131                        ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
2132                            __FUNCTION__, stream_id, currentNode->fd);
2133                    }
2134                    ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
2135                            __FUNCTION__, stream_id, currentNode->fd);
2136                    targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
2137                }
2138                else {
2139                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
2140                }
2141
2142                targetStreamParms->svcBuffers[i]       = currentBuf;
2143                targetStreamParms->metaBuffers[i] = metaBuf;
2144                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
2145            }
2146
2147    ALOGV("DEBUG(%s): calling  streamon stream id = %d", __FUNCTION__, stream_id);
2148    cam_int_streamon(targetStreamParms->node);
2149    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
2150    currentNode->status = true;
2151    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
2152
2153    return 0;
2154}
2155
2156int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
2157{
2158    StreamThread *targetStream;
2159    status_t res = NO_ERROR;
2160    ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2161    bool releasingScpMain = false;
2162
2163    if (stream_id == STREAM_ID_PREVIEW) {
2164        targetStream = (StreamThread*)(m_streamThreads[0].get());
2165        if (!targetStream) {
2166            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2167            return NO_ERROR;
2168        }
2169        targetStream->m_numRegisteredStream--;
2170        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2171        releasingScpMain = true;
2172        if (targetStream->m_parameters.needsIonMap) {
2173            for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2174                for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2175                    ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2176                                    targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2177                    ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2178                                  targetStream->m_parameters.svcBuffers[i].fd.extFd[j], targetStream->m_parameters.svcBuffers[i].virt.extP[j]);
2179                }
2180            }
2181        }
2182    } else if (stream_id == STREAM_ID_JPEG) {
2183        if (m_resizeBuf.size.s != 0) {
2184            freeCameraMemory(&m_resizeBuf, 1);
2185        }
2186        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2187
2188        targetStream = (StreamThread*)(m_streamThreads[1].get());
2189        if (!targetStream) {
2190            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2191            return NO_ERROR;
2192        }
2193
2194        if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2195            ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2196            return 1;
2197        }
2198        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2199        return 0;
2200    } else if (stream_id == STREAM_ID_RECORD) {
2201        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2202
2203        targetStream = (StreamThread*)(m_streamThreads[0].get());
2204        if (!targetStream) {
2205            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2206            return NO_ERROR;
2207        }
2208
2209        if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2210            ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2211            return 1;
2212        }
2213
2214        if (targetStream->m_numRegisteredStream != 0)
2215            return 0;
2216    } else if (stream_id == STREAM_ID_PRVCB) {
2217        if (m_previewCbBuf.size.s != 0) {
2218            freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
2219        }
2220        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2221
2222        targetStream = (StreamThread*)(m_streamThreads[0].get());
2223        if (!targetStream) {
2224            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2225            return NO_ERROR;
2226        }
2227
2228        if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2229            ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2230            return 1;
2231        }
2232
2233        if (targetStream->m_numRegisteredStream != 0)
2234            return 0;
2235    } else if (stream_id == STREAM_ID_ZSL) {
2236        targetStream = (StreamThread*)(m_streamThreads[1].get());
2237        if (!targetStream) {
2238            ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2239            return NO_ERROR;
2240        }
2241
2242        targetStream->m_numRegisteredStream--;
2243        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2244        if (targetStream->m_parameters.needsIonMap) {
2245            for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2246                for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2247                    ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2248                                    targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2249                    ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2250                                  targetStream->m_parameters.svcBuffers[i].fd.extFd[j], targetStream->m_parameters.svcBuffers[i].virt.extP[j]);
2251                }
2252            }
2253        }
2254    } else {
2255        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
2256        return 1;
2257    }
2258
2259    if (m_sensorThread != NULL && releasingScpMain) {
2260        m_sensorThread->release();
2261        ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
2262        while (!m_sensorThread->IsTerminated())
2263            usleep(SIG_WAITING_TICK);
2264        ALOGD("(%s): END   Waiting for (indirect) sensor thread termination", __FUNCTION__);
2265    }
2266
2267    if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
2268        ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
2269        targetStream = (StreamThread*)(m_streamThreads[1].get());
2270        targetStream->m_releasing = true;
2271        ALOGD("START stream thread release %d", __LINE__);
2272        do {
2273            targetStream->release();
2274            usleep(SIG_WAITING_TICK);
2275        } while (targetStream->m_releasing);
2276        m_camera_info.capture.status = false;
2277        ALOGD("END   stream thread release %d", __LINE__);
2278    }
2279
2280    if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
2281        ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
2282        targetStream = (StreamThread*)(m_streamThreads[0].get());
2283        targetStream->m_releasing = true;
2284        ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2285        do {
2286            targetStream->release();
2287            usleep(SIG_WAITING_TICK);
2288        } while (targetStream->m_releasing);
2289        ALOGD("(%s): END   Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2290        targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
2291
2292        if (targetStream != NULL) {
2293            ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
2294            while (!targetStream->IsTerminated())
2295                usleep(SIG_WAITING_TICK);
2296            ALOGD("(%s): END   Waiting for (indirect) stream thread termination", __FUNCTION__);
2297            m_streamThreads[0] = NULL;
2298        }
2299        if (m_camera_info.capture.status == true) {
2300            m_scpForceSuspended = true;
2301        }
2302        m_isIspStarted = false;
2303    }
2304    ALOGV("(%s): END", __FUNCTION__);
2305    return 0;
2306}
2307
2308int ExynosCameraHWInterface2::allocateReprocessStream(
2309    uint32_t width, uint32_t height, uint32_t format,
2310    const camera2_stream_in_ops_t *reprocess_stream_ops,
2311    uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
2312{
2313    ALOGV("DEBUG(%s):", __FUNCTION__);
2314    return 0;
2315}
2316
2317int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2318            uint32_t output_stream_id,
2319            const camera2_stream_in_ops_t *reprocess_stream_ops,
2320            // outputs
2321            uint32_t *stream_id)
2322{
2323    ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
2324    *stream_id = STREAM_ID_JPEG_REPROCESS;
2325
2326    m_reprocessStreamId = *stream_id;
2327    m_reprocessOps = reprocess_stream_ops;
2328    m_reprocessOutputStreamId = output_stream_id;
2329    return 0;
2330}
2331
2332int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
2333{
2334    ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2335    if (stream_id == STREAM_ID_JPEG_REPROCESS) {
2336        m_reprocessStreamId = 0;
2337        m_reprocessOps = NULL;
2338        m_reprocessOutputStreamId = 0;
2339        return 0;
2340    }
2341    return 1;
2342}
2343
2344int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
2345{
2346    ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
2347
2348    switch (trigger_id) {
2349    case CAMERA2_TRIGGER_AUTOFOCUS:
2350        ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2351        OnAfTriggerStart(ext1);
2352        break;
2353
2354    case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
2355        ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2356        OnAfCancel(ext1);
2357        break;
2358    case CAMERA2_TRIGGER_PRECAPTURE_METERING:
2359        ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
2360        OnPrecaptureMeteringTriggerStart(ext1);
2361        break;
2362    default:
2363        break;
2364    }
2365    return 0;
2366}
2367
2368int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
2369{
2370    ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
2371    m_notifyCb = notify_cb;
2372    m_callbackCookie = user;
2373    return 0;
2374}
2375
2376int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
2377{
2378    ALOGV("DEBUG(%s):", __FUNCTION__);
2379    return 0;
2380}
2381
2382int ExynosCameraHWInterface2::dump(int fd)
2383{
2384    ALOGV("DEBUG(%s):", __FUNCTION__);
2385    return 0;
2386}
2387
2388void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
2389{
2390    switch (colorFormat) {
2391    // 1p
2392    case V4L2_PIX_FMT_RGB565 :
2393    case V4L2_PIX_FMT_YUYV :
2394    case V4L2_PIX_FMT_UYVY :
2395    case V4L2_PIX_FMT_VYUY :
2396    case V4L2_PIX_FMT_YVYU :
2397        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2398        buf->size.extS[1] = 0;
2399        buf->size.extS[2] = 0;
2400        break;
2401    // 2p
2402    case V4L2_PIX_FMT_NV12 :
2403    case V4L2_PIX_FMT_NV12T :
2404    case V4L2_PIX_FMT_NV21 :
2405        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
2406        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2407        buf->size.extS[2] = 0;
2408        break;
2409    case V4L2_PIX_FMT_NV12M :
2410    case V4L2_PIX_FMT_NV12MT_16X16 :
2411    case V4L2_PIX_FMT_NV21M:
2412        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
2413        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2414        buf->size.extS[2] = 0;
2415        break;
2416    case V4L2_PIX_FMT_NV16 :
2417    case V4L2_PIX_FMT_NV61 :
2418        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2419        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
2420        buf->size.extS[2] = 0;
2421        break;
2422     // 3p
2423    case V4L2_PIX_FMT_YUV420 :
2424    case V4L2_PIX_FMT_YVU420 :
2425        buf->size.extS[0] = (w * h);
2426        buf->size.extS[1] = (w * h) >> 2;
2427        buf->size.extS[2] = (w * h) >> 2;
2428        break;
2429    case V4L2_PIX_FMT_YUV420M:
2430    case V4L2_PIX_FMT_YVU420M :
2431        buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
2432        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2433        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2434        break;
2435    case V4L2_PIX_FMT_YUV422P :
2436        buf->size.extS[0] = ALIGN(w,  16) * ALIGN(h,  16);
2437        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2438        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2439        break;
2440    default:
2441        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2442        return;
2443        break;
2444    }
2445}
2446
2447bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
2448                                             int  dst_w,  int   dst_h,
2449                                             int *crop_x, int *crop_y,
2450                                             int *crop_w, int *crop_h,
2451                                             int zoom)
2452{
2453    *crop_w = src_w;
2454    *crop_h = src_h;
2455
2456    if (   src_w != dst_w
2457        || src_h != dst_h) {
2458        float src_ratio = 1.0f;
2459        float dst_ratio = 1.0f;
2460
2461        // ex : 1024 / 768
2462        src_ratio = (float)src_w / (float)src_h;
2463
2464        // ex : 352  / 288
2465        dst_ratio = (float)dst_w / (float)dst_h;
2466
2467        if (dst_w * dst_h < src_w * src_h) {
2468            if (dst_ratio <= src_ratio) {
2469                // shrink w
2470                *crop_w = src_h * dst_ratio;
2471                *crop_h = src_h;
2472            } else {
2473                // shrink h
2474                *crop_w = src_w;
2475                *crop_h = src_w / dst_ratio;
2476            }
2477        } else {
2478            if (dst_ratio <= src_ratio) {
2479                // shrink w
2480                *crop_w = src_h * dst_ratio;
2481                *crop_h = src_h;
2482            } else {
2483                // shrink h
2484                *crop_w = src_w;
2485                *crop_h = src_w / dst_ratio;
2486            }
2487        }
2488    }
2489
2490    if (zoom != 0) {
2491        float zoomLevel = ((float)zoom + 10.0) / 10.0;
2492        *crop_w = (int)((float)*crop_w / zoomLevel);
2493        *crop_h = (int)((float)*crop_h / zoomLevel);
2494    }
2495
2496    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
2497    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2498    if (w_align != 0) {
2499        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2500            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
2501            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2502        }
2503        else
2504            *crop_w -= w_align;
2505    }
2506
2507    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
2508    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2509    if (h_align != 0) {
2510        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2511            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
2512            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2513        }
2514        else
2515            *crop_h -= h_align;
2516    }
2517
2518    *crop_x = (src_w - *crop_w) >> 1;
2519    *crop_y = (src_h - *crop_h) >> 1;
2520
2521    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2522        *crop_x -= 1;
2523
2524    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2525        *crop_y -= 1;
2526
2527    return true;
2528}
2529
2530BayerBufManager::BayerBufManager()
2531{
2532    ALOGV("DEBUG(%s): ", __FUNCTION__);
2533    for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2534        entries[i].status = BAYER_ON_HAL_EMPTY;
2535        entries[i].reqFrameCnt = 0;
2536    }
2537    sensorEnqueueHead = 0;
2538    sensorDequeueHead = 0;
2539    ispEnqueueHead = 0;
2540    ispDequeueHead = 0;
2541    numOnSensor = 0;
2542    numOnIsp = 0;
2543    numOnHalFilled = 0;
2544    numOnHalEmpty = NUM_BAYER_BUFFERS;
2545}
2546
2547BayerBufManager::~BayerBufManager()
2548{
2549    ALOGV("%s", __FUNCTION__);
2550}
2551
2552int     BayerBufManager::GetIndexForSensorEnqueue()
2553{
2554    int ret = 0;
2555    if (numOnHalEmpty == 0)
2556        ret = -1;
2557    else
2558        ret = sensorEnqueueHead;
2559    ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2560    return ret;
2561}
2562
2563int    BayerBufManager::MarkSensorEnqueue(int index)
2564{
2565    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2566
2567    // sanity check
2568    if (index != sensorEnqueueHead) {
2569        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2570        return -1;
2571    }
2572    if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2573        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2574            index, entries[index].status, BAYER_ON_HAL_EMPTY);
2575        return -1;
2576    }
2577
2578    entries[index].status = BAYER_ON_SENSOR;
2579    entries[index].reqFrameCnt = 0;
2580    numOnHalEmpty--;
2581    numOnSensor++;
2582    sensorEnqueueHead = GetNextIndex(index);
2583    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2584        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2585    return 0;
2586}
2587
2588int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
2589{
2590    ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2591
2592    if (entries[index].status != BAYER_ON_SENSOR) {
2593        ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2594            index, entries[index].status, BAYER_ON_SENSOR);
2595        return -1;
2596    }
2597
2598    entries[index].status = BAYER_ON_HAL_FILLED;
2599    numOnHalFilled++;
2600    numOnSensor--;
2601
2602    return 0;
2603}
2604
2605int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2606{
2607    int ret = 0;
2608    if (numOnHalFilled == 0)
2609        ret = -1;
2610    else {
2611        *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2612        ret = ispEnqueueHead;
2613    }
2614    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2615    return ret;
2616}
2617
2618int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2619{
2620    int ret = 0;
2621    if (numOnIsp == 0)
2622        ret = -1;
2623    else {
2624        *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2625        ret = ispDequeueHead;
2626    }
2627    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2628    return ret;
2629}
2630
2631int    BayerBufManager::MarkIspEnqueue(int index)
2632{
2633    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2634
2635    // sanity check
2636    if (index != ispEnqueueHead) {
2637        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2638        return -1;
2639    }
2640    if (entries[index].status != BAYER_ON_HAL_FILLED) {
2641        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2642            index, entries[index].status, BAYER_ON_HAL_FILLED);
2643        return -1;
2644    }
2645
2646    entries[index].status = BAYER_ON_ISP;
2647    numOnHalFilled--;
2648    numOnIsp++;
2649    ispEnqueueHead = GetNextIndex(index);
2650    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2651        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2652    return 0;
2653}
2654
2655int    BayerBufManager::MarkIspDequeue(int index)
2656{
2657    ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2658
2659    // sanity check
2660    if (index != ispDequeueHead) {
2661        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2662        return -1;
2663    }
2664    if (entries[index].status != BAYER_ON_ISP) {
2665        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2666            index, entries[index].status, BAYER_ON_ISP);
2667        return -1;
2668    }
2669
2670    entries[index].status = BAYER_ON_HAL_EMPTY;
2671    entries[index].reqFrameCnt = 0;
2672    numOnHalEmpty++;
2673    numOnIsp--;
2674    ispDequeueHead = GetNextIndex(index);
2675    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2676        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2677    return 0;
2678}
2679
2680int BayerBufManager::GetNumOnSensor()
2681{
2682    return numOnSensor;
2683}
2684
2685int BayerBufManager::GetNumOnHalFilled()
2686{
2687    return numOnHalFilled;
2688}
2689
2690int BayerBufManager::GetNumOnIsp()
2691{
2692    return numOnIsp;
2693}
2694
2695int     BayerBufManager::GetNextIndex(int index)
2696{
2697    index++;
2698    if (index >= NUM_BAYER_BUFFERS)
2699        index = 0;
2700
2701    return index;
2702}
2703
2704void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2705{
2706    camera_metadata_t *currentRequest = NULL;
2707    camera_metadata_t *currentFrame = NULL;
2708    size_t numEntries = 0;
2709    size_t frameSize = 0;
2710    camera_metadata_t * preparedFrame = NULL;
2711    camera_metadata_t *deregisteredRequest = NULL;
2712    uint32_t currentSignal = self->GetProcessingSignal();
2713    MainThread *  selfThread      = ((MainThread*)self);
2714    int res = 0;
2715
2716    int ret;
2717
2718    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2719
2720    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2721        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2722
2723        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2724        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2725        return;
2726    }
2727
2728    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2729        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2730        if (m_requestManager->IsRequestQueueFull()==false) {
2731            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2732            if (NULL == currentRequest) {
2733                ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal);
2734                m_isRequestQueueNull = true;
2735                if (m_requestManager->IsVdisEnable())
2736                    m_vdisBubbleCnt = 1;
2737            }
2738            else {
2739                m_requestManager->RegisterRequest(currentRequest);
2740
2741                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2742                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2743                if (m_requestManager->IsRequestQueueFull()==false)
2744                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2745
2746                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2747            }
2748        }
2749        else {
2750            m_isRequestQueuePending = true;
2751        }
2752    }
2753
2754    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2755        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2756        /*while (1)*/ {
2757            ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2758            if (ret == false)
2759                CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
2760
2761            m_requestManager->DeregisterRequest(&deregisteredRequest);
2762
2763            ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2764            if (ret < 0)
2765                CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
2766
2767            ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2768            if (ret < 0)
2769                CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
2770
2771            if (currentFrame==NULL) {
2772                ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2773            }
2774            else {
2775                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2776            }
2777            res = append_camera_metadata(currentFrame, preparedFrame);
2778            if (res==0) {
2779                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2780                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2781            }
2782            else {
2783                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2784            }
2785        }
2786        if (!m_isRequestQueueNull) {
2787            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2788        }
2789
2790        if (getInProgressCount()>0) {
2791            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2792            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2793        }
2794    }
2795    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2796    return;
2797}
2798
2799void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2800{
2801    ALOGD("####  common Section");
2802    ALOGD("####                 magic(%x) ",
2803        shot_ext->shot.magicNumber);
2804    ALOGD("####  ctl Section");
2805    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2806        shot_ext->shot.ctl.request.metadataMode,
2807        shot_ext->shot.ctl.lens.aperture,
2808        shot_ext->shot.ctl.sensor.exposureTime,
2809        shot_ext->shot.ctl.sensor.frameDuration,
2810        shot_ext->shot.ctl.sensor.sensitivity,
2811        shot_ext->shot.ctl.aa.awbMode);
2812
2813    ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2814        shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2815        shot_ext->shot.ctl.request.outputStreams[0]);
2816
2817    ALOGD("####  DM Section");
2818    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2819        shot_ext->shot.dm.request.metadataMode,
2820        shot_ext->shot.dm.lens.aperture,
2821        shot_ext->shot.dm.sensor.exposureTime,
2822        shot_ext->shot.dm.sensor.frameDuration,
2823        shot_ext->shot.dm.sensor.sensitivity,
2824        shot_ext->shot.dm.sensor.timeStamp,
2825        shot_ext->shot.dm.aa.awbMode,
2826        shot_ext->shot.dm.request.frameCount );
2827}
2828
2829void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
2830{
2831    // Flash
2832    switch (m_ctlInfo.flash.m_flashCnt) {
2833    case IS_FLASH_STATE_ON:
2834        ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2835        // check AF locked
2836        if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
2837            if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2838                if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2839                    shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2840                    m_ctlInfo.flash.m_flashTimeOut = 5;
2841                } else
2842                    shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2843                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2844            } else {
2845                m_ctlInfo.flash.m_flashTimeOut--;
2846            }
2847        } else {
2848            if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2849                shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2850                m_ctlInfo.flash.m_flashTimeOut = 5;
2851            } else
2852                shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2853            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2854        }
2855        break;
2856    case IS_FLASH_STATE_ON_WAIT:
2857        break;
2858    case IS_FLASH_STATE_ON_DONE:
2859        if (!m_ctlInfo.flash.m_afFlashDoneFlg)
2860            // auto transition at pre-capture trigger
2861            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
2862        break;
2863    case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
2864        ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2865        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2866        //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2867        shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2868        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
2869        break;
2870    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2871    case IS_FLASH_STATE_AUTO_WAIT:
2872        shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
2873        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
2874        break;
2875    case IS_FLASH_STATE_AUTO_DONE:
2876        ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2877        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2878        break;
2879    case IS_FLASH_STATE_AUTO_OFF:
2880        ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2881        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2882        m_ctlInfo.flash.m_flashEnableFlg = false;
2883        break;
2884    case IS_FLASH_STATE_CAPTURE:
2885        ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2886        m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
2887        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2888        shot_ext->request_scc = 0;
2889        shot_ext->request_scp = 0;
2890        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
2891        break;
2892    case IS_FLASH_STATE_CAPTURE_WAIT:
2893        shot_ext->request_scc = 0;
2894        shot_ext->request_scp = 0;
2895        break;
2896    case IS_FLASH_STATE_CAPTURE_JPEG:
2897        ALOGV("(%s): [Flash] Flash Capture  (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
2898        shot_ext->request_scc = 1;
2899        shot_ext->request_scp = 1;
2900        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END;  // auto transition
2901        break;
2902    case IS_FLASH_STATE_CAPTURE_END:
2903        ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2904        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2905        shot_ext->request_scc = 0;
2906        shot_ext->request_scp = 0;
2907        m_ctlInfo.flash.m_flashEnableFlg = false;
2908        m_ctlInfo.flash.m_flashCnt = 0;
2909        m_ctlInfo.flash.m_afFlashDoneFlg= false;
2910        break;
2911    case IS_FLASH_STATE_NONE:
2912        break;
2913    default:
2914        ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
2915    }
2916}
2917
2918void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
2919{
2920    // Flash
2921    switch (m_ctlInfo.flash.m_flashCnt) {
2922    case IS_FLASH_STATE_AUTO_WAIT:
2923        if (m_ctlInfo.flash.m_flashDecisionResult) {
2924            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2925                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2926                ALOGV("(%s): [Flash] Lis :  AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
2927            } else {
2928                ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
2929            }
2930        } else {
2931            //If flash isn't activated at flash auto mode, skip flash auto control
2932            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2933            ALOGV("(%s): [Flash] Skip :  AUTO -> OFF", __FUNCTION__);
2934        }
2935        break;
2936    }
2937}
2938
2939void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
2940{
2941    // Flash
2942    switch (m_ctlInfo.flash.m_flashCnt) {
2943    case IS_FLASH_STATE_ON_WAIT:
2944        if (shot_ext->shot.dm.flash.decision > 0) {
2945            // store decision result to skip capture sequenece
2946            ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
2947            if (shot_ext->shot.dm.flash.decision == 2)
2948                m_ctlInfo.flash.m_flashDecisionResult = false;
2949            else
2950                m_ctlInfo.flash.m_flashDecisionResult = true;
2951            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2952        } else {
2953            if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2954                ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
2955                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2956                m_ctlInfo.flash.m_flashDecisionResult = false;
2957            } else {
2958                m_ctlInfo.flash.m_flashTimeOut--;
2959            }
2960        }
2961        break;
2962    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2963        if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
2964            ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
2965            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
2966        } else {
2967            ALOGV("(%s):  [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
2968        }
2969        break;
2970    case IS_FLASH_STATE_CAPTURE_WAIT:
2971        if (m_ctlInfo.flash.m_flashDecisionResult) {
2972            if (shot_ext->shot.dm.flash.firingStable) {
2973                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2974            } else {
2975                if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2976                    ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
2977                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2978                } else {
2979                    ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
2980                    m_ctlInfo.flash.m_flashTimeOut--;
2981                }
2982            }
2983        } else {
2984            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2985        }
2986        break;
2987    }
2988}
2989
2990void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext)
2991{
2992    switch (m_ctlInfo.flash.i_flashMode) {
2993    case AA_AEMODE_ON:
2994        // At flash off mode, capture can be done as zsl capture
2995        shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED;
2996        break;
2997    case AA_AEMODE_ON_AUTO_FLASH:
2998        // At flash auto mode, main flash have to be done if pre-flash was done.
2999        if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg)
3000            shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED;
3001        break;
3002    }
3003}
3004
3005void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext)
3006{
3007    if (0 == shot_ext->shot.ctl.aa.afRegions[0] && 0 == shot_ext->shot.ctl.aa.afRegions[1]
3008            && 0 == shot_ext->shot.ctl.aa.afRegions[2] && 0 == shot_ext->shot.ctl.aa.afRegions[3]) {
3009        ALOGV("(%s): AF region resetting", __FUNCTION__);
3010        lastAfRegion[0] = 0;
3011        lastAfRegion[1] = 0;
3012        lastAfRegion[2] = 0;
3013        lastAfRegion[3] = 0;
3014    } else {
3015        // clear region infos in case of CAF mode
3016        if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
3017            shot_ext->shot.ctl.aa.afRegions[0] = shot_ext->shot.ctl.aa.aeRegions[0] = lastAfRegion[0] = 0;
3018            shot_ext->shot.ctl.aa.afRegions[1] = shot_ext->shot.ctl.aa.aeRegions[1] = lastAfRegion[1] = 0;
3019            shot_ext->shot.ctl.aa.afRegions[2] = shot_ext->shot.ctl.aa.aeRegions[2] = lastAfRegion[2] = 0;
3020            shot_ext->shot.ctl.aa.afRegions[3] = shot_ext->shot.ctl.aa.aeRegions[3] = lastAfRegion[3] = 0;
3021        } else if (!(lastAfRegion[0] == shot_ext->shot.ctl.aa.afRegions[0] && lastAfRegion[1] == shot_ext->shot.ctl.aa.afRegions[1]
3022                && lastAfRegion[2] == shot_ext->shot.ctl.aa.afRegions[2] && lastAfRegion[3] == shot_ext->shot.ctl.aa.afRegions[3])) {
3023            ALOGD("(%s): AF region changed : triggering (%d)", __FUNCTION__, m_afMode);
3024            shot_ext->shot.ctl.aa.afTrigger = 1;
3025            shot_ext->shot.ctl.aa.afMode = m_afMode;
3026            m_afState = HAL_AFSTATE_STARTED;
3027            lastAfRegion[0] = shot_ext->shot.ctl.aa.afRegions[0];
3028            lastAfRegion[1] = shot_ext->shot.ctl.aa.afRegions[1];
3029            lastAfRegion[2] = shot_ext->shot.ctl.aa.afRegions[2];
3030            lastAfRegion[3] = shot_ext->shot.ctl.aa.afRegions[3];
3031            m_IsAfTriggerRequired = false;
3032        }
3033    }
3034}
3035
3036void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode)
3037{
3038    if (m_afState == HAL_AFSTATE_SCANNING) {
3039        ALOGD("(%s): restarting trigger ", __FUNCTION__);
3040    } else if (!mode) {
3041        if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
3042            ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
3043        else
3044            m_afState = HAL_AFSTATE_STARTED;
3045    }
3046    ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState);
3047    shot_ext->shot.ctl.aa.afTrigger = 1;
3048    shot_ext->shot.ctl.aa.afMode = m_afMode;
3049    m_IsAfTriggerRequired = false;
3050}
3051
3052void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
3053{
3054    uint32_t        currentSignal = self->GetProcessingSignal();
3055    SensorThread *  selfThread      = ((SensorThread*)self);
3056    int index;
3057    int index_isp;
3058    status_t res;
3059    nsecs_t frameTime;
3060    int bayersOnSensor = 0, bayersOnIsp = 0;
3061    int j = 0;
3062    bool isCapture = false;
3063    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
3064
3065    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3066        CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3067
3068        ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
3069        cam_int_streamoff(&(m_camera_info.sensor));
3070        ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
3071
3072        m_camera_info.sensor.buffers = 0;
3073        ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
3074        cam_int_reqbufs(&(m_camera_info.sensor));
3075        ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
3076        m_camera_info.sensor.status = false;
3077
3078        ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
3079        isp_int_streamoff(&(m_camera_info.isp));
3080        ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
3081
3082        m_camera_info.isp.buffers = 0;
3083        ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
3084        cam_int_reqbufs(&(m_camera_info.isp));
3085        ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
3086
3087        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
3088
3089        m_requestManager->releaseSensorQ();
3090        m_requestManager->ResetEntry();
3091        ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3092        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
3093        return;
3094    }
3095
3096    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
3097    {
3098        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
3099        int targetStreamIndex = 0, i=0;
3100        int matchedFrameCnt = -1, processingReqIndex;
3101        struct camera2_shot_ext *shot_ext;
3102        struct camera2_shot_ext *shot_ext_capture;
3103        bool triggered = false;
3104        int afMode;
3105
3106        /* dqbuf from sensor */
3107        ALOGV("Sensor DQbuf start");
3108        index = cam_int_dqbuf(&(m_camera_info.sensor));
3109        m_requestManager->pushSensorQ(index);
3110        ALOGV("Sensor DQbuf done(%d)", index);
3111        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3112
3113        if (m_nightCaptureCnt != 0) {
3114            matchedFrameCnt = m_nightCaptureFrameCnt;
3115        } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
3116            matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
3117            ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
3118        } else {
3119            matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
3120        }
3121
3122        if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
3123            matchedFrameCnt = m_vdisDupFrame;
3124        }
3125
3126        if (matchedFrameCnt != -1) {
3127            if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) {
3128                frameTime = systemTime();
3129                m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
3130                m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
3131            } else {
3132                ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt);
3133            }
3134
3135            // face af mode setting in case of face priority scene mode
3136            if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) {
3137                ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode);
3138                m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3139            }
3140
3141            if (m_afModeWaitingCnt != 0) {
3142                ALOGV("### Af Trigger pulled, waiting for mode change cnt(%d) ", m_afModeWaitingCnt);
3143                m_afModeWaitingCnt --;
3144                if (m_afModeWaitingCnt == 1) {
3145                    m_afModeWaitingCnt = 0;
3146                    OnAfTrigger(m_afPendingTriggerId);
3147                }
3148            }
3149            m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
3150            float zoomLeft, zoomTop, zoomWidth, zoomHeight;
3151            int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
3152
3153            m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
3154                           m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
3155                           &crop_x, &crop_y,
3156                           &crop_w, &crop_h,
3157                           0);
3158
3159            if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
3160                zoomWidth =  m_camera2->getSensorW() / m_zoomRatio;
3161                zoomHeight = zoomWidth *
3162                        m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
3163            } else {
3164                zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
3165                zoomWidth = zoomHeight *
3166                        m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
3167            }
3168            zoomLeft = (crop_w - zoomWidth) / 2;
3169            zoomTop = (crop_h - zoomHeight) / 2;
3170
3171            int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
3172
3173            int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4);
3174            if (cropCompensation)
3175                new_cropRegion[2] -= cropCompensation;
3176
3177            shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
3178            shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
3179            shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
3180            if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) {
3181                ALOGD("### AF Mode change(Mode %d) ", m_afMode);
3182                shot_ext->shot.ctl.aa.afMode = m_afMode;
3183                if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
3184                    ALOGD("### With Automatic triger for continuous modes");
3185                    m_afState = HAL_AFSTATE_STARTED;
3186                    shot_ext->shot.ctl.aa.afTrigger = 1;
3187                    triggered = true;
3188                    if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) ||
3189                            (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) {
3190                        switch (m_afMode) {
3191                        case AA_AFMODE_CONTINUOUS_PICTURE:
3192                            shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE;
3193                            ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode);
3194                            break;
3195                        }
3196                    }
3197                    // reset flash result
3198                    if (m_ctlInfo.flash.m_afFlashDoneFlg) {
3199                        m_ctlInfo.flash.m_flashEnableFlg = false;
3200                        m_ctlInfo.flash.m_afFlashDoneFlg = false;
3201                        m_ctlInfo.flash.m_flashDecisionResult = false;
3202                        m_ctlInfo.flash.m_flashCnt = 0;
3203                    }
3204                    m_ctlInfo.af.m_afTriggerTimeOut = 1;
3205                }
3206
3207                m_IsAfModeUpdateRequired = false;
3208                // support inifinity focus mode
3209                if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
3210                    shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
3211                    shot_ext->shot.ctl.aa.afTrigger = 1;
3212                    triggered = true;
3213                }
3214                if (m_afMode2 != NO_CHANGE) {
3215                    enum aa_afmode tempAfMode = m_afMode2;
3216                    m_afMode2 = NO_CHANGE;
3217                    SetAfMode(tempAfMode);
3218                }
3219            }
3220            else {
3221                shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
3222            }
3223            if (m_IsAfTriggerRequired) {
3224                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
3225                    // flash case
3226                    if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
3227                        if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
3228                            // Flash is enabled and start AF
3229                            m_afTrigger(shot_ext, 1);
3230                        } else {
3231                            if (m_ctlInfo.af.m_afTriggerTimeOut == 0)
3232                                m_afTrigger(shot_ext, 0);
3233                            else
3234                                m_ctlInfo.af.m_afTriggerTimeOut--;
3235                        }
3236                    }
3237                } else {
3238                    // non-flash case
3239                    if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
3240                        m_afTrigger(shot_ext, 0);
3241                    } else {
3242                        if (m_ctlInfo.af.m_afTriggerTimeOut == 0)
3243                            m_afTrigger(shot_ext, 0);
3244                        else
3245                            m_ctlInfo.af.m_afTriggerTimeOut--;
3246                    }
3247                }
3248            } else {
3249                shot_ext->shot.ctl.aa.afTrigger = 0;
3250            }
3251
3252            if (m_wideAspect) {
3253                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3254                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3255                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3256            } else {
3257                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3258            }
3259            if (triggered)
3260                shot_ext->shot.ctl.aa.afTrigger = 1;
3261
3262            // TODO : check collision with AFMode Update
3263            if (m_IsAfLockRequired) {
3264                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
3265                m_IsAfLockRequired = false;
3266            }
3267            ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
3268                index,
3269                shot_ext->shot.ctl.request.frameCount,
3270                shot_ext->request_scp,
3271                shot_ext->request_scc,
3272                shot_ext->dis_bypass, sizeof(camera2_shot));
3273
3274            // update AF region
3275            m_updateAfRegion(shot_ext);
3276
3277            m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3278            if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
3279                    && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
3280                shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
3281            if (m_nightCaptureCnt == 0) {
3282                if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
3283                        && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3284                    shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3285                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3286                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3287                    m_nightCaptureCnt = 4;
3288                    m_nightCaptureFrameCnt = matchedFrameCnt;
3289                    shot_ext->request_scc = 0;
3290                }
3291            }
3292            else if (m_nightCaptureCnt == 1) {
3293                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3294                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3295                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3296                m_nightCaptureCnt--;
3297                m_nightCaptureFrameCnt = 0;
3298                shot_ext->request_scc = 1;
3299            }
3300            else if (m_nightCaptureCnt == 2) {
3301                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3302                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3303                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3304                m_nightCaptureCnt--;
3305                shot_ext->request_scc = 0;
3306            }
3307            else if (m_nightCaptureCnt == 3) {
3308                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3309                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3310                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3311                m_nightCaptureCnt--;
3312                shot_ext->request_scc = 0;
3313            }
3314            else if (m_nightCaptureCnt == 4) {
3315                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3316                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3317                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3318                m_nightCaptureCnt--;
3319                shot_ext->request_scc = 0;
3320            }
3321
3322            // Flash mode
3323            // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
3324            if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
3325                    && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
3326                    && (m_cameraId == 0)) {
3327                if (!m_ctlInfo.flash.m_flashDecisionResult) {
3328                    m_ctlInfo.flash.m_flashEnableFlg = false;
3329                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3330                    m_ctlInfo.flash.m_flashCnt = 0;
3331                } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) ||
3332                                          (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
3333                    ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
3334                    shot_ext->request_scc = 0;
3335                    m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
3336                    m_ctlInfo.flash.m_flashEnableFlg = true;
3337                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3338                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
3339                } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) {
3340                    ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
3341                    shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3342                    m_ctlInfo.flash.m_flashEnableFlg = false;
3343                    m_ctlInfo.flash.m_afFlashDoneFlg= false;
3344                    m_ctlInfo.flash.m_flashCnt = 0;
3345                }
3346            } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) {
3347                m_ctlInfo.flash.m_flashDecisionResult = false;
3348            }
3349
3350            // TODO : set torch mode for video recording. need to find proper position.
3351            // m_wideAspect is will be changed to recording hint
3352            if ((shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_SINGLE) && m_wideAspect) {
3353                shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_TORCH;
3354                shot_ext->shot.ctl.flash.firingPower = 10;
3355                m_ctlInfo.flash.m_flashTorchMode = true;
3356            } else if (m_wideAspect){
3357                shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3358                shot_ext->shot.ctl.flash.firingPower = 0;
3359                m_ctlInfo.flash.m_flashTorchMode = false;
3360            } else {
3361                if (m_ctlInfo.flash.m_flashTorchMode) {
3362                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3363                    shot_ext->shot.ctl.flash.firingPower = 0;
3364                    m_ctlInfo.flash.m_flashTorchMode = false;
3365                } else {
3366                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
3367                }
3368            }
3369
3370            if (shot_ext->isReprocessing) {
3371                ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__);
3372                m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
3373                shot_ext->request_scp = 0;
3374                shot_ext->request_scc = 0;
3375                m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
3376                m_ctlInfo.flash.m_flashDecisionResult = false;
3377                memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt)),
3378                    sizeof(struct camera2_shot_ext));
3379                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
3380                m_ctlInfo.flash.m_flashEnableFlg = false;
3381            }
3382
3383            if (m_ctlInfo.flash.m_flashEnableFlg) {
3384                m_preCaptureListenerSensor(shot_ext);
3385                m_preCaptureSetter(shot_ext);
3386            }
3387
3388            ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
3389            (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3390            (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3391            (int)(shot_ext->shot.ctl.aa.afTrigger));
3392
3393            if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
3394                shot_ext->dis_bypass = 1;
3395                shot_ext->request_scp = 0;
3396                shot_ext->request_scc = 0;
3397                m_vdisBubbleCnt--;
3398                matchedFrameCnt = -1;
3399            } else {
3400                m_vdisDupFrame = matchedFrameCnt;
3401            }
3402            if (m_scpForceSuspended)
3403                shot_ext->request_scc = 0;
3404
3405            uint32_t current_scp = shot_ext->request_scp;
3406            uint32_t current_scc = shot_ext->request_scc;
3407
3408            if (shot_ext->shot.dm.request.frameCount == 0) {
3409                CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
3410            }
3411
3412            cam_int_qbuf(&(m_camera_info.isp), index);
3413
3414            ALOGV("### isp DQBUF start");
3415            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3416
3417            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3418
3419            if (m_ctlInfo.flash.m_flashEnableFlg)
3420                m_preCaptureListenerISP(shot_ext);
3421
3422            ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)",
3423                index,
3424                shot_ext->shot.ctl.request.frameCount,
3425                shot_ext->request_scp,
3426                shot_ext->request_scc,
3427                shot_ext->dis_bypass,
3428                shot_ext->dnr_bypass, sizeof(camera2_shot));
3429
3430            ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
3431                (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3432                (int)(shot_ext->shot.dm.aa.awbMode),
3433                (int)(shot_ext->shot.dm.aa.afMode));
3434
3435#ifndef ENABLE_FRAME_SYNC
3436            m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
3437#endif
3438
3439            if (!shot_ext->fd_bypass) {
3440                /* FD orientation axis transformation */
3441                for (int i=0; i < CAMERA2_MAX_FACES; i++) {
3442                    if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
3443                        shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
3444                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][0])
3445                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3446                    if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
3447                        shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
3448                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][1])
3449                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3450                    if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
3451                        shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
3452                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][2])
3453                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3454                    if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
3455                        shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
3456                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][3])
3457                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3458                }
3459            }
3460            // aeState control
3461            if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT)
3462                m_preCaptureAeState(shot_ext);
3463
3464            // At scene mode face priority
3465            if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE)
3466                shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
3467
3468            if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
3469                m_requestManager->ApplyDynamicMetadata(shot_ext);
3470            }
3471
3472            if (current_scc != shot_ext->request_scc) {
3473                ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
3474                                __FUNCTION__, current_scc, shot_ext->request_scc);
3475                m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3476            }
3477            if (shot_ext->request_scc) {
3478                ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
3479                if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) {
3480                    if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)
3481                        memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(shot_ext->shot.ctl.request.frameCount)),
3482                            sizeof(struct camera2_shot_ext));
3483                    else
3484                        memcpy(&m_jpegMetadata, (void*)shot_ext, sizeof(struct camera2_shot_ext));
3485                }
3486                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3487            }
3488            if (current_scp != shot_ext->request_scp) {
3489                ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
3490                                __FUNCTION__, current_scp, shot_ext->request_scp);
3491                m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3492            }
3493            if (shot_ext->request_scp) {
3494                ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
3495                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3496            }
3497
3498            ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
3499               shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
3500            if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
3501                ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
3502                m_scp_closed = true;
3503            }
3504            else
3505                m_scp_closed = false;
3506
3507            OnAfNotification(shot_ext->shot.dm.aa.afState);
3508            OnPrecaptureMeteringNotificationISP();
3509        }   else {
3510            memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl));
3511            shot_ext->shot.ctl.request.frameCount = 0xfffffffe;
3512            shot_ext->request_sensor = 1;
3513            shot_ext->dis_bypass = 1;
3514            shot_ext->dnr_bypass = 1;
3515            shot_ext->fd_bypass = 1;
3516            shot_ext->drc_bypass = 1;
3517            shot_ext->request_scc = 0;
3518            shot_ext->request_scp = 0;
3519            if (m_wideAspect) {
3520                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3521                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3522                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3523            } else {
3524                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3525            }
3526            shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode;
3527            if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3528                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
3529                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3530            }
3531            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3532            shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3533            ALOGV("### isp QBUF start (bubble)");
3534            ALOGV("bubble: queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)",
3535                (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3536                (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3537                (int)(shot_ext->shot.ctl.aa.afTrigger));
3538
3539            cam_int_qbuf(&(m_camera_info.isp), index);
3540            ALOGV("### isp DQBUF start (bubble)");
3541            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3542            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3543            ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)",
3544                (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3545                (int)(shot_ext->shot.dm.aa.awbMode),
3546                (int)(shot_ext->shot.dm.aa.afMode));
3547
3548            OnAfNotification(shot_ext->shot.dm.aa.afState);
3549        }
3550
3551        index = m_requestManager->popSensorQ();
3552        if(index < 0){
3553            ALOGE("sensorQ is empty");
3554            return;
3555        }
3556
3557        processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]), &afMode);
3558        if (processingReqIndex != -1)
3559            SetAfMode((enum aa_afmode)afMode);
3560
3561
3562        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3563        if (m_scp_closing || m_scp_closed) {
3564            ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
3565            shot_ext->request_scc = 0;
3566            shot_ext->request_scp = 0;
3567            shot_ext->request_sensor = 0;
3568        }
3569        cam_int_qbuf(&(m_camera_info.sensor), index);
3570        ALOGV("Sensor Qbuf done(%d)", index);
3571
3572        if (!m_scp_closing
3573            && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
3574            ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3575                                    matchedFrameCnt, processingReqIndex);
3576            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
3577        }
3578    }
3579    return;
3580}
3581
3582void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
3583{
3584    uint32_t                currentSignal   = self->GetProcessingSignal();
3585    StreamThread *          selfThread      = ((StreamThread*)self);
3586    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3587    node_info_t             *currentNode    = selfStreamParms->node;
3588    substream_parameters_t  *subParms;
3589    buffer_handle_t * buf = NULL;
3590    status_t res;
3591    void *virtAddr[3];
3592    int i, j;
3593    int index;
3594    nsecs_t timestamp;
3595
3596    if (!(selfThread->m_isBufferInit))
3597    {
3598        for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3599            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3600            if (res != NO_ERROR || buf == NULL) {
3601                ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3602                return;
3603            }
3604            ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3605               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3606
3607            index = selfThread->findBufferIndex(buf);
3608            if (index == -1) {
3609                ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
3610            }
3611            else {
3612                ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3613                    __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
3614                if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
3615                    selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3616                else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
3617                    selfStreamParms->svcBufStatus[index] = ON_HAL;
3618                else {
3619                    ALOGV("DBG(%s): buffer status abnormal (%d) "
3620                        , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3621                }
3622                selfStreamParms->numSvcBufsInHal++;
3623            }
3624            selfStreamParms->bufIndex = 0;
3625        }
3626        selfThread->m_isBufferInit = true;
3627    }
3628    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3629        if (selfThread->m_attachedSubStreams[i].streamId == -1)
3630            continue;
3631
3632        subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
3633        if (subParms->type && subParms->needBufferInit) {
3634            ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3635                __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
3636            int checkingIndex = 0;
3637            bool found = false;
3638            for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
3639                res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3640                if (res != NO_ERROR || buf == NULL) {
3641                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3642                    return;
3643                }
3644                subParms->numSvcBufsInHal++;
3645                ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3646                   subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3647
3648                if (m_grallocHal->lock(m_grallocHal, *buf,
3649                       subParms->usage, 0, 0,
3650                       subParms->width, subParms->height, virtAddr) != 0) {
3651                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3652                }
3653                else {
3654                      ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3655                        __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3656                }
3657                found = false;
3658                for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3659                    if (subParms->svcBufHandle[checkingIndex] == *buf ) {
3660                        found = true;
3661                        break;
3662                    }
3663                }
3664                ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3665                if (!found) break;
3666
3667                index = checkingIndex;
3668
3669                if (index == -1) {
3670                    ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3671                }
3672                else {
3673                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3674                        __FUNCTION__, index, subParms->svcBufStatus[index]);
3675                    if (subParms->svcBufStatus[index]== ON_SERVICE)
3676                        subParms->svcBufStatus[index] = ON_HAL;
3677                    else {
3678                        ALOGV("DBG(%s): buffer status abnormal (%d) "
3679                            , __FUNCTION__, subParms->svcBufStatus[index]);
3680                    }
3681                    if (*buf != subParms->svcBufHandle[index])
3682                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3683                    else
3684                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3685                }
3686                subParms->svcBufIndex = 0;
3687            }
3688            if (subParms->type == SUBSTREAM_TYPE_JPEG) {
3689                m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
3690                m_resizeBuf.size.extS[1] = 0;
3691                m_resizeBuf.size.extS[2] = 0;
3692
3693                if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
3694                    ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3695                }
3696            }
3697            if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
3698                m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
3699                subParms->height, &m_previewCbBuf);
3700
3701                if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
3702                    ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
3703                }
3704            }
3705            subParms->needBufferInit= false;
3706        }
3707    }
3708}
3709
3710void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3711{
3712    StreamThread *          selfThread      = ((StreamThread*)self);
3713    ALOGV("DEBUG(%s): ", __FUNCTION__ );
3714    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3715    selfThread->m_isBufferInit = false;
3716    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3717        selfThread->m_attachedSubStreams[i].streamId    = -1;
3718        selfThread->m_attachedSubStreams[i].priority    = 0;
3719    }
3720    return;
3721}
3722
3723int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
3724    int stream_id, nsecs_t frameTimeStamp)
3725{
3726    substream_parameters_t  *subParms = &m_subStreams[stream_id];
3727
3728    switch (stream_id) {
3729
3730    case STREAM_ID_JPEG:
3731        return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
3732
3733    case STREAM_ID_RECORD:
3734        return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
3735
3736    case STREAM_ID_PRVCB:
3737        return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
3738
3739    default:
3740        return 0;
3741    }
3742}
3743void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
3744{
3745    uint32_t                currentSignal   = self->GetProcessingSignal();
3746    StreamThread *          selfThread      = ((StreamThread*)self);
3747    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3748    node_info_t             *currentNode    = selfStreamParms->node;
3749    int i = 0;
3750    nsecs_t frameTimeStamp;
3751
3752    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3753        CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3754
3755        if (selfThread->m_isBufferInit) {
3756            if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) {
3757                ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
3758                    selfThread->m_index, currentNode->fd);
3759                if (cam_int_streamoff(currentNode) < 0 ) {
3760                    ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3761                }
3762                ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
3763                        selfThread->m_index, currentNode->fd);
3764                currentNode->buffers = 0;
3765                cam_int_reqbufs(currentNode);
3766                ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
3767                        selfThread->m_index, currentNode->fd);
3768            }
3769        }
3770#ifdef ENABLE_FRAME_SYNC
3771        // free metabuffers
3772        for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3773            if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
3774                freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3775                selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3776                selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3777            }
3778#endif
3779        selfThread->m_isBufferInit = false;
3780        selfThread->m_releasing = false;
3781        selfThread->m_activated = false;
3782        ALOGV("(%s): [%d] END  SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3783        return;
3784    }
3785    if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
3786        status_t    res;
3787        buffer_handle_t * buf = NULL;
3788        bool found = false;
3789        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3790            __FUNCTION__, selfThread->m_index);
3791        res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
3792        if (res != NO_ERROR || buf == NULL) {
3793            ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
3794            return;
3795        }
3796        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3797        int checkingIndex = 0;
3798        for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3799            if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3800                found = true;
3801                break;
3802            }
3803        }
3804        ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3805            __FUNCTION__, (unsigned int)buf, found, checkingIndex);
3806
3807        if (!found) return;
3808
3809        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3810            if (selfThread->m_attachedSubStreams[i].streamId == -1)
3811                continue;
3812
3813#ifdef ENABLE_FRAME_SYNC
3814            frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt);
3815            m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
3816#else
3817            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3818#endif
3819            if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
3820                m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
3821                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3822        }
3823
3824        res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
3825        if (res != NO_ERROR) {
3826            ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
3827            return;
3828        }
3829        ALOGV("(%s): streamthread[%d] END   SIGNAL_STREAM_REPROCESSING_START",
3830            __FUNCTION__,selfThread->m_index);
3831
3832        return;
3833    }
3834    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3835        buffer_handle_t * buf = NULL;
3836        status_t res = 0;
3837        int i, j;
3838        int index;
3839        nsecs_t timestamp;
3840#ifdef ENABLE_FRAME_SYNC
3841        camera2_stream *frame;
3842        uint8_t currentOutputStreams;
3843        bool directOutputEnabled = false;
3844#endif
3845        int numOfUndqbuf = 0;
3846
3847        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3848
3849        m_streamBufferInit(self);
3850
3851        do {
3852            ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
3853                selfThread->m_index, selfThread->streamType);
3854
3855#ifdef ENABLE_FRAME_SYNC
3856            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
3857            frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
3858            frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3859            currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
3860            ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams);
3861            if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)||
3862                 ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) {
3863                directOutputEnabled = true;
3864            }
3865            if (!directOutputEnabled) {
3866                if (!m_nightCaptureFrameCnt)
3867                    m_requestManager->NotifyStreamOutput(frame->rcount);
3868            }
3869#else
3870            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3871            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
3872#endif
3873            ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)  sigcnt(%d)",__FUNCTION__,
3874                selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
3875
3876            if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] !=  ON_DRIVER)
3877                ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3878                       __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
3879            selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3880
3881            for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3882                if (selfThread->m_attachedSubStreams[i].streamId == -1)
3883                    continue;
3884#ifdef ENABLE_FRAME_SYNC
3885                if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3886                    m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3887                        selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3888                }
3889#else
3890                if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3891                    m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3892                        selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3893                }
3894#endif
3895            }
3896
3897            if (m_requestManager->GetSkipCnt() <= 0) {
3898#ifdef ENABLE_FRAME_SYNC
3899                if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3900                    ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
3901                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3902                            frameTimeStamp,
3903                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3904                }
3905                else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3906                    ALOGV("** SCC output (frameCnt:%d), last(%d)", frame->rcount);
3907                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3908                                frameTimeStamp,
3909                                &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3910                }
3911                else {
3912                    res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3913                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3914                    ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3915                }
3916#else
3917                if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3918                    ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3919                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3920                            frameTimeStamp,
3921                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3922                }
3923                else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3924                    ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
3925                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3926                                frameTimeStamp,
3927                                &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3928                }
3929#endif
3930                ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3931            }
3932            else {
3933                res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3934                        &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3935                ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3936            }
3937#ifdef ENABLE_FRAME_SYNC
3938            if (directOutputEnabled) {
3939                if (!m_nightCaptureFrameCnt)
3940                     m_requestManager->NotifyStreamOutput(frame->rcount);
3941            }
3942#endif
3943            if (res == 0) {
3944                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
3945                selfStreamParms->numSvcBufsInHal--;
3946            }
3947            else {
3948                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3949            }
3950
3951        }
3952        while(0);
3953
3954        while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS))
3955                    < selfStreamParms->minUndequedBuffer) {
3956            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3957            if (res != NO_ERROR || buf == NULL) {
3958                ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index,  res, selfStreamParms->numSvcBufsInHal);
3959                break;
3960            }
3961            selfStreamParms->numSvcBufsInHal++;
3962            ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3963                selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3964               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3965            const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3966
3967            bool found = false;
3968            int checkingIndex = 0;
3969            for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3970                if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3971                    found = true;
3972                    break;
3973                }
3974            }
3975            if (!found) break;
3976            selfStreamParms->bufIndex = checkingIndex;
3977            if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
3978                uint32_t    plane_index = 0;
3979                ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
3980                struct v4l2_buffer v4l2_buf;
3981                struct v4l2_plane  planes[VIDEO_MAX_PLANES];
3982
3983                v4l2_buf.m.planes   = planes;
3984                v4l2_buf.type       = currentNode->type;
3985                v4l2_buf.memory     = currentNode->memory;
3986                v4l2_buf.index      = selfStreamParms->bufIndex;
3987                v4l2_buf.length     = currentNode->planes;
3988
3989                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
3990                v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
3991                v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
3992                for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
3993                    v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
3994                }
3995#ifdef ENABLE_FRAME_SYNC
3996                /* add plane for metadata*/
3997                v4l2_buf.length += selfStreamParms->metaPlanes;
3998                v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
3999                v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
4000#endif
4001                if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
4002                    ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
4003                        __FUNCTION__, selfThread->m_index);
4004                    return;
4005                }
4006                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
4007                ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
4008                    __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
4009            }
4010        }
4011
4012        ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
4013    }
4014    return;
4015}
4016
4017void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
4018{
4019    uint32_t                currentSignal   = self->GetProcessingSignal();
4020    StreamThread *          selfThread      = ((StreamThread*)self);
4021    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4022    node_info_t             *currentNode    = selfStreamParms->node;
4023
4024
4025    if (currentSignal & SIGNAL_THREAD_RELEASE) {
4026        CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4027
4028        if (selfThread->m_isBufferInit) {
4029            if (currentNode->fd == m_camera_info.capture.fd) {
4030                if (m_camera_info.capture.status == true) {
4031                    ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4032                    selfThread->m_index, currentNode->fd);
4033                    if (cam_int_streamoff(currentNode) < 0 ){
4034                        ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4035                    } else {
4036                        m_camera_info.capture.status = false;
4037                    }
4038                }
4039            } else {
4040                ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4041                selfThread->m_index, currentNode->fd);
4042                if (cam_int_streamoff(currentNode) < 0 ){
4043                    ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4044                }
4045            }
4046            ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
4047            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
4048                    selfThread->m_index, currentNode->fd);
4049            currentNode->buffers = 0;
4050            cam_int_reqbufs(currentNode);
4051            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
4052                    selfThread->m_index, currentNode->fd);
4053        }
4054
4055        selfThread->m_isBufferInit = false;
4056        selfThread->m_releasing = false;
4057        selfThread->m_activated = false;
4058        ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4059        return;
4060    }
4061
4062    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
4063#ifdef ENABLE_FRAME_SYNC
4064        camera2_stream *frame;
4065        uint8_t currentOutputStreams;
4066#endif
4067        nsecs_t frameTimeStamp;
4068
4069        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
4070            __FUNCTION__,selfThread->m_index);
4071
4072        m_streamBufferInit(self);
4073
4074        ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
4075        selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
4076        ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
4077            selfThread->m_index, selfStreamParms->bufIndex);
4078
4079#ifdef ENABLE_FRAME_SYNC
4080        frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
4081        frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
4082        currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
4083        ALOGV("frame count(SCC) : %d outputStream(%x)",  frame->rcount, currentOutputStreams);
4084#else
4085        frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
4086#endif
4087
4088        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
4089            if (selfThread->m_attachedSubStreams[i].streamId == -1)
4090                continue;
4091#ifdef ENABLE_FRAME_SYNC
4092            if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4093                m_requestManager->NotifyStreamOutput(frame->rcount);
4094                m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4095                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4096            }
4097#else
4098            if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4099                m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4100                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4101            }
4102#endif
4103        }
4104        cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
4105        ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
4106
4107
4108
4109        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
4110            __FUNCTION__, selfThread->m_index);
4111    }
4112
4113
4114    return;
4115}
4116
4117void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
4118{
4119    uint32_t                currentSignal   = self->GetProcessingSignal();
4120    StreamThread *          selfThread      = ((StreamThread*)self);
4121    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
4122    node_info_t             *currentNode    = selfStreamParms->node;
4123
4124    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
4125
4126    // Do something in Child thread handler
4127    // Should change function to class that inherited StreamThread class to support dynamic stream allocation
4128    if (selfThread->streamType == STREAM_TYPE_DIRECT) {
4129        m_streamFunc_direct(self);
4130    } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
4131        m_streamFunc_indirect(self);
4132    }
4133
4134    return;
4135}
4136int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4137{
4138    Mutex::Autolock lock(m_jpegEncoderLock);
4139    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4140    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_JPEG];
4141    status_t    res;
4142    ExynosRect jpegRect;
4143    bool found = false;
4144    int srcW, srcH, srcCropX, srcCropY;
4145    int pictureW, pictureH, pictureFramesize = 0;
4146    int pictureFormat;
4147    int cropX, cropY, cropW, cropH = 0;
4148    ExynosBuffer resizeBufInfo;
4149    ExynosRect   m_jpegPictureRect;
4150    buffer_handle_t * buf = NULL;
4151    camera2_jpeg_blob * jpegBlob = NULL;
4152    int jpegBufSize = 0;
4153
4154    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4155    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4156        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4157            found = true;
4158            break;
4159        }
4160        subParms->svcBufIndex++;
4161        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4162            subParms->svcBufIndex = 0;
4163    }
4164    if (!found) {
4165        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4166        subParms->svcBufIndex++;
4167        return 1;
4168    }
4169
4170    m_jpegEncodingCount++;
4171
4172    m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
4173                    m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
4174                    &srcCropX, &srcCropY,
4175                    &srcW, &srcH,
4176                    0);
4177
4178    m_jpegPictureRect.w = subParms->width;
4179    m_jpegPictureRect.h = subParms->height;
4180
4181     ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
4182              __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
4183                   m_jpegPictureRect.w, m_jpegPictureRect.h);
4184
4185    m_getRatioSize(srcW, srcH,
4186                   m_jpegPictureRect.w, m_jpegPictureRect.h,
4187                   &cropX, &cropY,
4188                   &pictureW, &pictureH,
4189                   0);
4190    pictureFormat = V4L2_PIX_FMT_YUYV;
4191    pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
4192
4193    if (m_exynosPictureCSC) {
4194        float zoom_w = 0, zoom_h = 0;
4195        if (m_zoomRatio == 0)
4196            m_zoomRatio = 1;
4197
4198        if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
4199            zoom_w =  pictureW / m_zoomRatio;
4200            zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
4201        } else {
4202            zoom_h = pictureH / m_zoomRatio;
4203            zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
4204        }
4205        cropX = (srcW - zoom_w) / 2;
4206        cropY = (srcH - zoom_h) / 2;
4207        cropW = zoom_w;
4208        cropH = zoom_h;
4209
4210        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4211              __FUNCTION__, cropX, cropY, cropW, cropH);
4212
4213        csc_set_src_format(m_exynosPictureCSC,
4214                           ALIGN(srcW, 16), ALIGN(srcH, 16),
4215                           cropX, cropY, cropW, cropH,
4216                           V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
4217                           0);
4218
4219        csc_set_dst_format(m_exynosPictureCSC,
4220                           m_jpegPictureRect.w, m_jpegPictureRect.h,
4221                           0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
4222                           V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
4223                           0);
4224        for (int i = 0 ; i < 3 ; i++)
4225            ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
4226                __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
4227        csc_set_src_buffer(m_exynosPictureCSC,
4228                           (void **)&srcImageBuf->fd.fd);
4229
4230        csc_set_dst_buffer(m_exynosPictureCSC,
4231                           (void **)&m_resizeBuf.fd.fd);
4232        for (int i = 0 ; i < 3 ; i++)
4233            ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
4234                __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
4235
4236        if (csc_convert(m_exynosPictureCSC) != 0)
4237            ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
4238
4239    }
4240    else {
4241        ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
4242    }
4243
4244    resizeBufInfo = m_resizeBuf;
4245
4246    m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
4247
4248    for (int i = 1; i < 3; i++) {
4249        if (m_resizeBuf.size.extS[i] != 0)
4250            m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
4251
4252        ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
4253    }
4254
4255    jpegRect.w = m_jpegPictureRect.w;
4256    jpegRect.h = m_jpegPictureRect.h;
4257    jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
4258
4259    for (int j = 0 ; j < 3 ; j++)
4260        ALOGV("DEBUG(%s): dest buf node  fd.extFd[%d]=%d size=%d virt=%x ",
4261            __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
4262            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
4263            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
4264
4265    jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0];
4266    if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) {
4267        ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
4268    } else {
4269        m_resizeBuf = resizeBufInfo;
4270
4271        int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s;
4272        ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__,
4273            m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize);
4274        char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4275        jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]);
4276
4277        if (jpegBuffer[jpegSize-1] == 0)
4278            jpegSize--;
4279        jpegBlob->jpeg_size = jpegSize;
4280        jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID;
4281    }
4282    subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize;
4283    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4284
4285    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4286            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4287    if (res == 0) {
4288        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4289        subParms->numSvcBufsInHal--;
4290    }
4291    else {
4292        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4293    }
4294
4295    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4296    {
4297        bool found = false;
4298        int checkingIndex = 0;
4299
4300        ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4301
4302        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4303        if (res != NO_ERROR || buf == NULL) {
4304            ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4305            break;
4306        }
4307        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4308        subParms->numSvcBufsInHal ++;
4309        ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4310           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4311
4312
4313        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4314            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4315                found = true;
4316                break;
4317            }
4318        }
4319        ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
4320
4321        if (!found) {
4322             break;
4323        }
4324
4325        subParms->svcBufIndex = checkingIndex;
4326        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4327            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4328        }
4329        else {
4330            ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4331                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4332        }
4333    }
4334    m_jpegEncodingCount--;
4335    return 0;
4336}
4337
4338int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4339{
4340    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4341    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_RECORD];
4342    status_t    res;
4343    ExynosRect jpegRect;
4344    bool found = false;
4345    int cropX, cropY, cropW, cropH = 0;
4346    buffer_handle_t * buf = NULL;
4347
4348    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4349    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4350        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4351            found = true;
4352            break;
4353        }
4354        subParms->svcBufIndex++;
4355        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4356            subParms->svcBufIndex = 0;
4357    }
4358    if (!found) {
4359        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4360        subParms->svcBufIndex++;
4361        return 1;
4362    }
4363
4364    if (m_exynosVideoCSC) {
4365        int videoW = subParms->width, videoH = subParms->height;
4366        int cropX, cropY, cropW, cropH = 0;
4367        int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4368        m_getRatioSize(previewW, previewH,
4369                       videoW, videoH,
4370                       &cropX, &cropY,
4371                       &cropW, &cropH,
4372                       0);
4373
4374        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4375                 __FUNCTION__, cropX, cropY, cropW, cropH);
4376
4377        csc_set_src_format(m_exynosVideoCSC,
4378                           ALIGN(previewW, 32), previewH,
4379                           cropX, cropY, cropW, cropH,
4380                           selfStreamParms->format,
4381                           0);
4382
4383        csc_set_dst_format(m_exynosVideoCSC,
4384                           videoW, videoH,
4385                           0, 0, videoW, videoH,
4386                           subParms->format,
4387                           1);
4388
4389        csc_set_src_buffer(m_exynosVideoCSC,
4390                        (void **)&srcImageBuf->fd.fd);
4391
4392        csc_set_dst_buffer(m_exynosVideoCSC,
4393            (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
4394
4395        if (csc_convert(m_exynosVideoCSC) != 0) {
4396            ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
4397        }
4398        else {
4399            ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
4400        }
4401    }
4402    else {
4403        ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4404    }
4405
4406    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4407
4408    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4409            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4410    if (res == 0) {
4411        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4412        subParms->numSvcBufsInHal--;
4413    }
4414    else {
4415        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4416    }
4417
4418    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4419    {
4420        bool found = false;
4421        int checkingIndex = 0;
4422
4423        ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4424
4425        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4426        if (res != NO_ERROR || buf == NULL) {
4427            ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4428            break;
4429        }
4430        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4431        subParms->numSvcBufsInHal ++;
4432        ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4433           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4434
4435        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4436            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4437                found = true;
4438                break;
4439            }
4440        }
4441        ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4442
4443        if (!found) {
4444             break;
4445        }
4446
4447        subParms->svcBufIndex = checkingIndex;
4448        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4449            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4450        }
4451        else {
4452            ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4453                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4454        }
4455    }
4456    return 0;
4457}
4458
4459int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4460{
4461    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
4462    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_PRVCB];
4463    status_t    res;
4464    bool found = false;
4465    int cropX, cropY, cropW, cropH = 0;
4466    buffer_handle_t * buf = NULL;
4467
4468    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4469    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4470        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4471            found = true;
4472            break;
4473        }
4474        subParms->svcBufIndex++;
4475        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4476            subParms->svcBufIndex = 0;
4477    }
4478    if (!found) {
4479        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4480        subParms->svcBufIndex++;
4481        return 1;
4482    }
4483
4484    if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
4485        if (m_exynosVideoCSC) {
4486            int previewCbW = subParms->width, previewCbH = subParms->height;
4487            int cropX, cropY, cropW, cropH = 0;
4488            int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4489            m_getRatioSize(previewW, previewH,
4490                           previewCbW, previewCbH,
4491                           &cropX, &cropY,
4492                           &cropW, &cropH,
4493                           0);
4494
4495            ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4496                     __FUNCTION__, cropX, cropY, cropW, cropH);
4497            csc_set_src_format(m_exynosVideoCSC,
4498                               ALIGN(previewW, 32), previewH,
4499                               cropX, cropY, cropW, cropH,
4500                               selfStreamParms->format,
4501                               0);
4502
4503            csc_set_dst_format(m_exynosVideoCSC,
4504                               previewCbW, previewCbH,
4505                               0, 0, previewCbW, previewCbH,
4506                               subParms->internalFormat,
4507                               1);
4508
4509            csc_set_src_buffer(m_exynosVideoCSC,
4510                        (void **)&srcImageBuf->fd.fd);
4511
4512            csc_set_dst_buffer(m_exynosVideoCSC,
4513                (void **)(&(m_previewCbBuf.fd.fd)));
4514
4515            if (csc_convert(m_exynosVideoCSC) != 0) {
4516                ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
4517            }
4518            else {
4519                ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
4520            }
4521            if (previewCbW == ALIGN(previewCbW, 16)) {
4522                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4523                    m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
4524                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
4525                    m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
4526            }
4527            else {
4528                // TODO : copy line by line ?
4529            }
4530        }
4531        else {
4532            ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4533        }
4534    }
4535    else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
4536        int previewCbW = subParms->width, previewCbH = subParms->height;
4537        int stride = ALIGN(previewCbW, 16);
4538        int uv_stride = ALIGN(previewCbW/2, 16);
4539        int c_stride = ALIGN(stride / 2, 16);
4540
4541        if (previewCbW == ALIGN(previewCbW, 32)) {
4542            memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4543                srcImageBuf->virt.extP[0], stride * previewCbH);
4544            memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
4545                srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
4546            memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
4547                srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
4548        } else {
4549            char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4550            char * srcAddr = (char *)(srcImageBuf->virt.extP[0]);
4551            for (int i = 0 ; i < previewCbH ; i++) {
4552                memcpy(dstAddr, srcAddr, previewCbW);
4553                dstAddr += stride;
4554                srcAddr += ALIGN(stride, 32);
4555            }
4556            dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH);
4557            srcAddr = (char *)(srcImageBuf->virt.extP[1]);
4558            for (int i = 0 ; i < previewCbH/2 ; i++) {
4559                memcpy(dstAddr, srcAddr, previewCbW/2);
4560                dstAddr += c_stride;
4561                srcAddr += uv_stride;
4562            }
4563            srcAddr = (char *)(srcImageBuf->virt.extP[2]);
4564            for (int i = 0 ; i < previewCbH/2 ; i++) {
4565                memcpy(dstAddr, srcAddr, previewCbW/2);
4566                dstAddr += c_stride;
4567                srcAddr += uv_stride;
4568            }
4569        }
4570    }
4571    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4572
4573    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4574            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4575    if (res == 0) {
4576        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4577        subParms->numSvcBufsInHal--;
4578    }
4579    else {
4580        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4581    }
4582
4583    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4584    {
4585        bool found = false;
4586        int checkingIndex = 0;
4587
4588        ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4589
4590        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4591        if (res != NO_ERROR || buf == NULL) {
4592            ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4593            break;
4594        }
4595        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4596        subParms->numSvcBufsInHal ++;
4597        ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4598           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4599
4600
4601        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4602            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4603                found = true;
4604                break;
4605            }
4606        }
4607        ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4608
4609        if (!found) {
4610             break;
4611        }
4612
4613        subParms->svcBufIndex = checkingIndex;
4614        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4615            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4616        }
4617        else {
4618            ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4619                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4620        }
4621    }
4622    return 0;
4623}
4624
4625bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
4626{
4627    int sizeOfSupportList;
4628
4629    //REAR Camera
4630    if(this->getCameraId() == 0) {
4631        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
4632
4633        for(int i = 0; i < sizeOfSupportList; i++) {
4634            if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
4635                return true;
4636        }
4637
4638    }
4639    else {
4640        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
4641
4642        for(int i = 0; i < sizeOfSupportList; i++) {
4643            if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
4644                return true;
4645        }
4646    }
4647
4648    return false;
4649}
4650bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
4651                            ExynosBuffer *jpegBuf,
4652                            ExynosRect *rect)
4653{
4654    unsigned char *addr;
4655
4656    ExynosJpegEncoderForCamera jpegEnc;
4657    bool ret = false;
4658    int res = 0;
4659
4660    unsigned int *yuvSize = yuvBuf->size.extS;
4661
4662    if (jpegEnc.create()) {
4663        ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
4664        goto jpeg_encode_done;
4665    }
4666
4667    if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) {
4668        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
4669        goto jpeg_encode_done;
4670    }
4671
4672    if (jpegEnc.setSize(rect->w, rect->h)) {
4673        ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
4674        goto jpeg_encode_done;
4675    }
4676    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
4677
4678    if (jpegEnc.setColorFormat(rect->colorFormat)) {
4679        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
4680        goto jpeg_encode_done;
4681    }
4682
4683    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
4684        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
4685        goto jpeg_encode_done;
4686    }
4687
4688    if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) {
4689        mExifInfo.enableThumb = true;
4690        if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) {
4691            // in the case of unsupported parameter, disable thumbnail
4692            mExifInfo.enableThumb = false;
4693        } else {
4694            m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0];
4695            m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1];
4696        }
4697
4698        ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
4699
4700    } else {
4701        mExifInfo.enableThumb = false;
4702    }
4703
4704    if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
4705        ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
4706        goto jpeg_encode_done;
4707    }
4708
4709    ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
4710    if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) {
4711        ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
4712        goto jpeg_encode_done;
4713    }
4714
4715    m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
4716    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
4717    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
4718        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
4719        goto jpeg_encode_done;
4720    }
4721    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
4722        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
4723        goto jpeg_encode_done;
4724    }
4725
4726    if (jpegEnc.updateConfig()) {
4727        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
4728        goto jpeg_encode_done;
4729    }
4730
4731    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
4732        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
4733        goto jpeg_encode_done;
4734    }
4735
4736    ret = true;
4737
4738jpeg_encode_done:
4739
4740    if (jpegEnc.flagCreate() == true)
4741        jpegEnc.destroy();
4742
4743    return ret;
4744}
4745
4746void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
4747{
4748    m_ctlInfo.flash.m_precaptureTriggerId = id;
4749    m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
4750    if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
4751        // flash is required
4752        switch (m_ctlInfo.flash.m_flashCnt) {
4753        case IS_FLASH_STATE_AUTO_DONE:
4754        case IS_FLASH_STATE_AUTO_OFF:
4755            // Flash capture sequence, AF flash was executed before
4756            break;
4757        default:
4758            // Full flash sequence
4759            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4760            m_ctlInfo.flash.m_flashEnableFlg = true;
4761            m_ctlInfo.flash.m_flashTimeOut = 0;
4762        }
4763    } else {
4764        // Skip pre-capture in case of non-flash.
4765        ALOGV("[PreCap] Flash OFF mode ");
4766        m_ctlInfo.flash.m_flashEnableFlg = false;
4767        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
4768    }
4769    ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
4770    OnPrecaptureMeteringNotificationSensor();
4771}
4772void ExynosCameraHWInterface2::OnAfTriggerStart(int id)
4773{
4774    m_afPendingTriggerId = id;
4775    m_afModeWaitingCnt = 6;
4776}
4777
4778void ExynosCameraHWInterface2::OnAfTrigger(int id)
4779{
4780    m_afTriggerId = id;
4781
4782    switch (m_afMode) {
4783    case AA_AFMODE_AUTO:
4784    case AA_AFMODE_MACRO:
4785    case AA_AFMODE_OFF:
4786        ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
4787        // If flash is enable, Flash operation is executed before triggering AF
4788        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4789                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4790                && (m_cameraId == 0)) {
4791            ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode);
4792            m_ctlInfo.flash.m_flashEnableFlg = true;
4793            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4794            m_ctlInfo.flash.m_flashDecisionResult = false;
4795            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4796        }
4797        OnAfTriggerAutoMacro(id);
4798        break;
4799    case AA_AFMODE_CONTINUOUS_VIDEO:
4800        ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
4801        OnAfTriggerCAFVideo(id);
4802        break;
4803    case AA_AFMODE_CONTINUOUS_PICTURE:
4804        ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
4805        OnAfTriggerCAFPicture(id);
4806        break;
4807
4808    default:
4809        break;
4810    }
4811}
4812
4813void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
4814{
4815    int nextState = NO_TRANSITION;
4816
4817    switch (m_afState) {
4818    case HAL_AFSTATE_INACTIVE:
4819    case HAL_AFSTATE_PASSIVE_FOCUSED:
4820    case HAL_AFSTATE_SCANNING:
4821        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4822        m_IsAfTriggerRequired = true;
4823        m_ctlInfo.af.m_afTriggerTimeOut = 4;
4824        break;
4825    case HAL_AFSTATE_NEEDS_COMMAND:
4826        nextState = NO_TRANSITION;
4827        break;
4828    case HAL_AFSTATE_STARTED:
4829        nextState = NO_TRANSITION;
4830        break;
4831    case HAL_AFSTATE_LOCKED:
4832        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4833        m_IsAfTriggerRequired = true;
4834        break;
4835    case HAL_AFSTATE_FAILED:
4836        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4837        m_IsAfTriggerRequired = true;
4838        m_ctlInfo.af.m_afTriggerTimeOut = 4;
4839        break;
4840    default:
4841        break;
4842    }
4843    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4844    if (nextState != NO_TRANSITION)
4845        m_afState = nextState;
4846}
4847
4848void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4849{
4850    int nextState = NO_TRANSITION;
4851
4852    switch (m_afState) {
4853    case HAL_AFSTATE_INACTIVE:
4854        nextState = HAL_AFSTATE_FAILED;
4855        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4856        break;
4857    case HAL_AFSTATE_NEEDS_COMMAND:
4858        // not used
4859        break;
4860    case HAL_AFSTATE_STARTED:
4861        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4862        m_AfHwStateFailed = false;
4863        break;
4864    case HAL_AFSTATE_SCANNING:
4865        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4866        m_AfHwStateFailed = false;
4867        // If flash is enable, Flash operation is executed before triggering AF
4868        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4869                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4870                && (m_cameraId == 0)) {
4871            ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4872            m_ctlInfo.flash.m_flashEnableFlg = true;
4873            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4874            m_ctlInfo.flash.m_flashDecisionResult = false;
4875            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4876        }
4877        break;
4878    case HAL_AFSTATE_NEEDS_DETERMINATION:
4879        nextState = NO_TRANSITION;
4880        break;
4881    case HAL_AFSTATE_PASSIVE_FOCUSED:
4882        m_IsAfLockRequired = true;
4883        if (m_AfHwStateFailed) {
4884            ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
4885            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4886            nextState = HAL_AFSTATE_FAILED;
4887        }
4888        else {
4889            ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
4890            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4891            nextState = HAL_AFSTATE_LOCKED;
4892        }
4893        m_AfHwStateFailed = false;
4894        break;
4895    case HAL_AFSTATE_LOCKED:
4896        nextState = NO_TRANSITION;
4897        break;
4898    case HAL_AFSTATE_FAILED:
4899        nextState = NO_TRANSITION;
4900        break;
4901    default:
4902        break;
4903    }
4904    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4905    if (nextState != NO_TRANSITION)
4906        m_afState = nextState;
4907}
4908
4909
4910void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
4911{
4912    int nextState = NO_TRANSITION;
4913
4914    switch (m_afState) {
4915    case HAL_AFSTATE_INACTIVE:
4916        nextState = HAL_AFSTATE_FAILED;
4917        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4918        break;
4919    case HAL_AFSTATE_NEEDS_COMMAND:
4920        // not used
4921        break;
4922    case HAL_AFSTATE_STARTED:
4923        m_IsAfLockRequired = true;
4924        nextState = HAL_AFSTATE_FAILED;
4925        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4926        break;
4927    case HAL_AFSTATE_SCANNING:
4928        m_IsAfLockRequired = true;
4929        nextState = HAL_AFSTATE_FAILED;
4930        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4931        break;
4932    case HAL_AFSTATE_NEEDS_DETERMINATION:
4933        // not used
4934        break;
4935    case HAL_AFSTATE_PASSIVE_FOCUSED:
4936        m_IsAfLockRequired = true;
4937        SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4938        nextState = HAL_AFSTATE_LOCKED;
4939        break;
4940    case HAL_AFSTATE_LOCKED:
4941        nextState = NO_TRANSITION;
4942        break;
4943    case HAL_AFSTATE_FAILED:
4944        nextState = NO_TRANSITION;
4945        break;
4946    default:
4947        break;
4948    }
4949    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4950    if (nextState != NO_TRANSITION)
4951        m_afState = nextState;
4952}
4953
4954void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
4955{
4956    if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4957        // Just noti of pre-capture start
4958        if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) {
4959            m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4960                        ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4961                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4962            ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4963            m_notifyCb(CAMERA2_MSG_AUTOWB,
4964                        ANDROID_CONTROL_AWB_STATE_CONVERGED,
4965                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4966            m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
4967        }
4968    }
4969}
4970
4971void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
4972{
4973    if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4974        if (m_ctlInfo.flash.m_flashEnableFlg) {
4975            // flash case
4976            switch (m_ctlInfo.flash.m_flashCnt) {
4977            case IS_FLASH_STATE_AUTO_DONE:
4978            case IS_FLASH_STATE_AUTO_OFF:
4979                if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
4980                    // End notification
4981                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4982                                    ANDROID_CONTROL_AE_STATE_CONVERGED,
4983                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4984                    ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4985                    m_notifyCb(CAMERA2_MSG_AUTOWB,
4986                                    ANDROID_CONTROL_AWB_STATE_CONVERGED,
4987                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4988                    m_ctlInfo.flash.m_precaptureTriggerId = 0;
4989                } else {
4990                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4991                                    ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4992                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4993                    ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4994                    m_notifyCb(CAMERA2_MSG_AUTOWB,
4995                                    ANDROID_CONTROL_AWB_STATE_CONVERGED,
4996                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4997                    m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
4998                }
4999                break;
5000            case IS_FLASH_STATE_CAPTURE:
5001            case IS_FLASH_STATE_CAPTURE_WAIT:
5002            case IS_FLASH_STATE_CAPTURE_JPEG:
5003            case IS_FLASH_STATE_CAPTURE_END:
5004                ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt);
5005                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
5006                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5007                        ANDROID_CONTROL_AE_STATE_CONVERGED,
5008                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5009                m_notifyCb(CAMERA2_MSG_AUTOWB,
5010                        ANDROID_CONTROL_AWB_STATE_CONVERGED,
5011                        m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5012                m_ctlInfo.flash.m_precaptureTriggerId = 0;
5013                break;
5014            }
5015        } else {
5016            // non-flash case
5017            if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
5018                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5019                                ANDROID_CONTROL_AE_STATE_CONVERGED,
5020                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5021                ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5022                m_notifyCb(CAMERA2_MSG_AUTOWB,
5023                                ANDROID_CONTROL_AWB_STATE_CONVERGED,
5024                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5025                m_ctlInfo.flash.m_precaptureTriggerId = 0;
5026            }
5027        }
5028    }
5029}
5030
5031void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
5032{
5033    switch (m_afMode) {
5034    case AA_AFMODE_AUTO:
5035    case AA_AFMODE_MACRO:
5036        OnAfNotificationAutoMacro(noti);
5037        break;
5038    case AA_AFMODE_CONTINUOUS_VIDEO:
5039        OnAfNotificationCAFVideo(noti);
5040        break;
5041    case AA_AFMODE_CONTINUOUS_PICTURE:
5042        OnAfNotificationCAFPicture(noti);
5043        break;
5044    case AA_AFMODE_OFF:
5045    default:
5046        break;
5047    }
5048}
5049
5050void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
5051{
5052    int nextState = NO_TRANSITION;
5053    bool bWrongTransition = false;
5054
5055    if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
5056        switch (noti) {
5057        case AA_AFSTATE_INACTIVE:
5058        case AA_AFSTATE_ACTIVE_SCAN:
5059        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5060        case AA_AFSTATE_AF_FAILED_FOCUS:
5061        default:
5062            nextState = NO_TRANSITION;
5063            break;
5064        }
5065    }
5066    else if (m_afState == HAL_AFSTATE_STARTED) {
5067        switch (noti) {
5068        case AA_AFSTATE_INACTIVE:
5069            nextState = NO_TRANSITION;
5070            break;
5071        case AA_AFSTATE_ACTIVE_SCAN:
5072            nextState = HAL_AFSTATE_SCANNING;
5073            SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
5074            break;
5075        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5076            nextState = NO_TRANSITION;
5077            break;
5078        case AA_AFSTATE_AF_FAILED_FOCUS:
5079            nextState = NO_TRANSITION;
5080            break;
5081        default:
5082            bWrongTransition = true;
5083            break;
5084        }
5085    }
5086    else if (m_afState == HAL_AFSTATE_SCANNING) {
5087        switch (noti) {
5088        case AA_AFSTATE_INACTIVE:
5089            bWrongTransition = true;
5090            break;
5091        case AA_AFSTATE_ACTIVE_SCAN:
5092            nextState = NO_TRANSITION;
5093            break;
5094        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5095            // If Flash mode is enable, after AF execute pre-capture metering
5096            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5097                switch (m_ctlInfo.flash.m_flashCnt) {
5098                case IS_FLASH_STATE_ON_DONE:
5099                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5100                    nextState = NO_TRANSITION;
5101                    break;
5102                case IS_FLASH_STATE_AUTO_DONE:
5103                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5104                    nextState = HAL_AFSTATE_LOCKED;
5105                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5106                    break;
5107                default:
5108                    nextState = NO_TRANSITION;
5109                }
5110            } else {
5111                nextState = HAL_AFSTATE_LOCKED;
5112                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5113            }
5114            break;
5115        case AA_AFSTATE_AF_FAILED_FOCUS:
5116            // If Flash mode is enable, after AF execute pre-capture metering
5117            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5118                switch (m_ctlInfo.flash.m_flashCnt) {
5119                case IS_FLASH_STATE_ON_DONE:
5120                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5121                    nextState = NO_TRANSITION;
5122                    break;
5123                case IS_FLASH_STATE_AUTO_DONE:
5124                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5125                    nextState = HAL_AFSTATE_FAILED;
5126                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5127                    break;
5128                default:
5129                    nextState = NO_TRANSITION;
5130                }
5131            } else {
5132                nextState = HAL_AFSTATE_FAILED;
5133                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5134            }
5135            break;
5136        default:
5137            bWrongTransition = true;
5138            break;
5139        }
5140    }
5141    else if (m_afState == HAL_AFSTATE_LOCKED) {
5142        switch (noti) {
5143            case AA_AFSTATE_INACTIVE:
5144            case AA_AFSTATE_ACTIVE_SCAN:
5145                bWrongTransition = true;
5146                break;
5147            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5148                nextState = NO_TRANSITION;
5149                break;
5150            case AA_AFSTATE_AF_FAILED_FOCUS:
5151            default:
5152                bWrongTransition = true;
5153                break;
5154        }
5155    }
5156    else if (m_afState == HAL_AFSTATE_FAILED) {
5157        switch (noti) {
5158            case AA_AFSTATE_INACTIVE:
5159            case AA_AFSTATE_ACTIVE_SCAN:
5160            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5161                bWrongTransition = true;
5162                break;
5163            case AA_AFSTATE_AF_FAILED_FOCUS:
5164                nextState = NO_TRANSITION;
5165                break;
5166            default:
5167                bWrongTransition = true;
5168                break;
5169        }
5170    }
5171    if (bWrongTransition) {
5172        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5173        return;
5174    }
5175    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5176    if (nextState != NO_TRANSITION)
5177        m_afState = nextState;
5178}
5179
5180void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
5181{
5182    int nextState = NO_TRANSITION;
5183    bool bWrongTransition = false;
5184
5185    if (m_afState == HAL_AFSTATE_INACTIVE) {
5186        switch (noti) {
5187        case AA_AFSTATE_INACTIVE:
5188        case AA_AFSTATE_ACTIVE_SCAN:
5189        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5190        case AA_AFSTATE_AF_FAILED_FOCUS:
5191        default:
5192            nextState = NO_TRANSITION;
5193            break;
5194        }
5195        // Check AF notification after triggering
5196        if (m_ctlInfo.af.m_afTriggerTimeOut > 0) {
5197            if (m_ctlInfo.af.m_afTriggerTimeOut > 5) {
5198                ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode);
5199                SetAfMode(AA_AFMODE_OFF);
5200                SetAfMode(m_afMode);
5201                m_ctlInfo.af.m_afTriggerTimeOut = 0;
5202            } else {
5203                m_ctlInfo.af.m_afTriggerTimeOut++;
5204            }
5205        }
5206    }
5207    else if (m_afState == HAL_AFSTATE_STARTED) {
5208        switch (noti) {
5209        case AA_AFSTATE_INACTIVE:
5210            nextState = NO_TRANSITION;
5211            break;
5212        case AA_AFSTATE_ACTIVE_SCAN:
5213            nextState = HAL_AFSTATE_SCANNING;
5214            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5215            m_ctlInfo.af.m_afTriggerTimeOut = 0;
5216            break;
5217        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5218            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5219            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5220            m_ctlInfo.af.m_afTriggerTimeOut = 0;
5221            break;
5222        case AA_AFSTATE_AF_FAILED_FOCUS:
5223            //nextState = HAL_AFSTATE_FAILED;
5224            //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5225            nextState = NO_TRANSITION;
5226            break;
5227        default:
5228            bWrongTransition = true;
5229            break;
5230        }
5231    }
5232    else if (m_afState == HAL_AFSTATE_SCANNING) {
5233        switch (noti) {
5234        case AA_AFSTATE_INACTIVE:
5235            nextState = NO_TRANSITION;
5236            break;
5237        case AA_AFSTATE_ACTIVE_SCAN:
5238            nextState = NO_TRANSITION;
5239            m_AfHwStateFailed = false;
5240            break;
5241        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5242            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5243            m_AfHwStateFailed = false;
5244            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5245            break;
5246        case AA_AFSTATE_AF_FAILED_FOCUS:
5247            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5248            m_AfHwStateFailed = true;
5249            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5250            break;
5251        default:
5252            bWrongTransition = true;
5253            break;
5254        }
5255    }
5256    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5257        switch (noti) {
5258        case AA_AFSTATE_INACTIVE:
5259            nextState = NO_TRANSITION;
5260            break;
5261        case AA_AFSTATE_ACTIVE_SCAN:
5262            nextState = HAL_AFSTATE_SCANNING;
5263            m_AfHwStateFailed = false;
5264            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5265            break;
5266        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5267            nextState = NO_TRANSITION;
5268            m_AfHwStateFailed = false;
5269            break;
5270        case AA_AFSTATE_AF_FAILED_FOCUS:
5271            nextState = NO_TRANSITION;
5272            m_AfHwStateFailed = true;
5273            break;
5274        default:
5275            bWrongTransition = true;
5276            break;
5277        }
5278    }
5279    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5280        //Skip notification in case of flash, wait the end of flash on
5281        if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5282            if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE)
5283                return;
5284        }
5285        switch (noti) {
5286        case AA_AFSTATE_INACTIVE:
5287            nextState = NO_TRANSITION;
5288            break;
5289        case AA_AFSTATE_ACTIVE_SCAN:
5290            nextState = NO_TRANSITION;
5291            break;
5292        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5293            // If Flash mode is enable, after AF execute pre-capture metering
5294            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5295                switch (m_ctlInfo.flash.m_flashCnt) {
5296                case IS_FLASH_STATE_ON_DONE:
5297                    ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5298                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5299                    nextState = NO_TRANSITION;
5300                    break;
5301                case IS_FLASH_STATE_AUTO_DONE:
5302                    ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5303                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5304                    m_IsAfLockRequired = true;
5305                    nextState = HAL_AFSTATE_LOCKED;
5306                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5307                    break;
5308                default:
5309                    nextState = NO_TRANSITION;
5310                }
5311            } else {
5312                m_IsAfLockRequired = true;
5313                nextState = HAL_AFSTATE_LOCKED;
5314                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5315            }
5316            break;
5317        case AA_AFSTATE_AF_FAILED_FOCUS:
5318            // If Flash mode is enable, after AF execute pre-capture metering
5319            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5320                switch (m_ctlInfo.flash.m_flashCnt) {
5321                case IS_FLASH_STATE_ON_DONE:
5322                    ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5323                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5324                    nextState = NO_TRANSITION;
5325                    break;
5326                case IS_FLASH_STATE_AUTO_DONE:
5327                    ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5328                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5329                    m_IsAfLockRequired = true;
5330                    nextState = HAL_AFSTATE_FAILED;
5331                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5332                    break;
5333                default:
5334                    nextState = NO_TRANSITION;
5335                }
5336            } else {
5337                m_IsAfLockRequired = true;
5338                nextState = HAL_AFSTATE_FAILED;
5339                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5340            }
5341            break;
5342        default:
5343            bWrongTransition = true;
5344            break;
5345        }
5346    }
5347    else if (m_afState == HAL_AFSTATE_LOCKED) {
5348        switch (noti) {
5349            case AA_AFSTATE_INACTIVE:
5350                nextState = NO_TRANSITION;
5351                break;
5352            case AA_AFSTATE_ACTIVE_SCAN:
5353                bWrongTransition = true;
5354                break;
5355            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5356                nextState = NO_TRANSITION;
5357                break;
5358            case AA_AFSTATE_AF_FAILED_FOCUS:
5359            default:
5360                bWrongTransition = true;
5361                break;
5362        }
5363    }
5364    else if (m_afState == HAL_AFSTATE_FAILED) {
5365        switch (noti) {
5366            case AA_AFSTATE_INACTIVE:
5367                bWrongTransition = true;
5368                break;
5369            case AA_AFSTATE_ACTIVE_SCAN:
5370                nextState = HAL_AFSTATE_SCANNING;
5371                break;
5372            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5373                bWrongTransition = true;
5374                break;
5375            case AA_AFSTATE_AF_FAILED_FOCUS:
5376                nextState = NO_TRANSITION;
5377                break;
5378            default:
5379                bWrongTransition = true;
5380                break;
5381        }
5382    }
5383    if (bWrongTransition) {
5384        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5385        return;
5386    }
5387    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5388    if (nextState != NO_TRANSITION)
5389        m_afState = nextState;
5390}
5391
5392void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
5393{
5394    int nextState = NO_TRANSITION;
5395    bool bWrongTransition = false;
5396
5397    if (m_afState == HAL_AFSTATE_INACTIVE) {
5398        switch (noti) {
5399        case AA_AFSTATE_INACTIVE:
5400        case AA_AFSTATE_ACTIVE_SCAN:
5401        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5402        case AA_AFSTATE_AF_FAILED_FOCUS:
5403        default:
5404            nextState = NO_TRANSITION;
5405            break;
5406        }
5407    }
5408    else if (m_afState == HAL_AFSTATE_STARTED) {
5409        switch (noti) {
5410        case AA_AFSTATE_INACTIVE:
5411            nextState = NO_TRANSITION;
5412            break;
5413        case AA_AFSTATE_ACTIVE_SCAN:
5414            nextState = HAL_AFSTATE_SCANNING;
5415            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5416            break;
5417        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5418            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5419            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5420            break;
5421        case AA_AFSTATE_AF_FAILED_FOCUS:
5422            nextState = HAL_AFSTATE_FAILED;
5423            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5424            break;
5425        default:
5426            bWrongTransition = true;
5427            break;
5428        }
5429    }
5430    else if (m_afState == HAL_AFSTATE_SCANNING) {
5431        switch (noti) {
5432        case AA_AFSTATE_INACTIVE:
5433            bWrongTransition = true;
5434            break;
5435        case AA_AFSTATE_ACTIVE_SCAN:
5436            nextState = NO_TRANSITION;
5437            break;
5438        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5439            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5440            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5441            break;
5442        case AA_AFSTATE_AF_FAILED_FOCUS:
5443            nextState = NO_TRANSITION;
5444            break;
5445        default:
5446            bWrongTransition = true;
5447            break;
5448        }
5449    }
5450    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5451        switch (noti) {
5452        case AA_AFSTATE_INACTIVE:
5453            bWrongTransition = true;
5454            break;
5455        case AA_AFSTATE_ACTIVE_SCAN:
5456            nextState = HAL_AFSTATE_SCANNING;
5457            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5458            break;
5459        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5460            nextState = NO_TRANSITION;
5461            break;
5462        case AA_AFSTATE_AF_FAILED_FOCUS:
5463            nextState = HAL_AFSTATE_FAILED;
5464            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5465            // TODO : needs NO_TRANSITION ?
5466            break;
5467        default:
5468            bWrongTransition = true;
5469            break;
5470        }
5471    }
5472    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5473        switch (noti) {
5474        case AA_AFSTATE_INACTIVE:
5475            bWrongTransition = true;
5476            break;
5477        case AA_AFSTATE_ACTIVE_SCAN:
5478            nextState = NO_TRANSITION;
5479            break;
5480        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5481            m_IsAfLockRequired = true;
5482            nextState = HAL_AFSTATE_LOCKED;
5483            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5484            break;
5485        case AA_AFSTATE_AF_FAILED_FOCUS:
5486            nextState = HAL_AFSTATE_FAILED;
5487            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5488            break;
5489        default:
5490            bWrongTransition = true;
5491            break;
5492        }
5493    }
5494    else if (m_afState == HAL_AFSTATE_LOCKED) {
5495        switch (noti) {
5496            case AA_AFSTATE_INACTIVE:
5497                nextState = NO_TRANSITION;
5498                break;
5499            case AA_AFSTATE_ACTIVE_SCAN:
5500                bWrongTransition = true;
5501                break;
5502            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5503                nextState = NO_TRANSITION;
5504                break;
5505            case AA_AFSTATE_AF_FAILED_FOCUS:
5506            default:
5507                bWrongTransition = true;
5508                break;
5509        }
5510    }
5511    else if (m_afState == HAL_AFSTATE_FAILED) {
5512        switch (noti) {
5513            case AA_AFSTATE_INACTIVE:
5514            case AA_AFSTATE_ACTIVE_SCAN:
5515            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5516                bWrongTransition = true;
5517                break;
5518            case AA_AFSTATE_AF_FAILED_FOCUS:
5519                nextState = NO_TRANSITION;
5520                break;
5521            default:
5522                bWrongTransition = true;
5523                break;
5524        }
5525    }
5526    if (bWrongTransition) {
5527        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5528        return;
5529    }
5530    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5531    if (nextState != NO_TRANSITION)
5532        m_afState = nextState;
5533}
5534
5535void ExynosCameraHWInterface2::OnAfCancel(int id)
5536{
5537    m_afTriggerId = id;
5538
5539    switch (m_afMode) {
5540    case AA_AFMODE_AUTO:
5541    case AA_AFMODE_MACRO:
5542    case AA_AFMODE_OFF:
5543        OnAfCancelAutoMacro(id);
5544        break;
5545    case AA_AFMODE_CONTINUOUS_VIDEO:
5546        OnAfCancelCAFVideo(id);
5547        break;
5548    case AA_AFMODE_CONTINUOUS_PICTURE:
5549        OnAfCancelCAFPicture(id);
5550        break;
5551    default:
5552        break;
5553    }
5554}
5555
5556void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
5557{
5558    int nextState = NO_TRANSITION;
5559    m_afTriggerId = id;
5560
5561    if (m_ctlInfo.flash.m_flashEnableFlg  && m_ctlInfo.flash.m_afFlashDoneFlg) {
5562        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5563    }
5564    switch (m_afState) {
5565    case HAL_AFSTATE_INACTIVE:
5566        nextState = NO_TRANSITION;
5567        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5568        break;
5569    case HAL_AFSTATE_NEEDS_COMMAND:
5570    case HAL_AFSTATE_STARTED:
5571    case HAL_AFSTATE_SCANNING:
5572    case HAL_AFSTATE_LOCKED:
5573    case HAL_AFSTATE_FAILED:
5574        SetAfMode(AA_AFMODE_OFF);
5575        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5576        nextState = HAL_AFSTATE_INACTIVE;
5577        break;
5578    default:
5579        break;
5580    }
5581    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5582    if (nextState != NO_TRANSITION)
5583        m_afState = nextState;
5584}
5585
5586void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
5587{
5588    int nextState = NO_TRANSITION;
5589    m_afTriggerId = id;
5590
5591    switch (m_afState) {
5592    case HAL_AFSTATE_INACTIVE:
5593        nextState = NO_TRANSITION;
5594        break;
5595    case HAL_AFSTATE_NEEDS_COMMAND:
5596    case HAL_AFSTATE_STARTED:
5597    case HAL_AFSTATE_SCANNING:
5598    case HAL_AFSTATE_LOCKED:
5599    case HAL_AFSTATE_FAILED:
5600    case HAL_AFSTATE_NEEDS_DETERMINATION:
5601    case HAL_AFSTATE_PASSIVE_FOCUSED:
5602        SetAfMode(AA_AFMODE_OFF);
5603        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5604        SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
5605        nextState = HAL_AFSTATE_INACTIVE;
5606        break;
5607    default:
5608        break;
5609    }
5610    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5611    if (nextState != NO_TRANSITION)
5612        m_afState = nextState;
5613}
5614
5615void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
5616{
5617    int nextState = NO_TRANSITION;
5618    m_afTriggerId = id;
5619
5620    switch (m_afState) {
5621    case HAL_AFSTATE_INACTIVE:
5622        nextState = NO_TRANSITION;
5623        break;
5624    case HAL_AFSTATE_NEEDS_COMMAND:
5625    case HAL_AFSTATE_STARTED:
5626    case HAL_AFSTATE_SCANNING:
5627    case HAL_AFSTATE_LOCKED:
5628    case HAL_AFSTATE_FAILED:
5629    case HAL_AFSTATE_NEEDS_DETERMINATION:
5630    case HAL_AFSTATE_PASSIVE_FOCUSED:
5631        SetAfMode(AA_AFMODE_OFF);
5632        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5633        SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
5634        nextState = HAL_AFSTATE_INACTIVE;
5635        break;
5636    default:
5637        break;
5638    }
5639    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5640    if (nextState != NO_TRANSITION)
5641        m_afState = nextState;
5642}
5643
5644void ExynosCameraHWInterface2::SetAfStateForService(int newState)
5645{
5646    if (m_serviceAfState != newState || newState == 0)
5647        m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
5648    m_serviceAfState = newState;
5649}
5650
5651int ExynosCameraHWInterface2::GetAfStateForService()
5652{
5653   return m_serviceAfState;
5654}
5655
5656void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
5657{
5658    if (m_afMode != afMode) {
5659        if (m_IsAfModeUpdateRequired) {
5660            m_afMode2 = afMode;
5661            ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
5662        }
5663        else {
5664            ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
5665            m_IsAfModeUpdateRequired = true;
5666            m_afMode = afMode;
5667            if (m_afModeWaitingCnt != 0) {
5668                m_afModeWaitingCnt = 0;
5669                m_afState = HAL_AFSTATE_INACTIVE;
5670                OnAfTrigger(m_afPendingTriggerId);
5671            }
5672        }
5673    }
5674}
5675
5676void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5677{
5678    char property[PROPERTY_VALUE_MAX];
5679
5680    //2 0th IFD TIFF Tags
5681    //3 Maker
5682    property_get("ro.product.brand", property, EXIF_DEF_MAKER);
5683    strncpy((char *)mExifInfo.maker, property,
5684                sizeof(mExifInfo.maker) - 1);
5685    mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
5686    //3 Model
5687    property_get("ro.product.model", property, EXIF_DEF_MODEL);
5688    strncpy((char *)mExifInfo.model, property,
5689                sizeof(mExifInfo.model) - 1);
5690    mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
5691    //3 Software
5692    property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
5693    strncpy((char *)mExifInfo.software, property,
5694                sizeof(mExifInfo.software) - 1);
5695    mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
5696
5697    //3 YCbCr Positioning
5698    mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
5699
5700    //2 0th IFD Exif Private Tags
5701    //3 F Number
5702    mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
5703    mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
5704    //3 Exposure Program
5705    mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
5706    //3 Exif Version
5707    memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
5708    //3 Aperture
5709    double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
5710    mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
5711    mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
5712    //3 Maximum lens aperture
5713    mExifInfo.max_aperture.num = mExifInfo.aperture.num;
5714    mExifInfo.max_aperture.den = mExifInfo.aperture.den;
5715    //3 Lens Focal Length
5716    mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
5717
5718    mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
5719    //3 User Comments
5720    strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
5721    //3 Color Space information
5722    mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
5723    //3 Exposure Mode
5724    mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
5725
5726    //2 0th IFD GPS Info Tags
5727    unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
5728    memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
5729
5730    //2 1th IFD TIFF Tags
5731    mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
5732    mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5733    mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5734    mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5735    mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5736    mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
5737}
5738
5739void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
5740	camera2_shot_ext *currentEntry)
5741{
5742    camera2_dm *dm = &(currentEntry->shot.dm);
5743    camera2_ctl *ctl = &(currentEntry->shot.ctl);
5744
5745    ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
5746    if (!ctl->request.frameCount)
5747       return;
5748    //2 0th IFD TIFF Tags
5749    //3 Width
5750    exifInfo->width = rect->w;
5751    //3 Height
5752    exifInfo->height = rect->h;
5753    //3 Orientation
5754    switch (ctl->jpeg.orientation) {
5755    case 90:
5756        exifInfo->orientation = EXIF_ORIENTATION_90;
5757        break;
5758    case 180:
5759        exifInfo->orientation = EXIF_ORIENTATION_180;
5760        break;
5761    case 270:
5762        exifInfo->orientation = EXIF_ORIENTATION_270;
5763        break;
5764    case 0:
5765    default:
5766        exifInfo->orientation = EXIF_ORIENTATION_UP;
5767        break;
5768    }
5769
5770    //3 Date time
5771    time_t rawtime;
5772    struct tm *timeinfo;
5773    time(&rawtime);
5774    timeinfo = localtime(&rawtime);
5775    strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
5776
5777    //2 0th IFD Exif Private Tags
5778    //3 Exposure Time
5779    int shutterSpeed = (dm->sensor.exposureTime/1000);
5780
5781    // To display exposure time just above 500ms as 1/2sec, not 1 sec.
5782    if (shutterSpeed > 500000)
5783        shutterSpeed -=  100000;
5784
5785    if (shutterSpeed < 0) {
5786        shutterSpeed = 100;
5787    }
5788
5789    exifInfo->exposure_time.num = 1;
5790    // x us -> 1/x s */
5791    //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5792    exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
5793
5794    //3 ISO Speed Rating
5795    exifInfo->iso_speed_rating = dm->aa.isoValue;
5796
5797    uint32_t av, tv, bv, sv, ev;
5798    av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
5799    tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
5800    sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
5801    bv = av + tv - sv;
5802    ev = av + tv;
5803    //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5804    ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv);
5805
5806    //3 Shutter Speed
5807    exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
5808    exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
5809    //3 Brightness
5810    exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
5811    exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
5812    //3 Exposure Bias
5813    if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
5814        ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
5815        exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
5816        exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
5817    } else {
5818        exifInfo->exposure_bias.num = 0;
5819        exifInfo->exposure_bias.den = 0;
5820    }
5821    //3 Metering Mode
5822    /*switch (m_curCameraInfo->metering) {
5823    case METERING_MODE_CENTER:
5824        exifInfo->metering_mode = EXIF_METERING_CENTER;
5825        break;
5826    case METERING_MODE_MATRIX:
5827        exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5828        break;
5829    case METERING_MODE_SPOT:
5830        exifInfo->metering_mode = EXIF_METERING_SPOT;
5831        break;
5832    case METERING_MODE_AVERAGE:
5833    default:
5834        exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5835        break;
5836    }*/
5837    exifInfo->metering_mode = EXIF_METERING_CENTER;
5838
5839    //3 Flash
5840    if (m_ctlInfo.flash.m_flashDecisionResult)
5841        exifInfo->flash = 1;
5842    else
5843        exifInfo->flash = EXIF_DEF_FLASH;
5844
5845    //3 White Balance
5846    if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO)
5847        exifInfo->white_balance = EXIF_WB_AUTO;
5848    else
5849        exifInfo->white_balance = EXIF_WB_MANUAL;
5850
5851    //3 Scene Capture Type
5852    switch (ctl->aa.sceneMode) {
5853    case AA_SCENE_MODE_PORTRAIT:
5854        exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
5855        break;
5856    case AA_SCENE_MODE_LANDSCAPE:
5857        exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
5858        break;
5859    case AA_SCENE_MODE_NIGHT_PORTRAIT:
5860        exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
5861        break;
5862    default:
5863        exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
5864        break;
5865    }
5866
5867    //2 0th IFD GPS Info Tags
5868    if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
5869
5870        if (ctl->jpeg.gpsCoordinates[0] > 0)
5871            strcpy((char *)exifInfo->gps_latitude_ref, "N");
5872        else
5873            strcpy((char *)exifInfo->gps_latitude_ref, "S");
5874
5875        if (ctl->jpeg.gpsCoordinates[1] > 0)
5876            strcpy((char *)exifInfo->gps_longitude_ref, "E");
5877        else
5878            strcpy((char *)exifInfo->gps_longitude_ref, "W");
5879
5880        if (ctl->jpeg.gpsCoordinates[2] > 0)
5881            exifInfo->gps_altitude_ref = 0;
5882        else
5883            exifInfo->gps_altitude_ref = 1;
5884
5885        double latitude = fabs(ctl->jpeg.gpsCoordinates[0]);
5886        double longitude = fabs(ctl->jpeg.gpsCoordinates[1]);
5887        double altitude = fabs(ctl->jpeg.gpsCoordinates[2]);
5888
5889        exifInfo->gps_latitude[0].num = (uint32_t)latitude;
5890        exifInfo->gps_latitude[0].den = 1;
5891        exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
5892        exifInfo->gps_latitude[1].den = 1;
5893        exifInfo->gps_latitude[2].num = (uint32_t)((((latitude - exifInfo->gps_latitude[0].num) * 60)
5894                                        - exifInfo->gps_latitude[1].num) * 60);
5895        exifInfo->gps_latitude[2].den = 1;
5896
5897        exifInfo->gps_longitude[0].num = (uint32_t)longitude;
5898        exifInfo->gps_longitude[0].den = 1;
5899        exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
5900        exifInfo->gps_longitude[1].den = 1;
5901        exifInfo->gps_longitude[2].num = (uint32_t)((((longitude - exifInfo->gps_longitude[0].num) * 60)
5902                                        - exifInfo->gps_longitude[1].num) * 60);
5903        exifInfo->gps_longitude[2].den = 1;
5904
5905        exifInfo->gps_altitude.num = (uint32_t)altitude;
5906        exifInfo->gps_altitude.den = 1;
5907
5908        struct tm tm_data;
5909        long timestamp;
5910        timestamp = (long)ctl->jpeg.gpsTimestamp;
5911        gmtime_r(&timestamp, &tm_data);
5912        exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
5913        exifInfo->gps_timestamp[0].den = 1;
5914        exifInfo->gps_timestamp[1].num = tm_data.tm_min;
5915        exifInfo->gps_timestamp[1].den = 1;
5916        exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
5917        exifInfo->gps_timestamp[2].den = 1;
5918        snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
5919                "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
5920
5921        memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32);
5922        exifInfo->enableGps = true;
5923    } else {
5924        exifInfo->enableGps = false;
5925    }
5926
5927    //2 1th IFD TIFF Tags
5928    exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
5929    exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
5930}
5931
5932ExynosCameraHWInterface2::MainThread::~MainThread()
5933{
5934    ALOGV("(%s):", __FUNCTION__);
5935}
5936
5937void ExynosCameraHWInterface2::MainThread::release()
5938{
5939    ALOGV("(%s):", __func__);
5940    SetSignal(SIGNAL_THREAD_RELEASE);
5941}
5942
5943ExynosCameraHWInterface2::SensorThread::~SensorThread()
5944{
5945    ALOGV("(%s):", __FUNCTION__);
5946}
5947
5948void ExynosCameraHWInterface2::SensorThread::release()
5949{
5950    ALOGV("(%s):", __func__);
5951    SetSignal(SIGNAL_THREAD_RELEASE);
5952}
5953
5954ExynosCameraHWInterface2::StreamThread::~StreamThread()
5955{
5956    ALOGV("(%s):", __FUNCTION__);
5957}
5958
5959void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
5960{
5961    ALOGV("DEBUG(%s):", __FUNCTION__);
5962    memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
5963}
5964
5965void ExynosCameraHWInterface2::StreamThread::release()
5966{
5967    ALOGV("(%s):", __func__);
5968    SetSignal(SIGNAL_THREAD_RELEASE);
5969}
5970
5971int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
5972{
5973    int index;
5974    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5975        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
5976            return index;
5977    }
5978    return -1;
5979}
5980
5981int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
5982{
5983    int index;
5984    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5985        if (m_parameters.svcBufHandle[index] == *bufHandle)
5986            return index;
5987    }
5988    return -1;
5989}
5990
5991status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
5992{
5993    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
5994    int index, vacantIndex;
5995    bool vacancy = false;
5996
5997    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
5998        if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
5999            vacancy = true;
6000            vacantIndex = index;
6001        } else if (m_attachedSubStreams[index].streamId == stream_id) {
6002            return BAD_VALUE;
6003        }
6004    }
6005    if (!vacancy)
6006        return NO_MEMORY;
6007    m_attachedSubStreams[vacantIndex].streamId = stream_id;
6008    m_attachedSubStreams[vacantIndex].priority = priority;
6009    m_numRegisteredStream++;
6010    return NO_ERROR;
6011}
6012
6013status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
6014{
6015    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
6016    int index;
6017    bool found = false;
6018
6019    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
6020        if (m_attachedSubStreams[index].streamId == stream_id) {
6021            found = true;
6022            break;
6023        }
6024    }
6025    if (!found)
6026        return BAD_VALUE;
6027    m_attachedSubStreams[index].streamId = -1;
6028    m_attachedSubStreams[index].priority = 0;
6029    m_numRegisteredStream--;
6030    return NO_ERROR;
6031}
6032
6033int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
6034{
6035    if (ionClient == 0) {
6036        ionClient = ion_client_create();
6037        if (ionClient < 0) {
6038            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
6039            return 0;
6040        }
6041    }
6042    return ionClient;
6043}
6044
6045int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
6046{
6047    if (ionClient != 0) {
6048        if (ionClient > 0) {
6049            ion_client_destroy(ionClient);
6050        }
6051        ionClient = 0;
6052    }
6053    return ionClient;
6054}
6055
6056int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
6057{
6058    return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
6059}
6060
6061int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
6062{
6063    int ret = 0;
6064    int i = 0;
6065    int flag = 0;
6066
6067    if (ionClient == 0) {
6068        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
6069        return -1;
6070    }
6071
6072    for (i = 0 ; i < iMemoryNum ; i++) {
6073        if (buf->size.extS[i] == 0) {
6074            break;
6075        }
6076        if (1 << i & cacheFlag)
6077            flag = ION_FLAG_CACHED;
6078        else
6079            flag = 0;
6080        buf->fd.extFd[i] = ion_alloc(ionClient, \
6081                                      buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK, flag);
6082        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
6083            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
6084            buf->fd.extFd[i] = -1;
6085            freeCameraMemory(buf, iMemoryNum);
6086            return -1;
6087        }
6088
6089        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
6090                                        buf->size.extS[i], 0);
6091        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
6092            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
6093            buf->virt.extP[i] = (char *)MAP_FAILED;
6094            freeCameraMemory(buf, iMemoryNum);
6095            return -1;
6096        }
6097        ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
6098    }
6099
6100    return ret;
6101}
6102
6103void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6104{
6105
6106    int i = 0 ;
6107    int ret = 0;
6108
6109    for (i=0;i<iMemoryNum;i++) {
6110        if (buf->fd.extFd[i] != -1) {
6111            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
6112                ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
6113                if (ret < 0)
6114                    ALOGE("ERR(%s)", __FUNCTION__);
6115            }
6116            ion_free(buf->fd.extFd[i]);
6117        ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
6118        }
6119        buf->fd.extFd[i] = -1;
6120        buf->virt.extP[i] = (char *)MAP_FAILED;
6121        buf->size.extS[i] = 0;
6122    }
6123}
6124
6125void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6126{
6127    int i =0 ;
6128    for (i=0;i<iMemoryNum;i++) {
6129        buf->virt.extP[i] = (char *)MAP_FAILED;
6130        buf->fd.extFd[i] = -1;
6131        buf->size.extS[i] = 0;
6132    }
6133}
6134
6135
6136
6137
6138static camera2_device_t *g_cam2_device = NULL;
6139static bool g_camera_vaild = false;
6140static Mutex g_camera_mutex;
6141ExynosCamera2 * g_camera2[2] = { NULL, NULL };
6142
6143static int HAL2_camera_device_close(struct hw_device_t* device)
6144{
6145    Mutex::Autolock lock(g_camera_mutex);
6146    ALOGD("(%s): ENTER", __FUNCTION__);
6147    if (device) {
6148
6149        camera2_device_t *cam_device = (camera2_device_t *)device;
6150        ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
6151        ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
6152        delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
6153        free(cam_device);
6154        g_camera_vaild = false;
6155        g_cam2_device = NULL;
6156    }
6157
6158    ALOGD("(%s): EXIT", __FUNCTION__);
6159    return 0;
6160}
6161
6162static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
6163{
6164    return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
6165}
6166
6167static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
6168            const camera2_request_queue_src_ops_t *request_src_ops)
6169{
6170    ALOGV("DEBUG(%s):", __FUNCTION__);
6171    return obj(dev)->setRequestQueueSrcOps(request_src_ops);
6172}
6173
6174static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
6175{
6176    ALOGV("DEBUG(%s):", __FUNCTION__);
6177    return obj(dev)->notifyRequestQueueNotEmpty();
6178}
6179
6180static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
6181            const camera2_frame_queue_dst_ops_t *frame_dst_ops)
6182{
6183    ALOGV("DEBUG(%s):", __FUNCTION__);
6184    return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
6185}
6186
6187static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
6188{
6189    ALOGV("DEBUG(%s):", __FUNCTION__);
6190    return obj(dev)->getInProgressCount();
6191}
6192
6193static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
6194{
6195    ALOGV("DEBUG(%s):", __FUNCTION__);
6196    return obj(dev)->flushCapturesInProgress();
6197}
6198
6199static int HAL2_device_construct_default_request(const struct camera2_device *dev,
6200            int request_template, camera_metadata_t **request)
6201{
6202    ALOGV("DEBUG(%s):", __FUNCTION__);
6203    return obj(dev)->constructDefaultRequest(request_template, request);
6204}
6205
6206static int HAL2_device_allocate_stream(
6207            const struct camera2_device *dev,
6208            // inputs
6209            uint32_t width,
6210            uint32_t height,
6211            int      format,
6212            const camera2_stream_ops_t *stream_ops,
6213            // outputs
6214            uint32_t *stream_id,
6215            uint32_t *format_actual,
6216            uint32_t *usage,
6217            uint32_t *max_buffers)
6218{
6219    ALOGV("(%s): ", __FUNCTION__);
6220    return obj(dev)->allocateStream(width, height, format, stream_ops,
6221                                    stream_id, format_actual, usage, max_buffers);
6222}
6223
6224static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
6225            uint32_t stream_id,
6226            int num_buffers,
6227            buffer_handle_t *buffers)
6228{
6229    ALOGV("DEBUG(%s):", __FUNCTION__);
6230    return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
6231}
6232
6233static int HAL2_device_release_stream(
6234        const struct camera2_device *dev,
6235            uint32_t stream_id)
6236{
6237    ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
6238    if (!g_camera_vaild)
6239        return 0;
6240    return obj(dev)->releaseStream(stream_id);
6241}
6242
6243static int HAL2_device_allocate_reprocess_stream(
6244           const struct camera2_device *dev,
6245            uint32_t width,
6246            uint32_t height,
6247            uint32_t format,
6248            const camera2_stream_in_ops_t *reprocess_stream_ops,
6249            // outputs
6250            uint32_t *stream_id,
6251            uint32_t *consumer_usage,
6252            uint32_t *max_buffers)
6253{
6254    ALOGV("DEBUG(%s):", __FUNCTION__);
6255    return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
6256                                    stream_id, consumer_usage, max_buffers);
6257}
6258
6259static int HAL2_device_allocate_reprocess_stream_from_stream(
6260           const struct camera2_device *dev,
6261            uint32_t output_stream_id,
6262            const camera2_stream_in_ops_t *reprocess_stream_ops,
6263            // outputs
6264            uint32_t *stream_id)
6265{
6266    ALOGV("DEBUG(%s):", __FUNCTION__);
6267    return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
6268                                    reprocess_stream_ops, stream_id);
6269}
6270
6271static int HAL2_device_release_reprocess_stream(
6272        const struct camera2_device *dev,
6273            uint32_t stream_id)
6274{
6275    ALOGV("DEBUG(%s):", __FUNCTION__);
6276    return obj(dev)->releaseReprocessStream(stream_id);
6277}
6278
6279static int HAL2_device_trigger_action(const struct camera2_device *dev,
6280           uint32_t trigger_id,
6281            int ext1,
6282            int ext2)
6283{
6284    ALOGV("DEBUG(%s):", __FUNCTION__);
6285    if (!g_camera_vaild)
6286        return 0;
6287    return obj(dev)->triggerAction(trigger_id, ext1, ext2);
6288}
6289
6290static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
6291            camera2_notify_callback notify_cb,
6292            void *user)
6293{
6294    ALOGV("DEBUG(%s):", __FUNCTION__);
6295    return obj(dev)->setNotifyCallback(notify_cb, user);
6296}
6297
6298static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
6299            vendor_tag_query_ops_t **ops)
6300{
6301    ALOGV("DEBUG(%s):", __FUNCTION__);
6302    return obj(dev)->getMetadataVendorTagOps(ops);
6303}
6304
6305static int HAL2_device_dump(const struct camera2_device *dev, int fd)
6306{
6307    ALOGV("DEBUG(%s):", __FUNCTION__);
6308    return obj(dev)->dump(fd);
6309}
6310
6311
6312
6313
6314
6315static int HAL2_getNumberOfCameras()
6316{
6317    ALOGV("(%s): returning 2", __FUNCTION__);
6318    return 2;
6319}
6320
6321
6322static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
6323{
6324    ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
6325    static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
6326
6327    status_t res;
6328
6329    if (cameraId == 0) {
6330        info->facing = CAMERA_FACING_BACK;
6331        if (!g_camera2[0])
6332            g_camera2[0] = new ExynosCamera2(0);
6333    }
6334    else if (cameraId == 1) {
6335        info->facing = CAMERA_FACING_FRONT;
6336        if (!g_camera2[1])
6337            g_camera2[1] = new ExynosCamera2(1);
6338    }
6339    else
6340        return BAD_VALUE;
6341
6342    info->orientation = 0;
6343    info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
6344    if (mCameraInfo[cameraId] == NULL) {
6345        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
6346        if (res != OK) {
6347            ALOGE("%s: Unable to allocate static info: %s (%d)",
6348                    __FUNCTION__, strerror(-res), res);
6349            return res;
6350        }
6351        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
6352        if (res != OK) {
6353            ALOGE("%s: Unable to fill in static info: %s (%d)",
6354                    __FUNCTION__, strerror(-res), res);
6355            return res;
6356        }
6357    }
6358    info->static_camera_characteristics = mCameraInfo[cameraId];
6359    return NO_ERROR;
6360}
6361
6362#define SET_METHOD(m) m : HAL2_device_##m
6363
6364static camera2_device_ops_t camera2_device_ops = {
6365        SET_METHOD(set_request_queue_src_ops),
6366        SET_METHOD(notify_request_queue_not_empty),
6367        SET_METHOD(set_frame_queue_dst_ops),
6368        SET_METHOD(get_in_progress_count),
6369        SET_METHOD(flush_captures_in_progress),
6370        SET_METHOD(construct_default_request),
6371        SET_METHOD(allocate_stream),
6372        SET_METHOD(register_stream_buffers),
6373        SET_METHOD(release_stream),
6374        SET_METHOD(allocate_reprocess_stream),
6375        SET_METHOD(allocate_reprocess_stream_from_stream),
6376        SET_METHOD(release_reprocess_stream),
6377        SET_METHOD(trigger_action),
6378        SET_METHOD(set_notify_callback),
6379        SET_METHOD(get_metadata_vendor_tag_ops),
6380        SET_METHOD(dump),
6381};
6382
6383#undef SET_METHOD
6384
6385
6386static int HAL2_camera_device_open(const struct hw_module_t* module,
6387                                  const char *id,
6388                                  struct hw_device_t** device)
6389{
6390    int cameraId = atoi(id);
6391    int openInvalid = 0;
6392
6393    Mutex::Autolock lock(g_camera_mutex);
6394    if (g_camera_vaild) {
6395        ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__);
6396        return -EBUSY;
6397    }
6398    g_camera_vaild = false;
6399    ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
6400    if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
6401        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
6402        return -EINVAL;
6403    }
6404
6405    ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6406    if (g_cam2_device) {
6407        if (obj(g_cam2_device)->getCameraId() == cameraId) {
6408            ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
6409            goto done;
6410        } else {
6411            ALOGD("(%s): START waiting for cam device free", __FUNCTION__);
6412            while (g_cam2_device)
6413                usleep(SIG_WAITING_TICK);
6414            ALOGD("(%s): END   waiting for cam device free", __FUNCTION__);
6415        }
6416    }
6417
6418    g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
6419    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6420
6421    if (!g_cam2_device)
6422        return -ENOMEM;
6423
6424    g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
6425    g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
6426    g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
6427    g_cam2_device->common.close   = HAL2_camera_device_close;
6428
6429    g_cam2_device->ops = &camera2_device_ops;
6430
6431    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
6432
6433    g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
6434    if (!openInvalid) {
6435        ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
6436        return -ENODEV;
6437    }
6438done:
6439    *device = (hw_device_t *)g_cam2_device;
6440    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
6441    g_camera_vaild = true;
6442
6443    return 0;
6444}
6445
6446
6447static hw_module_methods_t camera_module_methods = {
6448            open : HAL2_camera_device_open
6449};
6450
6451extern "C" {
6452    struct camera_module HAL_MODULE_INFO_SYM = {
6453      common : {
6454          tag                : HARDWARE_MODULE_TAG,
6455          module_api_version : CAMERA_MODULE_API_VERSION_2_0,
6456          hal_api_version    : HARDWARE_HAL_API_VERSION,
6457          id                 : CAMERA_HARDWARE_MODULE_ID,
6458          name               : "Exynos Camera HAL2",
6459          author             : "Samsung Corporation",
6460          methods            : &camera_module_methods,
6461          dso:                NULL,
6462          reserved:           {0},
6463      },
6464      get_number_of_cameras : HAL2_getNumberOfCameras,
6465      get_camera_info       : HAL2_getCameraInfo
6466    };
6467}
6468
6469}; // namespace android
6470