ExynosCameraHWInterface2.cpp revision ce8e830a01160b297abaa5b0fcf0c88264ee9ecd
1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10**     http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file      ExynosCameraHWInterface2.cpp
21 * \brief     source file for Android Camera API 2.0 HAL
22 * \author    Sungjoong Kang(sj3.kang@samsung.com)
23 * \date      2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 *   Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 *   2nd Release
31 *
32 */
33
34//#define LOG_NDEBUG 0
35#define LOG_TAG "ExynosCameraHAL2"
36#include <utils/Log.h>
37
38#include "ExynosCameraHWInterface2.h"
39#include "exynos_format.h"
40
41
42
43namespace android {
44
45
46void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
47{
48    int nw;
49    int cnt = 0;
50    uint32_t written = 0;
51
52    ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
53    int fd = open(fname, O_RDWR | O_CREAT, 0644);
54    if (fd < 0) {
55        ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
56        return;
57    }
58
59    ALOGV("writing %d bytes to file [%s]", size, fname);
60    while (written < size) {
61        nw = ::write(fd, buf + written, size - written);
62        if (nw < 0) {
63            ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
64            break;
65        }
66        written += nw;
67        cnt++;
68    }
69    ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
70    ::close(fd);
71}
72
73int get_pixel_depth(uint32_t fmt)
74{
75    int depth = 0;
76
77    switch (fmt) {
78    case V4L2_PIX_FMT_JPEG:
79        depth = 8;
80        break;
81
82    case V4L2_PIX_FMT_NV12:
83    case V4L2_PIX_FMT_NV21:
84    case V4L2_PIX_FMT_YUV420:
85    case V4L2_PIX_FMT_YVU420M:
86    case V4L2_PIX_FMT_NV12M:
87    case V4L2_PIX_FMT_NV12MT:
88        depth = 12;
89        break;
90
91    case V4L2_PIX_FMT_RGB565:
92    case V4L2_PIX_FMT_YUYV:
93    case V4L2_PIX_FMT_YVYU:
94    case V4L2_PIX_FMT_UYVY:
95    case V4L2_PIX_FMT_VYUY:
96    case V4L2_PIX_FMT_NV16:
97    case V4L2_PIX_FMT_NV61:
98    case V4L2_PIX_FMT_YUV422P:
99    case V4L2_PIX_FMT_SBGGR10:
100    case V4L2_PIX_FMT_SBGGR12:
101    case V4L2_PIX_FMT_SBGGR16:
102        depth = 16;
103        break;
104
105    case V4L2_PIX_FMT_RGB32:
106        depth = 32;
107        break;
108    default:
109        ALOGE("Get depth failed(format : %d)", fmt);
110        break;
111    }
112
113    return depth;
114}
115
116int cam_int_s_fmt(node_info_t *node)
117{
118    struct v4l2_format v4l2_fmt;
119    unsigned int framesize;
120    int ret;
121
122    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
123
124    v4l2_fmt.type = node->type;
125    framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
126
127    if (node->planes >= 1) {
128        v4l2_fmt.fmt.pix_mp.width       = node->width;
129        v4l2_fmt.fmt.pix_mp.height      = node->height;
130        v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
131        v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
132    } else {
133        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
134    }
135
136    /* Set up for capture */
137    ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
138
139    if (ret < 0)
140        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
141
142
143    return ret;
144}
145
146int cam_int_reqbufs(node_info_t *node)
147{
148    struct v4l2_requestbuffers req;
149    int ret;
150
151    req.count = node->buffers;
152    req.type = node->type;
153    req.memory = node->memory;
154
155    ret = exynos_v4l2_reqbufs(node->fd, &req);
156
157    if (ret < 0)
158        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
159
160    return req.count;
161}
162
163int cam_int_qbuf(node_info_t *node, int index)
164{
165    struct v4l2_buffer v4l2_buf;
166    struct v4l2_plane planes[VIDEO_MAX_PLANES];
167    int i;
168    int ret = 0;
169
170    v4l2_buf.m.planes   = planes;
171    v4l2_buf.type       = node->type;
172    v4l2_buf.memory     = node->memory;
173    v4l2_buf.index      = index;
174    v4l2_buf.length     = node->planes;
175
176    for(i = 0; i < node->planes; i++){
177        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
178        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
179    }
180
181    ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
182
183    if (ret < 0)
184        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
185
186    return ret;
187}
188
189int cam_int_streamon(node_info_t *node)
190{
191    enum v4l2_buf_type type = node->type;
192    int ret;
193
194
195    ret = exynos_v4l2_streamon(node->fd, type);
196
197    if (ret < 0)
198        ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
199
200    ALOGV("On streaming I/O... ... fd(%d)", node->fd);
201
202    return ret;
203}
204
205int cam_int_streamoff(node_info_t *node)
206{
207    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
208    int ret;
209
210
211    ALOGV("Off streaming I/O... fd(%d)", node->fd);
212    ret = exynos_v4l2_streamoff(node->fd, type);
213
214    if (ret < 0)
215        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
216
217    return ret;
218}
219
220int isp_int_streamoff(node_info_t *node)
221{
222    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
223    int ret;
224
225    ALOGV("Off streaming I/O... fd(%d)", node->fd);
226    ret = exynos_v4l2_streamoff(node->fd, type);
227
228    if (ret < 0)
229        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
230
231    return ret;
232}
233
234int cam_int_dqbuf(node_info_t *node)
235{
236    struct v4l2_buffer v4l2_buf;
237    struct v4l2_plane planes[VIDEO_MAX_PLANES];
238    int ret;
239
240    v4l2_buf.type       = node->type;
241    v4l2_buf.memory     = node->memory;
242    v4l2_buf.m.planes   = planes;
243    v4l2_buf.length     = node->planes;
244
245    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
246    if (ret < 0)
247        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
248
249    return v4l2_buf.index;
250}
251
252int cam_int_dqbuf(node_info_t *node, int num_plane)
253{
254    struct v4l2_buffer v4l2_buf;
255    struct v4l2_plane planes[VIDEO_MAX_PLANES];
256    int ret;
257
258    v4l2_buf.type       = node->type;
259    v4l2_buf.memory     = node->memory;
260    v4l2_buf.m.planes   = planes;
261    v4l2_buf.length     = num_plane;
262
263    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
264    if (ret < 0)
265        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
266
267    return v4l2_buf.index;
268}
269
270int cam_int_s_input(node_info_t *node, int index)
271{
272    int ret;
273
274    ret = exynos_v4l2_s_input(node->fd, index);
275    if (ret < 0)
276        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
277
278    return ret;
279}
280
281
282gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
283
284RequestManager::RequestManager(SignalDrivenThread* main_thread):
285    m_lastAeMode(0),
286    m_lastAaMode(0),
287    m_lastAwbMode(0),
288    m_lastAeComp(0),
289    m_frameIndex(-1)
290{
291    m_metadataConverter = new MetadataConverter;
292    m_mainThread = main_thread;
293    ResetEntry();
294    m_sensorPipelineSkipCnt = 0;
295    return;
296}
297
298RequestManager::~RequestManager()
299{
300    ALOGV("%s", __FUNCTION__);
301    if (m_metadataConverter != NULL) {
302        delete m_metadataConverter;
303        m_metadataConverter = NULL;
304    }
305
306    releaseSensorQ();
307    return;
308}
309
310void RequestManager::ResetEntry()
311{
312    Mutex::Autolock lock(m_requestMutex);
313    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
314        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
315        entries[i].internal_shot.shot.ctl.request.frameCount = -1;
316    }
317    m_numOfEntries = 0;
318    m_entryInsertionIndex = -1;
319    m_entryProcessingIndex = -1;
320    m_entryFrameOutputIndex = -1;
321}
322
323int RequestManager::GetNumEntries()
324{
325    return m_numOfEntries;
326}
327
328void RequestManager::SetDefaultParameters(int cropX)
329{
330    m_cropX = cropX;
331}
332
333bool RequestManager::IsRequestQueueFull()
334{
335    Mutex::Autolock lock(m_requestMutex);
336    if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
337        return true;
338    else
339        return false;
340}
341
342void RequestManager::RegisterRequest(camera_metadata_t * new_request)
343{
344    ALOGV("DEBUG(%s):", __FUNCTION__);
345
346    Mutex::Autolock lock(m_requestMutex);
347
348    request_manager_entry * newEntry = NULL;
349    int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
350    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
351
352
353    newEntry = &(entries[newInsertionIndex]);
354
355    if (newEntry->status!=EMPTY) {
356        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
357        return;
358    }
359    newEntry->status = REGISTERED;
360    newEntry->original_request = new_request;
361    memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
362    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
363    newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.outputStreams[15];
364
365    m_numOfEntries++;
366    m_entryInsertionIndex = newInsertionIndex;
367
368
369    ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
370    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
371}
372
373void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
374{
375    ALOGV("DEBUG(%s):", __FUNCTION__);
376    int frame_index;
377    request_manager_entry * currentEntry;
378
379    Mutex::Autolock lock(m_requestMutex);
380
381    frame_index = GetFrameIndex();
382    currentEntry =  &(entries[frame_index]);
383    if (currentEntry->status != METADONE) {
384        ALOGV("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
385        , m_entryProcessingIndex, m_entryFrameOutputIndex,(int)(currentEntry->status));
386        return;
387    }
388    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
389
390    currentEntry->status = EMPTY;
391    currentEntry->original_request = NULL;
392    memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
393    currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
394    currentEntry->output_stream_count = 0;
395    m_numOfEntries--;
396    ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
397     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
398
399    return;
400}
401
402bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
403                camera_metadata_t ** prepared_frame, int afState)
404{
405    ALOGV("DEBUG(%s):", __FUNCTION__);
406    Mutex::Autolock lock(m_requestMutex);
407    status_t res = NO_ERROR;
408    int tempFrameOutputIndex = GetFrameIndex();
409    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
410    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
411        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
412
413    if (currentEntry->status != METADONE) {
414        ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
415
416        return false;
417    }
418    m_entryFrameOutputIndex = tempFrameOutputIndex;
419    m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 20, 500); //estimated
420    add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
421    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
422                m_tempFrameMetadata);
423    if (res!=NO_ERROR) {
424        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
425        return false;
426    }
427    *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
428    *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
429    *prepared_frame = m_tempFrameMetadata;
430    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
431        currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
432    // Dump();
433    return true;
434}
435
436int RequestManager::MarkProcessingRequest(ExynosBuffer* buf, int *afMode)
437{
438    struct camera2_shot_ext * shot_ext;
439    struct camera2_shot_ext * request_shot;
440    int targetStreamIndex = 0;
441    request_manager_entry * newEntry = NULL;
442    static int count = 0;
443
444    Mutex::Autolock lock(m_requestMutex);
445    if (m_numOfEntries == 0)  {
446        CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
447        return -1;
448    }
449
450    if ((m_entryProcessingIndex == m_entryInsertionIndex)
451        && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
452        ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
453         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
454        return -1;
455    }
456
457    int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
458    ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
459
460    newEntry = &(entries[newProcessingIndex]);
461    request_shot = &(newEntry->internal_shot);
462    *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
463    if (newEntry->status != REGISTERED) {
464        CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal ", __FUNCTION__, newProcessingIndex);
465        return -1;
466    }
467
468    newEntry->status = REQUESTED;
469
470    shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
471
472    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
473    shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
474    shot_ext->request_sensor = 1;
475    shot_ext->dis_bypass = 1;
476    shot_ext->dnr_bypass = 1;
477    shot_ext->fd_bypass = 1;
478    shot_ext->setfile = 0;
479
480    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
481    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
482    if (targetStreamIndex & MASK_OUTPUT_SCP)
483        shot_ext->request_scp = 1;
484
485    if (targetStreamIndex & MASK_OUTPUT_SCC)
486        shot_ext->request_scc = 1;
487
488    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
489        shot_ext->fd_bypass = 0;
490
491    if (count == 0){
492        shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
493    } else
494        shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
495
496    count++;
497    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
498    shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
499    shot_ext->shot.magicNumber = 0x23456789;
500    shot_ext->shot.ctl.sensor.exposureTime = 0;
501    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
502    shot_ext->shot.ctl.sensor.sensitivity = 0;
503
504
505    shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
506    shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
507    shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
508
509    m_entryProcessingIndex = newProcessingIndex;
510    return newProcessingIndex;
511}
512
513void RequestManager::NotifyStreamOutput(int frameCnt)
514{
515    int index;
516
517    Mutex::Autolock lock(m_requestMutex);
518    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
519
520    index = FindEntryIndexByFrameCnt(frameCnt);
521    if (index == -1) {
522        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
523        return;
524    }
525    ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt,   entries[index].output_stream_count);
526
527    entries[index].output_stream_count--;  //TODO : match stream id also
528    CheckCompleted(index);
529}
530
531void RequestManager::CheckCompleted(int index)
532{
533    if((entries[index].status == METADONE) && (entries[index].output_stream_count <= 0)){
534        ALOGV("send SIGNAL_MAIN_STREAM_OUTPUT_DONE(index:%d)(frameCnt:%d)",
535                index, entries[index].internal_shot.shot.ctl.request.frameCount );
536        SetFrameIndex(index);
537        m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
538    }
539}
540
541void RequestManager::SetFrameIndex(int index)
542{
543    m_frameIndex = index;
544}
545
546int RequestManager::GetFrameIndex()
547{
548    return m_frameIndex;
549}
550
551void  RequestManager::pushSensorQ(int index)
552{
553    Mutex::Autolock lock(m_requestMutex);
554    m_sensorQ.push_back(index);
555}
556
557int RequestManager::popSensorQ()
558{
559   List<int>::iterator sensor_token;
560   int index;
561
562    Mutex::Autolock lock(m_requestMutex);
563
564    if(m_sensorQ.size() == 0)
565        return -1;
566
567    sensor_token = m_sensorQ.begin()++;
568    index = *sensor_token;
569    m_sensorQ.erase(sensor_token);
570
571    return (index);
572}
573
574void RequestManager::releaseSensorQ()
575{
576    List<int>::iterator r;
577
578    Mutex::Autolock lock(m_requestMutex);
579    ALOGV("(%d)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
580
581    while(m_sensorQ.size() > 0){
582        r  = m_sensorQ.begin()++;
583        m_sensorQ.erase(r);
584    }
585    return;
586}
587
588void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
589{
590    int index;
591    struct camera2_shot_ext * request_shot;
592    nsecs_t timeStamp;
593    int i;
594
595    Mutex::Autolock lock(m_requestMutex);
596    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
597
598    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
599        if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
600            && (entries[i].status == CAPTURED)){
601            entries[i].status =METADONE;
602            break;
603        }
604    }
605
606    if (i == NUM_MAX_REQUEST_MGR_ENTRY){
607        ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
608        return;
609    }
610
611    request_manager_entry * newEntry = &(entries[i]);
612    request_shot = &(newEntry->internal_shot);
613
614    timeStamp = request_shot->shot.dm.sensor.timeStamp;
615    memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
616    request_shot->shot.dm.sensor.timeStamp = timeStamp;
617    m_lastTimeStamp = timeStamp;
618    CheckCompleted(i);
619}
620
621void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
622{
623    int index, targetStreamIndex;
624    struct camera2_shot_ext * request_shot;
625
626    ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
627    if (frameCnt < 0)
628        return;
629
630    index = FindEntryIndexByFrameCnt(frameCnt);
631    if (index == -1) {
632        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
633        return;
634    }
635
636    request_manager_entry * newEntry = &(entries[index]);
637    request_shot = &(newEntry->internal_shot);
638    memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
639    shot_ext->shot.ctl.request.frameCount = frameCnt;
640    shot_ext->request_sensor = 1;
641    shot_ext->dis_bypass = 1;
642    shot_ext->dnr_bypass = 1;
643    shot_ext->fd_bypass = 1;
644    shot_ext->setfile = 0;
645
646    shot_ext->request_scc = 0;
647    shot_ext->request_scp = 0;
648
649    shot_ext->isReprocessing = request_shot->isReprocessing;
650    shot_ext->reprocessInput = request_shot->reprocessInput;
651    shot_ext->shot.ctl.request.outputStreams[0] = 0;
652
653    shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
654    shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
655    shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
656
657    // mapping flash UI mode from aeMode
658    if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
659        ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
660        request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
661    }
662    // mapping awb UI mode form awbMode
663    ctl_info->awb.i_awbMode = request_shot->shot.ctl.aa.awbMode;
664
665    // Apply ae/awb lock or unlock
666    if (request_shot->ae_lock == AEMODE_LOCK_ON)
667            request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
668    if (request_shot->awb_lock == AWBMODE_LOCK_ON)
669            request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
670
671    // This is for pre-capture metering
672    if (ctl_info->ae.m_aeCnt >= IS_COMMAND_EXECUTION) {
673        if (ctl_info->ae.m_aeCnt == IS_COMMAND_CLEAR) {
674            ALOGV("(%s) [Capture] m_aeCnt :  CLEAR -> NONE", __FUNCTION__);
675            ctl_info->ae.m_aeCnt = IS_COMMAND_NONE;
676        } else {
677            ctl_info->ae.m_aeCnt = IS_COMMAND_CLEAR;
678            ALOGV("(%s) [Capture] m_aeCnt :  EXECUTION -> CLEAR", __FUNCTION__);
679        }
680    }
681    if (ctl_info->awb.m_awbCnt >= IS_COMMAND_EXECUTION) {
682        if (ctl_info->awb.m_awbCnt == IS_COMMAND_CLEAR)
683            ctl_info->awb.m_awbCnt = IS_COMMAND_NONE;
684        else
685            ctl_info->awb.m_awbCnt = IS_COMMAND_CLEAR;
686    }
687
688    if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
689        shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
690    }
691    else {
692        shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
693        m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
694    }
695    if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
696        shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
697    }
698    else {
699        shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
700        m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
701    }
702    if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
703        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
704    }
705    else {
706        shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
707        m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
708    }
709    if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
710        shot_ext->shot.ctl.aa.aeExpCompensation = 0;
711    }
712    else {
713        shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
714        m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
715    }
716
717    shot_ext->shot.ctl.aa.afTrigger = 0;
718
719    targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
720    shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
721    if (targetStreamIndex & MASK_OUTPUT_SCP)
722        shot_ext->request_scp = 1;
723
724    if (targetStreamIndex & MASK_OUTPUT_SCC)
725        shot_ext->request_scc = 1;
726
727    if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
728        shot_ext->fd_bypass = 0;
729
730    if (targetStreamIndex & STREAM_MASK_RECORD) {
731        shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
732        shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
733    }
734
735    ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
736    (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
737    (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
738    (int)(shot_ext->shot.ctl.aa.afMode));
739}
740
741int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
742{
743    for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
744        if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
745            return i;
746    }
747    return -1;
748}
749
750void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
751{
752    int index = FindEntryIndexByFrameCnt(frameCnt);
753    if (index == -1) {
754        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
755        return;
756    }
757
758    request_manager_entry * currentEntry = &(entries[index]);
759    currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
760    ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
761        index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
762}
763
764
765nsecs_t  RequestManager::GetTimestampByFrameCnt(int frameCnt)
766{
767    int index = FindEntryIndexByFrameCnt(frameCnt);
768    if (index == -1) {
769        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
770        return m_lastTimeStamp;
771    }
772    else
773        return GetTimestamp(index);
774}
775
776nsecs_t  RequestManager::GetTimestamp(int index)
777{
778    Mutex::Autolock lock(m_requestMutex);
779    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
780        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
781        return 0;
782    }
783
784    request_manager_entry * currentEntry = &(entries[index]);
785    nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
786    if (frameTime == 0) {
787        ALOGV("DEBUG(%s): timestamp null,  returning saved value", __FUNCTION__);
788        frameTime = m_lastTimeStamp;
789    }
790    ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
791    return frameTime;
792}
793
794int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
795{
796    int i;
797
798    if (m_numOfEntries == 0) {
799        ALOGV("(%s): No Entry found", __FUNCTION__);
800        return -1;
801    }
802
803    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
804        if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
805            continue;
806
807        if (entries[i].status == REQUESTED) {
808            entries[i].status = CAPTURED;
809            return entries[i].internal_shot.shot.ctl.request.frameCount;
810        }
811
812    }
813    CAM_LOGD("(%s): No Entry found", __FUNCTION__);
814
815    return -1;
816}
817
818void     RequestManager::SetInitialSkip(int count)
819{
820    ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
821    if (count > m_sensorPipelineSkipCnt)
822        m_sensorPipelineSkipCnt = count;
823}
824
825int     RequestManager::GetSkipCnt()
826{
827    ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
828    if (m_sensorPipelineSkipCnt == 0)
829        return m_sensorPipelineSkipCnt;
830    else
831        return --m_sensorPipelineSkipCnt;
832}
833
834void RequestManager::Dump(void)
835{
836    int i = 0;
837    request_manager_entry * currentEntry;
838    ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
839    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
840
841    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
842        currentEntry =  &(entries[i]);
843        ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
844        currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
845            currentEntry->output_stream_count,
846            currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
847    }
848}
849
850int     RequestManager::GetNextIndex(int index)
851{
852    index++;
853    if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
854        index = 0;
855
856    return index;
857}
858
859ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
860            m_requestQueueOps(NULL),
861            m_frameQueueOps(NULL),
862            m_callbackCookie(NULL),
863            m_numOfRemainingReqInSvc(0),
864            m_isRequestQueuePending(false),
865            m_isRequestQueueNull(true),
866            m_isIspStarted(false),
867            m_ionCameraClient(0),
868            m_zoomRatio(1),
869            m_scp_closing(false),
870            m_scp_closed(false),
871            m_afState(HAL_AFSTATE_INACTIVE),
872            m_afMode(NO_CHANGE),
873            m_afMode2(NO_CHANGE),
874            m_IsAfModeUpdateRequired(false),
875            m_IsAfTriggerRequired(false),
876            m_IsAfLockRequired(false),
877            m_wideAspect(false),
878            m_scpOutputSignalCnt(0),
879            m_scpOutputImageCnt(0),
880            m_afTriggerId(0),
881            m_afPendingTriggerId(0),
882            m_afModeWaitingCnt(0),
883            m_halDevice(dev),
884            m_nightCaptureCnt(0),
885            m_cameraId(cameraId),
886            m_thumbNailW(160),
887            m_thumbNailH(120)
888{
889    ALOGV("DEBUG(%s):", __FUNCTION__);
890    int ret = 0;
891    int res = 0;
892
893    m_exynosPictureCSC = NULL;
894    m_exynosVideoCSC = NULL;
895
896    if (!m_grallocHal) {
897        ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
898        if (ret)
899            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
900    }
901
902    m_camera2 = camera;
903    m_ionCameraClient = createIonClient(m_ionCameraClient);
904    if(m_ionCameraClient == 0)
905        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
906
907
908    m_BayerManager = new BayerBufManager();
909    m_mainThread    = new MainThread(this);
910    m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
911    *openInvalid = InitializeISPChain();
912    if (*openInvalid < 0) {
913        // clean process
914        // 1. close video nodes
915        // SCP
916        res = exynos_v4l2_close(m_camera_info.scp.fd);
917        if (res != NO_ERROR ) {
918            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
919        }
920        // SCC
921        res = exynos_v4l2_close(m_camera_info.capture.fd);
922        if (res != NO_ERROR ) {
923            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
924        }
925        // Sensor
926        res = exynos_v4l2_close(m_camera_info.sensor.fd);
927        if (res != NO_ERROR ) {
928            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
929        }
930        // ISP
931        res = exynos_v4l2_close(m_camera_info.isp.fd);
932        if (res != NO_ERROR ) {
933            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
934        }
935    } else {
936        m_sensorThread  = new SensorThread(this);
937        m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
938        ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
939
940        for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
941            m_subStreams[i].type =  SUBSTREAM_TYPE_NONE;
942        CSC_METHOD cscMethod = CSC_METHOD_HW;
943        m_exynosPictureCSC = csc_init(cscMethod);
944        if (m_exynosPictureCSC == NULL)
945            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
946        csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
947
948        m_exynosVideoCSC = csc_init(cscMethod);
949        if (m_exynosVideoCSC == NULL)
950            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
951        csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
952
953        m_setExifFixedAttribute();
954
955        // contol information clear
956        // flash
957        m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
958        m_ctlInfo.flash.m_afFlashDoneFlg= false;
959        m_ctlInfo.flash.m_flashEnableFlg = false;
960        m_ctlInfo.flash.m_flashFrameCount = 0;
961        m_ctlInfo.flash.m_flashCnt = 0;
962        m_ctlInfo.flash.m_flashTimeOut = 0;
963        m_ctlInfo.flash.m_flashDecisionResult = false;
964        m_ctlInfo.flash.m_flashTorchMode = false;
965        m_ctlInfo.flash.m_precaptureState = 0;
966        m_ctlInfo.flash.m_precaptureTriggerId = 0;
967        //ae
968        m_ctlInfo.ae.m_aeCnt = IS_COMMAND_NONE;
969        // awb
970        m_ctlInfo.awb.i_awbMode = AA_AWBMODE_OFF;
971        m_ctlInfo.awb.m_awbCnt = IS_COMMAND_NONE;
972    }
973}
974
975ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
976{
977    ALOGV("%s: ENTER", __FUNCTION__);
978    this->release();
979    ALOGV("%s: EXIT", __FUNCTION__);
980}
981
982void ExynosCameraHWInterface2::release()
983{
984    int i, res;
985    CAM_LOGD("%s: ENTER", __func__);
986
987    if (m_streamThreads[1] != NULL) {
988        m_streamThreads[1]->release();
989        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
990    }
991
992    if (m_streamThreads[0] != NULL) {
993        m_streamThreads[0]->release();
994        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
995    }
996
997    if (m_sensorThread != NULL) {
998        m_sensorThread->release();
999    }
1000
1001    if (m_mainThread != NULL) {
1002        m_mainThread->release();
1003    }
1004
1005    if (m_exynosPictureCSC)
1006        csc_deinit(m_exynosPictureCSC);
1007    m_exynosPictureCSC = NULL;
1008
1009    if (m_exynosVideoCSC)
1010        csc_deinit(m_exynosVideoCSC);
1011    m_exynosVideoCSC = NULL;
1012
1013    if (m_streamThreads[1] != NULL) {
1014        while (!m_streamThreads[1]->IsTerminated())
1015        {
1016            CAM_LOGD("Waiting for stream thread 1 is tetminated");
1017            usleep(100000);
1018        }
1019        m_streamThreads[1] = NULL;
1020    }
1021
1022    if (m_streamThreads[0] != NULL) {
1023        while (!m_streamThreads[0]->IsTerminated())
1024        {
1025            CAM_LOGD("Waiting for stream thread 0 is tetminated");
1026            usleep(100000);
1027        }
1028        m_streamThreads[0] = NULL;
1029    }
1030
1031    if (m_sensorThread != NULL) {
1032        while (!m_sensorThread->IsTerminated())
1033        {
1034            CAM_LOGD("Waiting for sensor thread is tetminated");
1035            usleep(100000);
1036        }
1037        m_sensorThread = NULL;
1038    }
1039
1040    if (m_mainThread != NULL) {
1041        while (!m_mainThread->IsTerminated())
1042        {
1043            CAM_LOGD("Waiting for main thread is tetminated");
1044            usleep(100000);
1045        }
1046        m_mainThread = NULL;
1047    }
1048
1049    if (m_requestManager != NULL) {
1050        delete m_requestManager;
1051        m_requestManager = NULL;
1052    }
1053
1054    if (m_BayerManager != NULL) {
1055        delete m_BayerManager;
1056        m_BayerManager = NULL;
1057    }
1058    for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1059        freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1060
1061    for(i = 0; i < m_camera_info.capture.buffers; i++)
1062        freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1063
1064    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1065    res = exynos_v4l2_close(m_camera_info.sensor.fd);
1066    if (res != NO_ERROR ) {
1067        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1068    }
1069
1070    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1071    res = exynos_v4l2_close(m_camera_info.isp.fd);
1072    if (res != NO_ERROR ) {
1073        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1074    }
1075
1076    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1077    res = exynos_v4l2_close(m_camera_info.capture.fd);
1078    if (res != NO_ERROR ) {
1079        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1080    }
1081
1082    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1083    res = exynos_v4l2_close(m_camera_info.scp.fd);
1084    if (res != NO_ERROR ) {
1085        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1086    }
1087    ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1088    deleteIonClient(m_ionCameraClient);
1089
1090    ALOGV("%s: EXIT", __func__);
1091}
1092
1093int ExynosCameraHWInterface2::InitializeISPChain()
1094{
1095    char node_name[30];
1096    int fd = 0;
1097    int i;
1098    int ret = 0;
1099
1100    /* Open Sensor */
1101    memset(&node_name, 0x00, sizeof(char[30]));
1102    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1103    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1104
1105    if (fd < 0) {
1106        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1107    }
1108    else {
1109        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1110    }
1111    m_camera_info.sensor.fd = fd;
1112
1113    /* Open ISP */
1114    memset(&node_name, 0x00, sizeof(char[30]));
1115    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1116    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1117
1118    if (fd < 0) {
1119        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1120    }
1121    else {
1122        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1123    }
1124    m_camera_info.isp.fd = fd;
1125
1126    /* Open ScalerC */
1127    memset(&node_name, 0x00, sizeof(char[30]));
1128    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1129    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1130
1131    if (fd < 0) {
1132        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1133    }
1134    else {
1135        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1136    }
1137    m_camera_info.capture.fd = fd;
1138
1139    /* Open ScalerP */
1140    memset(&node_name, 0x00, sizeof(char[30]));
1141    sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1142    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1143    if (fd < 0) {
1144        ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1145    }
1146    else {
1147        ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1148    }
1149    m_camera_info.scp.fd = fd;
1150
1151    if(m_cameraId == 0)
1152        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1153    else
1154        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1155
1156    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1157    m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1158    m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1159
1160    m_camera_info.dummy_shot.dis_bypass = 1;
1161    m_camera_info.dummy_shot.dnr_bypass = 1;
1162    m_camera_info.dummy_shot.fd_bypass = 1;
1163
1164    /*sensor setting*/
1165    m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1166    m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1167    m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1168
1169    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1170    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1171
1172    /*request setting*/
1173    m_camera_info.dummy_shot.request_sensor = 1;
1174    m_camera_info.dummy_shot.request_scc = 0;
1175    m_camera_info.dummy_shot.request_scp = 0;
1176    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1177
1178    m_camera_info.sensor.width = m_camera2->getSensorRawW();
1179    m_camera_info.sensor.height = m_camera2->getSensorRawH();
1180
1181    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1182    m_camera_info.sensor.planes = 2;
1183    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1184    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1185    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1186
1187    for(i = 0; i < m_camera_info.sensor.buffers; i++){
1188        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1189        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1190        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1191        allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
1192    }
1193
1194    m_camera_info.isp.width = m_camera_info.sensor.width;
1195    m_camera_info.isp.height = m_camera_info.sensor.height;
1196    m_camera_info.isp.format = m_camera_info.sensor.format;
1197    m_camera_info.isp.planes = m_camera_info.sensor.planes;
1198    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1199    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1200    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1201
1202    for(i = 0; i < m_camera_info.isp.buffers; i++){
1203        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1204        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1205        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1206        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1207        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1208        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1209        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1210    };
1211
1212    /* init ISP */
1213    ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1214    if (ret < 0) {
1215        ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
1216        return false;
1217    }
1218    cam_int_s_fmt(&(m_camera_info.isp));
1219    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1220    cam_int_reqbufs(&(m_camera_info.isp));
1221    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1222    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1223
1224    /* init Sensor */
1225    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1226    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1227    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1228        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1229    }
1230    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1231    cam_int_reqbufs(&(m_camera_info.sensor));
1232    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1233    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1234        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1235        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1236        m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1237        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1238                sizeof(struct camera2_shot_ext));
1239    }
1240
1241    for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1242        cam_int_qbuf(&(m_camera_info.sensor), i);
1243
1244    for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1245        m_requestManager->pushSensorQ(i);
1246
1247    ALOGV("== stream_on :: sensor");
1248    cam_int_streamon(&(m_camera_info.sensor));
1249    m_camera_info.sensor.status = true;
1250
1251    /* init Capture */
1252    m_camera_info.capture.width = m_camera2->getSensorW();
1253    m_camera_info.capture.height = m_camera2->getSensorH();
1254    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1255#ifdef ENABLE_FRAME_SYNC
1256    m_camera_info.capture.planes = 2;
1257#else
1258    m_camera_info.capture.planes = 1;
1259#endif
1260    m_camera_info.capture.buffers = 8;
1261    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1262    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1263
1264    m_camera_info.capture.status = false;
1265
1266    return true;
1267}
1268
1269void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
1270{
1271    ALOGV("(%s)", __FUNCTION__);
1272    StreamThread *AllocatedStream;
1273    stream_parameters_t newParameters;
1274    uint32_t format_actual;
1275    int numRegisteredStream = 0;
1276
1277
1278    if (!threadExists) {
1279        m_streamThreads[1]  = new StreamThread(this, 1);
1280    }
1281    AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1282    if (!threadExists)
1283        m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1284    AllocatedStream->m_index        = 1;
1285
1286    format_actual                   = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1287
1288    newParameters.width             = m_camera2->getSensorW();
1289    newParameters.height            = m_camera2->getSensorH();
1290    newParameters.format            = format_actual;
1291    newParameters.streamOps         = NULL;
1292    newParameters.numHwBuffers      = 8;
1293#ifdef ENABLE_FRAME_SYNC
1294    newParameters.planes            = 2;
1295#else
1296    newParameters.planes            = 1;
1297#endif
1298
1299    newParameters.numSvcBufsInHal   = 0;
1300
1301    newParameters.node              = &m_camera_info.capture;
1302
1303    AllocatedStream->streamType     = STREAM_TYPE_INDIRECT;
1304    AllocatedStream->m_numRegisteredStream = numRegisteredStream;
1305    ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1306
1307    for (int i = 0; i < m_camera_info.capture.buffers; i++){
1308        if (!threadExists) {
1309            initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
1310            m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1311#ifdef ENABLE_FRAME_SYNC
1312            m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1313            allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
1314#else
1315            allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1316#endif
1317        }
1318    }
1319    cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
1320    m_camera_info.capture.buffers = 8;
1321    cam_int_s_fmt(newParameters.node);
1322    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1323    cam_int_reqbufs(newParameters.node);
1324    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1325
1326    for (int i = 0; i < newParameters.node->buffers; i++) {
1327        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1328        cam_int_qbuf(newParameters.node, i);
1329        newParameters.svcBufStatus[i] = ON_DRIVER;
1330    }
1331
1332    ALOGV("== stream_on :: capture");
1333    if (cam_int_streamon(newParameters.node) < 0) {
1334        ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1335    } else {
1336        m_camera_info.capture.status = true;
1337    }
1338
1339    AllocatedStream->setParameter(&newParameters);
1340    AllocatedStream->m_activated    = true;
1341    AllocatedStream->m_isBufferInit = true;
1342}
1343
1344void ExynosCameraHWInterface2::StartISP()
1345{
1346    ALOGV("== stream_on :: isp");
1347    cam_int_streamon(&(m_camera_info.isp));
1348    exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1349}
1350
1351int ExynosCameraHWInterface2::getCameraId() const
1352{
1353    return m_cameraId;
1354}
1355
1356int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1357{
1358    ALOGV("DEBUG(%s):", __FUNCTION__);
1359    if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1360            && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1361        m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1362        return 0;
1363    }
1364    else {
1365        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1366        return 1;
1367    }
1368}
1369
1370int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1371{
1372    int i = 0;
1373
1374    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1375    if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1376        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1377        return 0;
1378    }
1379    m_isRequestQueueNull = false;
1380    if (m_requestManager->GetNumEntries() == 0)
1381        m_requestManager->SetInitialSkip(5);
1382
1383    if (m_isIspStarted == false) {
1384        /* isp */
1385        m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1386        m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1387        cam_int_s_fmt(&(m_camera_info.isp));
1388        cam_int_reqbufs(&(m_camera_info.isp));
1389
1390        /* sensor */
1391        if (m_camera_info.sensor.status == false) {
1392            cam_int_s_fmt(&(m_camera_info.sensor));
1393            cam_int_reqbufs(&(m_camera_info.sensor));
1394
1395            for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1396                ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1397                m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1398                m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1399                memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1400                        sizeof(struct camera2_shot_ext));
1401            }
1402            for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1403                cam_int_qbuf(&(m_camera_info.sensor), i);
1404
1405            for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1406                m_requestManager->pushSensorQ(i);
1407            ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
1408            cam_int_streamon(&(m_camera_info.sensor));
1409            m_camera_info.sensor.status = true;
1410        }
1411    }
1412    if (!(m_streamThreads[1].get())) {
1413        ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
1414        StartSCCThread(false);
1415    } else {
1416        if (m_streamThreads[1]->m_activated ==  false) {
1417            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1418            StartSCCThread(true);
1419        } else {
1420            if (m_camera_info.capture.status == false) {
1421                m_camera_info.capture.buffers = 8;
1422                cam_int_s_fmt(&(m_camera_info.capture));
1423                ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1424                cam_int_reqbufs(&(m_camera_info.capture));
1425                ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1426
1427                for (int i = 0; i < m_camera_info.capture.buffers; i++) {
1428                    ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1429                    cam_int_qbuf(&(m_camera_info.capture), i);
1430                }
1431                ALOGV("== stream_on :: capture");
1432                if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1433                    ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1434                } else {
1435                    m_camera_info.capture.status = true;
1436                }
1437            }
1438        }
1439    }
1440    if (m_isIspStarted == false) {
1441        StartISP();
1442        ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
1443        m_requestManager->SetInitialSkip(5);
1444        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1445        m_isIspStarted = true;
1446    }
1447    m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1448    return 0;
1449}
1450
1451int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1452{
1453    ALOGV("DEBUG(%s):", __FUNCTION__);
1454    if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1455            && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1456        m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1457        return 0;
1458    }
1459    else {
1460        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1461        return 1;
1462    }
1463}
1464
1465int ExynosCameraHWInterface2::getInProgressCount()
1466{
1467    int inProgressCount = m_requestManager->GetNumEntries();
1468    ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount);
1469    return inProgressCount;
1470}
1471
1472int ExynosCameraHWInterface2::flushCapturesInProgress()
1473{
1474    return 0;
1475}
1476
1477int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1478{
1479    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1480
1481    if (request == NULL) return BAD_VALUE;
1482    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1483        return BAD_VALUE;
1484    }
1485    status_t res;
1486    // Pass 1, calculate size and allocate
1487    res = m_camera2->constructDefaultRequest(request_template,
1488            request,
1489            true);
1490    if (res != OK) {
1491        return res;
1492    }
1493    // Pass 2, build request
1494    res = m_camera2->constructDefaultRequest(request_template,
1495            request,
1496            false);
1497    if (res != OK) {
1498        ALOGE("Unable to populate new request for template %d",
1499                request_template);
1500    }
1501
1502    return res;
1503}
1504
1505int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1506                                    uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1507{
1508    ALOGV("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1509    bool useDirectOutput = false;
1510    StreamThread *AllocatedStream;
1511    stream_parameters_t newParameters;
1512    substream_parameters_t *subParameters;
1513    StreamThread *parentStream;
1514    status_t res;
1515    int allocCase = 0;
1516
1517    if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)  &&
1518            m_camera2->isSupportedResolution(width, height)) {
1519        if (!(m_streamThreads[0].get())) {
1520            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1521            allocCase = 0;
1522        }
1523        else {
1524            if ((m_streamThreads[0].get())->m_activated == true) {
1525                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1526                allocCase = 1;
1527            }
1528            else {
1529                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1530                allocCase = 2;
1531            }
1532        }
1533
1534        // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1535        if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
1536                    || (width == 720 && height == 480) || (width == 1440 && height == 960)
1537                    || (width == 1344 && height == 896)) {
1538            m_wideAspect = true;
1539        } else {
1540            m_wideAspect = false;
1541        }
1542        ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1543
1544        if (allocCase == 0 || allocCase == 2) {
1545            *stream_id = STREAM_ID_PREVIEW;
1546
1547            m_streamThreads[0]  = new StreamThread(this, *stream_id);
1548
1549            AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1550            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1551
1552            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1553            *usage                              = GRALLOC_USAGE_SW_WRITE_OFTEN;
1554            *max_buffers                        = 6;
1555
1556            newParameters.width                 = width;
1557            newParameters.height                = height;
1558            newParameters.format                = *format_actual;
1559            newParameters.streamOps             = stream_ops;
1560            newParameters.usage                 = *usage;
1561            newParameters.numHwBuffers          = 8;
1562            newParameters.numOwnSvcBuffers      = *max_buffers;
1563            newParameters.planes                = NUM_PLANES(*format_actual);
1564            newParameters.metaPlanes            = 1;
1565            newParameters.numSvcBufsInHal       = 0;
1566            newParameters.minUndequedBuffer     = 4;
1567
1568            newParameters.node                  = &m_camera_info.scp;
1569            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1570            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1571
1572            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1573            AllocatedStream->m_index            = 0;
1574            AllocatedStream->setParameter(&newParameters);
1575            AllocatedStream->m_activated = true;
1576            AllocatedStream->m_numRegisteredStream = 1;
1577            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1578            m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1579            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1580            if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
1581                AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
1582            if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
1583                AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
1584            return 0;
1585        } else if (allocCase == 1) {
1586            *stream_id = STREAM_ID_RECORD;
1587
1588            subParameters = &m_subStreams[STREAM_ID_RECORD];
1589            memset(subParameters, 0, sizeof(substream_parameters_t));
1590
1591            parentStream = (StreamThread*)(m_streamThreads[0].get());
1592            if (!parentStream) {
1593                return 1;
1594            }
1595
1596            *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1597            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1598            *max_buffers = 6;
1599
1600            subParameters->type         = SUBSTREAM_TYPE_RECORD;
1601            subParameters->width        = width;
1602            subParameters->height       = height;
1603            subParameters->format       = *format_actual;
1604            subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1605            subParameters->streamOps     = stream_ops;
1606            subParameters->usage         = *usage;
1607            subParameters->numOwnSvcBuffers = *max_buffers;
1608            subParameters->numSvcBufsInHal  = 0;
1609            subParameters->needBufferInit    = false;
1610            subParameters->minUndequedBuffer = 2;
1611
1612            res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
1613            if (res != NO_ERROR) {
1614                ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1615                return 1;
1616            }
1617            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1618            ALOGV("(%s): Enabling Record", __FUNCTION__);
1619            return 0;
1620        }
1621    }
1622    else if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED|| format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE)
1623            && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) {
1624
1625        if (!(m_streamThreads[1].get())) {
1626            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1627            useDirectOutput = true;
1628//            createThread = true;
1629        }
1630        else {
1631            ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
1632//            useDirectOutput = true;
1633//            createThread = false;
1634        }
1635        if (useDirectOutput) {
1636            *stream_id = STREAM_ID_ZSL;
1637
1638            /*if (createThread)*/ {
1639                m_streamThreads[1]  = new StreamThread(this, *stream_id);
1640            }
1641            AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1642            m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1643
1644            *format_actual                      = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1645            *max_buffers                        = 6;
1646
1647            *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1648            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1649            *max_buffers = 6;
1650
1651            newParameters.width                 = width;
1652            newParameters.height                = height;
1653            newParameters.format                = *format_actual;
1654            newParameters.streamOps             = stream_ops;
1655            newParameters.usage                 = *usage;
1656            newParameters.numHwBuffers          = 8;
1657            newParameters.numOwnSvcBuffers      = *max_buffers;
1658            newParameters.planes                = NUM_PLANES(*format_actual);
1659            newParameters.metaPlanes            = 1;
1660
1661            newParameters.numSvcBufsInHal       = 0;
1662            newParameters.minUndequedBuffer     = 4;
1663
1664            newParameters.node                  = &m_camera_info.capture;
1665            newParameters.node->type            = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1666            newParameters.node->memory          = V4L2_MEMORY_DMABUF;
1667
1668            AllocatedStream->streamType         = STREAM_TYPE_DIRECT;
1669            AllocatedStream->m_index            = 1;
1670            AllocatedStream->setParameter(&newParameters);
1671            AllocatedStream->m_activated = true;
1672            AllocatedStream->m_numRegisteredStream++;
1673            ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1674            return 0;
1675        }
1676    }
1677    else if (format == HAL_PIXEL_FORMAT_BLOB
1678            && m_camera2->isSupportedJpegResolution(width, height)) {
1679        *stream_id = STREAM_ID_JPEG;
1680
1681        subParameters = &m_subStreams[*stream_id];
1682        memset(subParameters, 0, sizeof(substream_parameters_t));
1683
1684        if (!(m_streamThreads[1].get())) {
1685            ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1686            StartSCCThread(false);
1687        }
1688        else if (m_streamThreads[1]->m_activated ==  false) {
1689            ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1690            StartSCCThread(true);
1691        }
1692        parentStream = (StreamThread*)(m_streamThreads[1].get());
1693
1694        *format_actual = HAL_PIXEL_FORMAT_BLOB;
1695        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1696        *max_buffers = 4;
1697
1698        subParameters->type          = SUBSTREAM_TYPE_JPEG;
1699        subParameters->width         = width;
1700        subParameters->height        = height;
1701        subParameters->format        = *format_actual;
1702        subParameters->svcPlanes     = 1;
1703        subParameters->streamOps     = stream_ops;
1704        subParameters->usage         = *usage;
1705        subParameters->numOwnSvcBuffers = *max_buffers;
1706        subParameters->numSvcBufsInHal  = 0;
1707        subParameters->needBufferInit    = false;
1708        subParameters->minUndequedBuffer = 2;
1709
1710        res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
1711        if (res != NO_ERROR) {
1712            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1713            return 1;
1714        }
1715        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1716        ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
1717        return 0;
1718    }
1719    else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1720        *stream_id = STREAM_ID_PRVCB;
1721
1722        subParameters = &m_subStreams[STREAM_ID_PRVCB];
1723        memset(subParameters, 0, sizeof(substream_parameters_t));
1724
1725        parentStream = (StreamThread*)(m_streamThreads[0].get());
1726        if (!parentStream) {
1727            return 1;
1728        }
1729
1730        *format_actual = format;
1731        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1732        *max_buffers = 6;
1733
1734        subParameters->type         = SUBSTREAM_TYPE_PRVCB;
1735        subParameters->width        = width;
1736        subParameters->height       = height;
1737        subParameters->format       = *format_actual;
1738        subParameters->svcPlanes     = NUM_PLANES(*format_actual);
1739        subParameters->streamOps     = stream_ops;
1740        subParameters->usage         = *usage;
1741        subParameters->numOwnSvcBuffers = *max_buffers;
1742        subParameters->numSvcBufsInHal  = 0;
1743        subParameters->needBufferInit    = false;
1744        subParameters->minUndequedBuffer = 2;
1745
1746        if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
1747            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
1748            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
1749        }
1750        else {
1751            subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1752            subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
1753        }
1754
1755        res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
1756        if (res != NO_ERROR) {
1757            ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1758            return 1;
1759        }
1760        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1761        ALOGV("(%s): Enabling previewcb", __FUNCTION__);
1762        return 0;
1763    }
1764    ALOGE("DEBUG(%s): Unsupported Pixel Format", __FUNCTION__);
1765    return 1;
1766}
1767
1768int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1769        int num_buffers, buffer_handle_t *registeringBuffers)
1770{
1771    int                     i,j;
1772    void                    *virtAddr[3];
1773    int                     plane_index = 0;
1774    StreamThread *          targetStream;
1775    stream_parameters_t     *targetStreamParms;
1776    node_info_t             *currentNode;
1777
1778    struct v4l2_buffer v4l2_buf;
1779    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1780
1781    CAM_LOGV("DEBUG(%s): streamID (%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1782        stream_id, num_buffers, (uint32_t)registeringBuffers);
1783
1784    if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
1785        targetStream = m_streamThreads[0].get();
1786        targetStreamParms = &(m_streamThreads[0]->m_parameters);
1787    }
1788    else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
1789        substream_parameters_t  *targetParms;
1790        targetParms = &m_subStreams[stream_id];
1791
1792        targetParms->numSvcBuffers = num_buffers;
1793
1794        for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
1795            ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
1796                i, stream_id, (uint32_t)(registeringBuffers[i]));
1797            if (m_grallocHal) {
1798                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1799                       targetParms->usage, 0, 0,
1800                       targetParms->width, targetParms->height, virtAddr) != 0) {
1801                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1802                }
1803                else {
1804                    ExynosBuffer currentBuf;
1805                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1806                    if (targetParms->svcPlanes == 1) {
1807                        currentBuf.fd.extFd[0] = priv_handle->fd;
1808                        currentBuf.size.extS[0] = priv_handle->size;
1809                        currentBuf.size.extS[1] = 0;
1810                        currentBuf.size.extS[2] = 0;
1811                    } else if (targetParms->svcPlanes == 2) {
1812                        currentBuf.fd.extFd[0] = priv_handle->fd;
1813                        currentBuf.fd.extFd[1] = priv_handle->fd1;
1814
1815                    } else if (targetParms->svcPlanes == 3) {
1816                        currentBuf.fd.extFd[0] = priv_handle->fd;
1817                        currentBuf.fd.extFd[1] = priv_handle->fd1;
1818                        currentBuf.fd.extFd[2] = priv_handle->fd2;
1819                    }
1820                    for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
1821                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1822                        CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
1823                             __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
1824                             (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
1825                    }
1826                    targetParms->svcBufStatus[i]  = ON_SERVICE;
1827                    targetParms->svcBuffers[i]    = currentBuf;
1828                    targetParms->svcBufHandle[i]  = registeringBuffers[i];
1829                }
1830            }
1831        }
1832        targetParms->needBufferInit = true;
1833        return 0;
1834    }
1835    else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
1836        targetStream = m_streamThreads[1].get();
1837        targetStreamParms = &(m_streamThreads[1]->m_parameters);
1838    }
1839    else {
1840        ALOGE("ERR(%s) unregisterd stream id (%d)", __FUNCTION__, stream_id);
1841        return 1;
1842    }
1843
1844    if (targetStream->streamType == STREAM_TYPE_DIRECT) {
1845        if (num_buffers < targetStreamParms->numHwBuffers) {
1846            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
1847                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
1848            return 1;
1849        }
1850    }
1851    CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
1852            __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
1853            targetStreamParms->height, targetStreamParms->planes);
1854    targetStreamParms->numSvcBuffers = num_buffers;
1855    currentNode = targetStreamParms->node;
1856    currentNode->width      = targetStreamParms->width;
1857    currentNode->height     = targetStreamParms->height;
1858    currentNode->format     = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
1859    currentNode->planes     = targetStreamParms->planes;
1860    currentNode->buffers    = targetStreamParms->numHwBuffers;
1861    cam_int_s_input(currentNode, m_camera_info.sensor_id);
1862    cam_int_s_fmt(currentNode);
1863    cam_int_reqbufs(currentNode);
1864    for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
1865        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1866            i, (uint32_t)(registeringBuffers[i]));
1867                v4l2_buf.m.planes   = planes;
1868                v4l2_buf.type       = currentNode->type;
1869                v4l2_buf.memory     = currentNode->memory;
1870                v4l2_buf.index      = i;
1871                v4l2_buf.length     = currentNode->planes;
1872
1873                ExynosBuffer currentBuf;
1874                ExynosBuffer metaBuf;
1875                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1876
1877                m_getAlignedYUVSize(currentNode->format,
1878                    currentNode->width, currentNode->height, &currentBuf);
1879
1880                ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
1881                if (currentNode->planes == 1) {
1882                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1883                    currentBuf.fd.extFd[0] = priv_handle->fd;
1884                    currentBuf.size.extS[0] = priv_handle->size;
1885                    currentBuf.size.extS[1] = 0;
1886                    currentBuf.size.extS[2] = 0;
1887                } else if (currentNode->planes == 2) {
1888                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1889                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
1890                    currentBuf.fd.extFd[0] = priv_handle->fd;
1891                    currentBuf.fd.extFd[1] = priv_handle->fd1;
1892
1893                } else if (currentNode->planes == 3) {
1894                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1895                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
1896                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
1897                    currentBuf.fd.extFd[0] = priv_handle->fd;
1898                    currentBuf.fd.extFd[2] = priv_handle->fd1;
1899                    currentBuf.fd.extFd[1] = priv_handle->fd2;
1900                }
1901
1902                for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
1903                    currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
1904                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
1905                    CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
1906                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
1907                         (unsigned int)currentBuf.virt.extP[plane_index],
1908                         v4l2_buf.m.planes[plane_index].length);
1909                }
1910
1911                if (i < currentNode->buffers) {
1912
1913
1914#ifdef ENABLE_FRAME_SYNC
1915                    /* add plane for metadata*/
1916                    metaBuf.size.extS[0] = 4*1024;
1917                    allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
1918
1919                    v4l2_buf.length += targetStreamParms->metaPlanes;
1920                    v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
1921                    v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
1922
1923                    ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1924#endif
1925                    if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1926                        ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
1927                            __FUNCTION__, stream_id, currentNode->fd);
1928                    }
1929                    ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
1930                            __FUNCTION__, stream_id, currentNode->fd);
1931                    targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1932                }
1933                else {
1934                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1935                }
1936
1937                targetStreamParms->svcBuffers[i]       = currentBuf;
1938                targetStreamParms->metaBuffers[i] = metaBuf;
1939                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
1940            }
1941
1942    ALOGV("DEBUG(%s): calling  streamon stream id = %d", __FUNCTION__, stream_id);
1943    cam_int_streamon(targetStreamParms->node);
1944    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
1945    currentNode->status = true;
1946    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
1947
1948    return 0;
1949}
1950
1951int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
1952{
1953    StreamThread *targetStream;
1954    status_t res = NO_ERROR;
1955    ALOGV("DEBUG(%s):stream id %d", __FUNCTION__, stream_id);
1956    bool releasingScpMain = false;
1957
1958    if (stream_id == STREAM_ID_PREVIEW) {
1959        targetStream = (StreamThread*)(m_streamThreads[0].get());
1960        targetStream->m_numRegisteredStream--;
1961        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
1962        releasingScpMain = true;
1963        for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
1964            for (int j = 0; j < targetStream->m_parameters.planes; j++) {
1965                ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
1966                                targetStream->m_parameters.svcBuffers[i].size.extS[j]);
1967                CAM_LOGD("DBG(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
1968                              targetStream->m_parameters.svcBuffers[i].fd.extFd[j], targetStream->m_parameters.svcBuffers[i].virt.extP[j]);
1969            }
1970        }
1971    } else if (stream_id == STREAM_ID_JPEG) {
1972        targetStream = (StreamThread*)(m_streamThreads[1].get());
1973        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
1974        if (m_resizeBuf.size.s != 0) {
1975            freeCameraMemory(&m_resizeBuf, 1);
1976        }
1977        if (targetStream)
1978            res = targetStream->detachSubStream(stream_id);
1979        if (res != NO_ERROR) {
1980            ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
1981            return 1;
1982        }
1983        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
1984        return 0;
1985    } else if (stream_id == STREAM_ID_RECORD) {
1986        targetStream = (StreamThread*)(m_streamThreads[0].get());
1987        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
1988        if (targetStream)
1989            res = targetStream->detachSubStream(stream_id);
1990        else
1991            return 0;
1992    } else if (stream_id == STREAM_ID_PRVCB) {
1993        targetStream = (StreamThread*)(m_streamThreads[0].get());
1994        if (m_resizeBuf.size.s != 0) {
1995            freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
1996        }
1997        memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
1998        if (targetStream)
1999            res = targetStream->detachSubStream(stream_id);
2000        else
2001            return 0;
2002    } else if (stream_id == STREAM_ID_ZSL) {
2003        targetStream = (StreamThread*)(m_streamThreads[1].get());
2004        targetStream->m_numRegisteredStream--;
2005        ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2006    } else {
2007        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
2008        return 1;
2009    }
2010
2011    if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
2012        ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
2013        targetStream = (StreamThread*)(m_streamThreads[1].get());
2014        targetStream->m_releasing = true;
2015        do {
2016            CAM_LOGD("stream thread release %d", __LINE__);
2017            targetStream->release();
2018            usleep(33000);
2019        } while (targetStream->m_releasing);
2020    }
2021
2022    if (releasingScpMain || (m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
2023        ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
2024        targetStream = (StreamThread*)(m_streamThreads[0].get());
2025        targetStream->m_releasing = true;
2026        do {
2027            ALOGD("stream thread release %d", __LINE__);
2028            targetStream->release();
2029            usleep(33000);
2030        } while (targetStream->m_releasing);
2031        targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
2032
2033        if (targetStream != NULL) {
2034            while (targetStream->IsTerminated())
2035            {
2036                ALOGD("Waiting for stream thread is tetminated");
2037                usleep(10000);
2038            }
2039            m_streamThreads[0] = NULL;
2040        }
2041
2042        if (m_sensorThread != NULL) {
2043            m_sensorThread->release();
2044            while (!m_sensorThread->IsTerminated()){
2045                ALOGD("Waiting for sensor thread is tetminated");
2046                usleep(10000);
2047            }
2048        }
2049        else {
2050            ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
2051        }
2052
2053        if (m_camera_info.capture.status == true) {
2054            if (cam_int_streamoff(&(m_camera_info.capture)) < 0) {
2055                ALOGE("ERR(%s): capture stream off fail", __FUNCTION__);
2056            } else {
2057                m_camera_info.capture.status = false;
2058            }
2059            ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
2060            if (m_streamThreads[1]->streamType == STREAM_TYPE_INDIRECT) {
2061                m_camera_info.capture.buffers = 0;
2062                ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
2063                cam_int_reqbufs(&(m_camera_info.capture));
2064                ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
2065            }
2066        }
2067        m_isIspStarted = false;
2068    }
2069    ALOGV("(%s): END", __FUNCTION__);
2070    return 0;
2071}
2072
2073int ExynosCameraHWInterface2::allocateReprocessStream(
2074    uint32_t width, uint32_t height, uint32_t format,
2075    const camera2_stream_in_ops_t *reprocess_stream_ops,
2076    uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
2077{
2078    ALOGV("DEBUG(%s):", __FUNCTION__);
2079    return 0;
2080}
2081
2082int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2083            uint32_t output_stream_id,
2084            const camera2_stream_in_ops_t *reprocess_stream_ops,
2085            // outputs
2086            uint32_t *stream_id)
2087{
2088    ALOGV("DEBUG(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
2089    *stream_id = STREAM_ID_JPEG_REPROCESS;
2090
2091    m_reprocessStreamId = *stream_id;
2092    m_reprocessOps = reprocess_stream_ops;
2093    m_reprocessOutputStreamId = output_stream_id;
2094    return 0;
2095}
2096
2097int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
2098{
2099    ALOGV("DEBUG(%s):", __FUNCTION__);
2100    if (stream_id == STREAM_ID_JPEG_REPROCESS) {
2101        m_reprocessStreamId = 0;
2102        m_reprocessOps = NULL;
2103        m_reprocessOutputStreamId = 0;
2104        return 0;
2105    }
2106    return 1;
2107}
2108
2109int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
2110{
2111    ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
2112
2113    switch (trigger_id) {
2114    case CAMERA2_TRIGGER_AUTOFOCUS:
2115        ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2116        OnAfTriggerStart(ext1);
2117        break;
2118
2119    case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
2120        ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2121        OnAfCancel(ext1);
2122        break;
2123    case CAMERA2_TRIGGER_PRECAPTURE_METERING:
2124        ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
2125        OnPrecaptureMeteringTriggerStart(ext1);
2126        break;
2127    default:
2128        break;
2129    }
2130    return 0;
2131}
2132
2133int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
2134{
2135    ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
2136    m_notifyCb = notify_cb;
2137    m_callbackCookie = user;
2138    return 0;
2139}
2140
2141int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
2142{
2143    ALOGV("DEBUG(%s):", __FUNCTION__);
2144    return 0;
2145}
2146
2147int ExynosCameraHWInterface2::dump(int fd)
2148{
2149    ALOGV("DEBUG(%s):", __FUNCTION__);
2150    return 0;
2151}
2152
2153void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
2154{
2155    switch (colorFormat) {
2156    // 1p
2157    case V4L2_PIX_FMT_RGB565 :
2158    case V4L2_PIX_FMT_YUYV :
2159    case V4L2_PIX_FMT_UYVY :
2160    case V4L2_PIX_FMT_VYUY :
2161    case V4L2_PIX_FMT_YVYU :
2162        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2163        buf->size.extS[1] = 0;
2164        buf->size.extS[2] = 0;
2165        break;
2166    // 2p
2167    case V4L2_PIX_FMT_NV12 :
2168    case V4L2_PIX_FMT_NV12T :
2169    case V4L2_PIX_FMT_NV21 :
2170        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
2171        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2172        buf->size.extS[2] = 0;
2173        break;
2174    case V4L2_PIX_FMT_NV12M :
2175    case V4L2_PIX_FMT_NV12MT_16X16 :
2176    case V4L2_PIX_FMT_NV21M:
2177        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
2178        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2179        buf->size.extS[2] = 0;
2180        break;
2181    case V4L2_PIX_FMT_NV16 :
2182    case V4L2_PIX_FMT_NV61 :
2183        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2184        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
2185        buf->size.extS[2] = 0;
2186        break;
2187     // 3p
2188    case V4L2_PIX_FMT_YUV420 :
2189    case V4L2_PIX_FMT_YVU420 :
2190        buf->size.extS[0] = (w * h);
2191        buf->size.extS[1] = (w * h) >> 2;
2192        buf->size.extS[2] = (w * h) >> 2;
2193        break;
2194    case V4L2_PIX_FMT_YUV420M:
2195    case V4L2_PIX_FMT_YVU420M :
2196    case V4L2_PIX_FMT_YUV422P :
2197        buf->size.extS[0] = ALIGN(w,  16) * ALIGN(h,  16);
2198        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2199        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2200        break;
2201    default:
2202        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2203        return;
2204        break;
2205    }
2206}
2207
2208bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
2209                                             int  dst_w,  int   dst_h,
2210                                             int *crop_x, int *crop_y,
2211                                             int *crop_w, int *crop_h,
2212                                             int zoom)
2213{
2214    *crop_w = src_w;
2215    *crop_h = src_h;
2216
2217    if (   src_w != dst_w
2218        || src_h != dst_h) {
2219        float src_ratio = 1.0f;
2220        float dst_ratio = 1.0f;
2221
2222        // ex : 1024 / 768
2223        src_ratio = (float)src_w / (float)src_h;
2224
2225        // ex : 352  / 288
2226        dst_ratio = (float)dst_w / (float)dst_h;
2227
2228        if (dst_w * dst_h < src_w * src_h) {
2229            if (dst_ratio <= src_ratio) {
2230                // shrink w
2231                *crop_w = src_h * dst_ratio;
2232                *crop_h = src_h;
2233            } else {
2234                // shrink h
2235                *crop_w = src_w;
2236                *crop_h = src_w / dst_ratio;
2237            }
2238        } else {
2239            if (dst_ratio <= src_ratio) {
2240                // shrink w
2241                *crop_w = src_h * dst_ratio;
2242                *crop_h = src_h;
2243            } else {
2244                // shrink h
2245                *crop_w = src_w;
2246                *crop_h = src_w / dst_ratio;
2247            }
2248        }
2249    }
2250
2251    if (zoom != 0) {
2252        float zoomLevel = ((float)zoom + 10.0) / 10.0;
2253        *crop_w = (int)((float)*crop_w / zoomLevel);
2254        *crop_h = (int)((float)*crop_h / zoomLevel);
2255    }
2256
2257    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
2258    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2259    if (w_align != 0) {
2260        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2261            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
2262            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2263        }
2264        else
2265            *crop_w -= w_align;
2266    }
2267
2268    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
2269    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2270    if (h_align != 0) {
2271        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2272            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
2273            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2274        }
2275        else
2276            *crop_h -= h_align;
2277    }
2278
2279    *crop_x = (src_w - *crop_w) >> 1;
2280    *crop_y = (src_h - *crop_h) >> 1;
2281
2282    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2283        *crop_x -= 1;
2284
2285    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2286        *crop_y -= 1;
2287
2288    return true;
2289}
2290
2291BayerBufManager::BayerBufManager()
2292{
2293    ALOGV("DEBUG(%s): ", __FUNCTION__);
2294    for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2295        entries[i].status = BAYER_ON_HAL_EMPTY;
2296        entries[i].reqFrameCnt = 0;
2297    }
2298    sensorEnqueueHead = 0;
2299    sensorDequeueHead = 0;
2300    ispEnqueueHead = 0;
2301    ispDequeueHead = 0;
2302    numOnSensor = 0;
2303    numOnIsp = 0;
2304    numOnHalFilled = 0;
2305    numOnHalEmpty = NUM_BAYER_BUFFERS;
2306}
2307
2308BayerBufManager::~BayerBufManager()
2309{
2310    ALOGV("%s", __FUNCTION__);
2311}
2312
2313int     BayerBufManager::GetIndexForSensorEnqueue()
2314{
2315    int ret = 0;
2316    if (numOnHalEmpty == 0)
2317        ret = -1;
2318    else
2319        ret = sensorEnqueueHead;
2320    ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2321    return ret;
2322}
2323
2324int    BayerBufManager::MarkSensorEnqueue(int index)
2325{
2326    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2327
2328    // sanity check
2329    if (index != sensorEnqueueHead) {
2330        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2331        return -1;
2332    }
2333    if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2334        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2335            index, entries[index].status, BAYER_ON_HAL_EMPTY);
2336        return -1;
2337    }
2338
2339    entries[index].status = BAYER_ON_SENSOR;
2340    entries[index].reqFrameCnt = 0;
2341    numOnHalEmpty--;
2342    numOnSensor++;
2343    sensorEnqueueHead = GetNextIndex(index);
2344    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2345        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2346    return 0;
2347}
2348
2349int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
2350{
2351    ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2352
2353    if (entries[index].status != BAYER_ON_SENSOR) {
2354        ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2355            index, entries[index].status, BAYER_ON_SENSOR);
2356        return -1;
2357    }
2358
2359    entries[index].status = BAYER_ON_HAL_FILLED;
2360    numOnHalFilled++;
2361    numOnSensor--;
2362
2363    return 0;
2364}
2365
2366int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2367{
2368    int ret = 0;
2369    if (numOnHalFilled == 0)
2370        ret = -1;
2371    else {
2372        *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2373        ret = ispEnqueueHead;
2374    }
2375    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2376    return ret;
2377}
2378
2379int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2380{
2381    int ret = 0;
2382    if (numOnIsp == 0)
2383        ret = -1;
2384    else {
2385        *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2386        ret = ispDequeueHead;
2387    }
2388    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2389    return ret;
2390}
2391
2392int    BayerBufManager::MarkIspEnqueue(int index)
2393{
2394    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2395
2396    // sanity check
2397    if (index != ispEnqueueHead) {
2398        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2399        return -1;
2400    }
2401    if (entries[index].status != BAYER_ON_HAL_FILLED) {
2402        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2403            index, entries[index].status, BAYER_ON_HAL_FILLED);
2404        return -1;
2405    }
2406
2407    entries[index].status = BAYER_ON_ISP;
2408    numOnHalFilled--;
2409    numOnIsp++;
2410    ispEnqueueHead = GetNextIndex(index);
2411    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2412        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2413    return 0;
2414}
2415
2416int    BayerBufManager::MarkIspDequeue(int index)
2417{
2418    ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2419
2420    // sanity check
2421    if (index != ispDequeueHead) {
2422        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2423        return -1;
2424    }
2425    if (entries[index].status != BAYER_ON_ISP) {
2426        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2427            index, entries[index].status, BAYER_ON_ISP);
2428        return -1;
2429    }
2430
2431    entries[index].status = BAYER_ON_HAL_EMPTY;
2432    entries[index].reqFrameCnt = 0;
2433    numOnHalEmpty++;
2434    numOnIsp--;
2435    ispDequeueHead = GetNextIndex(index);
2436    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2437        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2438    return 0;
2439}
2440
2441int BayerBufManager::GetNumOnSensor()
2442{
2443    return numOnSensor;
2444}
2445
2446int BayerBufManager::GetNumOnHalFilled()
2447{
2448    return numOnHalFilled;
2449}
2450
2451int BayerBufManager::GetNumOnIsp()
2452{
2453    return numOnIsp;
2454}
2455
2456int     BayerBufManager::GetNextIndex(int index)
2457{
2458    index++;
2459    if (index >= NUM_BAYER_BUFFERS)
2460        index = 0;
2461
2462    return index;
2463}
2464
2465void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2466{
2467    camera_metadata_t *currentRequest = NULL;
2468    camera_metadata_t *currentFrame = NULL;
2469    size_t numEntries = 0;
2470    size_t frameSize = 0;
2471    camera_metadata_t * preparedFrame = NULL;
2472    camera_metadata_t *deregisteredRequest = NULL;
2473    uint32_t currentSignal = self->GetProcessingSignal();
2474    MainThread *  selfThread      = ((MainThread*)self);
2475    int res = 0;
2476
2477    int ret;
2478
2479    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2480
2481    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2482        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2483
2484        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2485        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2486        return;
2487    }
2488
2489    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2490        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2491        if (m_requestManager->IsRequestQueueFull()==false) {
2492            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2493            if (NULL == currentRequest) {
2494                ALOGE("DEBUG(%s)(0x%x): dequeue_request returned NULL ", __FUNCTION__, currentSignal);
2495                m_isRequestQueueNull = true;
2496            }
2497            else {
2498                m_requestManager->RegisterRequest(currentRequest);
2499
2500                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2501                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2502                if (m_requestManager->IsRequestQueueFull()==false)
2503                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2504
2505                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2506            }
2507        }
2508        else {
2509            m_isRequestQueuePending = true;
2510        }
2511    }
2512
2513    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2514        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2515        /*while (1)*/ {
2516            ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2517            if (ret == false)
2518                CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
2519
2520            m_requestManager->DeregisterRequest(&deregisteredRequest);
2521
2522            ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2523            if (ret < 0)
2524                CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
2525
2526            ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2527            if (ret < 0)
2528                CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
2529
2530            if (currentFrame==NULL) {
2531                ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2532            }
2533            else {
2534                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2535            }
2536            res = append_camera_metadata(currentFrame, preparedFrame);
2537            if (res==0) {
2538                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2539                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2540            }
2541            else {
2542                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2543            }
2544        }
2545        if (!m_isRequestQueueNull) {
2546            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2547        }
2548
2549        if (getInProgressCount()>0) {
2550            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2551            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2552        }
2553    }
2554    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2555    return;
2556}
2557
2558void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
2559{
2560    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2561    /* will add */
2562    return;
2563}
2564
2565
2566void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2567{
2568    ALOGD("####  common Section");
2569    ALOGD("####                 magic(%x) ",
2570        shot_ext->shot.magicNumber);
2571    ALOGD("####  ctl Section");
2572    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2573        shot_ext->shot.ctl.request.metadataMode,
2574        shot_ext->shot.ctl.lens.aperture,
2575        shot_ext->shot.ctl.sensor.exposureTime,
2576        shot_ext->shot.ctl.sensor.frameDuration,
2577        shot_ext->shot.ctl.sensor.sensitivity,
2578        shot_ext->shot.ctl.aa.awbMode);
2579
2580    ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2581        shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2582        shot_ext->shot.ctl.request.outputStreams[0]);
2583
2584    ALOGD("####  DM Section");
2585    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2586        shot_ext->shot.dm.request.metadataMode,
2587        shot_ext->shot.dm.lens.aperture,
2588        shot_ext->shot.dm.sensor.exposureTime,
2589        shot_ext->shot.dm.sensor.frameDuration,
2590        shot_ext->shot.dm.sensor.sensitivity,
2591        shot_ext->shot.dm.sensor.timeStamp,
2592        shot_ext->shot.dm.aa.awbMode,
2593        shot_ext->shot.dm.request.frameCount );
2594}
2595
2596void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
2597{
2598    // Flash
2599    switch (m_ctlInfo.flash.m_flashCnt) {
2600    case IS_FLASH_STATE_ON:
2601        CAM_LOGV("(%s): [Flash] Flash ON for Capture", __FUNCTION__);
2602        if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2603            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2604            m_ctlInfo.flash.m_flashTimeOut = 5;
2605        } else
2606            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2607        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2608        break;
2609    case IS_FLASH_STATE_ON_WAIT:
2610        break;
2611    case IS_FLASH_STATE_ON_DONE:
2612        if (!m_ctlInfo.flash.m_afFlashDoneFlg)
2613            // auto transition at pre-capture trigger
2614            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
2615        break;
2616    case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
2617        CAM_LOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK", __FUNCTION__);
2618        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2619        //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2620        shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2621        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
2622        break;
2623    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2624    case IS_FLASH_STATE_AUTO_WAIT:
2625        shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
2626        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
2627        break;
2628    case IS_FLASH_STATE_AUTO_DONE:
2629        CAM_LOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE", __FUNCTION__);
2630        break;
2631    case IS_FLASH_STATE_AUTO_OFF:
2632        CAM_LOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear", __FUNCTION__);
2633        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2634        m_ctlInfo.flash.m_afFlashDoneFlg = false;
2635        m_ctlInfo.flash.m_flashEnableFlg = false;
2636        break;
2637    case IS_FLASH_STATE_CAPTURE:
2638        CAM_LOGV("(%s): [Flash] IS_FLASH_CAPTURE", __FUNCTION__);
2639        m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
2640        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2641        shot_ext->request_scc = 0;
2642        shot_ext->request_scp = 0;
2643        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
2644        break;
2645    case IS_FLASH_STATE_CAPTURE_WAIT:
2646        shot_ext->request_scc = 0;
2647        shot_ext->request_scp = 0;
2648        break;
2649    case IS_FLASH_STATE_CAPTURE_JPEG:
2650        CAM_LOGE("(%s): [Flash] Flash Capture  (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
2651        shot_ext->request_scc = 1;
2652        shot_ext->request_scp = 1;
2653        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END;  // auto transition
2654        break;
2655    case IS_FLASH_STATE_CAPTURE_END:
2656        CAM_LOGV("(%s): [Flash] Flash Capture END", __FUNCTION__);
2657        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2658        shot_ext->request_scc = 0;
2659        shot_ext->request_scp = 0;
2660        m_ctlInfo.flash.m_flashEnableFlg = false;
2661        m_ctlInfo.flash.m_flashCnt = 0;
2662        m_ctlInfo.flash.m_afFlashDoneFlg= false;
2663        break;
2664    default:
2665        ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
2666    }
2667}
2668
2669void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
2670{
2671    // Flash
2672    switch (m_ctlInfo.flash.m_flashCnt) {
2673    case IS_FLASH_STATE_AUTO_WAIT:
2674        if (m_ctlInfo.flash.m_flashDecisionResult) {
2675            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2676                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2677                CAM_LOGV("(%s): [Flash] Lis :  AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
2678            } else {
2679                CAM_LOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
2680            }
2681        } else {
2682            //If flash isn't activated at flash auto mode, skip flash auto control
2683            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2684            CAM_LOGV("(%s): [Flash] Skip :  AUTO -> OFF", __FUNCTION__);
2685        }
2686        break;
2687    }
2688}
2689
2690void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
2691{
2692    // Flash
2693    switch (m_ctlInfo.flash.m_flashCnt) {
2694    case IS_FLASH_STATE_ON_WAIT:
2695        if (shot_ext->shot.dm.flash.decision > 0) {
2696            // store decision result to skip capture sequenece
2697            CAM_LOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
2698            if (shot_ext->shot.dm.flash.decision == 2)
2699                m_ctlInfo.flash.m_flashDecisionResult = false;
2700            else
2701                m_ctlInfo.flash.m_flashDecisionResult = true;
2702            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2703        } else {
2704            if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2705                CAM_LOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
2706                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
2707                m_ctlInfo.flash.m_flashDecisionResult = false;
2708            } else {
2709                m_ctlInfo.flash.m_flashTimeOut--;
2710            }
2711        }
2712        break;
2713    case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2714        if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
2715            CAM_LOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
2716            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
2717        } else {
2718            CAM_LOGV("(%s):  [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
2719        }
2720        break;
2721    case IS_FLASH_STATE_CAPTURE_WAIT:
2722        if (m_ctlInfo.flash.m_flashDecisionResult) {
2723            if (shot_ext->shot.dm.flash.firingStable) {
2724                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2725            } else {
2726                if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2727                    ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
2728                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2729                } else {
2730                    ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
2731                    m_ctlInfo.flash.m_flashTimeOut--;
2732                }
2733            }
2734        } else {
2735            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
2736        }
2737        break;
2738    }
2739}
2740
2741void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
2742{
2743    uint32_t        currentSignal = self->GetProcessingSignal();
2744    SensorThread *  selfThread      = ((SensorThread*)self);
2745    int index;
2746    int index_isp;
2747    status_t res;
2748    nsecs_t frameTime;
2749    int bayersOnSensor = 0, bayersOnIsp = 0;
2750    int j = 0;
2751    bool isCapture = false;
2752    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
2753
2754    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2755        CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2756
2757        ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
2758        cam_int_streamoff(&(m_camera_info.sensor));
2759        ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
2760
2761        m_camera_info.sensor.buffers = 0;
2762        ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
2763        cam_int_reqbufs(&(m_camera_info.sensor));
2764        ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
2765        m_camera_info.sensor.status = false;
2766
2767        ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
2768        isp_int_streamoff(&(m_camera_info.isp));
2769        ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
2770
2771        m_camera_info.isp.buffers = 0;
2772        ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
2773        cam_int_reqbufs(&(m_camera_info.isp));
2774        ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
2775
2776        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
2777
2778        m_requestManager->releaseSensorQ();
2779        m_requestManager->ResetEntry();
2780        ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2781        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2782        return;
2783    }
2784
2785    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
2786    {
2787        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
2788        int targetStreamIndex = 0, i=0;
2789        int matchedFrameCnt = -1, processingReqIndex;
2790        struct camera2_shot_ext *shot_ext;
2791        struct camera2_shot_ext *shot_ext_capture;
2792        bool triggered = false;
2793        int afMode;
2794
2795        /* dqbuf from sensor */
2796        ALOGV("Sensor DQbuf start");
2797        index = cam_int_dqbuf(&(m_camera_info.sensor));
2798        m_requestManager->pushSensorQ(index);
2799        ALOGV("Sensor DQbuf done(%d)", index);
2800        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2801
2802        if (m_nightCaptureCnt != 0) {
2803            matchedFrameCnt = m_nightCaptureFrameCnt;
2804        } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
2805            matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
2806            ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
2807        } else {
2808            matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
2809        }
2810
2811        if (matchedFrameCnt != -1) {
2812            frameTime = systemTime();
2813            m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
2814            m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
2815
2816            if (m_afModeWaitingCnt != 0) {
2817                ALOGV("### Af Trigger pulled, waiting for mode change cnt(%d) ", m_afModeWaitingCnt);
2818                m_afModeWaitingCnt --;
2819                if (m_afModeWaitingCnt == 1) {
2820                    m_afModeWaitingCnt = 0;
2821                    OnAfTrigger(m_afPendingTriggerId);
2822                }
2823            }
2824            m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
2825            float zoomLeft, zoomTop, zoomWidth, zoomHeight;
2826            int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
2827
2828            m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
2829                           m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
2830                           &crop_x, &crop_y,
2831                           &crop_w, &crop_h,
2832                           0);
2833
2834            if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
2835                zoomWidth =  m_camera2->getSensorW() / m_zoomRatio;
2836                zoomHeight = zoomWidth *
2837                        m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
2838            } else {
2839                zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
2840                zoomWidth = zoomHeight *
2841                        m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
2842            }
2843            zoomLeft = (crop_w - zoomWidth) / 2;
2844            zoomTop = (crop_h - zoomHeight) / 2;
2845
2846            int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
2847
2848            if (new_cropRegion[0] * 2 + new_cropRegion[2] > (int32_t)m_camera2->getSensorW())
2849                new_cropRegion[2]--;
2850            else if (new_cropRegion[0] * 2 + new_cropRegion[2] < (int32_t)m_camera2->getSensorW())
2851                new_cropRegion[2]++;
2852
2853            shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
2854            shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
2855            shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
2856            if (m_IsAfModeUpdateRequired) {
2857                ALOGE("### AF Mode change(Mode %d) ", m_afMode);
2858                shot_ext->shot.ctl.aa.afMode = m_afMode;
2859                if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
2860                    ALOGE("### With Automatic triger for continuous modes");
2861                    m_afState = HAL_AFSTATE_STARTED;
2862                    shot_ext->shot.ctl.aa.afTrigger = 1;
2863                    triggered = true;
2864                }
2865                m_IsAfModeUpdateRequired = false;
2866                if (m_afMode2 != NO_CHANGE) {
2867                    enum aa_afmode tempAfMode = m_afMode2;
2868                    m_afMode2 = NO_CHANGE;
2869                    SetAfMode(tempAfMode);
2870                }
2871            }
2872            else {
2873                shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
2874            }
2875            if (m_IsAfTriggerRequired) {
2876                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
2877                    if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
2878                        // Flash is enabled and start AF
2879                        if (m_afState == HAL_AFSTATE_SCANNING) {
2880                            ALOGE("(%s): restarting trigger ", __FUNCTION__);
2881                        } else {
2882                            if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
2883                                ALOGE("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
2884                            else
2885                                m_afState = HAL_AFSTATE_STARTED;
2886                        }
2887                        ALOGE("### AF Triggering with mode (%d)", m_afMode);
2888                        shot_ext->shot.ctl.aa.afTrigger = 1;
2889                        shot_ext->shot.ctl.aa.afMode = m_afMode;
2890                        m_IsAfTriggerRequired = false;
2891                    }
2892                } else {
2893                    ALOGE("### AF Triggering with mode (%d)", m_afMode);
2894                    if (m_afState == HAL_AFSTATE_SCANNING) {
2895                        ALOGE("(%s): restarting trigger ", __FUNCTION__);
2896                    } else {
2897                        if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
2898                            ALOGE("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
2899                        else
2900                            m_afState = HAL_AFSTATE_STARTED;
2901                    }
2902                    shot_ext->shot.ctl.aa.afTrigger = 1;
2903                    shot_ext->shot.ctl.aa.afMode = m_afMode;
2904                    m_IsAfTriggerRequired = false;
2905                }
2906            }
2907            else {
2908                shot_ext->shot.ctl.aa.afTrigger = 0;
2909            }
2910
2911            if (m_wideAspect) {
2912                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
2913                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
2914                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2915            } else {
2916                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
2917            }
2918            if (triggered)
2919                shot_ext->shot.ctl.aa.afTrigger = 1;
2920
2921            // TODO : check collision with AFMode Update
2922            if (m_IsAfLockRequired) {
2923                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
2924                m_IsAfLockRequired = false;
2925            }
2926            ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
2927                index,
2928                shot_ext->shot.ctl.request.frameCount,
2929                shot_ext->request_scp,
2930                shot_ext->request_scc,
2931                shot_ext->dis_bypass, sizeof(camera2_shot));
2932            if (0 == shot_ext->shot.ctl.aa.afRegions[0] && 0 == shot_ext->shot.ctl.aa.afRegions[1]
2933                && 0 == shot_ext->shot.ctl.aa.afRegions[2] && 0 == shot_ext->shot.ctl.aa.afRegions[3]) {
2934                ALOGV("(%s): AF region resetting", __FUNCTION__);
2935                lastAfRegion[0] = 0;
2936                lastAfRegion[1] = 0;
2937                lastAfRegion[2] = 0;
2938                lastAfRegion[3] = 0;
2939            }
2940            else {
2941                if (!(lastAfRegion[0] == shot_ext->shot.ctl.aa.afRegions[0] && lastAfRegion[1] == shot_ext->shot.ctl.aa.afRegions[1]
2942                        && lastAfRegion[2] == shot_ext->shot.ctl.aa.afRegions[2] && lastAfRegion[3] == shot_ext->shot.ctl.aa.afRegions[3])) {
2943                    ALOGE("(%s): AF region changed : triggering", __FUNCTION__);
2944                    shot_ext->shot.ctl.aa.afTrigger = 1;
2945                    shot_ext->shot.ctl.aa.afMode = m_afMode;
2946                    m_afState = HAL_AFSTATE_STARTED;
2947                    lastAfRegion[0] = shot_ext->shot.ctl.aa.afRegions[0];
2948                    lastAfRegion[1] = shot_ext->shot.ctl.aa.afRegions[1];
2949                    lastAfRegion[2] = shot_ext->shot.ctl.aa.afRegions[2];
2950                    lastAfRegion[3] = shot_ext->shot.ctl.aa.afRegions[3];
2951                }
2952                    // clear region infos in case of CAF mode
2953                    if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
2954                        shot_ext->shot.ctl.aa.afRegions[0] = lastAfRegion[0] = 0;
2955                        shot_ext->shot.ctl.aa.afRegions[1] = lastAfRegion[1] = 0;
2956                        shot_ext->shot.ctl.aa.afRegions[2] = lastAfRegion[2] = 0;
2957                        shot_ext->shot.ctl.aa.afRegions[3] = lastAfRegion[3] = 0;
2958                    }
2959            }
2960            if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
2961                    && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
2962                shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
2963            if (m_nightCaptureCnt == 0) {
2964                if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
2965                        && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
2966                    shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
2967                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
2968                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2969                    m_nightCaptureCnt = 4;
2970                    m_nightCaptureFrameCnt = matchedFrameCnt;
2971                    shot_ext->request_scc = 0;
2972                }
2973            }
2974            else if (m_nightCaptureCnt == 1) {
2975                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
2976                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
2977                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2978                m_nightCaptureCnt--;
2979                shot_ext->request_scc = 1;
2980            }
2981            else if (m_nightCaptureCnt == 2) {
2982                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
2983                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
2984                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2985                m_nightCaptureCnt--;
2986                shot_ext->request_scc = 0;
2987            }
2988            else if (m_nightCaptureCnt == 3 || m_nightCaptureCnt == 4) {
2989                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
2990                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
2991                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2992                m_nightCaptureCnt--;
2993                shot_ext->request_scc = 0;
2994            }
2995
2996            // Flash mode
2997            // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
2998            if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
2999                    && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
3000                    && (m_cameraId == 0)) {
3001                if (!m_ctlInfo.flash.m_flashDecisionResult) {
3002                    m_ctlInfo.flash.m_flashEnableFlg = false;
3003                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3004                    m_ctlInfo.flash.m_flashCnt = 0;
3005                } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) || (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
3006                    ALOGE("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
3007                    shot_ext->request_scc = 0;
3008                    m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
3009                    m_ctlInfo.flash.m_flashEnableFlg = true;
3010                    m_ctlInfo.flash.m_afFlashDoneFlg = false;
3011                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
3012                }
3013            }
3014
3015            // TODO : set torch mode for video recording. need to find proper position.
3016            // m_wideAspect is will be changed to recording hint
3017            if ((shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_SINGLE) && m_wideAspect) {
3018                shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_TORCH;
3019                shot_ext->shot.ctl.flash.firingPower = 10;
3020                m_ctlInfo.flash.m_flashTorchMode = true;
3021            } else if (m_wideAspect){
3022                shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3023                shot_ext->shot.ctl.flash.firingPower = 0;
3024                m_ctlInfo.flash.m_flashTorchMode = false;
3025            } else {
3026                if (m_ctlInfo.flash.m_flashTorchMode) {
3027                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3028                    shot_ext->shot.ctl.flash.firingPower = 0;
3029                    m_ctlInfo.flash.m_flashTorchMode = false;
3030                } else {
3031                    shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
3032                }
3033            }
3034
3035            if (m_ctlInfo.flash.m_flashEnableFlg) {
3036                m_preCaptureListenerSensor(shot_ext);
3037                m_preCaptureSetter(shot_ext);
3038            }
3039
3040            if (shot_ext->isReprocessing) {
3041                ALOGE("(%s): Reprocess request ", __FUNCTION__);
3042                m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
3043                shot_ext->request_scp = 0;
3044                shot_ext->request_scc = 0;
3045                m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
3046                memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot));
3047                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
3048            }
3049            ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
3050            (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3051            (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3052            (int)(shot_ext->shot.ctl.aa.afTrigger));
3053
3054            uint32_t current_scp = shot_ext->request_scp;
3055
3056            if (shot_ext->shot.dm.request.frameCount == 0) {
3057                CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
3058            }
3059
3060            cam_int_qbuf(&(m_camera_info.isp), index);
3061
3062            usleep(10000);
3063
3064            ALOGV("### isp DQBUF start");
3065            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3066
3067            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3068
3069            if (m_ctlInfo.flash.m_flashEnableFlg)
3070                m_preCaptureListenerISP(shot_ext);
3071
3072            ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) shot_size(%d)",
3073                index,
3074                shot_ext->shot.ctl.request.frameCount,
3075                shot_ext->request_scp,
3076                shot_ext->request_scc,
3077                shot_ext->dis_bypass, sizeof(camera2_shot));
3078            ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
3079                (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3080                (int)(shot_ext->shot.dm.aa.awbMode),
3081                (int)(shot_ext->shot.dm.aa.afMode));
3082
3083            m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
3084
3085            if (current_scp) {
3086                ALOGV("send SIGNAL_STREAM_DATA_COMING(return scp : %d)", shot_ext->request_scp);
3087                m_scpOutputSignalCnt++;
3088                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3089            }
3090
3091            if (current_scp != shot_ext->request_scp) {
3092                CAM_LOGW("WARN(%s): scp frame drop1 request_scp(%d to %d)",
3093                                __FUNCTION__, current_scp, shot_ext->request_scp);
3094            }
3095            if (shot_ext->request_scc) {
3096                memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot));
3097                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3098            }
3099
3100            ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
3101               shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
3102            if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
3103                ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
3104                m_scp_closed = true;
3105            }
3106            else
3107                m_scp_closed = false;
3108
3109            if (!shot_ext->fd_bypass) {
3110                /* FD orientation axis transformation */
3111                for (int i=0; i < CAMERA2_MAX_FACES; i++) {
3112                    if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
3113                        shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
3114                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][0])
3115                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3116                    if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
3117                        shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
3118                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][1])
3119                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3120                    if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
3121                        shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
3122                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][2])
3123                                                                                                / m_streamThreads[0].get()->m_parameters.width;
3124                    if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
3125                        shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
3126                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][3])
3127                                                                                                / m_streamThreads[0].get()->m_parameters.height;
3128                }
3129            }
3130            if (m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
3131                m_requestManager->ApplyDynamicMetadata(shot_ext);
3132            }
3133            OnAfNotification(shot_ext->shot.dm.aa.afState);
3134            OnPrecaptureMeteringNotification();
3135        }
3136
3137        index = m_requestManager->popSensorQ();
3138        if(index < 0){
3139            ALOGE("sensorQ is empty");
3140            return;
3141        }
3142
3143        processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]), &afMode);
3144        if (processingReqIndex != -1)
3145            SetAfMode((enum aa_afmode)afMode);
3146
3147
3148        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3149        if (m_scp_closing || m_scp_closed) {
3150            ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
3151            shot_ext->request_scc = 0;
3152            shot_ext->request_scp = 0;
3153            shot_ext->request_sensor = 0;
3154        }
3155        cam_int_qbuf(&(m_camera_info.sensor), index);
3156        ALOGV("Sensor Qbuf done(%d)", index);
3157
3158        if (!m_scp_closing
3159            && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
3160            ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3161                                    matchedFrameCnt, processingReqIndex);
3162            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
3163        }
3164    }
3165    return;
3166}
3167
3168void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
3169{
3170    uint32_t                currentSignal   = self->GetProcessingSignal();
3171    StreamThread *          selfThread      = ((StreamThread*)self);
3172    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3173    node_info_t             *currentNode    = selfStreamParms->node;
3174    substream_parameters_t  *subParms;
3175    buffer_handle_t * buf = NULL;
3176    status_t res;
3177    void *virtAddr[3];
3178    int i, j;
3179    int index;
3180    nsecs_t timestamp;
3181
3182    if (!(selfThread->m_isBufferInit))
3183    {
3184        for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3185            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3186            if (res != NO_ERROR || buf == NULL) {
3187                ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3188                return;
3189            }
3190            ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3191               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3192
3193            index = selfThread->findBufferIndex(buf);
3194            if (index == -1) {
3195                ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
3196            }
3197            else {
3198                ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3199                    __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
3200                if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
3201                    selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3202                else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
3203                    selfStreamParms->svcBufStatus[index] = ON_HAL;
3204                else {
3205                    ALOGV("DBG(%s): buffer status abnormal (%d) "
3206                        , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3207                }
3208                selfStreamParms->numSvcBufsInHal++;
3209            }
3210            selfStreamParms->bufIndex = 0;
3211        }
3212        selfThread->m_isBufferInit = true;
3213    }
3214    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3215        if (selfThread->m_attachedSubStreams[i].streamId == -1)
3216            continue;
3217
3218        subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
3219        if (subParms->type && subParms->needBufferInit) {
3220            ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3221                __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
3222            int checkingIndex = 0;
3223            bool found = false;
3224            for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
3225                res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3226                if (res != NO_ERROR || buf == NULL) {
3227                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3228                    return;
3229                }
3230                subParms->numSvcBufsInHal++;
3231                ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3232                   subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3233
3234                if (m_grallocHal->lock(m_grallocHal, *buf,
3235                       subParms->usage, 0, 0,
3236                       subParms->width, subParms->height, virtAddr) != 0) {
3237                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3238                }
3239                else {
3240                      ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3241                        __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3242                }
3243                found = false;
3244                for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3245                    if (subParms->svcBufHandle[checkingIndex] == *buf ) {
3246                        found = true;
3247                        break;
3248                    }
3249                }
3250                ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3251                if (!found) break;
3252
3253                index = checkingIndex;
3254
3255                if (index == -1) {
3256                    ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3257                }
3258                else {
3259                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3260                        __FUNCTION__, index, subParms->svcBufStatus[index]);
3261                    if (subParms->svcBufStatus[index]== ON_SERVICE)
3262                        subParms->svcBufStatus[index] = ON_HAL;
3263                    else {
3264                        ALOGV("DBG(%s): buffer status abnormal (%d) "
3265                            , __FUNCTION__, subParms->svcBufStatus[index]);
3266                    }
3267                    if (*buf != subParms->svcBufHandle[index])
3268                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3269                    else
3270                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3271                }
3272                subParms->svcBufIndex = 0;
3273            }
3274            if (subParms->type == SUBSTREAM_TYPE_JPEG) {
3275                m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
3276                m_resizeBuf.size.extS[1] = 0;
3277                m_resizeBuf.size.extS[2] = 0;
3278
3279                if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
3280                    ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3281                }
3282            }
3283            if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
3284                m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
3285                subParms->height, &m_previewCbBuf);
3286
3287                if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
3288                    ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
3289                }
3290            }
3291            subParms->needBufferInit= false;
3292        }
3293    }
3294}
3295
3296void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3297{
3298    StreamThread *          selfThread      = ((StreamThread*)self);
3299    ALOGV("DEBUG(%s): ", __FUNCTION__ );
3300    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3301    selfThread->m_isBufferInit = false;
3302    for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3303        selfThread->m_attachedSubStreams[i].streamId    = -1;
3304        selfThread->m_attachedSubStreams[i].priority    = 0;
3305    }
3306    return;
3307}
3308
3309int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
3310    int stream_id, nsecs_t frameTimeStamp)
3311{
3312    substream_parameters_t  *subParms = &m_subStreams[stream_id];
3313
3314    switch (stream_id) {
3315
3316    case STREAM_ID_JPEG:
3317        return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
3318
3319    case STREAM_ID_RECORD:
3320        return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
3321
3322    case STREAM_ID_PRVCB:
3323        return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
3324
3325    default:
3326        return 0;
3327    }
3328}
3329void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
3330{
3331    uint32_t                currentSignal   = self->GetProcessingSignal();
3332    StreamThread *          selfThread      = ((StreamThread*)self);
3333    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3334    node_info_t             *currentNode    = selfStreamParms->node;
3335    int i = 0;
3336    nsecs_t frameTimeStamp;
3337
3338    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3339        CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3340
3341        if (selfThread->m_isBufferInit) {
3342            ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
3343                selfThread->m_index, currentNode->fd);
3344            if (cam_int_streamoff(currentNode) < 0 ) {
3345                ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3346            }
3347            ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
3348                    selfThread->m_index, currentNode->fd);
3349            currentNode->buffers = 0;
3350            cam_int_reqbufs(currentNode);
3351            ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
3352                    selfThread->m_index, currentNode->fd);
3353        }
3354#ifdef ENABLE_FRAME_SYNC
3355        // free metabuffers
3356        for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3357            if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
3358                freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3359                selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3360                selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3361            }
3362#endif
3363        selfThread->m_isBufferInit = false;
3364        selfThread->m_releasing = false;
3365        selfThread->m_activated = false;
3366        ALOGV("(%s): [%d] END  SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3367        return;
3368    }
3369    if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
3370        status_t    res;
3371        buffer_handle_t * buf = NULL;
3372        bool found = false;
3373        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3374            __FUNCTION__, selfThread->m_index);
3375        res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
3376        if (res != NO_ERROR || buf == NULL) {
3377            ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
3378            return;
3379        }
3380        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3381        int checkingIndex = 0;
3382        for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3383            if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3384                found = true;
3385                break;
3386            }
3387        }
3388        ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3389            __FUNCTION__, (unsigned int)buf, found, checkingIndex);
3390
3391        if (!found) return;
3392
3393        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3394            if (selfThread->m_attachedSubStreams[i].streamId == -1)
3395                continue;
3396
3397#ifdef ENABLE_FRAME_SYNC
3398            // TODO: check real timestamp
3399            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3400            m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
3401#else
3402            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3403#endif
3404            if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
3405                m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
3406                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3407        }
3408
3409        res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
3410        if (res != NO_ERROR) {
3411            ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
3412            return;
3413        }
3414        ALOGV("(%s): streamthread[%d] END   SIGNAL_STREAM_REPROCESSING_START",
3415            __FUNCTION__,selfThread->m_index);
3416
3417        return;
3418    }
3419    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3420        buffer_handle_t * buf = NULL;
3421        status_t res = 0;
3422        int i, j;
3423        int index;
3424        nsecs_t timestamp;
3425#ifdef ENABLE_FRAME_SYNC
3426        camera2_stream *frame;
3427#endif
3428        int numOfUndqbuf = 0;
3429
3430        ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3431
3432        m_streamBufferInit(self);
3433
3434        do {
3435            ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
3436                selfThread->m_index, selfThread->streamType);
3437
3438#ifdef ENABLE_FRAME_SYNC
3439            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
3440            frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
3441            ALOGV("frame count streamthread[%d] : %d", selfThread->m_index, frame->rcount);
3442            frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3443#else
3444            selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3445            frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
3446#endif
3447            ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)  sigcnt(%d)",__FUNCTION__,
3448                selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
3449
3450            if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] !=  ON_DRIVER)
3451                ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3452                       __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
3453            selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3454
3455            for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3456                if (selfThread->m_attachedSubStreams[i].streamId == -1)
3457                    continue;
3458                if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3459#ifdef ENABLE_FRAME_SYNC
3460                    m_requestManager->NotifyStreamOutput(frame->rcount);
3461#endif
3462                    m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3463                        selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3464                }
3465            }
3466
3467#ifdef ENABLE_FRAME_SYNC
3468            m_requestManager->NotifyStreamOutput(frame->rcount);
3469#endif
3470            if (m_requestManager->GetSkipCnt() <= 0) {
3471                if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3472#ifdef ENABLE_FRAME_SYNC
3473                    ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
3474#else
3475                    ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3476#endif
3477                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3478                            frameTimeStamp,
3479                            &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3480                }
3481                else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3482#ifdef ENABLE_FRAME_SYNC
3483                    ALOGV("** SCC output (frameCnt:%d), last(%d)", frame->rcount);
3484#else
3485                    ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
3486#endif
3487                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3488                                frameTimeStamp,
3489                                &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3490                }
3491                ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3492            }
3493            else {
3494                res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3495                        &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3496                ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3497            }
3498            if (res == 0) {
3499                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
3500                selfStreamParms->numSvcBufsInHal--;
3501            }
3502            else {
3503                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3504            }
3505
3506        }
3507        while(0);
3508
3509        while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers - 1) {
3510            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3511            if (res != NO_ERROR || buf == NULL) {
3512                ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3513                break;
3514            }
3515            selfStreamParms->numSvcBufsInHal++;
3516            ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3517                selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3518               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3519            const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3520
3521            bool found = false;
3522            int checkingIndex = 0;
3523            for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3524                if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3525                    found = true;
3526                    break;
3527                }
3528            }
3529            if (!found) break;
3530            selfStreamParms->bufIndex = checkingIndex;
3531            if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
3532                uint32_t    plane_index = 0;
3533                ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
3534                struct v4l2_buffer v4l2_buf;
3535                struct v4l2_plane  planes[VIDEO_MAX_PLANES];
3536
3537                v4l2_buf.m.planes   = planes;
3538                v4l2_buf.type       = currentNode->type;
3539                v4l2_buf.memory     = currentNode->memory;
3540                v4l2_buf.index      = selfStreamParms->bufIndex;
3541                v4l2_buf.length     = currentNode->planes;
3542
3543                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
3544                v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
3545                v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
3546                for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
3547                    v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
3548                }
3549#ifdef ENABLE_FRAME_SYNC
3550                /* add plane for metadata*/
3551                v4l2_buf.length += selfStreamParms->metaPlanes;
3552                v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
3553                v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
3554#endif
3555                if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
3556                    ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
3557                        __FUNCTION__, selfThread->m_index);
3558                    return;
3559                }
3560                selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
3561                ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
3562                    __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
3563            }
3564        }
3565
3566        ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3567    }
3568    return;
3569}
3570
3571void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
3572{
3573    uint32_t                currentSignal   = self->GetProcessingSignal();
3574    StreamThread *          selfThread      = ((StreamThread*)self);
3575    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3576    node_info_t             *currentNode    = selfStreamParms->node;
3577
3578
3579    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3580        CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3581
3582        if (selfThread->m_isBufferInit) {
3583            if (currentNode->fd == m_camera_info.capture.fd) {
3584                if (m_camera_info.capture.status == true) {
3585                    ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
3586                    selfThread->m_index, currentNode->fd);
3587                    if (cam_int_streamoff(currentNode) < 0 ){
3588                        ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3589                    } else {
3590                        m_camera_info.capture.status = false;
3591                    }
3592                }
3593            } else {
3594                ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
3595                selfThread->m_index, currentNode->fd);
3596                if (cam_int_streamoff(currentNode) < 0 ){
3597                    ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3598                }
3599            }
3600            ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
3601            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
3602                    selfThread->m_index, currentNode->fd);
3603            currentNode->buffers = 0;
3604            cam_int_reqbufs(currentNode);
3605            ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
3606                    selfThread->m_index, currentNode->fd);
3607        }
3608
3609        selfThread->m_isBufferInit = false;
3610        selfThread->m_releasing = false;
3611        selfThread->m_activated = false;
3612        ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3613        return;
3614    }
3615
3616    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3617#ifdef ENABLE_FRAME_SYNC
3618        camera2_stream *frame;
3619#endif
3620        nsecs_t frameTimeStamp;
3621
3622        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
3623            __FUNCTION__,selfThread->m_index);
3624
3625        m_streamBufferInit(self);
3626
3627        ALOGD("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
3628        selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3629        ALOGD("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
3630            selfThread->m_index, selfStreamParms->bufIndex);
3631
3632#ifdef ENABLE_FRAME_SYNC
3633        frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
3634        ALOGV("frame count(SCC) : %d",  frame->rcount);
3635        frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3636#else
3637        frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3638#endif
3639
3640        for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3641            if (selfThread->m_attachedSubStreams[i].streamId == -1)
3642                continue;
3643            if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3644#ifdef ENABLE_FRAME_SYNC
3645                m_requestManager->NotifyStreamOutput(frame->rcount);
3646#endif
3647                m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
3648                    selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3649            }
3650        }
3651        cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
3652        ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
3653
3654
3655
3656        ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
3657            __FUNCTION__, selfThread->m_index);
3658    }
3659
3660
3661    return;
3662}
3663
3664void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
3665{
3666    uint32_t                currentSignal   = self->GetProcessingSignal();
3667    StreamThread *          selfThread      = ((StreamThread*)self);
3668    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3669    node_info_t             *currentNode    = selfStreamParms->node;
3670
3671    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
3672
3673    // Do something in Child thread handler
3674    // Should change function to class that inherited StreamThread class to support dynamic stream allocation
3675    if (selfThread->streamType == STREAM_TYPE_DIRECT) {
3676        m_streamFunc_direct(self);
3677    } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
3678        m_streamFunc_indirect(self);
3679    }
3680
3681    return;
3682}
3683int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
3684{
3685    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
3686    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_JPEG];
3687    status_t    res;
3688    ExynosRect jpegRect;
3689    bool found = false;
3690    int pictureW, pictureH, pictureFramesize = 0;
3691    int pictureFormat;
3692    int cropX, cropY, cropW, cropH = 0;
3693    ExynosBuffer resizeBufInfo;
3694    ExynosRect   m_jpegPictureRect;
3695    buffer_handle_t * buf = NULL;
3696
3697    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
3698    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
3699        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
3700            found = true;
3701            break;
3702        }
3703        subParms->svcBufIndex++;
3704        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
3705            subParms->svcBufIndex = 0;
3706    }
3707    if (!found) {
3708        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
3709        subParms->svcBufIndex++;
3710        return 1;
3711    }
3712
3713    m_jpegPictureRect.w = subParms->width;
3714    m_jpegPictureRect.h = subParms->height;
3715
3716     ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
3717              __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
3718                   m_jpegPictureRect.w, m_jpegPictureRect.h);
3719
3720    m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
3721                   m_jpegPictureRect.w, m_jpegPictureRect.h,
3722                   &cropX, &cropY,
3723                   &pictureW, &pictureH,
3724                   0);
3725    pictureFormat = V4L2_PIX_FMT_YUYV;
3726    pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
3727
3728    if (m_exynosPictureCSC) {
3729        float zoom_w = 0, zoom_h = 0;
3730        if (m_zoomRatio == 0)
3731            m_zoomRatio = 1;
3732
3733        if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
3734            zoom_w =  pictureW / m_zoomRatio;
3735            zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
3736        } else {
3737            zoom_h = pictureH / m_zoomRatio;
3738            zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
3739        }
3740        cropX = (pictureW - zoom_w) / 2;
3741        cropY = (pictureH - zoom_h) / 2;
3742        cropW = zoom_w;
3743        cropH = zoom_h;
3744
3745        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
3746              __FUNCTION__, cropX, cropY, cropW, cropH);
3747
3748        csc_set_src_format(m_exynosPictureCSC,
3749                           ALIGN(pictureW, 16), ALIGN(pictureH, 16),
3750                           cropX, cropY, cropW, cropH,
3751                           V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
3752                           0);
3753
3754        csc_set_dst_format(m_exynosPictureCSC,
3755                           m_jpegPictureRect.w, m_jpegPictureRect.h,
3756                           0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
3757                           V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
3758                           0);
3759        for (int i = 0 ; i < 3 ; i++)
3760            ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
3761                __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
3762        csc_set_src_buffer(m_exynosPictureCSC,
3763                           (void **)&srcImageBuf->fd.fd);
3764
3765        csc_set_dst_buffer(m_exynosPictureCSC,
3766                           (void **)&m_resizeBuf.fd.fd);
3767        for (int i = 0 ; i < 3 ; i++)
3768            ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
3769                __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
3770
3771        if (csc_convert(m_exynosPictureCSC) != 0)
3772            ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
3773
3774    }
3775    else {
3776        ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
3777    }
3778
3779    resizeBufInfo = m_resizeBuf;
3780
3781    m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
3782
3783    for (int i = 1; i < 3; i++) {
3784        if (m_resizeBuf.size.extS[i] != 0)
3785            m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
3786
3787        ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
3788    }
3789
3790    jpegRect.w = m_jpegPictureRect.w;
3791    jpegRect.h = m_jpegPictureRect.h;
3792    jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
3793
3794    for (int j = 0 ; j < 3 ; j++)
3795        ALOGV("DEBUG(%s): dest buf node  fd.extFd[%d]=%d size=%d virt=%x ",
3796            __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
3797            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
3798            (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
3799
3800    if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false)
3801        ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
3802
3803    m_resizeBuf = resizeBufInfo;
3804
3805    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
3806
3807    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
3808            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
3809    if (res == 0) {
3810        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
3811        subParms->numSvcBufsInHal--;
3812    }
3813    else {
3814        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
3815    }
3816
3817    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
3818    {
3819        bool found = false;
3820        int checkingIndex = 0;
3821
3822        ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
3823
3824        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3825        if (res != NO_ERROR || buf == NULL) {
3826            ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3827            break;
3828        }
3829        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3830        subParms->numSvcBufsInHal ++;
3831        ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3832           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3833
3834
3835        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3836            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3837                found = true;
3838                break;
3839            }
3840        }
3841        ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
3842
3843        if (!found) {
3844             break;
3845        }
3846
3847        subParms->svcBufIndex = checkingIndex;
3848        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
3849            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
3850        }
3851        else {
3852            ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d]  status = %d", __FUNCTION__,
3853                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
3854        }
3855    }
3856    return 0;
3857}
3858
3859int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
3860{
3861    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
3862    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_RECORD];
3863    status_t    res;
3864    ExynosRect jpegRect;
3865    bool found = false;
3866    int cropX, cropY, cropW, cropH = 0;
3867    buffer_handle_t * buf = NULL;
3868
3869    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
3870    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
3871        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
3872            found = true;
3873            break;
3874        }
3875        subParms->svcBufIndex++;
3876        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
3877            subParms->svcBufIndex = 0;
3878    }
3879    if (!found) {
3880        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
3881        subParms->svcBufIndex++;
3882        return 1;
3883    }
3884
3885    if (m_exynosVideoCSC) {
3886        int videoW = subParms->width, videoH = subParms->height;
3887        int cropX, cropY, cropW, cropH = 0;
3888        int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
3889        m_getRatioSize(previewW, previewH,
3890                       videoW, videoH,
3891                       &cropX, &cropY,
3892                       &cropW, &cropH,
3893                       0);
3894
3895        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
3896                 __FUNCTION__, cropX, cropY, cropW, cropH);
3897
3898        csc_set_src_format(m_exynosVideoCSC,
3899                           previewW, previewH,
3900                           cropX, cropY, cropW, cropH,
3901                           selfStreamParms->format,
3902                           0);
3903
3904        csc_set_dst_format(m_exynosVideoCSC,
3905                           videoW, videoH,
3906                           0, 0, videoW, videoH,
3907                           subParms->format,
3908                           1);
3909
3910        csc_set_src_buffer(m_exynosVideoCSC,
3911                        (void **)&srcImageBuf->fd.fd);
3912
3913        csc_set_dst_buffer(m_exynosVideoCSC,
3914            (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
3915
3916        if (csc_convert(m_exynosVideoCSC) != 0) {
3917            ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
3918        }
3919        else {
3920            ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
3921        }
3922    }
3923    else {
3924        ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
3925    }
3926
3927    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
3928
3929    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
3930            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
3931    if (res == 0) {
3932        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
3933        subParms->numSvcBufsInHal--;
3934    }
3935    else {
3936        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
3937    }
3938
3939    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
3940    {
3941        bool found = false;
3942        int checkingIndex = 0;
3943
3944        ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
3945
3946        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3947        if (res != NO_ERROR || buf == NULL) {
3948            ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3949            break;
3950        }
3951        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3952        subParms->numSvcBufsInHal ++;
3953        ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3954           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3955
3956        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3957            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3958                found = true;
3959                break;
3960            }
3961        }
3962        ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
3963
3964        if (!found) {
3965             break;
3966        }
3967
3968        subParms->svcBufIndex = checkingIndex;
3969        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
3970            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
3971        }
3972        else {
3973            ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
3974                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
3975        }
3976    }
3977    return 0;
3978}
3979
3980int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
3981{
3982    stream_parameters_t     *selfStreamParms = &(selfThread->m_parameters);
3983    substream_parameters_t  *subParms        = &m_subStreams[STREAM_ID_PRVCB];
3984    status_t    res;
3985    bool found = false;
3986    int cropX, cropY, cropW, cropH = 0;
3987    buffer_handle_t * buf = NULL;
3988
3989    ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
3990    for (int i = 0 ; subParms->numSvcBuffers ; i++) {
3991        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
3992            found = true;
3993            break;
3994        }
3995        subParms->svcBufIndex++;
3996        if (subParms->svcBufIndex >= subParms->numSvcBuffers)
3997            subParms->svcBufIndex = 0;
3998    }
3999    if (!found) {
4000        ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4001        subParms->svcBufIndex++;
4002        return 1;
4003    }
4004
4005    if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
4006        if (m_exynosVideoCSC) {
4007            int previewCbW = subParms->width, previewCbH = subParms->height;
4008            int cropX, cropY, cropW, cropH = 0;
4009            int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4010            m_getRatioSize(previewW, previewH,
4011                           previewCbW, previewCbH,
4012                           &cropX, &cropY,
4013                           &cropW, &cropH,
4014                           0);
4015
4016            ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4017                     __FUNCTION__, cropX, cropY, cropW, cropH);
4018            csc_set_src_format(m_exynosVideoCSC,
4019                               previewW, previewH,
4020                               cropX, cropY, cropW, cropH,
4021                               selfStreamParms->format,
4022                               0);
4023
4024            csc_set_dst_format(m_exynosVideoCSC,
4025                               previewCbW, previewCbH,
4026                               0, 0, previewCbW, previewCbH,
4027                               subParms->internalFormat,
4028                               1);
4029
4030            csc_set_src_buffer(m_exynosVideoCSC,
4031                        (void **)&srcImageBuf->fd.fd);
4032
4033            csc_set_dst_buffer(m_exynosVideoCSC,
4034                (void **)(&(m_previewCbBuf.fd.fd)));
4035
4036            if (csc_convert(m_exynosVideoCSC) != 0) {
4037                ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
4038            }
4039            else {
4040                ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
4041            }
4042            if (previewCbW == ALIGN(previewCbW, 16)) {
4043                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4044                    m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
4045                memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
4046                    m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
4047            }
4048            else {
4049                // TODO : copy line by line ?
4050            }
4051        }
4052        else {
4053            ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4054        }
4055    }
4056    else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
4057        int previewCbW = subParms->width, previewCbH = subParms->height;
4058        int stride = ALIGN(previewCbW, 16);
4059        int c_stride = ALIGN(stride / 2, 16);
4060        memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4061            srcImageBuf->virt.extP[0], stride * previewCbH);
4062        memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
4063            srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
4064        memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
4065            srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
4066    }
4067    res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4068
4069    ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4070            __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4071    if (res == 0) {
4072        subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4073        subParms->numSvcBufsInHal--;
4074    }
4075    else {
4076        subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4077    }
4078
4079    while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4080    {
4081        bool found = false;
4082        int checkingIndex = 0;
4083
4084        ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4085
4086        res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4087        if (res != NO_ERROR || buf == NULL) {
4088            ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
4089            break;
4090        }
4091        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4092        subParms->numSvcBufsInHal ++;
4093        ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4094           subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4095
4096
4097        for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4098            if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4099                found = true;
4100                break;
4101            }
4102        }
4103        ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4104
4105        if (!found) {
4106             break;
4107        }
4108
4109        subParms->svcBufIndex = checkingIndex;
4110        if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4111            subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4112        }
4113        else {
4114            ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
4115                subParms->svcBufIndex,  subParms->svcBufStatus[subParms->svcBufIndex]);
4116        }
4117    }
4118    return 0;
4119}
4120
4121bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
4122{
4123    int sizeOfSupportList;
4124
4125    //REAR Camera
4126    if(this->getCameraId() == 0) {
4127        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
4128
4129        for(int i = 0; i < sizeOfSupportList; i++) {
4130            if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
4131                return true;
4132        }
4133
4134    }
4135    else {
4136        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
4137
4138        for(int i = 0; i < sizeOfSupportList; i++) {
4139            if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
4140                return true;
4141        }
4142    }
4143
4144    return false;
4145}
4146bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
4147                            ExynosBuffer *jpegBuf,
4148                            ExynosRect *rect)
4149{
4150    unsigned char *addr;
4151
4152    ExynosJpegEncoderForCamera jpegEnc;
4153    bool ret = false;
4154    int res = 0;
4155
4156    unsigned int *yuvSize = yuvBuf->size.extS;
4157
4158    if (jpegEnc.create()) {
4159        ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
4160        goto jpeg_encode_done;
4161    }
4162
4163    if (jpegEnc.setQuality(100)) {
4164        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
4165        goto jpeg_encode_done;
4166    }
4167
4168    if (jpegEnc.setSize(rect->w, rect->h)) {
4169        ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
4170        goto jpeg_encode_done;
4171    }
4172    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
4173
4174    if (jpegEnc.setColorFormat(rect->colorFormat)) {
4175        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
4176        goto jpeg_encode_done;
4177    }
4178
4179    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
4180        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
4181        goto jpeg_encode_done;
4182    }
4183
4184    if((m_jpegMetadata.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.ctl.jpeg.thumbnailSize[1] != 0)) {
4185        mExifInfo.enableThumb = true;
4186        if(!m_checkThumbnailSize(m_jpegMetadata.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.ctl.jpeg.thumbnailSize[1])) {
4187            //default value
4188            m_thumbNailW = SUPPORT_THUMBNAIL_REAR_SIZE[0][0];
4189            m_thumbNailH = SUPPORT_THUMBNAIL_REAR_SIZE[0][1];
4190        } else {
4191            m_thumbNailW = m_jpegMetadata.ctl.jpeg.thumbnailSize[0];
4192            m_thumbNailH = m_jpegMetadata.ctl.jpeg.thumbnailSize[1];
4193        }
4194
4195        ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
4196
4197    } else {
4198        mExifInfo.enableThumb = false;
4199    }
4200
4201    if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
4202        ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
4203        goto jpeg_encode_done;
4204    }
4205
4206    ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
4207    if (jpegEnc.setThumbnailQuality(50)) {
4208        ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
4209        goto jpeg_encode_done;
4210    }
4211
4212    m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
4213    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
4214    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
4215        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
4216        goto jpeg_encode_done;
4217    }
4218    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
4219        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
4220        goto jpeg_encode_done;
4221    }
4222
4223    if (jpegEnc.updateConfig()) {
4224        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
4225        goto jpeg_encode_done;
4226    }
4227
4228    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
4229        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
4230        goto jpeg_encode_done;
4231    }
4232
4233    ret = true;
4234
4235jpeg_encode_done:
4236
4237    if (jpegEnc.flagCreate() == true)
4238        jpegEnc.destroy();
4239
4240    return ret;
4241}
4242
4243void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
4244{
4245    m_ctlInfo.flash.m_precaptureTriggerId = id;
4246    if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
4247        // flash is required
4248        switch (m_ctlInfo.flash.m_flashCnt) {
4249        case IS_FLASH_STATE_AUTO_DONE:
4250            // Flash capture sequence, AF flash was executed before
4251            break;
4252        default:
4253            // Full flash sequence
4254            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4255            m_ctlInfo.flash.m_flashEnableFlg = true;
4256        }
4257    } else {
4258        // Apply AE & AWB lock
4259        ALOGV("[PreCap] Flash OFF mode ");
4260        m_ctlInfo.flash.m_flashEnableFlg = false;
4261        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
4262        m_ctlInfo.ae.m_aeCnt = IS_COMMAND_EXECUTION;
4263        m_ctlInfo.awb.m_awbCnt = IS_COMMAND_EXECUTION;
4264    }
4265    ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
4266}
4267void ExynosCameraHWInterface2::OnAfTriggerStart(int id)
4268{
4269    m_afPendingTriggerId = id;
4270    m_afModeWaitingCnt = 6;
4271}
4272
4273void ExynosCameraHWInterface2::OnAfTrigger(int id)
4274{
4275    m_afTriggerId = id;
4276
4277    switch (m_afMode) {
4278    case AA_AFMODE_AUTO:
4279    case AA_AFMODE_MACRO:
4280    case AA_AFMODE_OFF:
4281        ALOGE("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
4282        // If flash is enable, Flash operation is executed before triggering AF
4283        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4284                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4285                && (m_cameraId == 0)) {
4286            ALOGE("[Flash] AF Flash start with Mode (%d)", m_afMode);
4287            m_ctlInfo.flash.m_flashEnableFlg = true;
4288            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4289            m_ctlInfo.flash.m_flashDecisionResult = false;
4290            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4291        }
4292        OnAfTriggerAutoMacro(id);
4293        break;
4294    case AA_AFMODE_CONTINUOUS_VIDEO:
4295        ALOGE("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
4296        OnAfTriggerCAFVideo(id);
4297        break;
4298    case AA_AFMODE_CONTINUOUS_PICTURE:
4299        ALOGE("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
4300        OnAfTriggerCAFPicture(id);
4301        break;
4302
4303    default:
4304        break;
4305    }
4306}
4307
4308void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
4309{
4310    int nextState = NO_TRANSITION;
4311
4312    switch (m_afState) {
4313    case HAL_AFSTATE_INACTIVE:
4314        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4315        m_IsAfTriggerRequired = true;
4316        break;
4317    case HAL_AFSTATE_NEEDS_COMMAND:
4318        nextState = NO_TRANSITION;
4319        break;
4320    case HAL_AFSTATE_STARTED:
4321        nextState = NO_TRANSITION;
4322        break;
4323    case HAL_AFSTATE_SCANNING:
4324        nextState = NO_TRANSITION;
4325        break;
4326    case HAL_AFSTATE_LOCKED:
4327        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4328        m_IsAfTriggerRequired = true;
4329        break;
4330    case HAL_AFSTATE_FAILED:
4331        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4332        m_IsAfTriggerRequired = true;
4333        break;
4334    default:
4335        break;
4336    }
4337    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4338    if (nextState != NO_TRANSITION)
4339        m_afState = nextState;
4340}
4341
4342void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4343{
4344    int nextState = NO_TRANSITION;
4345
4346    switch (m_afState) {
4347    case HAL_AFSTATE_INACTIVE:
4348        nextState = HAL_AFSTATE_FAILED;
4349        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4350        break;
4351    case HAL_AFSTATE_NEEDS_COMMAND:
4352        // not used
4353        break;
4354    case HAL_AFSTATE_STARTED:
4355        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4356        m_AfHwStateFailed = false;
4357        // If flash is enable, Flash operation is executed before triggering AF
4358        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4359                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4360                && (m_cameraId == 0)) {
4361            ALOGE("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4362            m_ctlInfo.flash.m_flashEnableFlg = true;
4363            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4364            m_ctlInfo.flash.m_flashDecisionResult = false;
4365            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4366        }
4367        break;
4368    case HAL_AFSTATE_SCANNING:
4369        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4370        m_AfHwStateFailed = false;
4371        // If flash is enable, Flash operation is executed before triggering AF
4372        if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4373                && (m_ctlInfo.flash.m_flashEnableFlg == false)
4374                && (m_cameraId == 0)) {
4375            ALOGE("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4376            m_ctlInfo.flash.m_flashEnableFlg = true;
4377            m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4378            m_ctlInfo.flash.m_flashDecisionResult = false;
4379            m_ctlInfo.flash.m_afFlashDoneFlg = true;
4380        }
4381        break;
4382    case HAL_AFSTATE_NEEDS_DETERMINATION:
4383        nextState = NO_TRANSITION;
4384        break;
4385    case HAL_AFSTATE_PASSIVE_FOCUSED:
4386        m_IsAfLockRequired = true;
4387        if (m_AfHwStateFailed) {
4388            ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
4389            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4390            nextState = HAL_AFSTATE_FAILED;
4391        }
4392        else {
4393            ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
4394            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4395            nextState = HAL_AFSTATE_LOCKED;
4396        }
4397        m_AfHwStateFailed = false;
4398        break;
4399    case HAL_AFSTATE_LOCKED:
4400        nextState = NO_TRANSITION;
4401        break;
4402    case HAL_AFSTATE_FAILED:
4403        nextState = NO_TRANSITION;
4404        break;
4405    default:
4406        break;
4407    }
4408    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4409    if (nextState != NO_TRANSITION)
4410        m_afState = nextState;
4411}
4412
4413
4414void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
4415{
4416    int nextState = NO_TRANSITION;
4417
4418    switch (m_afState) {
4419    case HAL_AFSTATE_INACTIVE:
4420        nextState = HAL_AFSTATE_FAILED;
4421        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4422        break;
4423    case HAL_AFSTATE_NEEDS_COMMAND:
4424        // not used
4425        break;
4426    case HAL_AFSTATE_STARTED:
4427        m_IsAfLockRequired = true;
4428        nextState = HAL_AFSTATE_FAILED;
4429        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4430        break;
4431    case HAL_AFSTATE_SCANNING:
4432        m_IsAfLockRequired = true;
4433        nextState = HAL_AFSTATE_FAILED;
4434        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4435        break;
4436    case HAL_AFSTATE_NEEDS_DETERMINATION:
4437        // not used
4438        break;
4439    case HAL_AFSTATE_PASSIVE_FOCUSED:
4440        m_IsAfLockRequired = true;
4441        SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4442        nextState = HAL_AFSTATE_LOCKED;
4443        break;
4444    case HAL_AFSTATE_LOCKED:
4445        nextState = NO_TRANSITION;
4446        break;
4447    case HAL_AFSTATE_FAILED:
4448        nextState = NO_TRANSITION;
4449        break;
4450    default:
4451        break;
4452    }
4453    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4454    if (nextState != NO_TRANSITION)
4455        m_afState = nextState;
4456}
4457
4458void ExynosCameraHWInterface2::OnPrecaptureMeteringNotification()
4459{
4460    if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4461        if (m_ctlInfo.flash.m_flashEnableFlg) {
4462            // flash case
4463            switch (m_ctlInfo.flash.m_flashCnt) {
4464            case IS_FLASH_STATE_AUTO_DONE:
4465                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4466                                ANDROID_CONTROL_AE_STATE_LOCKED,
4467                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4468                m_notifyCb(CAMERA2_MSG_AUTOWB,
4469                                ANDROID_CONTROL_AWB_STATE_LOCKED,
4470                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4471                m_ctlInfo.flash.m_precaptureTriggerId = 0;
4472                break;
4473            default:
4474                if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
4475                    ALOGE("(%s) INVALID flash state count. (%d)", (int)m_ctlInfo.flash.m_flashCnt);
4476                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
4477                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4478                                    ANDROID_CONTROL_AE_STATE_LOCKED,
4479                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4480                    m_notifyCb(CAMERA2_MSG_AUTOWB,
4481                                    ANDROID_CONTROL_AWB_STATE_LOCKED,
4482                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4483                    m_ctlInfo.flash.m_precaptureTriggerId = 0;
4484                } else {
4485                    m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4486                                    ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4487                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4488                    m_notifyCb(CAMERA2_MSG_AUTOWB,
4489                                    ANDROID_CONTROL_AWB_STATE_CONVERGED,
4490                                    m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4491                }
4492            }
4493        } else {
4494            // non-flash case
4495            // AE
4496            switch (m_ctlInfo.ae.m_aeCnt) {
4497            case IS_COMMAND_EXECUTION:
4498                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4499                                ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4500                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4501                ALOGE("[PreCap] OnPrecaptureMeteringNotification (ID %d) CAMERA2_MSG_AUTOEXPOSURE, ANDROID_CONTROL_AE_STATE_PRECAPTURE", m_ctlInfo.flash.m_precaptureTriggerId);
4502                break;
4503            case IS_COMMAND_NONE:
4504            case IS_COMMAND_CLEAR:
4505                m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4506                                ANDROID_CONTROL_AE_STATE_LOCKED,
4507                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4508                ALOGE("[PreCap] OnPrecaptureMeteringNotification (ID %d) CAMERA2_MSG_AUTOEXPOSURE, ANDROID_CONTROL_AE_STATE_LOCKED", m_ctlInfo.flash.m_precaptureTriggerId);
4509                m_ctlInfo.flash.m_precaptureTriggerId = 0;
4510                break;
4511            }
4512            // AWB
4513            switch (m_ctlInfo.awb.m_awbCnt) {
4514            case IS_COMMAND_EXECUTION:
4515                m_notifyCb(CAMERA2_MSG_AUTOWB,
4516                                ANDROID_CONTROL_AWB_STATE_CONVERGED,
4517                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4518                break;
4519            case IS_COMMAND_NONE:
4520            case IS_COMMAND_CLEAR:
4521                m_notifyCb(CAMERA2_MSG_AUTOWB,
4522                                ANDROID_CONTROL_AWB_STATE_LOCKED,
4523                                m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4524                break;
4525            }
4526        }
4527    }
4528}
4529
4530void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
4531{
4532    switch (m_afMode) {
4533    case AA_AFMODE_AUTO:
4534    case AA_AFMODE_MACRO:
4535        OnAfNotificationAutoMacro(noti);
4536        break;
4537    case AA_AFMODE_CONTINUOUS_VIDEO:
4538        OnAfNotificationCAFVideo(noti);
4539        break;
4540    case AA_AFMODE_CONTINUOUS_PICTURE:
4541        OnAfNotificationCAFPicture(noti);
4542        break;
4543    case AA_AFMODE_OFF:
4544    default:
4545        break;
4546    }
4547}
4548
4549void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
4550{
4551    int nextState = NO_TRANSITION;
4552    bool bWrongTransition = false;
4553
4554    if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
4555        switch (noti) {
4556        case AA_AFSTATE_INACTIVE:
4557        case AA_AFSTATE_ACTIVE_SCAN:
4558        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4559        case AA_AFSTATE_AF_FAILED_FOCUS:
4560        default:
4561            nextState = NO_TRANSITION;
4562            break;
4563        }
4564    }
4565    else if (m_afState == HAL_AFSTATE_STARTED) {
4566        switch (noti) {
4567        case AA_AFSTATE_INACTIVE:
4568            nextState = NO_TRANSITION;
4569            break;
4570        case AA_AFSTATE_ACTIVE_SCAN:
4571            nextState = HAL_AFSTATE_SCANNING;
4572            SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
4573            break;
4574        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4575            nextState = NO_TRANSITION;
4576            break;
4577        case AA_AFSTATE_AF_FAILED_FOCUS:
4578            nextState = NO_TRANSITION;
4579            break;
4580        default:
4581            bWrongTransition = true;
4582            break;
4583        }
4584    }
4585    else if (m_afState == HAL_AFSTATE_SCANNING) {
4586        switch (noti) {
4587        case AA_AFSTATE_INACTIVE:
4588            bWrongTransition = true;
4589            break;
4590        case AA_AFSTATE_ACTIVE_SCAN:
4591            nextState = NO_TRANSITION;
4592            break;
4593        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4594            // If Flash mode is enable, after AF execute pre-capture metering
4595            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4596                switch (m_ctlInfo.flash.m_flashCnt) {
4597                case IS_FLASH_STATE_ON_DONE:
4598                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4599                    break;
4600                case IS_FLASH_STATE_AUTO_DONE:
4601                    nextState = HAL_AFSTATE_LOCKED;
4602                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4603                    break;
4604                default:
4605                    nextState = NO_TRANSITION;
4606                }
4607            } else {
4608                nextState = HAL_AFSTATE_LOCKED;
4609                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4610            }
4611            break;
4612        case AA_AFSTATE_AF_FAILED_FOCUS:
4613            // If Flash mode is enable, after AF execute pre-capture metering
4614            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4615                switch (m_ctlInfo.flash.m_flashCnt) {
4616                case IS_FLASH_STATE_ON_DONE:
4617                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4618                    break;
4619                case IS_FLASH_STATE_AUTO_DONE:
4620                    nextState = HAL_AFSTATE_FAILED;
4621                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4622                    break;
4623                default:
4624                    nextState = NO_TRANSITION;
4625                }
4626            } else {
4627                nextState = HAL_AFSTATE_FAILED;
4628                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4629            }
4630            break;
4631        default:
4632            bWrongTransition = true;
4633            break;
4634        }
4635    }
4636    else if (m_afState == HAL_AFSTATE_LOCKED) {
4637        switch (noti) {
4638            case AA_AFSTATE_INACTIVE:
4639            case AA_AFSTATE_ACTIVE_SCAN:
4640                bWrongTransition = true;
4641                break;
4642            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4643                // Flash off if flash mode is available.
4644                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg)
4645                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4646                nextState = NO_TRANSITION;
4647                break;
4648            case AA_AFSTATE_AF_FAILED_FOCUS:
4649            default:
4650                bWrongTransition = true;
4651                break;
4652        }
4653    }
4654    else if (m_afState == HAL_AFSTATE_FAILED) {
4655        switch (noti) {
4656            case AA_AFSTATE_INACTIVE:
4657            case AA_AFSTATE_ACTIVE_SCAN:
4658            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4659                bWrongTransition = true;
4660                break;
4661            case AA_AFSTATE_AF_FAILED_FOCUS:
4662                // Flash off if flash mode is available.
4663                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg)
4664                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4665                nextState = NO_TRANSITION;
4666                break;
4667            default:
4668                bWrongTransition = true;
4669                break;
4670        }
4671    }
4672    if (bWrongTransition) {
4673        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
4674        return;
4675    }
4676    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
4677    if (nextState != NO_TRANSITION)
4678        m_afState = nextState;
4679}
4680
4681void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
4682{
4683    int nextState = NO_TRANSITION;
4684    bool bWrongTransition = false;
4685
4686    if (m_afState == HAL_AFSTATE_INACTIVE) {
4687        switch (noti) {
4688        case AA_AFSTATE_INACTIVE:
4689        case AA_AFSTATE_ACTIVE_SCAN:
4690        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4691        case AA_AFSTATE_AF_FAILED_FOCUS:
4692        default:
4693            nextState = NO_TRANSITION;
4694            break;
4695        }
4696    }
4697    else if (m_afState == HAL_AFSTATE_STARTED) {
4698        switch (noti) {
4699        case AA_AFSTATE_INACTIVE:
4700            nextState = NO_TRANSITION;
4701            break;
4702        case AA_AFSTATE_ACTIVE_SCAN:
4703            nextState = HAL_AFSTATE_SCANNING;
4704            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4705            break;
4706        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4707            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4708            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4709            break;
4710        case AA_AFSTATE_AF_FAILED_FOCUS:
4711            //nextState = HAL_AFSTATE_FAILED;
4712            //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4713            nextState = NO_TRANSITION;
4714            break;
4715        default:
4716            bWrongTransition = true;
4717            break;
4718        }
4719    }
4720    else if (m_afState == HAL_AFSTATE_SCANNING) {
4721        switch (noti) {
4722        case AA_AFSTATE_INACTIVE:
4723            nextState = NO_TRANSITION;
4724            break;
4725        case AA_AFSTATE_ACTIVE_SCAN:
4726            nextState = NO_TRANSITION;
4727            m_AfHwStateFailed = false;
4728            break;
4729        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4730            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4731            m_AfHwStateFailed = false;
4732            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4733            break;
4734        case AA_AFSTATE_AF_FAILED_FOCUS:
4735            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4736            m_AfHwStateFailed = true;
4737            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4738            break;
4739        default:
4740            bWrongTransition = true;
4741            break;
4742        }
4743    }
4744    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
4745        switch (noti) {
4746        case AA_AFSTATE_INACTIVE:
4747            nextState = NO_TRANSITION;
4748            break;
4749        case AA_AFSTATE_ACTIVE_SCAN:
4750            nextState = HAL_AFSTATE_SCANNING;
4751            m_AfHwStateFailed = false;
4752            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4753            break;
4754        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4755            nextState = NO_TRANSITION;
4756            m_AfHwStateFailed = false;
4757            // Flash off if flash mode is available.
4758            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4759                ALOGD("[AF Flash] Off with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4760                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4761            }
4762            break;
4763        case AA_AFSTATE_AF_FAILED_FOCUS:
4764            nextState = NO_TRANSITION;
4765            m_AfHwStateFailed = true;
4766            // Flash off if flash mode is available.
4767            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4768                ALOGD("[AF Flash] Off with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4769                m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4770            }
4771            break;
4772        default:
4773            bWrongTransition = true;
4774            break;
4775        }
4776    }
4777    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
4778        switch (noti) {
4779        case AA_AFSTATE_INACTIVE:
4780            nextState = NO_TRANSITION;
4781            break;
4782        case AA_AFSTATE_ACTIVE_SCAN:
4783            nextState = NO_TRANSITION;
4784            break;
4785        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4786            // If Flash mode is enable, after AF execute pre-capture metering
4787            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4788                ALOGD("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4789                switch (m_ctlInfo.flash.m_flashCnt) {
4790                case IS_FLASH_STATE_ON_DONE:
4791                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4792                    break;
4793                case IS_FLASH_STATE_AUTO_DONE:
4794                    m_IsAfLockRequired = true;
4795                    nextState = HAL_AFSTATE_LOCKED;
4796                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4797                    break;
4798                default:
4799                    nextState = NO_TRANSITION;
4800                }
4801            } else {
4802                m_IsAfLockRequired = true;
4803                nextState = HAL_AFSTATE_LOCKED;
4804                SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4805            }
4806            break;
4807        case AA_AFSTATE_AF_FAILED_FOCUS:
4808            // If Flash mode is enable, after AF execute pre-capture metering
4809            if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4810                ALOGD("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4811                switch (m_ctlInfo.flash.m_flashCnt) {
4812                case IS_FLASH_STATE_ON_DONE:
4813                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
4814                    break;
4815                case IS_FLASH_STATE_AUTO_DONE:
4816                    m_IsAfLockRequired = true;
4817                    nextState = HAL_AFSTATE_FAILED;
4818                    SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4819                    break;
4820                default:
4821                    nextState = NO_TRANSITION;
4822                }
4823            } else {
4824                m_IsAfLockRequired = true;
4825                nextState = HAL_AFSTATE_FAILED;
4826                SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4827            }
4828            break;
4829        default:
4830            bWrongTransition = true;
4831            break;
4832        }
4833    }
4834    else if (m_afState == HAL_AFSTATE_LOCKED) {
4835        switch (noti) {
4836            case AA_AFSTATE_INACTIVE:
4837                nextState = NO_TRANSITION;
4838                break;
4839            case AA_AFSTATE_ACTIVE_SCAN:
4840                bWrongTransition = true;
4841                break;
4842            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4843                nextState = NO_TRANSITION;
4844                // Flash off if flash mode is available.
4845                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4846                    ALOGD("[AF Flash] Off with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4847                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4848                }
4849                break;
4850            case AA_AFSTATE_AF_FAILED_FOCUS:
4851            default:
4852                bWrongTransition = true;
4853                break;
4854        }
4855    }
4856    else if (m_afState == HAL_AFSTATE_FAILED) {
4857        switch (noti) {
4858            case AA_AFSTATE_INACTIVE:
4859                bWrongTransition = true;
4860                break;
4861            case AA_AFSTATE_ACTIVE_SCAN:
4862                nextState = HAL_AFSTATE_SCANNING;
4863                break;
4864            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4865                bWrongTransition = true;
4866                break;
4867            case AA_AFSTATE_AF_FAILED_FOCUS:
4868                // Flash off if flash mode is available.
4869                if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
4870                    ALOGD("[AF Flash] Off with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
4871                    m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
4872                }
4873                nextState = NO_TRANSITION;
4874                break;
4875            default:
4876                bWrongTransition = true;
4877                break;
4878        }
4879    }
4880    if (bWrongTransition) {
4881        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
4882        return;
4883    }
4884    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
4885    if (nextState != NO_TRANSITION)
4886        m_afState = nextState;
4887}
4888
4889void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
4890{
4891    int nextState = NO_TRANSITION;
4892    bool bWrongTransition = false;
4893
4894    if (m_afState == HAL_AFSTATE_INACTIVE) {
4895        switch (noti) {
4896        case AA_AFSTATE_INACTIVE:
4897        case AA_AFSTATE_ACTIVE_SCAN:
4898        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4899        case AA_AFSTATE_AF_FAILED_FOCUS:
4900        default:
4901            nextState = NO_TRANSITION;
4902            break;
4903        }
4904    }
4905    else if (m_afState == HAL_AFSTATE_STARTED) {
4906        switch (noti) {
4907        case AA_AFSTATE_INACTIVE:
4908            nextState = NO_TRANSITION;
4909            break;
4910        case AA_AFSTATE_ACTIVE_SCAN:
4911            nextState = HAL_AFSTATE_SCANNING;
4912            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4913            break;
4914        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4915            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4916            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4917            break;
4918        case AA_AFSTATE_AF_FAILED_FOCUS:
4919            nextState = HAL_AFSTATE_FAILED;
4920            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4921            break;
4922        default:
4923            bWrongTransition = true;
4924            break;
4925        }
4926    }
4927    else if (m_afState == HAL_AFSTATE_SCANNING) {
4928        switch (noti) {
4929        case AA_AFSTATE_INACTIVE:
4930            bWrongTransition = true;
4931            break;
4932        case AA_AFSTATE_ACTIVE_SCAN:
4933            nextState = NO_TRANSITION;
4934            break;
4935        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4936            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4937            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4938            break;
4939        case AA_AFSTATE_AF_FAILED_FOCUS:
4940            nextState = NO_TRANSITION;
4941            m_IsAfTriggerRequired = true;
4942            break;
4943        default:
4944            bWrongTransition = true;
4945            break;
4946        }
4947    }
4948    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
4949        switch (noti) {
4950        case AA_AFSTATE_INACTIVE:
4951            bWrongTransition = true;
4952            break;
4953        case AA_AFSTATE_ACTIVE_SCAN:
4954            nextState = HAL_AFSTATE_SCANNING;
4955            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4956            break;
4957        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4958            nextState = NO_TRANSITION;
4959            break;
4960        case AA_AFSTATE_AF_FAILED_FOCUS:
4961            nextState = HAL_AFSTATE_FAILED;
4962            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4963            // TODO : needs NO_TRANSITION ?
4964            break;
4965        default:
4966            bWrongTransition = true;
4967            break;
4968        }
4969    }
4970    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
4971        switch (noti) {
4972        case AA_AFSTATE_INACTIVE:
4973            bWrongTransition = true;
4974            break;
4975        case AA_AFSTATE_ACTIVE_SCAN:
4976            nextState = NO_TRANSITION;
4977            break;
4978        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4979            m_IsAfLockRequired = true;
4980            nextState = HAL_AFSTATE_LOCKED;
4981            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4982            break;
4983        case AA_AFSTATE_AF_FAILED_FOCUS:
4984            nextState = HAL_AFSTATE_FAILED;
4985            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4986            break;
4987        default:
4988            bWrongTransition = true;
4989            break;
4990        }
4991    }
4992    else if (m_afState == HAL_AFSTATE_LOCKED) {
4993        switch (noti) {
4994            case AA_AFSTATE_INACTIVE:
4995                nextState = NO_TRANSITION;
4996                break;
4997            case AA_AFSTATE_ACTIVE_SCAN:
4998                bWrongTransition = true;
4999                break;
5000            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5001                nextState = NO_TRANSITION;
5002                break;
5003            case AA_AFSTATE_AF_FAILED_FOCUS:
5004            default:
5005                bWrongTransition = true;
5006                break;
5007        }
5008    }
5009    else if (m_afState == HAL_AFSTATE_FAILED) {
5010        switch (noti) {
5011            case AA_AFSTATE_INACTIVE:
5012            case AA_AFSTATE_ACTIVE_SCAN:
5013            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5014                bWrongTransition = true;
5015                break;
5016            case AA_AFSTATE_AF_FAILED_FOCUS:
5017                nextState = NO_TRANSITION;
5018                break;
5019            default:
5020                bWrongTransition = true;
5021                break;
5022        }
5023    }
5024    if (bWrongTransition) {
5025        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5026        return;
5027    }
5028    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5029    if (nextState != NO_TRANSITION)
5030        m_afState = nextState;
5031}
5032
5033void ExynosCameraHWInterface2::OnAfCancel(int id)
5034{
5035    m_afTriggerId = id;
5036
5037    switch (m_afMode) {
5038    case AA_AFMODE_AUTO:
5039    case AA_AFMODE_MACRO:
5040    case AA_AFMODE_OFF:
5041        OnAfCancelAutoMacro(id);
5042        break;
5043    case AA_AFMODE_CONTINUOUS_VIDEO:
5044        OnAfCancelCAFVideo(id);
5045        break;
5046    case AA_AFMODE_CONTINUOUS_PICTURE:
5047        OnAfCancelCAFPicture(id);
5048        break;
5049    default:
5050        break;
5051    }
5052}
5053
5054void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
5055{
5056    int nextState = NO_TRANSITION;
5057    m_afTriggerId = id;
5058
5059    if (m_ctlInfo.flash.m_flashEnableFlg  && m_ctlInfo.flash.m_afFlashDoneFlg) {
5060        m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5061    }
5062    switch (m_afState) {
5063    case HAL_AFSTATE_INACTIVE:
5064        nextState = NO_TRANSITION;
5065        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5066        break;
5067    case HAL_AFSTATE_NEEDS_COMMAND:
5068    case HAL_AFSTATE_STARTED:
5069    case HAL_AFSTATE_SCANNING:
5070    case HAL_AFSTATE_LOCKED:
5071    case HAL_AFSTATE_FAILED:
5072        SetAfMode(AA_AFMODE_OFF);
5073        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5074        nextState = HAL_AFSTATE_INACTIVE;
5075        break;
5076    default:
5077        break;
5078    }
5079    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5080    if (nextState != NO_TRANSITION)
5081        m_afState = nextState;
5082}
5083
5084void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
5085{
5086    int nextState = NO_TRANSITION;
5087    m_afTriggerId = id;
5088
5089    switch (m_afState) {
5090    case HAL_AFSTATE_INACTIVE:
5091        nextState = NO_TRANSITION;
5092        break;
5093    case HAL_AFSTATE_NEEDS_COMMAND:
5094    case HAL_AFSTATE_STARTED:
5095    case HAL_AFSTATE_SCANNING:
5096    case HAL_AFSTATE_LOCKED:
5097    case HAL_AFSTATE_FAILED:
5098    case HAL_AFSTATE_NEEDS_DETERMINATION:
5099    case HAL_AFSTATE_PASSIVE_FOCUSED:
5100        SetAfMode(AA_AFMODE_OFF);
5101        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5102        SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
5103        nextState = HAL_AFSTATE_INACTIVE;
5104        break;
5105    default:
5106        break;
5107    }
5108    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5109    if (nextState != NO_TRANSITION)
5110        m_afState = nextState;
5111}
5112
5113void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
5114{
5115    int nextState = NO_TRANSITION;
5116    m_afTriggerId = id;
5117
5118    switch (m_afState) {
5119    case HAL_AFSTATE_INACTIVE:
5120        nextState = NO_TRANSITION;
5121        break;
5122    case HAL_AFSTATE_NEEDS_COMMAND:
5123    case HAL_AFSTATE_STARTED:
5124    case HAL_AFSTATE_SCANNING:
5125    case HAL_AFSTATE_LOCKED:
5126    case HAL_AFSTATE_FAILED:
5127    case HAL_AFSTATE_NEEDS_DETERMINATION:
5128    case HAL_AFSTATE_PASSIVE_FOCUSED:
5129        SetAfMode(AA_AFMODE_OFF);
5130        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5131        SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
5132        nextState = HAL_AFSTATE_INACTIVE;
5133        break;
5134    default:
5135        break;
5136    }
5137    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5138    if (nextState != NO_TRANSITION)
5139        m_afState = nextState;
5140}
5141
5142void ExynosCameraHWInterface2::SetAfStateForService(int newState)
5143{
5144    if (m_serviceAfState != newState || newState == 0)
5145        m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
5146    m_serviceAfState = newState;
5147}
5148
5149int ExynosCameraHWInterface2::GetAfStateForService()
5150{
5151   return m_serviceAfState;
5152}
5153
5154void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
5155{
5156    if (m_afMode != afMode) {
5157        if (m_IsAfModeUpdateRequired) {
5158            m_afMode2 = afMode;
5159            ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
5160        }
5161        else {
5162            ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
5163            m_IsAfModeUpdateRequired = true;
5164            m_afMode = afMode;
5165            if (m_afModeWaitingCnt != 0) {
5166                m_afModeWaitingCnt = 0;
5167                m_afState = HAL_AFSTATE_INACTIVE;
5168                OnAfTrigger(m_afPendingTriggerId);
5169            }
5170        }
5171    }
5172}
5173
5174void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5175{
5176    char property[PROPERTY_VALUE_MAX];
5177
5178    //2 0th IFD TIFF Tags
5179#if 0 // STOPSHIP TODO(aray): remove before launch, but for now don't leak product data
5180    //3 Maker
5181    property_get("ro.product.brand", property, EXIF_DEF_MAKER);
5182    strncpy((char *)mExifInfo.maker, property,
5183                sizeof(mExifInfo.maker) - 1);
5184    mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
5185    //3 Model
5186    property_get("ro.product.model", property, EXIF_DEF_MODEL);
5187    strncpy((char *)mExifInfo.model, property,
5188                sizeof(mExifInfo.model) - 1);
5189    mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
5190    //3 Software
5191    property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
5192    strncpy((char *)mExifInfo.software, property,
5193                sizeof(mExifInfo.software) - 1);
5194    mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
5195#endif
5196
5197    //3 YCbCr Positioning
5198    mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
5199
5200    //2 0th IFD Exif Private Tags
5201    //3 F Number
5202    mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
5203    mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
5204    //3 Exposure Program
5205    mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
5206    //3 Exif Version
5207    memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
5208    //3 Aperture
5209    double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
5210    mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
5211    mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
5212    //3 Maximum lens aperture
5213    mExifInfo.max_aperture.num = mExifInfo.aperture.num;
5214    mExifInfo.max_aperture.den = mExifInfo.aperture.den;
5215    //3 Lens Focal Length
5216    mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
5217
5218    mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
5219    //3 User Comments
5220    strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
5221    //3 Color Space information
5222    mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
5223    //3 Exposure Mode
5224    mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
5225
5226    //2 0th IFD GPS Info Tags
5227    unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
5228    memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
5229
5230    //2 1th IFD TIFF Tags
5231    mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
5232    mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5233    mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5234    mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5235    mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5236    mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
5237}
5238
5239void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
5240	camera2_shot *currentEntry)
5241{
5242    camera2_dm *dm = &(currentEntry->dm);
5243    camera2_ctl *ctl = &(currentEntry->ctl);
5244
5245    ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
5246    if (!ctl->request.frameCount)
5247       return;
5248    //2 0th IFD TIFF Tags
5249    //3 Width
5250    exifInfo->width = rect->w;
5251    //3 Height
5252    exifInfo->height = rect->h;
5253    //3 Orientation
5254    switch (ctl->jpeg.orientation) {
5255    case 90:
5256        exifInfo->orientation = EXIF_ORIENTATION_90;
5257        break;
5258    case 180:
5259        exifInfo->orientation = EXIF_ORIENTATION_180;
5260        break;
5261    case 270:
5262        exifInfo->orientation = EXIF_ORIENTATION_270;
5263        break;
5264    case 0:
5265    default:
5266        exifInfo->orientation = EXIF_ORIENTATION_UP;
5267        break;
5268    }
5269
5270    //3 Date time
5271    time_t rawtime;
5272    struct tm *timeinfo;
5273    time(&rawtime);
5274    timeinfo = localtime(&rawtime);
5275    strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
5276
5277    //2 0th IFD Exif Private Tags
5278    //3 Exposure Time
5279    int shutterSpeed = (dm->sensor.exposureTime/1000);
5280
5281    if (shutterSpeed < 0) {
5282        shutterSpeed = 100;
5283    }
5284
5285    exifInfo->exposure_time.num = 1;
5286    // x us -> 1/x s */
5287    //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5288    exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
5289
5290    //3 ISO Speed Rating
5291    exifInfo->iso_speed_rating = dm->aa.isoValue;
5292
5293    uint32_t av, tv, bv, sv, ev;
5294    av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
5295    tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
5296    sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
5297    bv = av + tv - sv;
5298    ev = av + tv;
5299    //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5300    ALOGD("AV=%d, TV=%d, SV=%d", av, tv, sv);
5301
5302    //3 Shutter Speed
5303    exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
5304    exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
5305    //3 Brightness
5306    exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
5307    exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
5308    //3 Exposure Bias
5309    if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
5310        ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
5311        exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
5312        exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
5313    } else {
5314        exifInfo->exposure_bias.num = 0;
5315        exifInfo->exposure_bias.den = 0;
5316    }
5317    //3 Metering Mode
5318    /*switch (m_curCameraInfo->metering) {
5319    case METERING_MODE_CENTER:
5320        exifInfo->metering_mode = EXIF_METERING_CENTER;
5321        break;
5322    case METERING_MODE_MATRIX:
5323        exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5324        break;
5325    case METERING_MODE_SPOT:
5326        exifInfo->metering_mode = EXIF_METERING_SPOT;
5327        break;
5328    case METERING_MODE_AVERAGE:
5329    default:
5330        exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5331        break;
5332    }*/
5333    exifInfo->metering_mode = EXIF_METERING_CENTER;
5334
5335    //3 Flash
5336    if (m_ctlInfo.flash.m_flashDecisionResult)
5337        exifInfo->flash = 1;
5338    else
5339        exifInfo->flash = EXIF_DEF_FLASH;
5340
5341    //3 White Balance
5342    if (m_ctlInfo.awb.i_awbMode == AA_AWBMODE_WB_AUTO)
5343        exifInfo->white_balance = EXIF_WB_AUTO;
5344    else
5345        exifInfo->white_balance = EXIF_WB_MANUAL;
5346
5347    //3 Scene Capture Type
5348    switch (ctl->aa.sceneMode) {
5349    case AA_SCENE_MODE_PORTRAIT:
5350        exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
5351        break;
5352    case AA_SCENE_MODE_LANDSCAPE:
5353        exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
5354        break;
5355    case AA_SCENE_MODE_NIGHT_PORTRAIT:
5356        exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
5357        break;
5358    default:
5359        exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
5360        break;
5361    }
5362
5363    //2 0th IFD GPS Info Tags
5364    if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
5365
5366        if (ctl->jpeg.gpsCoordinates[0] > 0)
5367            strcpy((char *)exifInfo->gps_latitude_ref, "N");
5368        else
5369            strcpy((char *)exifInfo->gps_latitude_ref, "S");
5370
5371        if (ctl->jpeg.gpsCoordinates[1] > 0)
5372            strcpy((char *)exifInfo->gps_longitude_ref, "E");
5373        else
5374            strcpy((char *)exifInfo->gps_longitude_ref, "W");
5375
5376        if (ctl->jpeg.gpsCoordinates[2] > 0)
5377            exifInfo->gps_altitude_ref = 0;
5378        else
5379            exifInfo->gps_altitude_ref = 1;
5380
5381        double latitude = fabs(ctl->jpeg.gpsCoordinates[0] / 10000.0);
5382        double longitude = fabs(ctl->jpeg.gpsCoordinates[1] / 10000.0);
5383        double altitude = fabs(ctl->jpeg.gpsCoordinates[2] / 100.0);
5384
5385        exifInfo->gps_latitude[0].num = (uint32_t)latitude;
5386        exifInfo->gps_latitude[0].den = 1;
5387        exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
5388        exifInfo->gps_latitude[1].den = 1;
5389        exifInfo->gps_latitude[2].num = (uint32_t)((((latitude - exifInfo->gps_latitude[0].num) * 60)
5390                                        - exifInfo->gps_latitude[1].num) * 60);
5391        exifInfo->gps_latitude[2].den = 1;
5392
5393        exifInfo->gps_longitude[0].num = (uint32_t)longitude;
5394        exifInfo->gps_longitude[0].den = 1;
5395        exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
5396        exifInfo->gps_longitude[1].den = 1;
5397        exifInfo->gps_longitude[2].num = (uint32_t)((((longitude - exifInfo->gps_longitude[0].num) * 60)
5398                                        - exifInfo->gps_longitude[1].num) * 60);
5399        exifInfo->gps_longitude[2].den = 1;
5400
5401        exifInfo->gps_altitude.num = (uint32_t)altitude;
5402        exifInfo->gps_altitude.den = 1;
5403
5404        struct tm tm_data;
5405        long timestamp;
5406        timestamp = (long)ctl->jpeg.gpsTimestamp;
5407        gmtime_r(&timestamp, &tm_data);
5408        exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
5409        exifInfo->gps_timestamp[0].den = 1;
5410        exifInfo->gps_timestamp[1].num = tm_data.tm_min;
5411        exifInfo->gps_timestamp[1].den = 1;
5412        exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
5413        exifInfo->gps_timestamp[2].den = 1;
5414        snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
5415                "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
5416
5417        exifInfo->enableGps = true;
5418    } else {
5419        exifInfo->enableGps = false;
5420    }
5421
5422    //2 1th IFD TIFF Tags
5423    exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
5424    exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
5425}
5426
5427ExynosCameraHWInterface2::MainThread::~MainThread()
5428{
5429    ALOGV("(%s):", __FUNCTION__);
5430}
5431
5432void ExynosCameraHWInterface2::MainThread::release()
5433{
5434    ALOGV("(%s):", __func__);
5435    SetSignal(SIGNAL_THREAD_RELEASE);
5436}
5437
5438ExynosCameraHWInterface2::SensorThread::~SensorThread()
5439{
5440    ALOGV("(%s):", __FUNCTION__);
5441}
5442
5443void ExynosCameraHWInterface2::SensorThread::release()
5444{
5445    ALOGV("(%s):", __func__);
5446    SetSignal(SIGNAL_THREAD_RELEASE);
5447}
5448
5449ExynosCameraHWInterface2::StreamThread::~StreamThread()
5450{
5451    ALOGV("(%s):", __FUNCTION__);
5452}
5453
5454void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
5455{
5456    ALOGV("DEBUG(%s):", __FUNCTION__);
5457    memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
5458}
5459
5460void ExynosCameraHWInterface2::StreamThread::release()
5461{
5462    ALOGV("(%s):", __func__);
5463    SetSignal(SIGNAL_THREAD_RELEASE);
5464}
5465
5466int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
5467{
5468    int index;
5469    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5470        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
5471            return index;
5472    }
5473    return -1;
5474}
5475
5476int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
5477{
5478    int index;
5479    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5480        if (m_parameters.svcBufHandle[index] == *bufHandle)
5481            return index;
5482    }
5483    return -1;
5484}
5485
5486status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
5487{
5488    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
5489    int index, vacantIndex;
5490    bool vacancy = false;
5491
5492    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
5493        if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
5494            vacancy = true;
5495            vacantIndex = index;
5496        } else if (m_attachedSubStreams[index].streamId == stream_id) {
5497            return BAD_VALUE;
5498        }
5499    }
5500    if (!vacancy)
5501        return NO_MEMORY;
5502    m_attachedSubStreams[vacantIndex].streamId = stream_id;
5503    m_attachedSubStreams[vacantIndex].priority = priority;
5504    m_numRegisteredStream++;
5505    return NO_ERROR;
5506}
5507
5508status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
5509{
5510    ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
5511    int index;
5512    bool found = false;
5513
5514    for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
5515        if (m_attachedSubStreams[index].streamId == stream_id) {
5516            found = true;
5517            break;
5518        }
5519    }
5520    if (!found)
5521        return BAD_VALUE;
5522    m_attachedSubStreams[index].streamId = -1;
5523    m_attachedSubStreams[index].priority = 0;
5524    m_numRegisteredStream--;
5525    return NO_ERROR;
5526}
5527
5528int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
5529{
5530    if (ionClient == 0) {
5531        ionClient = ion_client_create();
5532        if (ionClient < 0) {
5533            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
5534            return 0;
5535        }
5536    }
5537    return ionClient;
5538}
5539
5540int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
5541{
5542    if (ionClient != 0) {
5543        if (ionClient > 0) {
5544            ion_client_destroy(ionClient);
5545        }
5546        ionClient = 0;
5547    }
5548    return ionClient;
5549}
5550
5551int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
5552{
5553    return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
5554}
5555
5556int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
5557{
5558    int ret = 0;
5559    int i = 0;
5560    int flag = 0;
5561
5562    if (ionClient == 0) {
5563        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
5564        return -1;
5565    }
5566
5567    for (i = 0 ; i < iMemoryNum ; i++) {
5568        if (buf->size.extS[i] == 0) {
5569            break;
5570        }
5571        if (1 << i & cacheFlag)
5572            flag = ION_FLAG_CACHED;
5573        else
5574            flag = 0;
5575        buf->fd.extFd[i] = ion_alloc(ionClient, \
5576                                      buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK, flag);
5577        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
5578            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
5579            buf->fd.extFd[i] = -1;
5580            freeCameraMemory(buf, iMemoryNum);
5581            return -1;
5582        }
5583
5584        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
5585                                        buf->size.extS[i], 0);
5586        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
5587            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
5588            buf->virt.extP[i] = (char *)MAP_FAILED;
5589            freeCameraMemory(buf, iMemoryNum);
5590            return -1;
5591        }
5592        ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
5593    }
5594
5595    return ret;
5596}
5597
5598void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
5599{
5600
5601    int i = 0 ;
5602    int ret = 0;
5603
5604    for (i=0;i<iMemoryNum;i++) {
5605        if (buf->fd.extFd[i] != -1) {
5606            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
5607                ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
5608                if (ret < 0)
5609                    ALOGE("ERR(%s)", __FUNCTION__);
5610            }
5611            ion_free(buf->fd.extFd[i]);
5612        }
5613        buf->fd.extFd[i] = -1;
5614        buf->virt.extP[i] = (char *)MAP_FAILED;
5615        buf->size.extS[i] = 0;
5616    }
5617}
5618
5619void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
5620{
5621    int i =0 ;
5622    for (i=0;i<iMemoryNum;i++) {
5623        buf->virt.extP[i] = (char *)MAP_FAILED;
5624        buf->fd.extFd[i] = -1;
5625        buf->size.extS[i] = 0;
5626    }
5627}
5628
5629
5630
5631
5632static camera2_device_t *g_cam2_device = NULL;
5633static bool g_camera_vaild = false;
5634ExynosCamera2 * g_camera2[2] = { NULL, NULL };
5635
5636static int HAL2_camera_device_close(struct hw_device_t* device)
5637{
5638    ALOGV("%s: ENTER", __FUNCTION__);
5639    if (device) {
5640
5641        camera2_device_t *cam_device = (camera2_device_t *)device;
5642        ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
5643        ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
5644        delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
5645        g_cam2_device = NULL;
5646        free(cam_device);
5647        g_camera_vaild = false;
5648    }
5649
5650    ALOGV("%s: EXIT", __FUNCTION__);
5651    return 0;
5652}
5653
5654static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
5655{
5656    return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
5657}
5658
5659static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
5660            const camera2_request_queue_src_ops_t *request_src_ops)
5661{
5662    ALOGV("DEBUG(%s):", __FUNCTION__);
5663    return obj(dev)->setRequestQueueSrcOps(request_src_ops);
5664}
5665
5666static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
5667{
5668    ALOGV("DEBUG(%s):", __FUNCTION__);
5669    return obj(dev)->notifyRequestQueueNotEmpty();
5670}
5671
5672static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
5673            const camera2_frame_queue_dst_ops_t *frame_dst_ops)
5674{
5675    ALOGV("DEBUG(%s):", __FUNCTION__);
5676    return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
5677}
5678
5679static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
5680{
5681    ALOGV("DEBUG(%s):", __FUNCTION__);
5682    return obj(dev)->getInProgressCount();
5683}
5684
5685static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
5686{
5687    ALOGV("DEBUG(%s):", __FUNCTION__);
5688    return obj(dev)->flushCapturesInProgress();
5689}
5690
5691static int HAL2_device_construct_default_request(const struct camera2_device *dev,
5692            int request_template, camera_metadata_t **request)
5693{
5694    ALOGV("DEBUG(%s):", __FUNCTION__);
5695    return obj(dev)->constructDefaultRequest(request_template, request);
5696}
5697
5698static int HAL2_device_allocate_stream(
5699            const struct camera2_device *dev,
5700            // inputs
5701            uint32_t width,
5702            uint32_t height,
5703            int      format,
5704            const camera2_stream_ops_t *stream_ops,
5705            // outputs
5706            uint32_t *stream_id,
5707            uint32_t *format_actual,
5708            uint32_t *usage,
5709            uint32_t *max_buffers)
5710{
5711    ALOGV("(%s): ", __FUNCTION__);
5712    return obj(dev)->allocateStream(width, height, format, stream_ops,
5713                                    stream_id, format_actual, usage, max_buffers);
5714}
5715
5716static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
5717            uint32_t stream_id,
5718            int num_buffers,
5719            buffer_handle_t *buffers)
5720{
5721    ALOGV("DEBUG(%s):", __FUNCTION__);
5722    return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
5723}
5724
5725static int HAL2_device_release_stream(
5726        const struct camera2_device *dev,
5727            uint32_t stream_id)
5728{
5729    ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
5730    if (!g_camera_vaild)
5731        return 0;
5732    return obj(dev)->releaseStream(stream_id);
5733}
5734
5735static int HAL2_device_allocate_reprocess_stream(
5736           const struct camera2_device *dev,
5737            uint32_t width,
5738            uint32_t height,
5739            uint32_t format,
5740            const camera2_stream_in_ops_t *reprocess_stream_ops,
5741            // outputs
5742            uint32_t *stream_id,
5743            uint32_t *consumer_usage,
5744            uint32_t *max_buffers)
5745{
5746    ALOGV("DEBUG(%s):", __FUNCTION__);
5747    return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
5748                                    stream_id, consumer_usage, max_buffers);
5749}
5750
5751static int HAL2_device_allocate_reprocess_stream_from_stream(
5752           const struct camera2_device *dev,
5753            uint32_t output_stream_id,
5754            const camera2_stream_in_ops_t *reprocess_stream_ops,
5755            // outputs
5756            uint32_t *stream_id)
5757{
5758    ALOGV("DEBUG(%s):", __FUNCTION__);
5759    return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
5760                                    reprocess_stream_ops, stream_id);
5761}
5762
5763static int HAL2_device_release_reprocess_stream(
5764        const struct camera2_device *dev,
5765            uint32_t stream_id)
5766{
5767    ALOGV("DEBUG(%s):", __FUNCTION__);
5768    return obj(dev)->releaseReprocessStream(stream_id);
5769}
5770
5771static int HAL2_device_trigger_action(const struct camera2_device *dev,
5772           uint32_t trigger_id,
5773            int ext1,
5774            int ext2)
5775{
5776    ALOGV("DEBUG(%s):", __FUNCTION__);
5777    return obj(dev)->triggerAction(trigger_id, ext1, ext2);
5778}
5779
5780static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
5781            camera2_notify_callback notify_cb,
5782            void *user)
5783{
5784    ALOGV("DEBUG(%s):", __FUNCTION__);
5785    return obj(dev)->setNotifyCallback(notify_cb, user);
5786}
5787
5788static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
5789            vendor_tag_query_ops_t **ops)
5790{
5791    ALOGV("DEBUG(%s):", __FUNCTION__);
5792    return obj(dev)->getMetadataVendorTagOps(ops);
5793}
5794
5795static int HAL2_device_dump(const struct camera2_device *dev, int fd)
5796{
5797    ALOGV("DEBUG(%s):", __FUNCTION__);
5798    return obj(dev)->dump(fd);
5799}
5800
5801
5802
5803
5804
5805static int HAL2_getNumberOfCameras()
5806{
5807    ALOGV("(%s): returning 2", __FUNCTION__);
5808    return 2;
5809}
5810
5811
5812static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
5813{
5814    ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
5815    static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
5816
5817    status_t res;
5818
5819    if (cameraId == 0) {
5820        info->facing = CAMERA_FACING_BACK;
5821        if (!g_camera2[0])
5822            g_camera2[0] = new ExynosCamera2(0);
5823    }
5824    else if (cameraId == 1) {
5825        info->facing = CAMERA_FACING_FRONT;
5826        if (!g_camera2[1])
5827            g_camera2[1] = new ExynosCamera2(1);
5828    }
5829    else
5830        return BAD_VALUE;
5831
5832    info->orientation = 0;
5833    info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
5834    if (mCameraInfo[cameraId] == NULL) {
5835        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
5836        if (res != OK) {
5837            ALOGE("%s: Unable to allocate static info: %s (%d)",
5838                    __FUNCTION__, strerror(-res), res);
5839            return res;
5840        }
5841        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
5842        if (res != OK) {
5843            ALOGE("%s: Unable to fill in static info: %s (%d)",
5844                    __FUNCTION__, strerror(-res), res);
5845            return res;
5846        }
5847    }
5848    info->static_camera_characteristics = mCameraInfo[cameraId];
5849    return NO_ERROR;
5850}
5851
5852#define SET_METHOD(m) m : HAL2_device_##m
5853
5854static camera2_device_ops_t camera2_device_ops = {
5855        SET_METHOD(set_request_queue_src_ops),
5856        SET_METHOD(notify_request_queue_not_empty),
5857        SET_METHOD(set_frame_queue_dst_ops),
5858        SET_METHOD(get_in_progress_count),
5859        SET_METHOD(flush_captures_in_progress),
5860        SET_METHOD(construct_default_request),
5861        SET_METHOD(allocate_stream),
5862        SET_METHOD(register_stream_buffers),
5863        SET_METHOD(release_stream),
5864        SET_METHOD(allocate_reprocess_stream),
5865        SET_METHOD(allocate_reprocess_stream_from_stream),
5866        SET_METHOD(release_reprocess_stream),
5867        SET_METHOD(trigger_action),
5868        SET_METHOD(set_notify_callback),
5869        SET_METHOD(get_metadata_vendor_tag_ops),
5870        SET_METHOD(dump),
5871};
5872
5873#undef SET_METHOD
5874
5875
5876static int HAL2_camera_device_open(const struct hw_module_t* module,
5877                                  const char *id,
5878                                  struct hw_device_t** device)
5879{
5880    int cameraId = atoi(id);
5881    int openInvalid = 0;
5882
5883    g_camera_vaild = false;
5884    ALOGV("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
5885    if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
5886        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
5887        return -EINVAL;
5888    }
5889
5890    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
5891    if (g_cam2_device) {
5892        if (obj(g_cam2_device)->getCameraId() == cameraId) {
5893            ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
5894            goto done;
5895        } else {
5896
5897            while (g_cam2_device)
5898                usleep(10000);
5899        }
5900    }
5901
5902    g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
5903    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
5904
5905    if (!g_cam2_device)
5906        return -ENOMEM;
5907
5908    g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
5909    g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
5910    g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
5911    g_cam2_device->common.close   = HAL2_camera_device_close;
5912
5913    g_cam2_device->ops = &camera2_device_ops;
5914
5915    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
5916
5917    g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
5918    if (!openInvalid) {
5919        ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
5920        return -ENODEV;
5921    }
5922done:
5923    *device = (hw_device_t *)g_cam2_device;
5924    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
5925    g_camera_vaild = true;
5926
5927    return 0;
5928}
5929
5930
5931static hw_module_methods_t camera_module_methods = {
5932            open : HAL2_camera_device_open
5933};
5934
5935extern "C" {
5936    struct camera_module HAL_MODULE_INFO_SYM = {
5937      common : {
5938          tag                : HARDWARE_MODULE_TAG,
5939          module_api_version : CAMERA_MODULE_API_VERSION_2_0,
5940          hal_api_version    : HARDWARE_HAL_API_VERSION,
5941          id                 : CAMERA_HARDWARE_MODULE_ID,
5942          name               : "Exynos Camera HAL2",
5943          author             : "Samsung Corporation",
5944          methods            : &camera_module_methods,
5945          dso:                NULL,
5946          reserved:           {0},
5947      },
5948      get_number_of_cameras : HAL2_getNumberOfCameras,
5949      get_camera_info       : HAL2_getCameraInfo
5950    };
5951}
5952
5953}; // namespace android
5954